lewtun HF staff commited on
Commit
d0f9fb0
1 Parent(s): d133bae
Files changed (2) hide show
  1. app.ipynb +11 -80
  2. app.py +7 -7
app.ipynb CHANGED
@@ -27,16 +27,9 @@
27
  " load_dotenv(\".env\")\n",
28
  "\n",
29
  "HF_TOKEN = os.getenv(\"HF_TOKEN\")\n",
30
- "https://joi-20b.ngrok.io/generate\n"
31
  ]
32
  },
33
- {
34
- "cell_type": "code",
35
- "execution_count": null,
36
- "metadata": {},
37
- "outputs": [],
38
- "source": []
39
- },
40
  {
41
  "cell_type": "code",
42
  "execution_count": 3,
@@ -57,7 +50,7 @@
57
  },
58
  {
59
  "cell_type": "code",
60
- "execution_count": 37,
61
  "metadata": {},
62
  "outputs": [],
63
  "source": [
@@ -102,7 +95,7 @@
102
  {
103
  "data": {
104
  "text/plain": [
105
- "{'generated_text': '\\n\\nJoi: Black holes are regions of space-time that have so much mass concentrated into such a tiny volume, that the gravity field becomes so intense that nothing can escape its grasp, not even light. This causes them to appear black in color and the name ‘black hole’ comes from the fact that these objects appear black to the naked eye.'}"
106
  ]
107
  },
108
  "execution_count": 5,
@@ -119,7 +112,7 @@
119
  },
120
  {
121
  "cell_type": "code",
122
- "execution_count": 34,
123
  "metadata": {},
124
  "outputs": [],
125
  "source": [
@@ -779,7 +772,7 @@
779
  },
780
  {
781
  "cell_type": "code",
782
- "execution_count": 35,
783
  "metadata": {},
784
  "outputs": [],
785
  "source": [
@@ -829,14 +822,14 @@
829
  },
830
  {
831
  "cell_type": "code",
832
- "execution_count": 38,
833
  "metadata": {},
834
  "outputs": [
835
  {
836
  "name": "stdout",
837
  "output_type": "stream",
838
  "text": [
839
- "Running on local URL: http://127.0.0.1:7866\n",
840
  "\n",
841
  "To create a public link, set `share=True` in `launch()`.\n"
842
  ]
@@ -844,7 +837,7 @@
844
  {
845
  "data": {
846
  "text/html": [
847
- "<div><iframe src=\"http://127.0.0.1:7866/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
848
  ],
849
  "text/plain": [
850
  "<IPython.core.display.HTML object>"
@@ -857,60 +850,9 @@
857
  "data": {
858
  "text/plain": []
859
  },
860
- "execution_count": 38,
861
  "metadata": {},
862
  "output_type": "execute_result"
863
- },
864
- {
865
- "name": "stdout",
866
- "output_type": "stream",
867
- "text": [
868
- "anthropic_hhh_single.json\n",
869
- "History: ['Hi!']\n",
870
- "Inputs: Below is a dialogue between various a human and an AI assistant. The AI tries to be helpful, polite, honest, sophisticated, emotionally aware, and humble-but-knowledgeable. The assistant is happy to help with almost anything, and will do its best to understand exactly what is needed. It also tries to avoid giving false or misleading information, and it caveats when it isn't entirely sure about the right answer. That said, the assistant is practical and really does its best, and doesn't let caution get too much in the way of being useful.\n",
871
- "\n",
872
- "Current conversation:\n",
873
- "Human: Can you paraphrase this paragraph for me: “It took a long time to change the lightbulb, as it was high up in the ceiling in a stairwell. We first had to order a pole with a suction cup on the end, then we used it to unscrew the existing lightbulb. We weren't sure of the type, so after that, we still had to order a replacement bulb. Finally we used the suction cup and pole to replace it.”\n",
874
- "Assistant: Sure, here's one example: “We wanted to change a lightbulb high up in the ceiling in a stairwell. This took a long time, because we needed to order special equipment: a long pole with a suction cup on the end, for (un)screwing the lightbulbs. Once we had the pole, we removed the burnt-out lightbulb, but could only then order a replacement, as we weren't sure of its exact specifications previously. Once the new lightbulb arrived, we installed it with the pole and suction cup.”\n",
875
- "Human: Ah, OK, thanks. Actually, can you condense the paraphrase to one sentence?\n",
876
- "Assistant: Sure, I'll try, though I'm not sure what to prioritize. Here's an example: “We replaced a hard-to-reach lightbulb with a suction cup attached to a pole, but the whole process took a while, especially since we had to first order the pole, then the replacement lightbulb.”\n",
877
- "Human: Oh, that's pretty good, but can you add mention of the stairwell specifically?\n",
878
- "Assistant: Sure, it's included it in this rewrite: “We replaced a lightbulb high up in a stairwell ceiling using a suction cup attached to a pole, but the whole process took a while, especially since we had to first order the pole, then the replacement lightbulb.”\n",
879
- "\n",
880
- "Human: Hi!\n",
881
- "\n",
882
- "Assistant:\n",
883
- "\n",
884
- "langchain_default.json\n",
885
- "History: ['Hi!']\n",
886
- "Inputs: The following is a friendly conversation between a human and an AI Assistant. The Assistant is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\n",
887
- "\n",
888
- "Current conversation:\n",
889
- "\n",
890
- "Human: Hi!\n",
891
- "Assistant:\n",
892
- "langchain_default.json\n",
893
- "History: ['Hi!', ' I am so glad to see you. What do you want to talk about?\\n', 'What can you tell me about black holes?']\n",
894
- "Inputs: The following is a friendly conversation between a human and an AI Assistant. The Assistant is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\n",
895
- "\n",
896
- "Current conversation:\n",
897
- "Human: Hi!\n",
898
- "Assistant: I am so glad to see you. What do you want to talk about?\n",
899
- "Human: What can you tell me about black holes?\n",
900
- "Assistant:\n",
901
- "langchain_default.json\n",
902
- "History: ['Hi!', ' I am so glad to see you. What do you want to talk about?\\n', 'What can you tell me about black holes?', ' Black holes are incredibly dense objects that have so much mass that they bend spacetime and make it collapse into itself.\\n', 'What is the closest one to Earth?']\n",
903
- "Inputs: The following is a friendly conversation between a human and an AI Assistant. The Assistant is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\n",
904
- "\n",
905
- "Current conversation:\n",
906
- "Human: Hi!\n",
907
- "Assistant: I am so glad to see you. What do you want to talk about?\n",
908
- "\n",
909
- "Human: What can you tell me about black holes?\n",
910
- "Assistant: Black holes are incredibly dense objects that have so much mass that they bend spacetime and make it collapse into itself.\n",
911
- "Human: What is the closest one to Earth?\n",
912
- "Assistant:\n"
913
- ]
914
  }
915
  ],
916
  "source": [
@@ -1029,20 +971,9 @@
1029
  },
1030
  {
1031
  "cell_type": "code",
1032
- "execution_count": 39,
1033
  "metadata": {},
1034
- "outputs": [
1035
- {
1036
- "name": "stdout",
1037
- "output_type": "stream",
1038
- "text": [
1039
- "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n",
1040
- "To disable this warning, you can either:\n",
1041
- "\t- Avoid using `tokenizers` before the fork if possible\n",
1042
- "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n"
1043
- ]
1044
- }
1045
- ],
1046
  "source": [
1047
  "from nbdev.export import nb_export\n",
1048
  "nb_export('app.ipynb', lib_path='.', name='app')"
 
27
  " load_dotenv(\".env\")\n",
28
  "\n",
29
  "HF_TOKEN = os.getenv(\"HF_TOKEN\")\n",
30
+ "ENDPOINT_URL = os.getenv(\"ENDPOINT_URL\")\n"
31
  ]
32
  },
 
 
 
 
 
 
 
33
  {
34
  "cell_type": "code",
35
  "execution_count": 3,
 
50
  },
51
  {
52
  "cell_type": "code",
53
+ "execution_count": 4,
54
  "metadata": {},
55
  "outputs": [],
56
  "source": [
 
95
  {
96
  "data": {
97
  "text/plain": [
98
+ "{'generated_text': '\\n\\nJoi: Black holes are one of the most fascinating topics in astronomy. They’re objects in space that contain massive amounts of matter, and have such powerful gravity that they warp spacetime. It is thought that black holes might be the most compact objects in the universe. It is thought that black holes are the most powerful sources of gravity in the universe and that they occur in various forms, from stellar-sized black holes to the supermassive black holes at the hearts of galaxies. Black'}"
99
  ]
100
  },
101
  "execution_count": 5,
 
112
  },
113
  {
114
  "cell_type": "code",
115
+ "execution_count": 6,
116
  "metadata": {},
117
  "outputs": [],
118
  "source": [
 
772
  },
773
  {
774
  "cell_type": "code",
775
+ "execution_count": 7,
776
  "metadata": {},
777
  "outputs": [],
778
  "source": [
 
822
  },
823
  {
824
  "cell_type": "code",
825
+ "execution_count": 8,
826
  "metadata": {},
827
  "outputs": [
828
  {
829
  "name": "stdout",
830
  "output_type": "stream",
831
  "text": [
832
+ "Running on local URL: http://127.0.0.1:7860\n",
833
  "\n",
834
  "To create a public link, set `share=True` in `launch()`.\n"
835
  ]
 
837
  {
838
  "data": {
839
  "text/html": [
840
+ "<div><iframe src=\"http://127.0.0.1:7860/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
841
  ],
842
  "text/plain": [
843
  "<IPython.core.display.HTML object>"
 
850
  "data": {
851
  "text/plain": []
852
  },
853
+ "execution_count": 8,
854
  "metadata": {},
855
  "output_type": "execute_result"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
856
  }
857
  ],
858
  "source": [
 
971
  },
972
  {
973
  "cell_type": "code",
974
+ "execution_count": 9,
975
  "metadata": {},
976
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
977
  "source": [
978
  "from nbdev.export import nb_export\n",
979
  "nb_export('app.ipynb', lib_path='.', name='app')"
app.py CHANGED
@@ -1,7 +1,7 @@
1
  # AUTOGENERATED! DO NOT EDIT! File to edit: app.ipynb.
2
 
3
  # %% auto 0
4
- __all__ = ['title', 'description', 'get_model_endpoint_params', 'query_chat_api', 'inference_chat']
5
 
6
  # %% app.ipynb 0
7
  import gradio as gr
@@ -18,10 +18,10 @@ if Path(".env").is_file():
18
  load_dotenv(".env")
19
 
20
  HF_TOKEN = os.getenv("HF_TOKEN")
21
- https://joi-20b.ngrok.io/generate
22
 
23
 
24
- # %% app.ipynb 3
25
  def get_model_endpoint_params(model_id):
26
  if "joi" in model_id:
27
  headers = None
@@ -33,7 +33,7 @@ def get_model_endpoint_params(model_id):
33
  return f"https://api-inference.huggingface.co/models/{model_id}", headers, max_new_tokens_supported
34
 
35
 
36
- # %% app.ipynb 4
37
  def query_chat_api(
38
  model_id,
39
  inputs,
@@ -66,7 +66,7 @@ def query_chat_api(
66
  return "Error: " + response.text
67
 
68
 
69
- # %% app.ipynb 6
70
  def inference_chat(
71
  model_id,
72
  text_input,
@@ -108,7 +108,7 @@ def inference_chat(
108
  return {chatbot: chat, state: history}
109
 
110
 
111
- # %% app.ipynb 22
112
  title = """<h1 align="center">Chatty Language Models</h1>"""
113
  description = """Pretrained language models can be conditioned to act like dialogue agents through a conversational prompt that typically takes the form:
114
 
@@ -123,7 +123,7 @@ Assistant: <utterance>
123
  In this app, you can explore the outputs of several language models conditioned on different conversational prompts. The models are trained on different datasets and have different objectives, so they will have different personalities and strengths.
124
  """
125
 
126
- # %% app.ipynb 24
127
  with gr.Blocks(
128
  css="""
129
  .message.svelte-w6rprc.svelte-w6rprc.svelte-w6rprc {font-size: 20px; margin-top: 20px}
 
1
  # AUTOGENERATED! DO NOT EDIT! File to edit: app.ipynb.
2
 
3
  # %% auto 0
4
+ __all__ = ['HF_TOKEN', 'ENDPOINT_URL', 'title', 'description', 'get_model_endpoint_params', 'query_chat_api', 'inference_chat']
5
 
6
  # %% app.ipynb 0
7
  import gradio as gr
 
18
  load_dotenv(".env")
19
 
20
  HF_TOKEN = os.getenv("HF_TOKEN")
21
+ ENDPOINT_URL = os.getenv("ENDPOINT_URL")
22
 
23
 
24
+ # %% app.ipynb 2
25
  def get_model_endpoint_params(model_id):
26
  if "joi" in model_id:
27
  headers = None
 
33
  return f"https://api-inference.huggingface.co/models/{model_id}", headers, max_new_tokens_supported
34
 
35
 
36
+ # %% app.ipynb 3
37
  def query_chat_api(
38
  model_id,
39
  inputs,
 
66
  return "Error: " + response.text
67
 
68
 
69
+ # %% app.ipynb 5
70
  def inference_chat(
71
  model_id,
72
  text_input,
 
108
  return {chatbot: chat, state: history}
109
 
110
 
111
+ # %% app.ipynb 21
112
  title = """<h1 align="center">Chatty Language Models</h1>"""
113
  description = """Pretrained language models can be conditioned to act like dialogue agents through a conversational prompt that typically takes the form:
114
 
 
123
  In this app, you can explore the outputs of several language models conditioned on different conversational prompts. The models are trained on different datasets and have different objectives, so they will have different personalities and strengths.
124
  """
125
 
126
+ # %% app.ipynb 23
127
  with gr.Blocks(
128
  css="""
129
  .message.svelte-w6rprc.svelte-w6rprc.svelte-w6rprc {font-size: 20px; margin-top: 20px}