boomiikas commited on
Commit
18bbc31
Β·
verified Β·
1 Parent(s): c75cecd

Upload Untitled0.ipynb

Browse files
Files changed (1) hide show
  1. Untitled0.ipynb +187 -0
Untitled0.ipynb ADDED
@@ -0,0 +1,187 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "nbformat": 4,
3
+ "nbformat_minor": 0,
4
+ "metadata": {
5
+ "colab": {
6
+ "provenance": [],
7
+ "gpuType": "T4"
8
+ },
9
+ "kernelspec": {
10
+ "name": "python3",
11
+ "display_name": "Python 3"
12
+ },
13
+ "language_info": {
14
+ "name": "python"
15
+ },
16
+ "accelerator": "GPU"
17
+ },
18
+ "cells": [
19
+ {
20
+ "cell_type": "code",
21
+ "execution_count": null,
22
+ "metadata": {
23
+ "id": "_qYQS0tiDKEL"
24
+ },
25
+ "outputs": [],
26
+ "source": [
27
+ "!pip install -q langchain openai duckduckgo-search gradio transformers\n"
28
+ ]
29
+ },
30
+ {
31
+ "cell_type": "code",
32
+ "source": [
33
+ "!pip install -q langchain-community langchainhub langchain-core"
34
+ ],
35
+ "metadata": {
36
+ "colab": {
37
+ "base_uri": "https://localhost:8080/"
38
+ },
39
+ "id": "zsmgG_1LD4hj",
40
+ "outputId": "8cc047a0-6963-491c-a80f-bb46732a9689"
41
+ },
42
+ "execution_count": null,
43
+ "outputs": [
44
+ {
45
+ "output_type": "stream",
46
+ "name": "stdout",
47
+ "text": [
48
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.5/2.5 MB\u001b[0m \u001b[31m36.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
49
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m467.2/467.2 kB\u001b[0m \u001b[31m21.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
50
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m38.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
51
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m65.5/65.5 kB\u001b[0m \u001b[31m3.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
52
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m64.7/64.7 kB\u001b[0m \u001b[31m5.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
53
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.9/50.9 kB\u001b[0m \u001b[31m2.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
54
+ "\u001b[?25h\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
55
+ "google-colab 1.0.0 requires requests==2.32.4, but you have requests 2.32.5 which is incompatible.\n",
56
+ "langchain 0.3.27 requires langchain-core<1.0.0,>=0.3.72, but you have langchain-core 1.0.0 which is incompatible.\n",
57
+ "langchain 0.3.27 requires langchain-text-splitters<1.0.0,>=0.3.9, but you have langchain-text-splitters 1.0.0 which is incompatible.\u001b[0m\u001b[31m\n",
58
+ "\u001b[0m"
59
+ ]
60
+ }
61
+ ]
62
+ },
63
+ {
64
+ "cell_type": "code",
65
+ "source": [
66
+ "!pip install -q ddgs"
67
+ ],
68
+ "metadata": {
69
+ "colab": {
70
+ "base_uri": "https://localhost:8080/"
71
+ },
72
+ "id": "1xhRZvQ7Eh9p",
73
+ "outputId": "ebe48afb-2fcd-4698-9930-83bca327e58b"
74
+ },
75
+ "execution_count": null,
76
+ "outputs": [
77
+ {
78
+ "output_type": "stream",
79
+ "name": "stdout",
80
+ "text": [
81
+ "\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/41.6 kB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m41.6/41.6 kB\u001b[0m \u001b[31m3.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
82
+ "\u001b[?25h\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/5.3 MB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[91m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[91mβ•Έ\u001b[0m \u001b[32m5.3/5.3 MB\u001b[0m \u001b[31m194.3 MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m5.3/5.3 MB\u001b[0m \u001b[31m112.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
83
+ "\u001b[?25h"
84
+ ]
85
+ }
86
+ ]
87
+ },
88
+ {
89
+ "cell_type": "code",
90
+ "source": [
91
+ "from transformers import pipeline\n",
92
+ "from langchain.llms import HuggingFacePipeline\n",
93
+ "from langchain.tools import DuckDuckGoSearchRun\n",
94
+ "import gradio as gr\n",
95
+ "\n",
96
+ "# πŸ”Ή Create search tool\n",
97
+ "search_tool = DuckDuckGoSearchRun()\n",
98
+ "\n",
99
+ "# πŸ”Ή Create local LLM\n",
100
+ "hf = pipeline(\"text2text-generation\", model=\"google/flan-t5-base\", max_new_tokens=256)\n",
101
+ "llm = HuggingFacePipeline(pipeline=hf)\n",
102
+ "\n",
103
+ "# πŸ”Ή Combined function: first search, then summarize\n",
104
+ "def agent_chat(query):\n",
105
+ " try:\n",
106
+ " # Step 1: Get web search results\n",
107
+ " search_results = search_tool.run(query)\n",
108
+ "\n",
109
+ " # Step 2: Ask model to summarize results\n",
110
+ " prompt = f\"Summarize the following information and answer clearly:\\n\\n{search_results}\"\n",
111
+ " response = llm(prompt)\n",
112
+ "\n",
113
+ " return response\n",
114
+ " except Exception as e:\n",
115
+ " return f\"❌ Error: {str(e)}\"\n"
116
+ ],
117
+ "metadata": {
118
+ "colab": {
119
+ "base_uri": "https://localhost:8080/"
120
+ },
121
+ "id": "y1LePpZ4DOWy",
122
+ "outputId": "7a40d787-acff-4156-fffe-99f6bc4655eb"
123
+ },
124
+ "execution_count": null,
125
+ "outputs": [
126
+ {
127
+ "output_type": "stream",
128
+ "name": "stderr",
129
+ "text": [
130
+ "Device set to use cuda:0\n"
131
+ ]
132
+ }
133
+ ]
134
+ },
135
+ {
136
+ "cell_type": "code",
137
+ "source": [
138
+ "\n",
139
+ "# πŸ”Ή Gradio Interface\n",
140
+ "demo = gr.Interface(\n",
141
+ " fn=agent_chat,\n",
142
+ " inputs=gr.Textbox(label=\"Ask the AI anything:\"),\n",
143
+ " outputs=\"text\",\n",
144
+ " title=\"AI Assistant\",\n",
145
+ " description=\"This simple agent searches the web and summarizes results using FLAN-T5.\"\n",
146
+ ")\n",
147
+ "\n",
148
+ "demo.launch(debug=True)"
149
+ ],
150
+ "metadata": {
151
+ "colab": {
152
+ "base_uri": "https://localhost:8080/",
153
+ "height": 646
154
+ },
155
+ "id": "QwDaTh7ODe1B",
156
+ "outputId": "b6597c18-f6ad-446f-9e06-85f4ce8ecd18"
157
+ },
158
+ "execution_count": null,
159
+ "outputs": [
160
+ {
161
+ "output_type": "stream",
162
+ "name": "stdout",
163
+ "text": [
164
+ "It looks like you are running Gradio on a hosted Jupyter notebook, which requires `share=True`. Automatically setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n",
165
+ "\n",
166
+ "Colab notebook detected. This cell will run indefinitely so that you can see errors and logs. To turn off, set debug=False in launch().\n",
167
+ "* Running on public URL: https://a884027dcc1d0b4670.gradio.live\n",
168
+ "\n",
169
+ "This share link expires in 1 week. For free permanent hosting and GPU upgrades, run `gradio deploy` from the terminal in the working directory to deploy to Hugging Face Spaces (https://huggingface.co/spaces)\n"
170
+ ]
171
+ },
172
+ {
173
+ "output_type": "display_data",
174
+ "data": {
175
+ "text/plain": [
176
+ "<IPython.core.display.HTML object>"
177
+ ],
178
+ "text/html": [
179
+ "<div><iframe src=\"https://a884027dcc1d0b4670.gradio.live\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
180
+ ]
181
+ },
182
+ "metadata": {}
183
+ }
184
+ ]
185
+ }
186
+ ]
187
+ }