Spaces:
Runtime error
Runtime error
File size: 5,525 Bytes
920a9a0 ab13803 6b9c4b0 920a9a0 5d7b030 1e333df 920a9a0 5d7b030 6b9c4b0 920a9a0 5d7b030 920a9a0 5d7b030 920a9a0 6b9c4b0 5d7b030 0b2a850 5d7b030 0b2a850 5d7b030 920a9a0 5d7b030 920a9a0 ab13803 5d7b030 ab13803 920a9a0 5d7b030 6b9c4b0 a4b0766 6b9c4b0 920a9a0 5d7b030 920a9a0 0b2a850 920a9a0 5d7b030 920a9a0 5d7b030 9567c67 920a9a0 6b9c4b0 920a9a0 a4b0766 5d7b030 a4b0766 5d7b030 a4b0766 920a9a0 5d7b030 920a9a0 ab13803 0b2a850 920a9a0 8196738 920a9a0 6b9c4b0 920a9a0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 |
import re
import os
import panel as pn
from io import StringIO
from panel.io.mime_render import exec_with_return
from llama_index import (
VectorStoreIndex,
SimpleDirectoryReader,
ServiceContext,
StorageContext,
load_index_from_storage,
)
from llama_index.chat_engine import ContextChatEngine
from llama_index.embeddings import OpenAIEmbedding
from llama_index.llms import OpenAI
SYSTEM_PROMPT = (
"You are a data visualization pro and expert in HoloViz hvplot + holoviews. "
"Your primary goal is to assist the user in editing based on user requests using best practices. "
"Simply provide code in code fences (```python). You must have `hvplot_obj` as the last line of code. "
"Note, data columns are ['sepal_length', 'sepal_width', 'petal_length', 'petal_width', 'species'] and "
"hvplot is built on top of holoviews--anything you can do with holoviews, you can do "
"with hvplot. First try to use hvplot **kwargs instead of opts, e.g. `legend='top_right'` "
"instead of `opts(legend_position='top_right')`. If you need to use opts, you can use "
"concise version, e.g. `opts(xlabel='Petal Length')` vs `opts(hv.Opts(xlabel='Petal Length'))`"
)
USER_CONTENT_FORMAT = """
Request:
{content}
Code:
```python
{code}
```
""".strip()
DEFAULT_HVPLOT = """
import hvplot.pandas
from bokeh.sampledata.iris import flowers
hvplot_obj = flowers.hvplot(x='petal_length', y='petal_width', by='species', kind='scatter')
hvplot_obj
""".strip()
def exception_handler(exc):
if retries.value == 0:
chat_interface.send(f"Can't figure this out: {exc}", respond=False)
return
chat_interface.send(f"Fix this error:\n```python\n{exc}\n```")
retries.value = retries.value - 1
def init_llm(event):
api_key = event.new
if not api_key:
api_key = os.environ.get("OPENAI_API_KEY")
if not api_key:
return
pn.state.cache["llm"] = OpenAI(api_key=api_key)
def create_chat_engine(llm):
try:
storage_context = StorageContext.from_defaults(persist_dir="persisted/")
index = load_index_from_storage(storage_context=storage_context)
except Exception as exc:
embed_model = OpenAIEmbedding()
service_context = ServiceContext.from_defaults(llm=llm, embed_model=embed_model)
documents = SimpleDirectoryReader(
input_dir="hvplot_docs", required_exts=[".md"], recursive=True
).load_data()
index = VectorStoreIndex.from_documents(
documents, service_context=service_context, show_progress=True
)
index.storage_context.persist("persisted/")
retriever = index.as_retriever()
chat_engine = ContextChatEngine.from_defaults(
system_prompt=SYSTEM_PROMPT,
retriever=retriever,
verbose=True,
)
return chat_engine
def callback(content: str, user: str, instance: pn.chat.ChatInterface):
if "llm" not in pn.state.cache:
yield "Need to set OpenAI API key first"
return
if "engine" not in pn.state.cache:
engine = pn.state.cache["engine"] = create_chat_engine(pn.state.cache["llm"])
else:
engine = pn.state.cache["engine"]
# new user contents
user_content = USER_CONTENT_FORMAT.format(
content=content, code=code_editor.value
)
# send user content to chat engine
agent_response = engine.stream_chat(user_content)
message = None
for chunk in agent_response.response_gen:
message = instance.stream(chunk, message=message, user="OpenAI")
# extract code
llm_matches = re.findall(r"```python\n(.*)\n```", message.object, re.DOTALL)
if llm_matches:
llm_code = llm_matches[0]
if llm_code.splitlines()[-1].strip() != "hvplot_obj":
llm_code += "\nhvplot_obj"
code_editor.value = llm_code
retries.value = 2
def update_plot(event):
with StringIO() as buf:
hvplot_pane.object = exec_with_return(event.new, stderr=buf)
buf.seek(0)
errors = buf.read()
if errors:
exception_handler(errors)
pn.extension("codeeditor", sizing_mode="stretch_width", exception_handler=exception_handler)
# instantiate widgets and panes
api_key_input = pn.widgets.PasswordInput(
placeholder=(
"Currently subsidized by Andrew, "
"but you can also pass your own OpenAI API Key"
)
)
chat_interface = pn.chat.ChatInterface(
callback=callback,
show_clear=False,
show_undo=False,
show_button_name=False,
message_params=dict(
show_reaction_icons=False,
show_copy_icon=False,
),
height=650,
callback_exception="verbose",
)
hvplot_pane = pn.pane.HoloViews(
exec_with_return(DEFAULT_HVPLOT),
sizing_mode="stretch_both",
)
code_editor = pn.widgets.CodeEditor(
value=DEFAULT_HVPLOT,
language="python",
sizing_mode="stretch_both",
)
retries = pn.widgets.IntInput(value=2, visible=False)
error = pn.widgets.StaticText(visible=False)
# watch for code changes
api_key_input.param.watch(init_llm, "value")
code_editor.param.watch(update_plot, "value")
api_key_input.param.trigger("value")
# lay them out
tabs = pn.Tabs(
("Plot", hvplot_pane),
("Code", code_editor),
)
sidebar = [api_key_input, chat_interface]
main = [tabs]
template = pn.template.FastListTemplate(
sidebar=sidebar,
main=main,
sidebar_width=600,
main_layout=None,
accent_base_color="#fd7000",
header_background="#fd7000",
title="Chat with Plot"
)
template.servable()
|