minimal working
Browse files- .gitignore +1 -0
- Dockerfile +34 -0
- README.md +4 -6
- main.py +137 -0
- pup +238 -0
- sql.py +35 -0
.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
__pycache__
|
Dockerfile
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM debian:latest AS base
|
2 |
+
|
3 |
+
RUN apt-get update \
|
4 |
+
&& apt-get install -y curl \
|
5 |
+
&& apt-get clean
|
6 |
+
|
7 |
+
# Create user
|
8 |
+
RUN useradd -m -u 1000 user
|
9 |
+
USER user
|
10 |
+
ENV HOME=/home/user \
|
11 |
+
PATH=/home/user/.local/bin:/home/user/.pixi/bin:$PATH \
|
12 |
+
PUP=/home/user/pup
|
13 |
+
|
14 |
+
WORKDIR $HOME
|
15 |
+
|
16 |
+
CMD ["bash"]
|
17 |
+
|
18 |
+
|
19 |
+
#### Puppy ####
|
20 |
+
FROM base AS puppy
|
21 |
+
|
22 |
+
COPY --chown=user pup .
|
23 |
+
|
24 |
+
RUN $PUP
|
25 |
+
|
26 |
+
RUN pup py3.11
|
27 |
+
|
28 |
+
RUN pup fetch appenv duckdb gradio itables
|
29 |
+
|
30 |
+
COPY --chown=user *.py .
|
31 |
+
|
32 |
+
EXPOSE 7860
|
33 |
+
CMD ["appenv/.venv/bin/uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
|
34 |
+
# CMD ["pup", "play"]
|
README.md
CHANGED
@@ -1,10 +1,8 @@
|
|
1 |
---
|
2 |
-
title: Duckdb
|
3 |
-
emoji:
|
4 |
colorFrom: red
|
5 |
colorTo: pink
|
6 |
sdk: docker
|
7 |
-
pinned:
|
8 |
-
---
|
9 |
-
|
10 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
---
|
2 |
+
title: Duckdb FastAPI Gradio
|
3 |
+
emoji: ๐
|
4 |
colorFrom: red
|
5 |
colorTo: pink
|
6 |
sdk: docker
|
7 |
+
pinned: true
|
8 |
+
---
|
|
|
|
main.py
ADDED
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
__doc__ = """
|
2 |
+
This FastAPI app uses gradio components with SQL code input
|
3 |
+
and HTML table output. The query is executed using DuckDB.
|
4 |
+
The query results are shown in an iframe where the table
|
5 |
+
is styled and made interactive using Datatables.net scripts.
|
6 |
+
|
7 |
+
"""
|
8 |
+
|
9 |
+
import gradio as gr
|
10 |
+
import pandas as pd
|
11 |
+
from fastapi import FastAPI
|
12 |
+
from fastapi.responses import HTMLResponse, RedirectResponse
|
13 |
+
from itables import to_html_datatable
|
14 |
+
|
15 |
+
from sql import Q
|
16 |
+
|
17 |
+
app = FastAPI()
|
18 |
+
|
19 |
+
def query_from_request(query, request: gr.Request):
|
20 |
+
"""Process query from input block or from initial request.
|
21 |
+
|
22 |
+
https://github.com/gradio-app/gradio/issues/7464#issuecomment-1960161591
|
23 |
+
"""
|
24 |
+
if not query:
|
25 |
+
query_params = request.query_params
|
26 |
+
base64query = dict(query_params).get("q")
|
27 |
+
else:
|
28 |
+
base64query = Q(query).base64
|
29 |
+
if base64query in (None, "example"):
|
30 |
+
decoded = Q("""SELECT 42 AS answer, 'LU & E' AS question""")
|
31 |
+
base64query = decoded.base64
|
32 |
+
else:
|
33 |
+
decoded = Q.from_base64(base64query)
|
34 |
+
href = format_href(base64query)
|
35 |
+
result = f"""<iframe src="/q/{base64query}" width="90%" height="90%"></iframe>"""
|
36 |
+
return (decoded, href, result)
|
37 |
+
|
38 |
+
def format_href(url: str):
|
39 |
+
href=f"localhost:7860/sql/{url}"
|
40 |
+
return f"""<a href="{href}">{href}</a>"""
|
41 |
+
|
42 |
+
@app.get("/q/{base64query}", response_class=HTMLResponse)
|
43 |
+
def query_db(base64query: str|None = None):
|
44 |
+
decoded = Q.from_base64(base64query)
|
45 |
+
df = decoded.df()
|
46 |
+
html = to_html_datatable(df)
|
47 |
+
return f"""
|
48 |
+
<h3>{decoded}</h3>
|
49 |
+
<div>{html}</div>
|
50 |
+
"""
|
51 |
+
|
52 |
+
with gr.Blocks() as gradio_sql_interface:
|
53 |
+
with gr.Row():
|
54 |
+
with gr.Column():
|
55 |
+
header = gr.Markdown("# SQL Editor")
|
56 |
+
sql_code = gr.Code(language="sql", label="SQL Query", interactive=True)
|
57 |
+
copy_button = gr.HTML()
|
58 |
+
button = gr.Button("run")
|
59 |
+
with gr.Column():
|
60 |
+
markdown = gr.Markdown("# RESULTS")
|
61 |
+
results = gr.HTML()
|
62 |
+
button.click(query_from_request, [sql_code], [sql_code, copy_button, results])
|
63 |
+
gradio_sql_interface.load(query_from_request, [sql_code], [sql_code, copy_button, results], queue=True)
|
64 |
+
|
65 |
+
app = gr.mount_gradio_app(app, gradio_sql_interface, path="/sql")
|
66 |
+
|
67 |
+
@app.get("/")
|
68 |
+
@app.get("/sql")
|
69 |
+
@app.get("/sql/")
|
70 |
+
def redirect_to_example():
|
71 |
+
return RedirectResponse("/sql/?q=example")
|
72 |
+
|
73 |
+
# no need to mount/unmount dynamically
|
74 |
+
# just change path and app blocks?
|
75 |
+
# {
|
76 |
+
# "api.routes": [
|
77 |
+
# "...",
|
78 |
+
# "Mount(path='/gradiosql/qq', name='', app=<gradio.routes.App object at 0x71dc7b9d6d10>)"
|
79 |
+
# ]
|
80 |
+
# }
|
81 |
+
# @app.get("/sql/{query}")
|
82 |
+
# def render_query(query: str):
|
83 |
+
# # unmount any previous gradio apps
|
84 |
+
# # for route in app.routes:
|
85 |
+
# # if "gradio" in route.path:
|
86 |
+
# # del route
|
87 |
+
# # global app
|
88 |
+
# # routes = [route for route in app.routes if 'gradio' not in route.path]
|
89 |
+
# # app = Starlette(routes=routes)
|
90 |
+
# with gr.Blocks() as gradio_sql_interface:
|
91 |
+
# sql_code = gr.Code(value=query, language="sql", label="SQL Query", interactive=True)
|
92 |
+
# button = gr.Button("run", link=f"/sql/{sql_code.value}")
|
93 |
+
# markdown = gr.Markdown("# RESULTS")
|
94 |
+
# html = gr.HTML(value=f"""<iframe src="/q/{query}" width="90%" height="90%"></iframe>""")
|
95 |
+
# # button.click(None, [], [], js=f"window.open('/sql/{sql_code.value}', '_top')")
|
96 |
+
|
97 |
+
# gradio_path = f"/gradiosql/{sql_code.value}"
|
98 |
+
# gr.mount_gradio_app(app, gradio_sql_interface, path=gradio_path)
|
99 |
+
|
100 |
+
# return RedirectResponse(gradio_path)
|
101 |
+
|
102 |
+
# del app.routes[index]
|
103 |
+
# @app.get("/redirect")
|
104 |
+
# async def redirect_example():
|
105 |
+
# url = "/q/qqq"
|
106 |
+
# return Response(status_code=302, headers={"Location": url})
|
107 |
+
|
108 |
+
|
109 |
+
|
110 |
+
# # return """hi<hr><iframe src="/" width="80%" height="80%"></iframe>"""
|
111 |
+
|
112 |
+
# @app.get("/qq/{apath}")
|
113 |
+
# async def build_sql_interface(apath: str):
|
114 |
+
# with gr.Blocks() as gradio_sql_interface:
|
115 |
+
# sql_code = gr.Code(value=apath, language="sql", label="SQL Query", interactive=True)
|
116 |
+
# html_code = gr.Code(value=html)
|
117 |
+
# html_html = gr.HTML(value=f"""<iframe src="/q/{apath}" width="80%" height="80%"></iframe>""")
|
118 |
+
# grdf = gr.DataFrame(value=df)
|
119 |
+
# # gradio_sql_interface.queue()
|
120 |
+
|
121 |
+
# gr.mount_gradio_app(app, gradio_sql_interface, path="/gradio/{apath}")
|
122 |
+
# # import time; time.sleep(1)
|
123 |
+
# return RedirectResponse("/gradio/{{apath}}")
|
124 |
+
# async with httpx.AsyncClient() as client:
|
125 |
+
# try:
|
126 |
+
# response = await client.get("/gradio")
|
127 |
+
# response.raise_for_status()
|
128 |
+
# return response
|
129 |
+
# except httpx.HTTPError as e:
|
130 |
+
# return {"errresponse": e}
|
131 |
+
# pass #raise HTTPException(status_code=exc.status_code, detail=exc.msg)
|
132 |
+
# return {"path":path}
|
133 |
+
|
134 |
+
# if __name__ == "__main__":
|
135 |
+
# import uvicorn
|
136 |
+
# uvicorn.run(app, host="0.0.0.0", port=7860)
|
137 |
+
# appenv/.venv/bin/uvicorn main:app --host 0.0.0.0 --port 7860
|
pup
ADDED
@@ -0,0 +1,238 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/bash
|
2 |
+
VERSION="pup v0 @ Apr 2 2024" # for Debian
|
3 |
+
|
4 |
+
|
5 |
+
####################
|
6 |
+
#### Parameters ####
|
7 |
+
####################
|
8 |
+
|
9 |
+
LOG=.woof.log
|
10 |
+
# PIXI_DEFAULT_TASK="echo" #"$HOME/appenv/.venv/bin/uvicorn main:app --host 0.0.0.0 --port 7860"
|
11 |
+
SYMLINK_ON_PATH=$HOME/.local/bin/pup
|
12 |
+
USAGE_MAIN="\n
|
13 |
+
\e[1m\e[4mUsage\e[0m: (each subcommand shows more help)\n
|
14 |
+
\e[0;33mpup\e[0m\n
|
15 |
+
\tinitialize and link ๐ถ to PATH\n
|
16 |
+
\e[0;33mpup py\e[0m\n
|
17 |
+
\tinteractively install base python to current folder\n
|
18 |
+
\tif you know what you need:\n
|
19 |
+
\tpup py3.12 jupyter jupyter-collaboration>=2\n
|
20 |
+
\t(additional arguments go into 'pixi add')\n
|
21 |
+
\e[0;33mpup fetch\e[0m\n
|
22 |
+
\tinteractively install packages with uv into specified virtual env\n
|
23 |
+
\tif you know what you need:\n
|
24 |
+
\tpup fetch . duckdb\n
|
25 |
+
\tpup fetch newviz altair seaborn\n
|
26 |
+
\e[0;33mpup install\e[0m\n
|
27 |
+
\tsame as pup fetch\n
|
28 |
+
\e[0;33mpup home\e[0m\n
|
29 |
+
\tshow ๐ถ's home folder\n
|
30 |
+
\e[0;33mpup kernel\e[0m\n
|
31 |
+
\tinteractively create Jupyter kernel linked to ๐ถ's environment\n
|
32 |
+
\tif you know what you need:\n
|
33 |
+
\tpup kernel . pup3.11-home\n
|
34 |
+
\tpup kernel dataenv pup3.11-data\n
|
35 |
+
\e[0;33mpup play\e[0m\n
|
36 |
+
\truns PIXI_DEFAULT_TASK (default: 'jupyter notebook')\n
|
37 |
+
\e[0;33mpup whereami\e[0m\n
|
38 |
+
\tin case you got lost: log of ๐ถ's commands thus far\n
|
39 |
+
\e[0;33mpup which\e[0m\n
|
40 |
+
\tshow ๐ถ's current symlink\n
|
41 |
+
\n"
|
42 |
+
|
43 |
+
|
44 |
+
###################
|
45 |
+
#### Functions ####
|
46 |
+
###################
|
47 |
+
|
48 |
+
log_command() {
|
49 |
+
ts=$(date +'%FT%T')
|
50 |
+
echo "# $ts" >> "$(pup home)/$LOG"
|
51 |
+
echo "$1" >> "$(pup home)/$LOG"
|
52 |
+
}
|
53 |
+
|
54 |
+
log() {
|
55 |
+
ts=$(date +'%FT%T')
|
56 |
+
for arg in "$@"; do
|
57 |
+
echo "# $ts - $arg" | tee -a "$(pup home)/$LOG"
|
58 |
+
done
|
59 |
+
}
|
60 |
+
|
61 |
+
|
62 |
+
#######################
|
63 |
+
#### CORE COMMANDS ####
|
64 |
+
#######################
|
65 |
+
|
66 |
+
#### init (no args) ####
|
67 |
+
if [ -z "$1" ]; then
|
68 |
+
# init pup and show usage
|
69 |
+
echo -e $USAGE_MAIN
|
70 |
+
|
71 |
+
# link
|
72 |
+
SYMLINK_CURRENT=$(readlink -f "$SYMLINK_ON_PATH")
|
73 |
+
if [ "$SYMLINK_CURRENT" != "$(pwd)/pup" ] && [ -f "./pup" ]; then
|
74 |
+
SYMLINK_PATH_FOLDER=${SYMLINK_ON_PATH%/*}
|
75 |
+
mkdir -p "$SYMLINK_PATH_FOLDER"
|
76 |
+
ln -s -f "$(pwd)"/pup $SYMLINK_ON_PATH
|
77 |
+
fi
|
78 |
+
log_command "$0 $*"
|
79 |
+
log woof!
|
80 |
+
log "๐ถ = $(pup which)"
|
81 |
+
# initialize Pixi project in 'pup home'
|
82 |
+
pup pixi init
|
83 |
+
exit 0
|
84 |
+
fi
|
85 |
+
#### py|python ####
|
86 |
+
if [[ "$1" =~ ^py$|^python$ ]]; then
|
87 |
+
USAGE="\n
|
88 |
+
\e[1m\e[4mUsage\e[0m: pup $1{version} [ADDITIONAL PACKAGES]\n
|
89 |
+
For example:\n
|
90 |
+
\tpup py3.12 jupyter jupyter-collaboration>=2\n
|
91 |
+
\e[4mNote\e[0m:\n
|
92 |
+
\tthis command is meant to be used once during the first base layer setup;\n
|
93 |
+
\tbeyond that, use 'pixi add'\n
|
94 |
+
\n"
|
95 |
+
echo -e $USAGE
|
96 |
+
read -ei "" -p $'\e[0;33minstall which python version (blank = latest)? \e[0m' PYVER
|
97 |
+
PACKAGES=${@:2}
|
98 |
+
read -ei "${PACKAGES:-notebook>=7}" -p $'\e[0;33many additional packages? \e[0m' PACKAGES
|
99 |
+
pup pixi init
|
100 |
+
COMMAND="pixi add uv python${PYVER:+=$PYVER} $PACKAGES"
|
101 |
+
log "๐ถ asked for: '$COMMAND'"
|
102 |
+
$COMMAND
|
103 |
+
exit 0
|
104 |
+
fi
|
105 |
+
if [[ "$1" =~ ^py3.*$ ]]; then
|
106 |
+
log_command "$0 $*"
|
107 |
+
pup pixi init
|
108 |
+
COMMAND="pixi add uv python=${1#py} ${@:2}"
|
109 |
+
log "๐ถ asked for: '$COMMAND'"
|
110 |
+
$COMMAND
|
111 |
+
log "โจ $(pixi run python -VV)"
|
112 |
+
exit 0
|
113 |
+
fi
|
114 |
+
#### fetch|install ####
|
115 |
+
if [[ "$1" =~ ^fetch$|^install$ ]]; then
|
116 |
+
USAGE="\e[1m\e[4mUsage\e[0m: pup $1 [WHERE] [WHAT] [OPTIONS]"
|
117 |
+
if [ -z "$2" ]; then
|
118 |
+
echo -e $USAGE
|
119 |
+
read -ei "." -p "Fetch packages where? " WHERE
|
120 |
+
read -ei "" -p "What packages? " WHAT
|
121 |
+
read -ei "" -p "Options to pass to 'uv pip install'? " OPTIONS
|
122 |
+
COMMAND="pup fetch $WHERE $WHAT $OPTIONS"
|
123 |
+
log "๐ถ asked for: '$COMMAND'"
|
124 |
+
$COMMAND
|
125 |
+
exit 0
|
126 |
+
fi
|
127 |
+
if [ $# -eq 2 ]; then
|
128 |
+
echo "`๐ถ fetch` needs more arguments"
|
129 |
+
echo -e $USAGE
|
130 |
+
fi
|
131 |
+
if [ $# -gt 2 ] && [ "$2" = "." ]; then
|
132 |
+
PYTHON=$(pixi run which python)
|
133 |
+
fi
|
134 |
+
if [ $# -gt 2 ] && [ "$2" != "." ]; then
|
135 |
+
pup new $2
|
136 |
+
PYTHON="$(pup home)/$2/.venv/bin/python"
|
137 |
+
fi
|
138 |
+
COMMAND="pixi run uv pip install -p $PYTHON ${@:3}"
|
139 |
+
log "๐ถ asked for: '$COMMAND'"
|
140 |
+
$COMMAND
|
141 |
+
exit 0
|
142 |
+
fi
|
143 |
+
#### kernel ####
|
144 |
+
if [ "$1" == "kernel" ]; then
|
145 |
+
USAGE="\e[1m\e[4mUsage\e[0m: pup $1 [WHERE] [KERNEL_NAME]"
|
146 |
+
if [ -z "$2" ]; then
|
147 |
+
echo -e $USAGE
|
148 |
+
read -ei "." -p "Install kernel for which environment? " WHERE
|
149 |
+
PYVER=$(pixi run python -V); PYVER=${PYVER#Python }
|
150 |
+
read -ei "pup$PYVER-$WHERE" -p "Unique kernel name? (allowed characters: [a-zA-Z0-9.-_]) " KERNEL_NAME
|
151 |
+
COMMAND="pup kernel $WHERE $KERNEL_NAME"
|
152 |
+
log "๐ถ asked for: '$COMMAND'"
|
153 |
+
$COMMAND
|
154 |
+
exit 0
|
155 |
+
fi
|
156 |
+
if [ $# -eq 2 ]; then
|
157 |
+
echo "`๐ถ kernel` needs more arguments"
|
158 |
+
echo -e $USAGE
|
159 |
+
fi
|
160 |
+
if [ $# -gt 2 ] && [ "$2" = "." ]; then
|
161 |
+
PYTHON=$(pixi run which python)
|
162 |
+
fi
|
163 |
+
if [ $# -gt 2 ] && [ "$2" != "." ]; then
|
164 |
+
PYTHON="$(pup home)/$2/.venv/bin/python"
|
165 |
+
fi
|
166 |
+
pup fetch $2 ipykernel
|
167 |
+
COMMAND="$PYTHON -m ipykernel install --user --name $3"
|
168 |
+
log "๐ถ asked for: '$COMMAND'"
|
169 |
+
$COMMAND
|
170 |
+
exit 0
|
171 |
+
fi
|
172 |
+
#### new ####
|
173 |
+
if [ "$1" == "new" ]; then
|
174 |
+
log_command "$0 $*"
|
175 |
+
VENV_PATH="$(pup home)/$2/.venv"
|
176 |
+
if [ ! -d "$VENV_PATH" ]; then
|
177 |
+
pixi run uv venv -p "$(pixi run which python)" $VENV_PATH
|
178 |
+
log "pup & uv created new environment in $2"
|
179 |
+
else
|
180 |
+
log "found existing uv virtual environment $2"
|
181 |
+
fi
|
182 |
+
exit 0
|
183 |
+
fi
|
184 |
+
#### play ####
|
185 |
+
if [ "$1" == "play" ]; then
|
186 |
+
pixi run start
|
187 |
+
exit 0
|
188 |
+
fi
|
189 |
+
#########################
|
190 |
+
#### HELPER COMMANDS ####
|
191 |
+
#########################
|
192 |
+
#### home ####
|
193 |
+
if [ "$1" == "home" ]; then
|
194 |
+
if [ -L "$0" ]; then
|
195 |
+
dirname $(readlink -f $SYMLINK_ON_PATH)
|
196 |
+
else
|
197 |
+
realpath $(dirname $0)
|
198 |
+
fi
|
199 |
+
exit 0
|
200 |
+
fi
|
201 |
+
#### pixi ####
|
202 |
+
if [ "$1" == "pixi" ]; then
|
203 |
+
if [ "$2" == "init" ]; then
|
204 |
+
# install and init Pixi project in pup's home dir
|
205 |
+
if ! command -v pixi &> /dev/null; then
|
206 |
+
export SHELL=bash
|
207 |
+
curl -fsSL https://pixi.sh/install.sh | bash
|
208 |
+
fi
|
209 |
+
PUPHOME=$(pup home)
|
210 |
+
if [[ ! -f "$PUPHOME"/pixi.toml ]]; then
|
211 |
+
pixi init "$PUPHOME"
|
212 |
+
# pixi task add start "$PIXI_DEFAULT_TASK"
|
213 |
+
fi
|
214 |
+
exit 0
|
215 |
+
fi
|
216 |
+
if [ "$2" == "rm" ]; then
|
217 |
+
# remove pixi files
|
218 |
+
rm -r "$(pup home)"/.pixi "$(pup home)"/pixi*
|
219 |
+
exit 0
|
220 |
+
fi
|
221 |
+
fi
|
222 |
+
#### version ####
|
223 |
+
if [ "$1" == "version" ]; then
|
224 |
+
echo $VERSION
|
225 |
+
exit 0
|
226 |
+
fi
|
227 |
+
#### which ####
|
228 |
+
if [ "$1" == "which" ]; then
|
229 |
+
echo $(ls "$SYMLINK_ON_PATH") -\> $(readlink -f $SYMLINK_ON_PATH)
|
230 |
+
exit 0
|
231 |
+
fi
|
232 |
+
#### whereami ####
|
233 |
+
if [ "$1" == "whereami" ]; then
|
234 |
+
cat "$(pup home)/$LOG"
|
235 |
+
exit 0
|
236 |
+
fi
|
237 |
+
#### unknown command ####
|
238 |
+
echo -e "\e[0;33m๐ถ does not know 'pup $*'\e[0m\nrun 'pup' to list known commands"
|
sql.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import base64
|
2 |
+
import duckdb
|
3 |
+
class Q(str):
|
4 |
+
UNSAFE = ["CREATE", "DELETE", "DROP", "INSERT", "UPDATE"]
|
5 |
+
|
6 |
+
def __init__(self, query: str):
|
7 |
+
self.is_safe = not any([cmd in query.upper() for cmd in self.UNSAFE])
|
8 |
+
|
9 |
+
def run(self, sql_engine=None):
|
10 |
+
try:
|
11 |
+
if sql_engine is None:
|
12 |
+
return self.run_duckdb()
|
13 |
+
else:
|
14 |
+
return self.run_sql(sql_engine)
|
15 |
+
except Exception as e:
|
16 |
+
pass
|
17 |
+
|
18 |
+
def run_duckdb(self):
|
19 |
+
return duckdb.sql(self)
|
20 |
+
|
21 |
+
def df(self, sql_engine=None):
|
22 |
+
result = self.run(sql_engine=sql_engine)
|
23 |
+
if result is None: return
|
24 |
+
result_df = result.df()
|
25 |
+
return result_df
|
26 |
+
|
27 |
+
|
28 |
+
@property
|
29 |
+
def base64(self):
|
30 |
+
return base64.b64encode(self.encode()).decode()
|
31 |
+
|
32 |
+
@classmethod
|
33 |
+
def from_base64(cls, b64):
|
34 |
+
"""Initializing from base64-encoded URL paths."""
|
35 |
+
return cls(base64.b64decode(b64).decode())
|