Spaces:
Running
Running
Commit
•
bcbb85b
1
Parent(s):
71c2318
fix: limit token lengths
Browse files
app.py
CHANGED
@@ -56,7 +56,7 @@ def vectorize_dataset(split: str, column: str):
|
|
56 |
global df
|
57 |
global ds
|
58 |
df = ds[split].to_polars()
|
59 |
-
embeddings = model.encode(df[column], max_length=512
|
60 |
df = df.with_columns(pl.Series(embeddings).alias("embeddings"))
|
61 |
|
62 |
|
@@ -153,4 +153,5 @@ with gr.Blocks() as demo:
|
|
153 |
)
|
154 |
|
155 |
btn_run.click(fn=run_query, inputs=query_input, outputs=results_output)
|
|
|
156 |
demo.launch()
|
|
|
56 |
global df
|
57 |
global ds
|
58 |
df = ds[split].to_polars()
|
59 |
+
embeddings = model.encode(df[column], max_length=512)
|
60 |
df = df.with_columns(pl.Series(embeddings).alias("embeddings"))
|
61 |
|
62 |
|
|
|
153 |
)
|
154 |
|
155 |
btn_run.click(fn=run_query, inputs=query_input, outputs=results_output)
|
156 |
+
|
157 |
demo.launch()
|