update code
Browse files- app.py +3 -3
- model_cache/.gitignore +4 -0
app.py
CHANGED
@@ -75,7 +75,7 @@ def llm_pipeline(tokenizer, base_model, input_text):
|
|
75 |
tokenizer=tokenizer,
|
76 |
max_length=600,
|
77 |
min_length=300,
|
78 |
-
truncation=True
|
79 |
)
|
80 |
print("Summarizing...")
|
81 |
result = pipe_sum(input_text)
|
@@ -120,7 +120,7 @@ def main():
|
|
120 |
trust_remote_code=True,
|
121 |
)
|
122 |
base_model = AutoModelForSeq2SeqLM.from_pretrained(
|
123 |
-
checkpoint, torch_dtype=torch.float32, trust_remote_code=True
|
124 |
)
|
125 |
else: # default Flan T5 small
|
126 |
checkpoint = "MBZUAI/LaMini-Flan-T5-77M"
|
@@ -131,7 +131,7 @@ def main():
|
|
131 |
model_max_length=1000,
|
132 |
)
|
133 |
base_model = AutoModelForSeq2SeqLM.from_pretrained(
|
134 |
-
checkpoint, torch_dtype=torch.float32
|
135 |
)
|
136 |
with col2:
|
137 |
st.write("Skip any pages?")
|
|
|
75 |
tokenizer=tokenizer,
|
76 |
max_length=600,
|
77 |
min_length=300,
|
78 |
+
truncation=True
|
79 |
)
|
80 |
print("Summarizing...")
|
81 |
result = pipe_sum(input_text)
|
|
|
120 |
trust_remote_code=True,
|
121 |
)
|
122 |
base_model = AutoModelForSeq2SeqLM.from_pretrained(
|
123 |
+
checkpoint, torch_dtype=torch.float32, trust_remote_code=True, cache_dir="model_cache"
|
124 |
)
|
125 |
else: # default Flan T5 small
|
126 |
checkpoint = "MBZUAI/LaMini-Flan-T5-77M"
|
|
|
131 |
model_max_length=1000,
|
132 |
)
|
133 |
base_model = AutoModelForSeq2SeqLM.from_pretrained(
|
134 |
+
checkpoint, torch_dtype=torch.float32, cache_dir="model_cache"
|
135 |
)
|
136 |
with col2:
|
137 |
st.write("Skip any pages?")
|
model_cache/.gitignore
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Ignore everything in this directory
|
2 |
+
*
|
3 |
+
# Except this file
|
4 |
+
!.gitignore
|