Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,7 +4,7 @@ from langchain_core.prompts import PromptTemplate
|
|
| 4 |
from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline
|
| 5 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline,BitsAndBytesConfig
|
| 6 |
import os
|
| 7 |
-
|
| 8 |
|
| 9 |
HF_TOKEN = os.environ["HF_TOKEN"]
|
| 10 |
# quants = BitsAndBytesConfig(load_in_4bit=True)
|
|
@@ -50,7 +50,8 @@ def main():
|
|
| 50 |
hf = HuggingFacePipeline(pipeline=pipe)
|
| 51 |
chain = LLMChain(llm=hf,prompt=prompt,verbose=True)
|
| 52 |
aa = chain.invoke({"topic": topic,"words":word_count,"role":role})
|
| 53 |
-
|
|
|
|
| 54 |
st.write("Will Come here")
|
| 55 |
|
| 56 |
if __name__ == "__main__":
|
|
|
|
| 4 |
from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline
|
| 5 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline,BitsAndBytesConfig
|
| 6 |
import os
|
| 7 |
+
aa=''
|
| 8 |
|
| 9 |
HF_TOKEN = os.environ["HF_TOKEN"]
|
| 10 |
# quants = BitsAndBytesConfig(load_in_4bit=True)
|
|
|
|
| 50 |
hf = HuggingFacePipeline(pipeline=pipe)
|
| 51 |
chain = LLMChain(llm=hf,prompt=prompt,verbose=True)
|
| 52 |
aa = chain.invoke({"topic": topic,"words":word_count,"role":role})
|
| 53 |
+
|
| 54 |
+
st.write(aa)
|
| 55 |
st.write("Will Come here")
|
| 56 |
|
| 57 |
if __name__ == "__main__":
|