Spaces:
Build error
Build error
from transformers import AutoModelForCausalLM, AutoTokenizer ,T5ForConditionalGeneration ,T5Tokenizer | |
import re | |
import torch | |
torch.set_default_tensor_type(torch.cuda.FloatTensor) | |
import os | |
import io | |
import warnings | |
from PIL import Image | |
from stability_sdk import client | |
import stability_sdk.interfaces.gooseai.generation.generation_pb2 as generation | |
import gradio as gr | |
def generate_post(model,tokenizer,company_name , description , example1 ,example2 ,example3): | |
prompt = f""" {company_name} {description}, {example1}. | |
{company_name} {description}, {example2}. | |
{company_name} {description}, {example3}. | |
{company_name} {description}, """ | |
input_ids = tokenizer(prompt, return_tensors="pt").to(0) | |
sample = model.generate(**input_ids, top_k=0, temperature=0.7, do_sample = True , max_new_tokens = 70, repetition_penalty= 5.4) | |
outputs = tokenizer.decode(sample[0]) | |
res = outputs.split(f""" {company_name} {description}, {example1}. | |
{company_name} {description}, {example2}. | |
{company_name} {description}, {example3}. | |
{company_name} {description}, """)[1] | |
res = re.sub('[#]\w+' , " ", res) | |
res = re.sub('@[^\s]\w+',' ', res) | |
res = re.sub(r'http\S+', ' ', res) | |
res = res.replace("\n" ," ") | |
res = re.sub(' +', ' ',res) | |
return res | |
def generate_caption(model , text_body ,tokenizer ,max_length): | |
test_sent = 'generate: ' + text_body | |
input = tokenizer.encode(test_sent , return_tensors="pt")#.to('cuda') | |
outs = model.generate(input , | |
max_length = max_length, | |
do_sample = True , | |
temperature = 0.7, | |
min_length = 8, | |
repetition_penalty = 5.4, | |
max_time = 12, | |
top_p = 1.0, | |
top_k = 50) | |
sent = tokenizer.decode(outs[0], skip_special_tokens=True,clean_up_tokenization_spaces=True) | |
return sent | |
def demo_smg(company_name ,description , example1 , example2 , example3): | |
access_token = "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" | |
model_cp= T5ForConditionalGeneration.from_pretrained("Abdelmageed95/caption_model" , use_auth_token = access_token ) | |
tokenizer = T5Tokenizer.from_pretrained('t5-base') | |
model_bm = AutoModelForCausalLM.from_pretrained("bigscience/bloom-3b" , use_auth_token = access_token) | |
tokenizer_bm = AutoTokenizer.from_pretrained("bigscience/bloom-3b") | |
res = generate_post( model_bm , tokenizer_bm, company_name , description , example1 , example2 , example3) | |
generated_caption = generate_caption( model_cp , | |
res, | |
tokenizer , | |
30) | |
os.environ['STABILITY_HOST'] = "grpc.stability.ai:443" | |
os.environ['STABILITY_KEY'] = "sk-t4x1wv6WFgTANF7O1TkWDJZzxXxQZeU6X7oybl6rdCOOiHIk" | |
stability_api = client.StabilityInference( | |
key=os.environ['STABILITY_KEY'], | |
verbose=True) | |
generated_caption = generated_caption + ", intricate, highly detailed, smooth , sharp focus, 8k" | |
answers = stability_api.generate( prompt= generated_caption , | |
#seed=34567, | |
steps= 70 ) | |
for resp in answers: | |
for artifact in resp.artifacts: | |
if artifact.finish_reason == generation.FILTER: | |
warnings.warn( | |
"Your request activated the API's safety filters and could not be processed." | |
"Please modify the prompt and try again.") | |
if artifact.type == generation.ARTIFACT_IMAGE: | |
img = Image.open(io.BytesIO(artifact.binary)) | |
return res, generated_caption ,img | |
company_name = "ADES Group" | |
description = "delivers full-scale petroleum services; from onshore and offshore drilling to full oil & gas projects and services, with emphasis on the HSE culture while maintaining excellence in operation targets." | |
example1 = """Throwback to ADM 680 Team during their Cyber-chair controls Course in August, | |
Our development strategy at ADES does not only focus on enriching the technical expertise of our teams in their specialization in Jack- up rigs, | |
but also in providing access to latest operational models""" | |
example2 = """With complexity of oil & gas equipment and the seriousness of failure and its consequences confronting our people, | |
it has become a necessity to equip our Asset Management Team with leading methodologies and techniques that enable them to think and act proactively""" | |
example3 = """ Part of our people development strategy is providing our senior leadership with the latest industry technologies | |
and world class practices and standards""" | |
txt , generated_caption , im = demo_smg( company_name, description , example1 , example2 , example3) | |
print(txt) | |
print(generated_caption) | |
# demo = gr.Interface( | |
# fn= demo_smg, | |
# inputs=["text","text" , "text" ,"text" ,"text"], | |
# outputs=["text", "text", "image" ] | |
# ) | |
# demo.launch(share=True) |