Update app.py
Browse files
app.py
CHANGED
|
@@ -1,34 +1,21 @@
|
|
| 1 |
from fastai.vision.all import *
|
| 2 |
import gradio as gr
|
| 3 |
-
|
| 4 |
-
#from diffusers import FluxPipeline
|
| 5 |
-
#from huggingface_hub import login
|
| 6 |
-
#login()
|
| 7 |
-
|
| 8 |
-
#pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
|
| 9 |
-
#pipe.enable_model_cpu_offload() #save some VRAM by offloading the model to CPU. Remove this if you have enough GPU power
|
| 10 |
-
|
| 11 |
-
#prompt = "A cat holding a sign that says hello world"
|
| 12 |
-
#image = pipe(
|
| 13 |
-
# prompt,
|
| 14 |
-
# height=1024,
|
| 15 |
-
# width=1024,
|
| 16 |
-
# guidance_scale=3.5,
|
| 17 |
-
# num_inference_steps=50,
|
| 18 |
-
#max_sequence_length=512,
|
| 19 |
-
# generator=torch.Generator("cpu").manual_seed(0)
|
| 20 |
-
#).images[0]
|
| 21 |
-
#image.save("flux-dev.png")
|
| 22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
|
| 24 |
-
learn = load_learner('export.pkl')
|
| 25 |
|
| 26 |
-
categories = ('balsamroot', 'bladderpod', 'blazing star', 'bristlecone pine flowers', 'brittlebrush')
|
| 27 |
def classify_image(img):
|
| 28 |
pred, idx, probs = learn.predict(img)
|
| 29 |
return dict(zip(categories, map(float, probs)))
|
| 30 |
|
| 31 |
|
|
|
|
|
|
|
|
|
|
| 32 |
|
| 33 |
image=gr.Image(height = 192, width = 192)
|
| 34 |
label = gr.Label()
|
|
@@ -36,3 +23,12 @@ examples = ['https://www.deserthorizonnursery.com/wp-content/uploads/2024/03/Bri
|
|
| 36 |
intf = gr.Interface(fn=classify_image, inputs=image, outputs=label, examples=examples)
|
| 37 |
intf.launch(inline=False)
|
| 38 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
from fastai.vision.all import *
|
| 2 |
import gradio as gr
|
| 3 |
+
import fal_client
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
|
| 5 |
+
def on_queue_update(update):
|
| 6 |
+
if isinstance(update, fal_client.InProgress):
|
| 7 |
+
for log in update.logs:
|
| 8 |
+
print(log["message"])
|
| 9 |
|
|
|
|
| 10 |
|
|
|
|
| 11 |
def classify_image(img):
|
| 12 |
pred, idx, probs = learn.predict(img)
|
| 13 |
return dict(zip(categories, map(float, probs)))
|
| 14 |
|
| 15 |
|
| 16 |
+
learn = load_learner('export.pkl')
|
| 17 |
+
|
| 18 |
+
categories = ('balsamroot', 'bladderpod', 'blazing star', 'bristlecone pine flowers', 'brittlebrush')
|
| 19 |
|
| 20 |
image=gr.Image(height = 192, width = 192)
|
| 21 |
label = gr.Label()
|
|
|
|
| 23 |
intf = gr.Interface(fn=classify_image, inputs=image, outputs=label, examples=examples)
|
| 24 |
intf.launch(inline=False)
|
| 25 |
|
| 26 |
+
result = fal_client.subscribe(
|
| 27 |
+
"fal-ai/flux/schnell",
|
| 28 |
+
arguments={
|
| 29 |
+
"prompt": "Extreme close-up of a single tiger eye, direct frontal view. Detailed iris and pupil. Sharp focus on eye texture and color. Natural lighting to capture authentic eye shine and depth. The word \"FLUX\" is painted over it in big, white brush strokes with visible texture."
|
| 30 |
+
},
|
| 31 |
+
with_logs=True,
|
| 32 |
+
on_queue_update=on_queue_update,
|
| 33 |
+
)
|
| 34 |
+
print(result)
|