staindart commited on
Commit
de2a93c
·
verified ·
1 Parent(s): de4cd1f

Deploy TinyModel1Space from GitHub Actions

Browse files
Files changed (3) hide show
  1. README.md +7 -5
  2. app.py +77 -0
  3. requirements.txt +3 -0
README.md CHANGED
@@ -1,12 +1,14 @@
1
  ---
2
  title: TinyModel1Space
3
- emoji: 👁
4
- colorFrom: indigo
5
- colorTo: gray
6
  sdk: gradio
7
- sdk_version: 6.12.0
8
  app_file: app.py
9
  pinned: false
10
  ---
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
1
  ---
2
  title: TinyModel1Space
3
+ emoji: 🤗
4
+ colorFrom: blue
5
+ colorTo: indigo
6
  sdk: gradio
7
+ sdk_version: 5.49.1
8
  app_file: app.py
9
  pinned: false
10
  ---
11
 
12
+ # TinyModel1Space
13
+
14
+ Interactive demo for `HyperlinksSpace/TinyModel1`.
app.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ from transformers import pipeline
4
+
5
+ MODEL_ID = "HyperlinksSpace/TinyModel1"
6
+ SPACE_URL = "https://hyperlinksspace-tinymodel1space.hf.space"
7
+ _clf = None
8
+
9
+
10
+ def get_pipeline():
11
+ global _clf
12
+ if _clf is not None:
13
+ return _clf
14
+ token = os.getenv("HF_TOKEN")
15
+ kwargs = {}
16
+ if token:
17
+ kwargs["token"] = token
18
+ _clf = pipeline(
19
+ "text-classification",
20
+ model=MODEL_ID,
21
+ tokenizer=MODEL_ID,
22
+ top_k=None,
23
+ **kwargs,
24
+ )
25
+ return _clf
26
+
27
+
28
+ def _prediction_list(batch_output):
29
+ # One batch item: either a single {label, score} dict or a list of them.
30
+ if not batch_output:
31
+ return []
32
+ first = batch_output[0]
33
+ if isinstance(first, dict):
34
+ return [first]
35
+ if isinstance(first, list):
36
+ return first
37
+ return []
38
+
39
+
40
+ def predict(text):
41
+ text = (text or "").strip()
42
+ if not text:
43
+ return {}, "Please enter some text first."
44
+ try:
45
+ clf = get_pipeline()
46
+ except Exception as exc:
47
+ return {}, f"Model load failed for {MODEL_ID}: {exc}"
48
+ raw = clf(text, truncation=True, max_length=128)
49
+ preds = _prediction_list(raw)
50
+ if not preds:
51
+ return {}, "Empty model output (unexpected pipeline shape)."
52
+ preds = sorted(preds, key=lambda x: float(x["score"]), reverse=True)
53
+ return {item["label"]: float(item["score"]) for item in preds}, "OK"
54
+
55
+
56
+ EXAMPLES = [
57
+ ["Apple reported strong quarterly revenue growth and raised guidance."],
58
+ ["The team won the championship after a dramatic overtime finish."],
59
+ ["Scientists announced a new breakthrough in battery technology."],
60
+ ["Leaders met to discuss tensions and trade policy in the region."],
61
+ ]
62
+
63
+ with gr.Blocks(title="TinyModel1Space") as demo:
64
+ gr.Markdown("# TinyModel1Space")
65
+ gr.Markdown("Model: `HyperlinksSpace/TinyModel1`")
66
+ gr.Markdown("Public URL: [https://hyperlinksspace-tinymodel1space.hf.space](https://hyperlinksspace-tinymodel1space.hf.space)")
67
+ inp = gr.Textbox(lines=4, label="Input text", placeholder="Paste a news sentence here...")
68
+ out = gr.Label(num_top_classes=4, label="Predicted class probabilities")
69
+ status = gr.Textbox(label="Status", interactive=False)
70
+ run_btn = gr.Button("Run Inference", variant="primary")
71
+ run_btn.click(fn=predict, inputs=inp, outputs=[out, status])
72
+ inp.submit(fn=predict, inputs=inp, outputs=[out, status])
73
+ gr.Examples(examples=EXAMPLES, inputs=inp)
74
+
75
+ if __name__ == "__main__":
76
+ print(f"Space URL: {SPACE_URL}")
77
+ demo.launch(ssr_mode=False)
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ gradio
2
+ transformers
3
+ torch