mwitiderrick commited on
Commit
9b61462
1 Parent(s): ad83615

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -17
app.py CHANGED
@@ -32,13 +32,6 @@ When deploying a text classification model, decreasing the model’s latency and
32
 
33
  '''
34
  task = "zero_shot_text_classification"
35
- dense_classification_pipeline = Pipeline.create(
36
- task=task,
37
- model_path="zoo:nlp/text_classification/distilbert-none/pytorch/huggingface/mnli/base-none",
38
- model_scheme="mnli",
39
- model_config={"hypothesis_template": "This text is related to {}"},
40
- )
41
-
42
  sparse_classification_pipeline = Pipeline.create(
43
  task=task,
44
  model_path="zoo:nlp/text_classification/distilbert-none/pytorch/huggingface/mnli/pruned80_quant-none-vnni",
@@ -46,21 +39,13 @@ sparse_classification_pipeline = Pipeline.create(
46
  model_config={"hypothesis_template": "This text is related to {}"},
47
  )
48
  def run_pipeline(text):
49
- dense_start = time.perf_counter()
50
-
51
- dense_output = dense_classification_pipeline(sequences= text,labels=['politics', 'public health', 'Europe'],)
52
- dense_result = dict(dense_output)
53
- dense_end = time.perf_counter()
54
- dense_duration = (dense_end - dense_start) * 1000.0
55
-
56
  sparse_start = time.perf_counter()
57
-
58
  sparse_output = sparse_classification_pipeline(sequences= text,labels=['politics', 'public health', 'Europe'],)
59
  sparse_result = dict(sparse_output)
60
  sparse_end = time.perf_counter()
61
  sparse_duration = (sparse_end - sparse_start) * 1000.0
62
 
63
- return sparse_result, sparse_duration, dense_result, dense_duration
64
 
65
 
66
  with gr.Blocks() as demo:
@@ -85,7 +70,7 @@ with gr.Blocks() as demo:
85
  btn.click(
86
  run_pipeline,
87
  inputs=[text],
88
- outputs=[sparse_answers,sparse_duration,dense_answers,dense_duration],
89
  )
90
 
91
  if __name__ == "__main__":
 
32
 
33
  '''
34
  task = "zero_shot_text_classification"
 
 
 
 
 
 
 
35
  sparse_classification_pipeline = Pipeline.create(
36
  task=task,
37
  model_path="zoo:nlp/text_classification/distilbert-none/pytorch/huggingface/mnli/pruned80_quant-none-vnni",
 
39
  model_config={"hypothesis_template": "This text is related to {}"},
40
  )
41
  def run_pipeline(text):
 
 
 
 
 
 
 
42
  sparse_start = time.perf_counter()
 
43
  sparse_output = sparse_classification_pipeline(sequences= text,labels=['politics', 'public health', 'Europe'],)
44
  sparse_result = dict(sparse_output)
45
  sparse_end = time.perf_counter()
46
  sparse_duration = (sparse_end - sparse_start) * 1000.0
47
 
48
+ return sparse_result, sparse_duration
49
 
50
 
51
  with gr.Blocks() as demo:
 
70
  btn.click(
71
  run_pipeline,
72
  inputs=[text],
73
+ outputs=[sparse_answers,sparse_duration],
74
  )
75
 
76
  if __name__ == "__main__":