mwitiderrick commited on
Commit
e155e73
·
1 Parent(s): fcbc27a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -20
app.py CHANGED
@@ -27,11 +27,6 @@ For example, you may want to process text and store the entities in different co
27
  [Want to train a sparse model on your data? Checkout the documentation on sparse transfer learning](https://docs.neuralmagic.com/use-cases/natural-language-processing/question-answering)
28
  '''
29
  task = "ner"
30
- dense_qa_pipeline = Pipeline.create(
31
- task=task,
32
- model_path="zoo:nlp/token_classification/bert-base/pytorch/huggingface/conll2003/base-none",
33
- )
34
-
35
  sparse_qa_pipeline = Pipeline.create(
36
  task=task,
37
  model_path="zoo:nlp/token_classification/bert-base/pytorch/huggingface/conll2003/12layer_pruned80_quant-none-vnni",
@@ -65,28 +60,15 @@ def map_ner(inference):
65
  return entities
66
 
67
  def run_pipeline(text):
68
- dense_start = time.perf_counter()
69
-
70
- dense_output = dense_qa_pipeline(text)
71
- dense_entities = map_ner(dense_output)
72
-
73
- dense_output = {"text": text, "entities": dense_entities}
74
-
75
- dense_end = time.perf_counter()
76
- dense_duration = (dense_end - dense_start) * 1000.0
77
-
78
  sparse_start = time.perf_counter()
79
-
80
  sparse_output = sparse_qa_pipeline(text)
81
  sparse_entities = map_ner(sparse_output)
82
-
83
  sparse_output = {"text": text, "entities": sparse_entities}
84
-
85
  sparse_result = dict(sparse_output)
86
  sparse_end = time.perf_counter()
87
  sparse_duration = (sparse_end - sparse_start) * 1000.0
88
 
89
- return sparse_output, sparse_duration, dense_output, dense_duration
90
 
91
 
92
  with gr.Blocks() as demo:
@@ -113,7 +95,7 @@ with gr.Blocks() as demo:
113
  btn.click(
114
  run_pipeline,
115
  inputs=[text],
116
- outputs=[sparse_answers,sparse_duration,dense_answers,dense_duration],
117
  )
118
 
119
  if __name__ == "__main__":
 
27
  [Want to train a sparse model on your data? Checkout the documentation on sparse transfer learning](https://docs.neuralmagic.com/use-cases/natural-language-processing/question-answering)
28
  '''
29
  task = "ner"
 
 
 
 
 
30
  sparse_qa_pipeline = Pipeline.create(
31
  task=task,
32
  model_path="zoo:nlp/token_classification/bert-base/pytorch/huggingface/conll2003/12layer_pruned80_quant-none-vnni",
 
60
  return entities
61
 
62
  def run_pipeline(text):
 
 
 
 
 
 
 
 
 
 
63
  sparse_start = time.perf_counter()
 
64
  sparse_output = sparse_qa_pipeline(text)
65
  sparse_entities = map_ner(sparse_output)
 
66
  sparse_output = {"text": text, "entities": sparse_entities}
 
67
  sparse_result = dict(sparse_output)
68
  sparse_end = time.perf_counter()
69
  sparse_duration = (sparse_end - sparse_start) * 1000.0
70
 
71
+ return sparse_output, sparse_duration
72
 
73
 
74
  with gr.Blocks() as demo:
 
95
  btn.click(
96
  run_pipeline,
97
  inputs=[text],
98
+ outputs=[sparse_answers,sparse_duration],
99
  )
100
 
101
  if __name__ == "__main__":