Spaces:
Sleeping
Sleeping
File size: 2,683 Bytes
8a8ccdb 47a6c20 8a8ccdb 47a6c20 8a8ccdb 47a6c20 8a8ccdb 5596de2 8a8ccdb 47a6c20 fc28382 76d290c 47a6c20 76d290c 47a6c20 76d290c 47a6c20 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
import gradio as gr
from functools import partial
from transformers import pipeline, pipelines
######################
##### INFERENCE ######
######################
# Text Analysis
def cls_inference(input: list[str], pipe: pipeline) -> dict:
results = pipe(input, top_k=None)
return {x["label"]: x["score"] for x in results}
# POSP
def tagging(text: str, pipe: pipeline):
output = pipe(text)
return {"text": text, "entities": output}
# Text Analysis
def text_analysis(text, pipes: list[pipeline]):
outputs = []
for pipe in pipes:
if isinstance(pipe, pipelines.token_classification.TokenClassificationPipeline):
outputs.append(tagging(text, pipe))
else:
outputs.append(cls_inference(text, pipe))
return outputs
######################
##### INTERFACE ######
######################
def text_interface(pipe: pipeline, examples: list[str], output_label: str, title: str, desc: str):
return gr.Interface(
fn=partial(cls_inference, pipe=pipe),
inputs=[
gr.Textbox(lines=5, label="Input Text"),
],
title=title,
description=desc,
outputs=[gr.Label(label=output_label)],
examples=examples,
allow_flagging="never",
)
def token_classification_interface(pipe: pipeline, examples: list[str], output_label: str, title: str, desc: str):
return gr.Interface(
fn=partial(tagging, pipe=pipe),
inputs=[
gr.Textbox(placeholder="Masukan kalimat di sini...", label="Input Text"),
],
outputs=[gr.HighlightedText(label=output_label)],
title=title,
examples=examples,
description=desc,
allow_flagging="never",
)
def text_analysis_interface(pipe: list, examples: list[str], output_label: str, title: str, desc: str):
with gr.Blocks() as text_analysis_interface:
gr.Markdown(title)
gr.Markdown(desc)
input_text = gr.Textbox(lines=5, label="Input Text")
with gr.Row():
outputs = [
(
gr.HighlightedText(label=label)
if isinstance(p, pipelines.token_classification.TokenClassificationPipeline)
else gr.Label(label=label)
)
for label, p in zip(output_label, pipe)
]
btn = gr.Button("Analyze")
btn.click(
fn=partial(text_analysis, pipes=pipe),
inputs=[input_text],
outputs=outputs,
)
gr.Examples(
examples=examples,
inputs=input_text,
outputs=outputs,
)
return text_analysis_interface
|