File size: 1,460 Bytes
134e623
ecf6936
 
 
134e623
ecf6936
 
134e623
cec7d5e
ecf6936
 
 
cec7d5e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import gradio as gr
from transformers import AutoModel, AutoConfig
from main_idea_with_torch import predict_mainidea_sent_old
from main_idea_with_pipeline import predict_mainidea_sent

config = AutoConfig.from_pretrained("yutingg/custom-distill-bert-for-sentence-label", trust_remote_code=True)
model = AutoModel.from_pretrained("yutingg/custom-distill-bert-for-sentence-label", trust_remote_code=True, config=config)

def predict_main_idea(essay):
    ret = predict_mainidea_sent(essay, model), predict_mainidea_sent_old(essay, model)
    return ret

with gr.Blocks() as main_idea_demo:
    with gr.Row():
        essay_input = gr.Textbox(label="essay", lines=10)
    with gr.Row():
        predict_button = gr.Button("Predict Main Idea Sentence")
    with gr.Row():
        with gr.Column(scale=1, min_width=600):
            output_1 = gr.Dataframe(
                label="pipeline output",
                headers=['label: is main idea', 'sentence'],
                datatype=["str", "str"],
                col_count=(2, "fixed"),
            )
        with gr.Column(scale=1, min_width=600):
            output_2 = gr.Dataframe(
                label="torch output with Triage",
                headers=['label: is main idea', 'sentence'],
                datatype=["str", "str"],
                col_count=(2, "fixed"),
            )
    predict_button.click(predict_main_idea, inputs=essay_input, outputs=[output_1, output_2])

main_idea_demo.launch()