Spaces:
Sleeping
Sleeping
RMakushkin
commited on
Commit
·
e9a748e
1
Parent(s):
d7c4403
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import torch
|
3 |
+
from transformers import GPT2LMHeadModel, GPT2Tokenizer
|
4 |
+
|
5 |
+
|
6 |
+
|
7 |
+
|
8 |
+
model_name_or_path = "sberbank-ai/rugpt3small_based_on_gpt2"
|
9 |
+
tokenizer = GPT2Tokenizer.from_pretrained(model_name_or_path)
|
10 |
+
model = GPT2LMHeadModel.from_pretrained(
|
11 |
+
model_name_or_path,
|
12 |
+
output_attentions = False,
|
13 |
+
output_hidden_states = False,
|
14 |
+
)
|
15 |
+
|
16 |
+
|
17 |
+
# Загрузка сохраненных весов
|
18 |
+
model_weights_path = "hunter_pelevin.pt"
|
19 |
+
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
20 |
+
model.load_state_dict(torch.load(model_weights_path, map_location=device))
|
21 |
+
model.eval()
|
22 |
+
def generate_text(user_input, model=model, tokenizer=tokenizer):
|
23 |
+
input_ids = tokenizer.encode(user_input, return_tensors="pt")
|
24 |
+
with torch.no_grad():
|
25 |
+
out = model.generate(
|
26 |
+
input_ids,
|
27 |
+
max_length=slider1,
|
28 |
+
num_beams=10,
|
29 |
+
do_sample=True,
|
30 |
+
temperature=slider3,
|
31 |
+
top_k=500,
|
32 |
+
top_p=0.8,
|
33 |
+
no_repeat_ngram_size=3,
|
34 |
+
num_return_sequences=slider2,
|
35 |
+
)
|
36 |
+
generated_text = list(map(tokenizer.decode, out))[0]
|
37 |
+
return generated_text
|
38 |
+
|
39 |
+
|
40 |
+
st.title("Простое веб-приложение на Streamlit")
|
41 |
+
# Получаем ввод от пользователя
|
42 |
+
user_input = st.text_area("Введите текст:")
|
43 |
+
slider1 = st.slider("Выберите длинну текста:", min_value=10, max_value=100, value=50)
|
44 |
+
slider2 = st.slider("Выберите количество генераций", min_value=1, max_value=5, value=2)
|
45 |
+
slider3 = st.slider("Выберите степень безумия:", min_value=0.1, max_value=3.0, value=1.2, step=0.1)
|
46 |
+
if user_input:
|
47 |
+
gen_text = generate_text(user_input)
|
48 |
+
st.write(gen_text)
|