Spaces:
Runtime error
Runtime error
Remove Llama from tokenizer options (needs login)
Browse files
app.py
CHANGED
@@ -3,7 +3,6 @@ from prompt_generator import generate_prompt
|
|
3 |
|
4 |
tokenizers = [
|
5 |
"google/gemma-7b",
|
6 |
-
"meta-llama/Llama-2-7b",
|
7 |
"mistralai/Mistral-7B-v0.1",
|
8 |
"facebook/opt-2.7b",
|
9 |
"microsoft/phi-2",
|
@@ -25,7 +24,7 @@ def generate(model_id, num_tokens):
|
|
25 |
|
26 |
demo = gr.Interface(
|
27 |
fn=generate,
|
28 |
-
title="Prompt Generator",
|
29 |
description="Generate prompts with a given length for testing transformer models. "
|
30 |
"Prompt source: https://archive.org/stream/alicesadventures19033gut/19033.txt",
|
31 |
inputs=[
|
|
|
3 |
|
4 |
tokenizers = [
|
5 |
"google/gemma-7b",
|
|
|
6 |
"mistralai/Mistral-7B-v0.1",
|
7 |
"facebook/opt-2.7b",
|
8 |
"microsoft/phi-2",
|
|
|
24 |
|
25 |
demo = gr.Interface(
|
26 |
fn=generate,
|
27 |
+
title="Test Prompt Generator",
|
28 |
description="Generate prompts with a given length for testing transformer models. "
|
29 |
"Prompt source: https://archive.org/stream/alicesadventures19033gut/19033.txt",
|
30 |
inputs=[
|