Spaces:
Running
Running
import gradio as gr | |
from random import randint | |
from all_models import models | |
from datetime import datetime | |
from concurrent.futures import TimeoutError, ThreadPoolExecutor | |
import numpy as np | |
import time | |
import requests | |
import logging | |
logging.basicConfig(level=logging.WARNING) | |
now2 = 0 | |
index_tracker = 0 # Index tracker for the current model | |
model_scores = {model: 0 for model in models} # Dictionary to track scores for each model | |
processed_models_count = 0 | |
kii=" blonde mohawk femboy playing game with self at computer with programmer socks on, still a wip" | |
combined_prompt = "" | |
def get_current_time(): | |
now = datetime.now() | |
now2 = now | |
current_time = now2.strftime("%Y-%m-%d %H:%M:%S") | |
ki = f'{kii} {current_time}' | |
return ki | |
# Sanitize file names and truncate them | |
def sanitize_file_name(file_name, max_length=100): | |
"""Shortens and removes unsafe characters from file name.""" | |
file_name = file_name[:max_length] | |
return file_name.replace(" ", "_").replace("/", "_") | |
def load_fn(models): | |
global models_load | |
models_load = {} | |
for model in models: | |
if model not in models_load.keys(): | |
try: | |
m = gr.load(f'models/{model}') | |
print(f"{m}\n"); | |
except Exception as error: | |
print(f"Error loading model {model}: {error}\n") | |
m = gr.Interface(lambda _: None, inputs=gr.Textbox(), outputs=gr.Image(), queue=False) | |
models_load.update({model: m}) | |
load_fn(models) | |
num_models = len(models) | |
default_models = models[:num_models] | |
def extend_choices(choices): | |
return choices + (num_models - len(choices)) * ['NA'] | |
def update_imgbox(choices): | |
choices_plus = extend_choices(choices) | |
return [gr.Image(None, label=m, visible=(m != 'NA')) for m in choices_plus] | |
executor = ThreadPoolExecutor(max_workers=num_models) | |
def gen_fn(model_str, prompt): | |
global index_tracker, model_scores, processed_models_count | |
if model_str == 'NA': | |
return None | |
try: | |
index_tracker = (index_tracker + 1) % len(models) | |
current_model_index = index_tracker | |
current_model_name = models[current_model_index] | |
max_prompt_length = 100 | |
truncated_prompt = sanitize_file_name(prompt[:max_prompt_length]) | |
combined_prompt = f"{truncated_prompt}_{randint(0, 9999)}" | |
# Execute the model's processing with a timeout | |
future = executor.submit(models_load[model_str], f"{combined_prompt}") | |
response = future.result(timeout=150) # Wait for result with timeout | |
if isinstance(response, gr.Image): | |
return response | |
elif isinstance(response, tuple): | |
return None | |
elif isinstance(response, str): | |
if processed_models_count == 0: | |
print(f"**************") | |
# print(f"{prompt}") | |
# print(f"{prompt}") | |
# print(f"{prompt}") | |
print(f"**************") | |
model_scores[current_model_name] += 1 | |
print(f"OOO n:{processed_models_count} x:{current_model_index} r[{model_scores[current_model_name]}] {model_str}") | |
processed_models_count += 1 | |
if processed_models_count == len(models): | |
print("\nCycle Complete! Updated Scores:") | |
print(model_scores) | |
processed_models_count = 0 | |
return response | |
except TimeoutError: | |
print(f"TimeoutError: Model '{model_str}' did not respond within 150 seconds.") | |
processed_models_count += 1 | |
if processed_models_count == len(models): | |
print("\nCycle Complete! Updated Scores:") | |
print(model_scores) | |
processed_models_count = 0 | |
return None | |
except Exception as e: | |
if processed_models_count == 0: | |
print(f"**************") | |
# print(f"{prompt}") | |
# print(f"{prompt}") | |
# print(f"{prompt}") | |
print(f"**************") | |
print(f"--- n:{processed_models_count} x:{current_model_index} r[{model_scores[current_model_name]}] {model_str}") | |
processed_models_count += 1 | |
if processed_models_count == len(models): | |
print("\nCycle Complete! Updated Scores:") | |
print(model_scores) | |
processed_models_count = 0 | |
return None | |
def make_me(): | |
with gr.Row(): | |
txt_input = gr.Textbox(lines=2, value=kii, label=None) | |
gen_button = gr.Button('Generate images') | |
stop_button = gr.Button('Stop', variant='secondary', interactive=False) | |
gen_button.click(lambda _: gr.update(interactive=True), None, stop_button) | |
gen_button.click(lambda _: gr.update(interactive=True), None) | |
gr.HTML(""" <div style="text-align: center; max-width: 100%; margin: 0 auto;"> <body> </body> </div> """) | |
with gr.Row(): | |
output = [gr.Image(label=m) for m in default_models] | |
current_models = [gr.Textbox(m, visible=False) for m in default_models] | |
for m, o in zip(current_models, output): | |
gen_event = gen_button.click(gen_fn, [m, txt_input], o, queue=False) | |
# stop_button.click(lambda _: gr.update(interactive=False), None, stop_button, cancels=[gen_event]) | |
with gr.Accordion('Model selection', visible=False): | |
model_choice = gr.CheckboxGroup(models, label=f' {num_models} different models selected', value=default_models, interactive=True) | |
model_choice.change(update_imgbox, model_choice, output) | |
model_choice.change(extend_choices, model_choice, current_models) | |
js_code = """<script>const originalScroll = window.scrollTo; const originalShowToast = gradio.Toast.show; | |
gradio.Toast.show = function() { originalShowToast.apply(this, arguments); window.scrollTo = function() {};}; | |
setTimeout(() => { window.scrollTo = originalScroll; }, 1000); // Restore scroll function after 3 seconds</script>""" | |
with gr.Blocks(css=""" | |
label.float.svelte-i3tvor { top:auto!important; bottom: 0; position: absolute; background: rgba(0,0,0,0.0); left: var(--block-label-margin); color: rgba(200,200,200,.7);} | |
.genbut { max-width: 50px; max-height: 30px; width:150px; height:30px} | |
.stopbut { max-width: 50px; max-height: 30px; width:150px; height:30px} | |
.float.svelte-1mwvhlq { position: absolute; top: var(--block-label-margin); left: var(--block-label-margin); background: none; border: none;} | |
textarea:hover { background:#55555555;} | |
textarea { overflow-y: scroll; top:0px; width: 100%; height:100%!important; font-size: 1.5em; letter-spacing: 3px; color: limegreen; border: none!important; background: none; outline: none !important; } | |
.form.svelte-633qhp{ flex-grow: 1; position: absolute; right: 0px; border-radius: 6px; z-index: 400000; resize: both; left: 52%; background: rgba(103, 103, 114, 0.35); height: 46px; width: 48%!important;} | |
label.svelte-173056l.svelte-173056l { display: block; width: 100%; height: 100%;} | |
.input-container.svelte-173056l.svelte-173056l { /* display: flex; */ position: absolute; border: 1px solid; padding: 0px; /* height: calc(100% - 32px); */ /* align-items: flex-end; */ border-radius: 6px; margin: 0px; top: 0px; left: 0px; /* bottom: -16px; */ width: 100%; min-height: 100%;} | |
textarea{ position: absolute; font-size: 1em !important; padding: 4px; background: none; height: 100% !important; height: 100%;} | |
.svelte-11xb1hd.padded{background:none;}span.svelte-1gfkn6j:not(.has-info) { margin-bottom: var(--spacing-lg); display: none;} | |
.lg.secondary{ min-width:20%!imoprtant; width: 150px !important; flex: none !important;} | |
.unpadded_box.svelte-1oiin9d { margin-top: 0; margin-left: auto!important; max-height: 134px!important; min-height: 156px!important; margin-right: auto!important; min-width: 133px !important;} | |
}""") as demo: | |
gr.Markdown("<script>" + js_code + "</script>") | |
make_me() | |
demo.queue() | |
demo.queue = False | |
demo.config["queue"] = False | |
demo.launch(max_threads=200) | |