captionate / app.py
bilgeyucel's picture
Update the ui
cfadd82
raw
history blame
2.14 kB
import os
import gradio as gr
from haystack.nodes import TransformersImageToText
from haystack.nodes import PromptNode, PromptTemplate
from haystack import Pipeline
description = """
# Captionate ✨ 📸
## Create Instagram captions for your pics!
Built by [Bilge Yucel](https://twitter.com/bilgeycl) using [Haystack](https://github.com/deepset-ai/haystack) 💙
"""
image_to_text = TransformersImageToText(
model_name_or_path="nlpconnect/vit-gpt2-image-captioning",
progress_bar=True
)
prompt_template = PromptTemplate(prompt="""
You will receive a descriptive text of a photo.
Try to come up with a nice Instagram caption that has a phrase rhyming with the text. Include emojis to the caption.
Descriptive text: {documents};
Caption:
""")
hf_api_key = os.environ["HF_API_KEY"]
def generate_caption(image_file_paths, model_name):
captioning_pipeline = Pipeline()
prompt_node = PromptNode(model_name_or_path=model_name, api_key=hf_api_key, default_prompt_template=prompt_template, model_kwargs={"trust_remote_code":True})
captioning_pipeline.add_node(component=image_to_text, name="image_to_text", inputs=["File"])
captioning_pipeline.add_node(component=prompt_node, name="prompt_node", inputs=["image_to_text"])
caption = captioning_pipeline.run(file_paths=[image_file_paths])
return caption["results"][0]
with gr.Blocks(theme="monochrome") as demo:
gr.Markdown(value=description)
with gr.Row():
image = gr.Image(type="filepath")
model_name = gr.Dropdown(["tiiuae/falcon-7b-instruct", "tiiuae/falcon-7b", "EleutherAI/gpt-neox-20b", "HuggingFaceH4/starchat-beta", "OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5", "bigscience/bloom"], value="tiiuae/falcon-7b-instruct", label="Choose your model!")
submit_btn = gr.Button("✨ Captionate ✨")
caption = gr.Textbox(label="Caption", show_copy_button=True)
submit_btn.click(fn=generate_caption, inputs=[image, model_name], outputs=[caption])
if __name__ == "__main__":
demo.launch()