Update README.md
#1
by
MohamedRashad
- opened
README.md
CHANGED
@@ -14,18 +14,21 @@ pipeline_tag: image-to-text
|
|
14 |
|
15 |
Usage is as follows:
|
16 |
|
17 |
-
```
|
18 |
from transformers import InstructBlipProcessor, InstructBlipForConditionalGeneration
|
19 |
import torch
|
20 |
from PIL import Image
|
21 |
import requests
|
|
|
22 |
model = InstructBlipForConditionalGeneration.from_pretrained("UBC-NLP/Peacock")
|
23 |
processor = InstructBlipProcessor.from_pretrained("UBC-NLP/Peacock")
|
24 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
25 |
model.to(device)
|
|
|
26 |
url = "https://upload.wikimedia.org/wikipedia/commons/8/83/Socotra_dragon_tree.JPG"
|
27 |
image = Image.open(requests.get(url, stream=True).raw).convert("RGB")
|
28 |
prompt = "اوصف الصوره"
|
|
|
29 |
inputs = processor(images=image, text=prompt, return_tensors="pt").to(device)
|
30 |
outputs = model.generate(
|
31 |
**inputs,
|
|
|
14 |
|
15 |
Usage is as follows:
|
16 |
|
17 |
+
```python
|
18 |
from transformers import InstructBlipProcessor, InstructBlipForConditionalGeneration
|
19 |
import torch
|
20 |
from PIL import Image
|
21 |
import requests
|
22 |
+
|
23 |
model = InstructBlipForConditionalGeneration.from_pretrained("UBC-NLP/Peacock")
|
24 |
processor = InstructBlipProcessor.from_pretrained("UBC-NLP/Peacock")
|
25 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
26 |
model.to(device)
|
27 |
+
|
28 |
url = "https://upload.wikimedia.org/wikipedia/commons/8/83/Socotra_dragon_tree.JPG"
|
29 |
image = Image.open(requests.get(url, stream=True).raw).convert("RGB")
|
30 |
prompt = "اوصف الصوره"
|
31 |
+
|
32 |
inputs = processor(images=image, text=prompt, return_tensors="pt").to(device)
|
33 |
outputs = model.generate(
|
34 |
**inputs,
|