ybelkada commited on
Commit
d47ad64
·
1 Parent(s): 9fd56c0

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -39,7 +39,7 @@ pipe = pipeline("image-to-text", model=model_id)
39
  url = "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/tasks/ai2d-demo.jpg"
40
 
41
  image = Image.open(requests.get(url, stream=True).raw)
42
- prompt = "<image>\nUSER: What does the label 15 represent? (1) lava (2) core (3) tunnel (4) ash cloud\nASSISTANT:"
43
 
44
  outputs = pipe(image, prompt=prompt, generate_kwargs={"max_new_tokens": 200})
45
  print(outputs)
@@ -59,7 +59,7 @@ from transformers import AutoProcessor, LlavaForConditionalGeneration
59
 
60
  model_id = "llava-hf/bakLlava-v1-hf"
61
 
62
- prompt = "<image> \nUSER: What are these?\nASSISTANT:"
63
  image_file = "http://images.cocodataset.org/val2017/000000039769.jpg"
64
 
65
  model = LlavaForConditionalGeneration.from_pretrained(
 
39
  url = "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/tasks/ai2d-demo.jpg"
40
 
41
  image = Image.open(requests.get(url, stream=True).raw)
42
+ prompt = "USER: <image>\nWhat does the label 15 represent? (1) lava (2) core (3) tunnel (4) ash cloud\nASSISTANT:"
43
 
44
  outputs = pipe(image, prompt=prompt, generate_kwargs={"max_new_tokens": 200})
45
  print(outputs)
 
59
 
60
  model_id = "llava-hf/bakLlava-v1-hf"
61
 
62
+ prompt = "USER: <image>\nWhat are these?\nASSISTANT:"
63
  image_file = "http://images.cocodataset.org/val2017/000000039769.jpg"
64
 
65
  model = LlavaForConditionalGeneration.from_pretrained(