Added cuda config on Sample Code
#6
by
mahimairaja
- opened
README.md
CHANGED
@@ -104,11 +104,15 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
104 |
|
105 |
model = AutoModelForCausalLM.from_pretrained("microsoft/phi-1_5", trust_remote_code=True, torch_dtype="auto")
|
106 |
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-1_5", trust_remote_code=True, torch_dtype="auto")
|
|
|
|
|
|
|
|
|
107 |
inputs = tokenizer('''```python
|
108 |
def print_prime(n):
|
109 |
"""
|
110 |
Print all primes between 1 and n
|
111 |
-
"""''', return_tensors="pt", return_attention_mask=False)
|
112 |
|
113 |
outputs = model.generate(**inputs, max_length=200)
|
114 |
text = tokenizer.batch_decode(outputs)[0]
|
|
|
104 |
|
105 |
model = AutoModelForCausalLM.from_pretrained("microsoft/phi-1_5", trust_remote_code=True, torch_dtype="auto")
|
106 |
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-1_5", trust_remote_code=True, torch_dtype="auto")
|
107 |
+
|
108 |
+
device = torch.device("cuda:0")
|
109 |
+
model.cuda()
|
110 |
+
|
111 |
inputs = tokenizer('''```python
|
112 |
def print_prime(n):
|
113 |
"""
|
114 |
Print all primes between 1 and n
|
115 |
+
"""''', return_tensors="pt", return_attention_mask=False).to('cuda')
|
116 |
|
117 |
outputs = model.generate(**inputs, max_length=200)
|
118 |
text = tokenizer.batch_decode(outputs)[0]
|