Lahiru Menikdiwela commited on
Commit
793459b
·
1 Parent(s): 173b5f1

fix cuda issiue on new llama added code

Browse files
Files changed (1) hide show
  1. summarizer.py +1 -1
summarizer.py CHANGED
@@ -60,7 +60,7 @@ def summarizer_summarize(model_type,tokenizer, base_summarizer, text:str,summari
60
  ]
61
  prompt = tokenizer.apply_chat_template(chat, tokenize=False, add_generation_prompt=True)
62
  inputs = tokenizer(prompt,
63
- return_tensors="pt", truncation=True).to('cuda')
64
  attention_mask = inputs["attention_mask"]
65
  approximate_tokens = int(len(text)//10)
66
  output = base_summarizer.generate(inputs['input_ids'],
 
60
  ]
61
  prompt = tokenizer.apply_chat_template(chat, tokenize=False, add_generation_prompt=True)
62
  inputs = tokenizer(prompt,
63
+ return_tensors="pt", truncation=True).to('cpu')
64
  attention_mask = inputs["attention_mask"]
65
  approximate_tokens = int(len(text)//10)
66
  output = base_summarizer.generate(inputs['input_ids'],