|
input_ids = tokenizer(prompt, add_special_tokens=False, return_tensors='pt').input_ids.to(model.device)
|
|
input_ids = tokenizer(prompt, add_special_tokens=False, return_tensors='pt').input_ids.to(model.device)
|
|
max_length=request['max_tokens'] + len(input_ids[0]),
|
|
max_length=request['max_tokens'] + len(input_ids[0]),
|