Dongfu Jiang commited on
Commit
0d37d21
1 Parent(s): 342b809

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -48,7 +48,7 @@ def tokenize_pair(sources:List[str], candidate1s:List[str], candidate2s:List[str
48
  candidate1_ids = tokenizer.encode(cand1_prefix + candidate1s[i], max_length=candidate_max_length, truncation=True)
49
  candidate2_ids = tokenizer.encode(cand2_prefix + candidate2s[i], max_length=candidate_max_length, truncation=True)
50
  ids.append(source_ids + candidate1_ids + candidate2_ids)
51
- encodings = tokenizer.pad({"input_ids": ids}, return_tensors="pt", padding=True, max_length=max_length)
52
  return encodings
53
 
54
  encodings = tokenize_pair(inputs, candidates_A, candidates_B)
 
48
  candidate1_ids = tokenizer.encode(cand1_prefix + candidate1s[i], max_length=candidate_max_length, truncation=True)
49
  candidate2_ids = tokenizer.encode(cand2_prefix + candidate2s[i], max_length=candidate_max_length, truncation=True)
50
  ids.append(source_ids + candidate1_ids + candidate2_ids)
51
+ encodings = tokenizer.pad({"input_ids": ids}, return_tensors="pt", padding="max_length", max_length=max_length)
52
  return encodings
53
 
54
  encodings = tokenize_pair(inputs, candidates_A, candidates_B)