File size: 585 Bytes
39e20f2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
transforms: [sentencepiece]
#### Subword
src_subword_model: "/mnt/InternalCrucial4/dataAI/mixtral/tokenizer.model"
tgt_subword_model: "/mnt/InternalCrucial4/dataAI/mixtral/tokenizer.model"
# Model info
model: "/mnt/InternalCrucial4/dataAI/mixtral/mixtral-onmt-awq.pt"
# Inference
seed: 42
max_length: 256
gpu: 0
batch_type: sents
batch_size: 1
world_size: 2
gpu_ranks: [0, 1]
parallel_mode: "tensor_parallel"
precision: fp16
#random_sampling_topk: 1
#random_sampling_topp: 0.6
#random_sampling_temp: 0.9
beam_size: 1
n_best: 1
profile: false
report_time: true
src: None
#tgt: None
|