File size: 995 Bytes
34423c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
batch_size: 32
clip_grad: 5
data_strategy: shuffle_batches
dev_datasets:
  pos_ours:
    dir: data
    mode: BMES
    name: pos_ours
    repeat_times: 1
    samples_num: 1000
    split: test
  seg_ours:
    dir: data
    mode: BMES
    name: seg_ours
    repeat_times: 1
    samples_num: 1000
    split: test
epoch_num: 1
head_config:
  dropout: 0.1
  layers_num: 1
  use_crf: false
heads:
- seg
- pos
learning_rate: 1.0e-05
part_data: false
pos_labels:
- w
- e
- g
- o
- mq
- f
- h
- a
- j
- id
- k
- v
- c
- u
- nz
- r
- q
- s
- x
- m
- np
- p
- d
- ns
- t
- n
- ni
pretrained_bert_model: /data03/private/chengzhili/pretrain/output/2023-06-09_21-23-00/save
saved_path: output/pos
seg_labels:
- B
- M
- E
- S
train_datasets:
  pos_ours:
    dir: data
    mode: BMES
    name: pos_ours
    repeat_times: 1
    samples_num: 0
    split: train
  seg_ours:
    dir: data
    mode: BMES
    name: seg_ours
    repeat_times: 1
    samples_num: 0
    split: train
warmup_steps: 0.1
weight_decay: 0.01