mrcuddle commited on
Commit
6b7b2db
·
verified ·
1 Parent(s): c93bf50

Upload merge.py

Browse files
Files changed (1) hide show
  1. merge.py +165 -0
merge.py ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import numpy as np
3
+ import os
4
+ import shutil
5
+ import torch
6
+ import torch.nn.functional as F
7
+ from safetensors.torch import safe_open, save_file
8
+
9
+ def merge_tensors(tensor1, tensor2, p):
10
+ # Calculate the delta of the weights
11
+ delta = tensor2 - tensor1
12
+ # Generate the mask m^t from Bernoulli distribution
13
+ m = torch.from_numpy(np.random.binomial(1, p, delta.shape)).to(tensor1.dtype)
14
+ # Apply the mask to the delta to get δ̃^t
15
+ delta_tilde = m * delta
16
+ # Scale the masked delta by the dropout rate to get δ̂^t
17
+ delta_hat = delta_tilde / (1 - p)
18
+ return delta_hat
19
+
20
+ def merge_safetensors(file_path1, file_path2, p, lambda_val):
21
+ merged_tensors = {}
22
+
23
+ with safe_open(file_path1, framework="pt", device="cpu") as f1, safe_open(file_path2, framework="pt", device="cpu") as f2:
24
+ keys1 = set(f1.keys())
25
+ keys2 = set(f2.keys())
26
+ common_keys = keys1.intersection(keys2)
27
+
28
+ for key in common_keys:
29
+ tensor1 = f1.get_tensor(key)
30
+ tensor2 = f2.get_tensor(key)
31
+ tensor1, tensor2 = resize_tensors(tensor1, tensor2)
32
+ merged_tensors[key] = tensor1 + lambda_val * merge_tensors(tensor1, tensor2, p)
33
+ print("merging", key)
34
+
35
+ return merged_tensors
36
+
37
+ class BinDataHandler():
38
+ def __init__(self, data):
39
+ self.data = data
40
+
41
+ def get_tensor(self, key):
42
+ return self.data[key]
43
+
44
+ def read_tensors(file_path, ext):
45
+ if ext == ".safetensors" and file_path.endswith(".safetensors"):
46
+ f = safe_open(file_path, framework="pt", device="cpu")
47
+ return f, set(f.keys())
48
+ if ext == ".bin" and file_path.endswith(".bin"):
49
+ data = torch.load(file_path, map_location=torch.device('cpu'))
50
+ f = BinDataHandler(data)
51
+ return f, set(data.keys())
52
+ return None, None
53
+
54
+ def resize_tensors(tensor1, tensor2):
55
+ if len(tensor1.shape) not in [1, 2]:
56
+ return tensor1, tensor2
57
+
58
+ # Pad along the last dimension (width)
59
+ if tensor1.shape[-1] < tensor2.shape[-1]:
60
+ padding_size = tensor2.shape[-1] - tensor1.shape[-1]
61
+ tensor1 = F.pad(tensor1, (0, padding_size, 0, 0))
62
+ elif tensor2.shape[-1] < tensor1.shape[-1]:
63
+ padding_size = tensor1.shape[-1] - tensor2.shape[-1]
64
+ tensor2 = F.pad(tensor2, (0, padding_size, 0, 0))
65
+
66
+ # Pad along the first dimension (height)
67
+ if tensor1.shape[0] < tensor2.shape[0]:
68
+ padding_size = tensor2.shape[0] - tensor1.shape[0]
69
+ tensor1 = F.pad(tensor1, (0, 0, 0, padding_size))
70
+ elif tensor2.shape[0] < tensor1.shape[0]:
71
+ padding_size = tensor1.shape[0] - tensor2.shape[0]
72
+ tensor2 = F.pad(tensor2, (0, 0, 0, padding_size))
73
+
74
+ return tensor1, tensor2
75
+
76
+ def merge_folder(tensor_map, directory_path, p, lambda_val):
77
+ keys1 = set(tensor_map.keys())
78
+ # Some repos have both bin and safetensors, choose safetensors if so
79
+ ext = None
80
+ for filename in os.listdir(directory_path):
81
+ # Default to safetensors
82
+ if filename.endswith(".safetensors"):
83
+ ext = ".safetensors"
84
+ if filename.endswith(".bin") and ext is None:
85
+ ext = ".bin"
86
+ if ext is None:
87
+ raise "Could not find model files"
88
+
89
+ for filename in os.listdir(directory_path):
90
+ file_path = os.path.join(directory_path, filename)
91
+ f, keys2 = read_tensors(file_path, ext)
92
+ if keys2:
93
+ common_keys = keys1.intersection(keys2)
94
+ for key in common_keys:
95
+ if "block_sparse_moe.gate" in key:
96
+ tensor1 = tensor_map[key]['tensor']
97
+ tensor2 = f.get_tensor(key)
98
+ tensor_map[key]['tensor'] = (tensor1 + tensor2) /2.0
99
+ continue
100
+ tensor1 = tensor_map[key]['tensor']
101
+ tensor2 = f.get_tensor(key)
102
+ tensor1, tensor2 = resize_tensors(tensor1, tensor2)
103
+ tensor_map[key]['tensor'] = tensor1 + lambda_val * merge_tensors(tensor1, tensor2, p)
104
+ return tensor_map
105
+
106
+ def map_tensors_to_files(directory_path):
107
+ tensor_map = {}
108
+
109
+ for filename in os.listdir(directory_path):
110
+ file_path = os.path.join(directory_path, filename)
111
+ f, keys = read_tensors(file_path, '.safetensors')
112
+ if keys:
113
+ for key in keys:
114
+ tensor = f.get_tensor(key)
115
+ tensor_map[key] = {'filename':filename, 'shape':tensor.shape, 'tensor': tensor}
116
+
117
+ return tensor_map
118
+
119
+ def copy_nontensor_files(from_path, to_path):
120
+ for filename in os.listdir(from_path):
121
+ file_path = os.path.join(from_path, filename)
122
+ if from_path != to_path and not filename.startswith(".") and not filename.startswith("README") and not filename.endswith(".bin") and not filename.endswith(".safetensors") and not filename.endswith(".pt") and not os.path.isdir(file_path):
123
+ print(f"Copying {file_path} to {to_path}")
124
+ shutil.copyfile(file_path, to_path+'/'+filename)
125
+
126
+ def save_tensor_map(tensor_map, output_folder):
127
+ metadata = {'format': 'pt'}
128
+ by_filename = {}
129
+
130
+ for key, value in tensor_map.items():
131
+ filename = value["filename"]
132
+ tensor = value["tensor"]
133
+ if filename not in by_filename:
134
+ by_filename[filename] = {}
135
+ by_filename[filename][key] = tensor
136
+
137
+ for filename in sorted(by_filename.keys()):
138
+ output_file = output_folder+'/'+filename
139
+ print("Saving:", output_file)
140
+ save_file(by_filename[filename], output_file, metadata=metadata)
141
+
142
+ def main():
143
+ # Parse command-line arguments
144
+ parser = argparse.ArgumentParser(description='Merge two safetensor model files.')
145
+ parser.add_argument('base_model', type=str, help='The base model safetensor file')
146
+ parser.add_argument('second_model', type=str, help='The second model safetensor file')
147
+ parser.add_argument('output_model', type=str, help='The output merged model safetensor file')
148
+ parser.add_argument('-p', type=float, default=0.5, help='Dropout probability')
149
+ parser.add_argument('-lambda', dest='lambda_val', type=float, default=1.0, help='Scaling factor for the weight delta')
150
+ args = parser.parse_args()
151
+
152
+ if os.path.isdir(args.base_model):
153
+ if not os.path.exists(args.output_model):
154
+ os.makedirs(args.output_model)
155
+
156
+ tensor_map = map_tensors_to_files(args.base_model)
157
+ tensor_map = merge_folder(tensor_map, args.second_model, args.p, args.lambda_val)
158
+ copy_nontensor_files(args.base_model, args.output_model)
159
+ save_tensor_map(tensor_map, args.output_model)
160
+ else:
161
+ merged = merge_safetensors(args.base_model, args.second_model, args.p, args.lambda_val)
162
+ save_file(merged, args.output_model)
163
+
164
+ if __name__ == '__main__':
165
+ main()