#!/bin/env python """ Load in two pre-calculated embeddings files. (eg: *.allid.*) Typically, I generate files that cover the full range of tokenids, 0-49405 (using generate-allid-embeddings.py(XL).py ) This then goes through the full range and calculate distances between each. Display a graph of the distances. Also print out things like the mean distance """ import sys import torch from safetensors import safe_open file1=sys.argv[1] file2=sys.argv[2] device=torch.device("cuda") print(f"reading {file1} embeddings now",file=sys.stderr) model = safe_open(file1,framework="pt",device="cuda") embs1=model.get_tensor("embeddings") embs1.to(device) print("Shape of loaded embeds =",embs1.shape) print(f"reading {file2} embeddings now",file=sys.stderr) model = safe_open(file2,framework="pt",device="cuda") embs2=model.get_tensor("embeddings") embs2.to(device) print("Shape of loaded embeds =",embs2.shape) if torch.equal(embs1 , embs2): print("HEY! Both files are identical!") exit(0) print(f"calculating distances...") # This calculates a full cross matrix of ALL distances to ALL other points # in other tensor ##targetdistances = torch.cdist( embs1,embs2, p=2) targetdistances = torch.norm(embs2 - embs1, dim=1) #print(targetdistances.shape) #tl=targetdistances.tolist() #print(tl[:10]) print("sum of all distances=",torch.sum(targetdistances)) embs1_avg=torch.mean(embs1,dim=0) embs2_avg=torch.mean(embs2,dim=0) avg_dist= torch.cdist( embs1_avg.unsqueeze(0),embs2_avg.unsqueeze(0), p=2) print("However, the distance between the avg-point of each is:",avg_dist) print("Mean of all the distances:" + str(torch.mean(targetdistances,dim=0))) ###################################### import PyQt5 import matplotlib matplotlib.use('QT5Agg') # Set the backend to QT5Agg import matplotlib.pyplot as plt junk, ax = plt.subplots() graph1=targetdistances.tolist() ax.set_title(f"{file1} vs \n{file2}") ax.plot(graph1, label="Distance between same tokenID") ax.set_ylabel("Distance") ax.set_xlabel("CLIP TokenID") ax.legend() plt.show()