#!/bin/env python """ Work in progress Plan: Modded version of graph-embeddings.py Just to see if using different CLIP module changes values significantly (It does not) It does have the small bonus feature of being able to accept a purely numerical tokenid in liu of a number, if you use the syntax, "#345". You can input a text string, or a single numeric code, per input This code requires pip install git+https://github.com/openai/CLIP.git """ import sys import json import torch import clip import PyQt5 import matplotlib matplotlib.use('QT5Agg') # Set the backend to TkAgg import matplotlib.pyplot as plt ### The stablediffusion standard model is Vit-L/14 #CLIPname= "ViT-B/16" CLIPname= "ViT-L/14" #CLIPname= "ViT-L/14@336px" # Available models: # 'RN50', 'RN101', 'RN50x4', 'RN50x16', 'RN50x64', 'ViT-B/32', 'ViT-B/16', 'ViT-L/14', 'ViT-L/14@336px' device=torch.device("cuda") print("loading CLIP model",CLIPname) model, processor = clip.load(CLIPname,device=device) model.cuda().eval() print("done") def embed_from_tokenid(num): # A bit sleazy, but, eh. tokens = clip.tokenize("dummy").to(device) tokens[0][1]=num with torch.no_grad(): embed = model.encode_text(tokens) return embed def embed_from_text(text): if text[0]=="#": print("Converting string to number") return embed_from_tokenid(int(text[1:])) tokens = clip.tokenize(text).to(device) print("Tokens for",text,"=",tokens) with torch.no_grad(): embed = model.encode_text(tokens) return embed fig, ax = plt.subplots() text1 = input("First prompt or #tokenid: ") text2 = input("Second prompt(or leave blank): ") print("generating embeddings for each now") emb1 = embed_from_text(text1) print("shape of emb1:",emb1.shape) graph1=emb1[0].tolist() ax.plot(graph1, label=text1[:20]) if len(text2) >0: emb2 = embed_from_text(text2) graph2=emb2[0].tolist() ax.plot(graph2, label=text2[:20]) # Add labels, title, and legend #ax.set_xlabel('Index') ax.set_ylabel('Values') ax.set_title(f"Graph of Embeddings in {CLIPname}") ax.legend() # Display the graph print("Pulling up the graph") plt.show()