download
history
blame
1.42 GB
Detected Pickle imports (278)
- "__torch__.torch.nn.modules.linear.___torch_mangle_4759.Linear",
- "__torch__.open_clip.model.___torch_mangle_4806.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4948.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4978.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4902.Linear",
- "__torch__.open_clip.model.___torch_mangle_4931.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4763.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4906.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4873.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.container.___torch_mangle_4991.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4802.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4911.Identity",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4819.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4985.Identity",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4890.GELU",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4929.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4842.Identity",
- "__torch__.open_clip.model.___torch_mangle_4850.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4899.Linear",
- "__torch__.open_clip.model.___torch_mangle_4766.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4810.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4942.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4945.GELU",
- "__torch__.torch.nn.modules.activation.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4954.Linear",
- "torch.FloatStorage",
- "__torch__.open_clip.model.___torch_mangle_4909.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4898.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4956.GELU",
- "__torch__.open_clip.model.___torch_mangle_4994.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4973.MultiheadAttention",
- "__torch__.open_clip.model.___torch_mangle_4865.LayerNorm",
- "__torch__.open_clip.model.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4884.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.container.___torch_mangle_4914.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4853.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4778.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4885.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4886.Identity",
- "__torch__.torch.nn.modules.container.___torch_mangle_4892.Sequential",
- "__torch__.open_clip.model.___torch_mangle_4805.ResidualAttentionBlock",
- "__torch__.open_clip.model.___torch_mangle_4893.ResidualAttentionBlock",
- "__torch__.open_clip.model.___torch_mangle_4982.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4820.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4833.Linear",
- "__torch__.open_clip.model.___torch_mangle_4905.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_4881.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4917.NonDynamicallyQuantizableLinear",
- "__torch__.open_clip.model.___torch_mangle_4975.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4895.NonDynamicallyQuantizableLinear",
- "__torch__.open_clip.model.___torch_mangle_4751.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4935.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4780.GELU",
- "__torch__.open_clip.model.___torch_mangle_4795.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4821.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4874.MultiheadAttention",
- "__torch__.open_clip.model.___torch_mangle_4843.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4959.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.activation.GELU",
- "__torch__.open_clip.model.___torch_mangle_4920.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4952.Identity",
- "__torch__.open_clip.model.___torch_mangle_4882.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4770.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4800.Linear",
- "__torch__.open_clip.model.___torch_mangle_4915.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4862.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4741.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.NonDynamicallyQuantizableLinear",
- "__torch__.open_clip.model.___torch_mangle_4783.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4928.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.container.___torch_mangle_4969.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4972.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4989.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4803.Linear",
- "__torch__.open_clip.model.___torch_mangle_4735.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4988.Identity",
- "__torch__.open_clip.model.___torch_mangle_4937.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4823.Identity",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4846.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4957.Linear",
- "__torch__.open_clip.model.___torch_mangle_4970.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4786.MultiheadAttention",
- "__torch__.torch.nn.modules.container.___torch_mangle_4804.Sequential",
- "__torch__.open_clip.model.___torch_mangle_4784.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4750.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4738.Linear",
- "__torch__.open_clip.model.___torch_mangle_4788.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4808.MultiheadAttention",
- "__torch__.open_clip.model.Transformer",
- "__torch__.open_clip.model.___torch_mangle_4832.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_4947.Sequential",
- "__torch__.torch.nn.modules.container.___torch_mangle_4958.Sequential",
- "__torch__.open_clip.model.___torch_mangle_4883.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4912.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4977.Identity",
- "__torch__.open_clip.model.___torch_mangle_4854.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_4870.Sequential",
- "__torch__.open_clip.model.___torch_mangle_4827.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4888.Linear",
- "__torch__.open_clip.model.___torch_mangle_4755.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4845.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4946.Linear",
- "__torch__.open_clip.model.___torch_mangle_4926.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4797.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4923.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4768.Identity",
- "__torch__.open_clip.model.___torch_mangle_4772.ResidualAttentionBlock",
- "__torch__.ldm.modules.encoders.modules.FrozenOpenCLIPEmbedder",
- "__torch__.torch.nn.modules.container.___torch_mangle_4936.Sequential",
- "__torch__.torch.nn.modules.container.___torch_mangle_4848.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4966.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4829.NonDynamicallyQuantizableLinear",
- "__torch__.open_clip.model.___torch_mangle_4894.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4941.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4818.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4834.Identity",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4962.MultiheadAttention",
- "__torch__.torch.nn.modules.sparse.Embedding",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4930.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4822.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4812.Identity",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4940.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4869.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4921.Linear",
- "__torch__.open_clip.model.___torch_mangle_4839.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4773.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4907.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4976.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4809.Identity",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4868.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4798.Identity",
- "__torch__.open_clip.model.CLIP",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4857.GELU",
- "__torch__.torch.nn.modules.container.___torch_mangle_4903.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4913.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4840.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4918.MultiheadAttention",
- "__torch__.open_clip.model.___torch_mangle_4981.ResidualAttentionBlock",
- "__torch__.open_clip.model.___torch_mangle_4960.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4789.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4908.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4963.Identity",
- "__torch__.open_clip.model.___torch_mangle_4860.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4880.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4967.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4754.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4757.Identity",
- "__torch__.open_clip.model.___torch_mangle_4927.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4758.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4792.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4831.Identity",
- "__torch__.open_clip.model.___torch_mangle_4838.ResidualAttentionBlock",
- "__torch__.ClipEncoder",
- "__torch__.torch.nn.modules.container.___torch_mangle_4980.Sequential",
- "__torch__.open_clip.model.___torch_mangle_4949.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4825.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4748.Linear",
- "__torch__.open_clip.model.___torch_mangle_4744.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4986.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4799.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4866.Linear",
- "__torch__.open_clip.model.___torch_mangle_4876.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4932.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4933.Identity",
- "__torch__.open_clip.model.___torch_mangle_4861.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4816.ResidualAttentionBlock",
- "__torch__.open_clip.model.___torch_mangle_4953.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_4993.ModuleList",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4785.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4856.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4864.Identity",
- "__torch__.open_clip.model.___torch_mangle_4849.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4924.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4944.Identity",
- "__torch__.open_clip.model.___torch_mangle_4777.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4734.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4900.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4979.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4775.MultiheadAttention",
- "collections.OrderedDict",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4742.MultiheadAttention",
- "__torch__.torch.nn.modules.container.___torch_mangle_4771.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4968.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4830.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4891.Linear",
- "__torch__.open_clip.model.___torch_mangle_4992.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4787.Identity",
- "__torch__.open_clip.model.___torch_mangle_4916.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4844.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4779.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4814.Linear",
- "__torch__.open_clip.model.___torch_mangle_4887.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4922.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4889.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4781.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4790.Identity",
- "__torch__.torch.nn.modules.container.___torch_mangle_4760.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4847.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4756.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4851.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4987.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_4815.Sequential",
- "__torch__.open_clip.model.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4747.GELU",
- "__torch__.torch.nn.modules.container.___torch_mangle_4925.Sequential",
- "__torch__.open_clip.model.___torch_mangle_4964.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4745.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4910.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4879.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4983.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4852.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4867.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4878.Identity",
- "__torch__.open_clip.model.___torch_mangle_4794.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4939.NonDynamicallyQuantizableLinear",
- "torch._utils._rebuild_tensor_v2",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4841.MultiheadAttention",
- "__torch__.torch.nn.modules.container.___torch_mangle_4782.Sequential",
- "__torch__.open_clip.model.___torch_mangle_4828.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4950.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4736.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4813.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4858.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4901.GELU",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4863.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4764.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4951.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4752.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4836.Linear",
- "__torch__.open_clip.model.___torch_mangle_4971.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4791.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4743.Identity",
- "__torch__.open_clip.model.___torch_mangle_4762.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4737.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4974.Identity",
- "__torch__.torch.nn.modules.container.___torch_mangle_4837.Sequential",
- "__torch__.open_clip.model.___torch_mangle_4872.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4761.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.container.___torch_mangle_4793.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4877.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4835.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4943.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4919.Identity",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4769.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4811.Linear",
- "__torch__.open_clip.model.___torch_mangle_4904.ResidualAttentionBlock",
- "__torch__.open_clip.model.___torch_mangle_4871.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4776.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4855.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4753.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4984.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4965.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4767.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4807.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4990.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_4739.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4875.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4765.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4796.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4934.GELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4801.Identity",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4824.GELU",
- "__torch__.torch.nn.modules.container.___torch_mangle_4749.Sequential",
- "__torch__.torch.nn.modules.container.___torch_mangle_4859.Sequential",
- "__torch__.open_clip.model.___torch_mangle_4817.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4740.LayerNorm",
- "__torch__.open_clip.model.___torch_mangle_4938.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4955.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4774.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4897.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4961.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4896.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4746.Identity",
- "__torch__.torch.nn.modules.container.___torch_mangle_4826.Sequential",
- "torch.DoubleStorage",
- "torch.LongStorage",
- "torch._utils._rebuild_tensor_v2",
- "collections.OrderedDict"
Git LFS Details
- SHA256: ef9706f02a78b2cf93acff22f3036bc3e629d0a5b595c640ada1f73788826f37
- Pointer size: 135 Bytes
- Size of remote file: 1.42 GB
Git Large File Storage (LFS) replaces large files with text pointers inside Git, while storing the file contents on a remote server. More info.