File size: 2,962 Bytes
21dd449
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import { describe, expect, it } from "vitest";
import type { ModelEntry } from "./list-models";
import { listModels } from "./list-models";

describe("listModels", () => {
	it("should list models for depth estimation", async () => {
		const results: ModelEntry[] = [];

		for await (const entry of listModels({
			search: { owner: "Intel", task: "depth-estimation" },
		})) {
			if (typeof entry.downloads === "number") {
				entry.downloads = 0;
			}
			if (typeof entry.likes === "number") {
				entry.likes = 0;
			}
			if (entry.updatedAt instanceof Date && !isNaN(entry.updatedAt.getTime())) {
				entry.updatedAt = new Date(0);
			}

			if (!["Intel/dpt-large", "Intel/dpt-hybrid-midas"].includes(entry.name)) {
				expect(entry.task).to.equal("depth-estimation");
				continue;
			}

			results.push(entry);
		}

		results.sort((a, b) => a.id.localeCompare(b.id));

		expect(results).deep.equal([
			{
				id: "621ffdc136468d709f17e709",
				name: "Intel/dpt-large",
				private: false,
				gated: false,
				downloads: 0,
				likes: 0,
				task: "depth-estimation",
				updatedAt: new Date(0),
			},
			{
				id: "638f07977559bf9a2b2b04ac",
				name: "Intel/dpt-hybrid-midas",
				gated: false,
				private: false,
				downloads: 0,
				likes: 0,
				task: "depth-estimation",
				updatedAt: new Date(0),
			},
		]);
	});

	it("should list indonesian models with gguf format", async () => {
		let count = 0;
		for await (const entry of listModels({
			search: { tags: ["gguf", "id"] },
			additionalFields: ["tags"],
			limit: 2,
		})) {
			count++;
			expect(entry.tags).to.include("gguf");
			expect(entry.tags).to.include("id");
		}

		expect(count).to.equal(2);
	});

	it("should search model by name", async () => {
		let count = 0;
		for await (const entry of listModels({
			search: { query: "t5" },
			limit: 10,
		})) {
			count++;
			expect(entry.name.toLocaleLowerCase()).to.include("t5");
		}

		expect(count).to.equal(10);
	});

	it("should search model by inference provider", async () => {
		let count = 0;
		for await (const entry of listModels({
			search: { inferenceProviders: ["together"] },
			additionalFields: ["inferenceProviderMapping"],
			limit: 10,
		})) {
			count++;
			if (Array.isArray(entry.inferenceProviderMapping)) {
				expect(entry.inferenceProviderMapping.map(({ provider }) => provider)).to.include("together");
			}
		}

		expect(count).to.equal(10);
	});

	it("should search model by several inference providers", async () => {
		let count = 0;
		const inferenceProviders = ["together", "replicate"];
		for await (const entry of listModels({
			search: { inferenceProviders },
			additionalFields: ["inferenceProviderMapping"],
			limit: 10,
		})) {
			count++;
			if (Array.isArray(entry.inferenceProviderMapping)) {
				expect(
					entry.inferenceProviderMapping.filter(({ provider }) => inferenceProviders.includes(provider)).length
				).toBeGreaterThan(0);
			}
		}

		expect(count).to.equal(10);
	});
});