multimodalart HF staff commited on
Commit
fa8cc68
1 Parent(s): 097567a

Delete tests

Browse files
Files changed (1) hide show
  1. tests/inference/test_inference.py +0 -111
tests/inference/test_inference.py DELETED
@@ -1,111 +0,0 @@
1
- import numpy
2
- from PIL import Image
3
- import pytest
4
- from pytest import fixture
5
- import torch
6
- from typing import Tuple
7
-
8
- from sgm.inference.api import (
9
- model_specs,
10
- SamplingParams,
11
- SamplingPipeline,
12
- Sampler,
13
- ModelArchitecture,
14
- )
15
- import sgm.inference.helpers as helpers
16
-
17
-
18
- @pytest.mark.inference
19
- class TestInference:
20
- @fixture(scope="class", params=model_specs.keys())
21
- def pipeline(self, request) -> SamplingPipeline:
22
- pipeline = SamplingPipeline(request.param)
23
- yield pipeline
24
- del pipeline
25
- torch.cuda.empty_cache()
26
-
27
- @fixture(
28
- scope="class",
29
- params=[
30
- [ModelArchitecture.SDXL_V1_BASE, ModelArchitecture.SDXL_V1_REFINER],
31
- [ModelArchitecture.SDXL_V0_9_BASE, ModelArchitecture.SDXL_V0_9_REFINER],
32
- ],
33
- ids=["SDXL_V1", "SDXL_V0_9"],
34
- )
35
- def sdxl_pipelines(self, request) -> Tuple[SamplingPipeline, SamplingPipeline]:
36
- base_pipeline = SamplingPipeline(request.param[0])
37
- refiner_pipeline = SamplingPipeline(request.param[1])
38
- yield base_pipeline, refiner_pipeline
39
- del base_pipeline
40
- del refiner_pipeline
41
- torch.cuda.empty_cache()
42
-
43
- def create_init_image(self, h, w):
44
- image_array = numpy.random.rand(h, w, 3) * 255
45
- image = Image.fromarray(image_array.astype("uint8")).convert("RGB")
46
- return helpers.get_input_image_tensor(image)
47
-
48
- @pytest.mark.parametrize("sampler_enum", Sampler)
49
- def test_txt2img(self, pipeline: SamplingPipeline, sampler_enum):
50
- output = pipeline.text_to_image(
51
- params=SamplingParams(sampler=sampler_enum.value, steps=10),
52
- prompt="A professional photograph of an astronaut riding a pig",
53
- negative_prompt="",
54
- samples=1,
55
- )
56
-
57
- assert output is not None
58
-
59
- @pytest.mark.parametrize("sampler_enum", Sampler)
60
- def test_img2img(self, pipeline: SamplingPipeline, sampler_enum):
61
- output = pipeline.image_to_image(
62
- params=SamplingParams(sampler=sampler_enum.value, steps=10),
63
- image=self.create_init_image(pipeline.specs.height, pipeline.specs.width),
64
- prompt="A professional photograph of an astronaut riding a pig",
65
- negative_prompt="",
66
- samples=1,
67
- )
68
- assert output is not None
69
-
70
- @pytest.mark.parametrize("sampler_enum", Sampler)
71
- @pytest.mark.parametrize(
72
- "use_init_image", [True, False], ids=["img2img", "txt2img"]
73
- )
74
- def test_sdxl_with_refiner(
75
- self,
76
- sdxl_pipelines: Tuple[SamplingPipeline, SamplingPipeline],
77
- sampler_enum,
78
- use_init_image,
79
- ):
80
- base_pipeline, refiner_pipeline = sdxl_pipelines
81
- if use_init_image:
82
- output = base_pipeline.image_to_image(
83
- params=SamplingParams(sampler=sampler_enum.value, steps=10),
84
- image=self.create_init_image(
85
- base_pipeline.specs.height, base_pipeline.specs.width
86
- ),
87
- prompt="A professional photograph of an astronaut riding a pig",
88
- negative_prompt="",
89
- samples=1,
90
- return_latents=True,
91
- )
92
- else:
93
- output = base_pipeline.text_to_image(
94
- params=SamplingParams(sampler=sampler_enum.value, steps=10),
95
- prompt="A professional photograph of an astronaut riding a pig",
96
- negative_prompt="",
97
- samples=1,
98
- return_latents=True,
99
- )
100
-
101
- assert isinstance(output, (tuple, list))
102
- samples, samples_z = output
103
- assert samples is not None
104
- assert samples_z is not None
105
- refiner_pipeline.refiner(
106
- params=SamplingParams(sampler=sampler_enum.value, steps=10),
107
- image=samples_z,
108
- prompt="A professional photograph of an astronaut riding a pig",
109
- negative_prompt="",
110
- samples=1,
111
- )