PB Unity commited on
Commit
aac9c1a
1 Parent(s): 24b8444

Upload 3 files

Browse files
Files changed (4) hide show
  1. .gitattributes +1 -0
  2. RunIris.cs +252 -0
  3. iris_landmark.onnx +3 -0
  4. iris_landmark.sentis +3 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ iris_landmark.sentis filter=lfs diff=lfs merge=lfs -text
RunIris.cs ADDED
@@ -0,0 +1,252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ using UnityEngine;
2
+ using Unity.Sentis;
3
+ using UnityEngine.Video;
4
+ using UnityEngine.UI;
5
+ using Lays = Unity.Sentis.Layers;
6
+ using System.Collections.Generic;
7
+
8
+ /*
9
+ * Iris Inference
10
+ * ==============
11
+ *
12
+ * Basic inference script for Iris
13
+ *
14
+ * Put this script on the Main Camera
15
+ * Put iris_landmark.sentis in the Assets/StreamingAssets folder
16
+ * Create a RawImage of in the scene
17
+ * Put a link to that image in previewUI
18
+ * Put a video in Assets/StreamingAssets folder and put the name of it int videoName
19
+ * Or put a test image in inputImage
20
+ * Set inputType to appropriate input
21
+ */
22
+
23
+
24
+ public class RunIris : MonoBehaviour
25
+ {
26
+ //Drag a link to a raw image here:
27
+ public RawImage previewUI = null;
28
+
29
+ public enum InputType { Image, Video, Webcam };
30
+
31
+ public string videoName = "chatting.mp4";
32
+
33
+ // Input image for neural network
34
+ public Texture2D inputImage;
35
+
36
+ public InputType inputType = InputType.Video;
37
+
38
+ Vector2Int resolution = new Vector2Int(640, 640);
39
+ WebCamTexture webcam;
40
+ VideoPlayer video;
41
+
42
+ const BackendType backend = BackendType.GPUCompute;
43
+
44
+ RenderTexture targetTexture;
45
+ Texture2D canvasTexture;
46
+
47
+ const int markerWidth = 5;
48
+ Color32[] markerPixels;
49
+
50
+ IWorker worker;
51
+
52
+ //Holds image size
53
+ const int size = 64;
54
+
55
+ Ops ops;
56
+ ITensorAllocator allocator;
57
+
58
+ Model model;
59
+
60
+ //webcam device name:
61
+ const string deviceName = "";
62
+
63
+ bool closing = false;
64
+
65
+ void Start()
66
+ {
67
+ allocator = new TensorCachingAllocator();
68
+
69
+ //(Note: if using a webcam on mobile get permissions here first)
70
+
71
+ SetupTextures();
72
+ SetupInput();
73
+ SetupModel();
74
+ SetupEngine();
75
+ SetupMarkers();
76
+ }
77
+
78
+ void SetupModel()
79
+ {
80
+ model = ModelLoader.Load(Application.streamingAssetsPath + "/iris_landmark.sentis");
81
+ }
82
+ public void SetupEngine()
83
+ {
84
+ worker = WorkerFactory.CreateWorker(backend, model);
85
+ ops = WorkerFactory.CreateOps(backend, allocator);
86
+ }
87
+ void SetupTextures()
88
+ {
89
+ targetTexture = new RenderTexture(resolution.x, resolution.y, 0);
90
+ canvasTexture = new Texture2D(targetTexture.width, targetTexture.height);
91
+ previewUI.texture = targetTexture;
92
+ }
93
+
94
+ void SetupMarkers()
95
+ {
96
+ markerPixels = new Color32[markerWidth * markerWidth];
97
+ for (int n = 0; n < markerWidth * markerWidth; n++)
98
+ {
99
+ markerPixels[n] = Color.white;
100
+ }
101
+ int center = markerWidth / 2;
102
+ markerPixels[center * markerWidth + center] = Color.black;
103
+ }
104
+
105
+ void SetupInput()
106
+ {
107
+ switch (inputType)
108
+ {
109
+ case InputType.Webcam:
110
+ {
111
+ webcam = new WebCamTexture(deviceName, resolution.x, resolution.y);
112
+ webcam.requestedFPS = 30;
113
+ webcam.Play();
114
+ break;
115
+ }
116
+ case InputType.Video:
117
+ {
118
+ video = gameObject.AddComponent<VideoPlayer>();//new VideoPlayer();
119
+ video.renderMode = VideoRenderMode.APIOnly;
120
+ video.source = VideoSource.Url;
121
+ video.url = Application.streamingAssetsPath + "/"+videoName;
122
+ video.isLooping = true;
123
+ video.Play();
124
+ break;
125
+ }
126
+ default:
127
+ {
128
+ Graphics.Blit(inputImage, targetTexture);
129
+ }
130
+ break;
131
+ }
132
+ }
133
+
134
+ void Update()
135
+ {
136
+ if (inputType == InputType.Webcam)
137
+ {
138
+ // Format video input
139
+ if (!webcam.didUpdateThisFrame) return;
140
+
141
+ var aspect1 = (float)webcam.width / webcam.height;
142
+ var aspect2 = (float)resolution.x / resolution.y;
143
+ var gap = aspect2 / aspect1;
144
+
145
+ var vflip = webcam.videoVerticallyMirrored;
146
+ var scale = new Vector2(gap, vflip ? -1 : 1);
147
+ var offset = new Vector2((1 - gap) / 2, vflip ? 1 : 0);
148
+
149
+ Graphics.Blit(webcam, targetTexture, scale, offset);
150
+ }
151
+ if (inputType == InputType.Video)
152
+ {
153
+ var aspect1 = (float)video.width / video.height;
154
+ var aspect2 = (float)resolution.x / resolution.y;
155
+ var gap = aspect2 / aspect1;
156
+
157
+ var vflip = false;
158
+ var scale = new Vector2(gap, vflip ? -1 : 1);
159
+ var offset = new Vector2((1 - gap) / 2, vflip ? 1 : 0);
160
+ Graphics.Blit(video.texture, targetTexture, scale, offset);
161
+ }
162
+ if (inputType == InputType.Image)
163
+ {
164
+ Graphics.Blit(inputImage, targetTexture);
165
+ }
166
+
167
+ if (Input.GetKeyDown(KeyCode.Escape))
168
+ {
169
+ closing = true;
170
+ Application.Quit();
171
+ }
172
+
173
+ if (Input.GetKeyDown(KeyCode.P))
174
+ {
175
+ previewUI.enabled = !previewUI.enabled;
176
+ }
177
+ }
178
+
179
+
180
+ void LateUpdate()
181
+ {
182
+ if (!closing)
183
+ {
184
+ RunInference(targetTexture);
185
+ }
186
+ }
187
+
188
+ void RunInference(Texture source)
189
+ {
190
+ var transform = new TextureTransform();
191
+ transform.SetDimensions(size, size, 3);
192
+ transform.SetTensorLayout(0, 1, 2, 3);
193
+ using var image0 = TextureConverter.ToTensor(source, transform);
194
+
195
+ // Pre-process the image to make input in range (-1..1)
196
+ using var image = ops.Mad(image0, 2f, -1f);
197
+
198
+ worker.Execute(image);
199
+
200
+ using var eyeLandmarks = worker.PeekOutput("output_eyes_contours_and_brows") as TensorFloat;
201
+ using var irisLandmarks = worker.PeekOutput("output_iris") as TensorFloat;
202
+
203
+ float scaleX = targetTexture.width * 1f / size;
204
+ float scaleY = targetTexture.height * 1f / size;
205
+ eyeLandmarks.MakeReadable();
206
+ irisLandmarks.MakeReadable();
207
+
208
+ //Draw the markers
209
+ RenderTexture.active = targetTexture;
210
+ canvasTexture.ReadPixels(new Rect(0, 0, targetTexture.width, targetTexture.height), 0, 0);
211
+ DrawLandmarks(irisLandmarks, scaleX, scaleY);
212
+ DrawLandmarks(eyeLandmarks, scaleX, scaleY);
213
+ canvasTexture.Apply();
214
+ Graphics.Blit(canvasTexture, targetTexture);
215
+ RenderTexture.active = null;
216
+ }
217
+
218
+ void DrawLandmarks(TensorFloat landmarks, float scaleX, float scaleY)
219
+ {
220
+ int numLandmarks = landmarks.shape[1] / 3; //468 face landmarks
221
+
222
+ for (int n = 0; n < numLandmarks; n++)
223
+ {
224
+ int px = (int)(landmarks[ 0, n * 3 + 0] * scaleX) - (markerWidth - 1) / 2;
225
+ int py = (int)(landmarks[ 0, n * 3 + 1] * scaleY) - (markerWidth - 1) / 2;
226
+ int pz = (int)(landmarks[ 0, n * 3 + 2] * scaleX);
227
+ int destX = Mathf.Clamp(px, 0, targetTexture.width - 1 - markerWidth);
228
+ int destY = Mathf.Clamp(targetTexture.height - 1 - py, 0, targetTexture.height - 1 - markerWidth);
229
+ canvasTexture.SetPixels32(destX, destY, markerWidth, markerWidth, markerPixels);
230
+ }
231
+ }
232
+
233
+ void CleanUp()
234
+ {
235
+ closing = true;
236
+ ops?.Dispose();
237
+ allocator?.Dispose();
238
+ if (webcam) Destroy(webcam);
239
+ if (video) Destroy(video);
240
+ RenderTexture.active = null;
241
+ targetTexture.Release();
242
+ worker?.Dispose();
243
+ worker = null;
244
+ }
245
+
246
+ void OnDestroy()
247
+ {
248
+ CleanUp();
249
+ }
250
+
251
+ }
252
+
iris_landmark.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3eb985960b80e6069812c2f20682b7a4194d80cf5a3a918ddb8d0b3ca72a6432
3
+ size 2645189
iris_landmark.sentis ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba688b3b5e4bcd783b2e478ef001e8007e7eaa57158d17a31848fbf04e10137b
3
+ size 5948165