Spaces:
Running
Running
Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .devcontainer/Dockerfile +5 -1
- app.py +42 -2
- client/run.js +77 -0
- client/run.mjs +74 -0
- node_modules/.package-lock.json +147 -0
- node_modules/asynckit/LICENSE +21 -0
- node_modules/asynckit/README.md +233 -0
- node_modules/asynckit/bench.js +76 -0
- node_modules/asynckit/index.js +6 -0
- node_modules/asynckit/lib/abort.js +29 -0
- node_modules/asynckit/lib/async.js +34 -0
- node_modules/asynckit/lib/defer.js +26 -0
- node_modules/asynckit/lib/iterate.js +75 -0
- node_modules/asynckit/lib/readable_asynckit.js +91 -0
- node_modules/asynckit/lib/readable_parallel.js +25 -0
- node_modules/asynckit/lib/readable_serial.js +25 -0
- node_modules/asynckit/lib/readable_serial_ordered.js +29 -0
- node_modules/asynckit/lib/state.js +37 -0
- node_modules/asynckit/lib/streamify.js +141 -0
- node_modules/asynckit/lib/terminator.js +29 -0
- node_modules/asynckit/package.json +63 -0
- node_modules/asynckit/parallel.js +43 -0
- node_modules/asynckit/serial.js +17 -0
- node_modules/asynckit/serialOrdered.js +75 -0
- node_modules/asynckit/stream.js +21 -0
- node_modules/combined-stream/License +19 -0
- node_modules/combined-stream/Readme.md +138 -0
- node_modules/combined-stream/lib/combined_stream.js +208 -0
- node_modules/combined-stream/package.json +25 -0
- node_modules/combined-stream/yarn.lock +17 -0
- node_modules/data-uri-to-buffer/README.md +88 -0
- node_modules/data-uri-to-buffer/dist/index.d.ts +15 -0
- node_modules/data-uri-to-buffer/dist/index.js +53 -0
- node_modules/data-uri-to-buffer/dist/index.js.map +1 -0
- node_modules/data-uri-to-buffer/package.json +62 -0
- node_modules/data-uri-to-buffer/src/index.ts +68 -0
- node_modules/delayed-stream/.npmignore +1 -0
- node_modules/delayed-stream/License +19 -0
- node_modules/delayed-stream/Makefile +7 -0
- node_modules/delayed-stream/Readme.md +141 -0
- node_modules/delayed-stream/lib/delayed_stream.js +107 -0
- node_modules/delayed-stream/package.json +27 -0
- node_modules/fetch-blob/LICENSE +21 -0
- node_modules/fetch-blob/README.md +106 -0
- node_modules/fetch-blob/file.d.ts +2 -0
- node_modules/fetch-blob/file.js +49 -0
- node_modules/fetch-blob/from.d.ts +26 -0
- node_modules/fetch-blob/from.js +100 -0
- node_modules/fetch-blob/index.d.ts +3 -0
- node_modules/fetch-blob/index.js +250 -0
.devcontainer/Dockerfile
CHANGED
@@ -31,7 +31,9 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
|
31 |
python3-pip \
|
32 |
python3-setuptools \
|
33 |
libgl1-mesa-dev \
|
34 |
-
libglib2.0-0
|
|
|
|
|
35 |
|
36 |
RUN git lfs install
|
37 |
|
@@ -42,3 +44,5 @@ RUN python3 -m pip install --upgrade pip \
|
|
42 |
ultralytics \
|
43 |
pillow \
|
44 |
gradio
|
|
|
|
|
|
31 |
python3-pip \
|
32 |
python3-setuptools \
|
33 |
libgl1-mesa-dev \
|
34 |
+
libglib2.0-0 \
|
35 |
+
unzip \
|
36 |
+
curl
|
37 |
|
38 |
RUN git lfs install
|
39 |
|
|
|
44 |
ultralytics \
|
45 |
pillow \
|
46 |
gradio
|
47 |
+
|
48 |
+
RUN curl -fsSL https://fnm.vercel.app/install | bash
|
app.py
CHANGED
@@ -1,6 +1,4 @@
|
|
1 |
-
import asyncio
|
2 |
import os
|
3 |
-
import random
|
4 |
|
5 |
from fastapi import FastAPI, UploadFile
|
6 |
from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse
|
@@ -15,9 +13,17 @@ import uvicorn
|
|
15 |
import cv2
|
16 |
import uuid
|
17 |
from functools import partial
|
|
|
|
|
|
|
18 |
|
19 |
# from solareyes.sam import SAM
|
20 |
|
|
|
|
|
|
|
|
|
|
|
21 |
app = FastAPI()
|
22 |
|
23 |
# Load the model
|
@@ -237,6 +243,40 @@ def extract_image(uploadFile: UploadFile) -> FileResponse:
|
|
237 |
jpeg_image.save(filename)
|
238 |
return FileResponse(filename)
|
239 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
240 |
app = gr.mount_gradio_app(app, demo, path="/")
|
241 |
|
242 |
if __name__ == "__main__":
|
|
|
|
|
1 |
import os
|
|
|
2 |
|
3 |
from fastapi import FastAPI, UploadFile
|
4 |
from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse
|
|
|
13 |
import cv2
|
14 |
import uuid
|
15 |
from functools import partial
|
16 |
+
from openai import OpenAI
|
17 |
+
|
18 |
+
PROMPT = "You are analyzing the spec sheet of a solar panel. Plese answer the following questions, format them with bullets. \n"
|
19 |
|
20 |
# from solareyes.sam import SAM
|
21 |
|
22 |
+
client = OpenAI(
|
23 |
+
# This is the default and can be omitted
|
24 |
+
api_key=os.environ.get("OPENAI_API_KEY"),
|
25 |
+
)
|
26 |
+
|
27 |
app = FastAPI()
|
28 |
|
29 |
# Load the model
|
|
|
243 |
jpeg_image.save(filename)
|
244 |
return FileResponse(filename)
|
245 |
|
246 |
+
def parse_pdf_text(file):
|
247 |
+
pdf = pdfium.PdfDocument(file)
|
248 |
+
all_text = "PDF Extract Text Contents Below: \n\n"
|
249 |
+
for page in pdf:
|
250 |
+
textpage = page.get_textpage()
|
251 |
+
text_all = textpage.get_text_range()
|
252 |
+
all_text += text_all
|
253 |
+
|
254 |
+
#use openai to ask questions about text
|
255 |
+
q1 = "What are module dimensions in L x W x H?"
|
256 |
+
q2 = "What is the module weight in kilograms?"
|
257 |
+
q3 = "What are the cable lengthes in millimeters?"
|
258 |
+
q4 = "What brand, name, or model are the connectors?"
|
259 |
+
q5 = "How many pieces per container? Prefer 40' HQ or HC, if not available try 53'"
|
260 |
+
q6 = "What is the model number?"
|
261 |
+
question = PROMPT + q1 + "\n" + q2 + "\n" + q3 + "\n" + q4 + "\n" + q5 + "\n" + q6 + "\n" + all_text
|
262 |
+
chat_completion = client.chat.completions.create(
|
263 |
+
messages=[
|
264 |
+
{
|
265 |
+
"role": "user",
|
266 |
+
"content": question,
|
267 |
+
}
|
268 |
+
],
|
269 |
+
model="gpt-3.5-turbo",
|
270 |
+
)
|
271 |
+
return chat_completion.choices[0].message.content
|
272 |
+
|
273 |
+
#Accept a PDF file, return a text summary
|
274 |
+
@app.post("/parsePdf")
|
275 |
+
def parse_info(uploadFile: UploadFile):
|
276 |
+
file = uploadFile.file.read()
|
277 |
+
answer = parse_pdf_text(file)
|
278 |
+
return {"answer": answer}
|
279 |
+
|
280 |
app = gr.mount_gradio_app(app, demo, path="/")
|
281 |
|
282 |
if __name__ == "__main__":
|
client/run.js
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
const fs = require('fs');
|
2 |
+
const { pipeline } = require('stream');
|
3 |
+
const util = require('util');
|
4 |
+
const FormData = require('form-data');
|
5 |
+
|
6 |
+
// Convert pipeline to Promise for easier async/await usage
|
7 |
+
const pipelineAsync = util.promisify(pipeline);
|
8 |
+
|
9 |
+
class PDFExtractor {
|
10 |
+
constructor(url = 'https://3martini-solar-eyes-dockerized.hf.space') {
|
11 |
+
this.bearerToken = process.env.HF_TOKEN; // Load from environment variable
|
12 |
+
this.url = url;
|
13 |
+
}
|
14 |
+
|
15 |
+
async convert(pdfFilePath, outputFile) {
|
16 |
+
const form = new FormData();
|
17 |
+
form.append('uploadFile', fs.createReadStream(pdfFilePath));
|
18 |
+
|
19 |
+
const options = {
|
20 |
+
method: 'POST',
|
21 |
+
body: form,
|
22 |
+
headers: {
|
23 |
+
'Authorization': `Bearer ${this.bearerToken}`,
|
24 |
+
},
|
25 |
+
};
|
26 |
+
|
27 |
+
try {
|
28 |
+
const fetch = (await import('node-fetch')).default;
|
29 |
+
const response = await fetch(this.url + "/uploadPdf", options);
|
30 |
+
if (!response.ok) {
|
31 |
+
throw new Error(`HTTP error! status: ${response.status}`);
|
32 |
+
}
|
33 |
+
// Save the response as an image file
|
34 |
+
const dest = fs.createWriteStream(outputFile);
|
35 |
+
await pipelineAsync(response.body, dest);
|
36 |
+
console.log(`Image saved as ${outputFile}`);
|
37 |
+
} catch (error) {
|
38 |
+
console.error('Error:', error);
|
39 |
+
}
|
40 |
+
}
|
41 |
+
|
42 |
+
async extractDesc(pdfFilePath) {
|
43 |
+
const form = new FormData();
|
44 |
+
form.append('uploadFile', fs.createReadStream(pdfFilePath));
|
45 |
+
|
46 |
+
const options = {
|
47 |
+
method: 'POST',
|
48 |
+
body: form,
|
49 |
+
headers: {
|
50 |
+
'Authorization': `Bearer ${this.bearerToken}`,
|
51 |
+
},
|
52 |
+
};
|
53 |
+
|
54 |
+
try {
|
55 |
+
const fetch = (await import('node-fetch')).default;
|
56 |
+
const response = await fetch(this.url + "/parsePdf", options);
|
57 |
+
if (!response.ok) {
|
58 |
+
throw new Error(`HTTP error! status: ${response.status}`);
|
59 |
+
}
|
60 |
+
return await response.json();
|
61 |
+
} catch (error) {
|
62 |
+
console.error('Error:', error);
|
63 |
+
}
|
64 |
+
}
|
65 |
+
}
|
66 |
+
|
67 |
+
// Example usage wrapped in an async IIFE
|
68 |
+
(async () => {
|
69 |
+
const converter = new PDFExtractor('http://localhost:7860');
|
70 |
+
const pdfPath = '/workspaces/solar_eyes/pdf_downloads/0a0824f0-a5e7-4643-a834-0cea06f36c49.pdf';
|
71 |
+
const outputPath = __dirname + '/outputImage2.jpg';
|
72 |
+
await converter.convert(pdfPath, outputPath); // Ensure this is awaited if you need sequential execution
|
73 |
+
|
74 |
+
// Moved inside the async IIFE to use await
|
75 |
+
const result = await converter.extractDesc(pdfPath);
|
76 |
+
console.log(result);
|
77 |
+
})();
|
client/run.mjs
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import fetch from 'node-fetch';
|
2 |
+
import FormData from 'form-data';
|
3 |
+
import fs from 'fs';
|
4 |
+
import { pipeline } from 'stream/promises';
|
5 |
+
import { dirname, join } from 'path';
|
6 |
+
import { fileURLToPath } from 'url';
|
7 |
+
|
8 |
+
// If you're using ESM, you might need to resolve __dirname
|
9 |
+
const __filename = fileURLToPath(import.meta.url);
|
10 |
+
const __dirname = dirname(__filename);
|
11 |
+
|
12 |
+
class PDFExtractor {
|
13 |
+
constructor(url = 'https://3martini-solar-eyes-dockerized.hf.space') {
|
14 |
+
this.bearerToken = process.env.HF_TOKEN; // Load from environment variable
|
15 |
+
this.url = url;
|
16 |
+
}
|
17 |
+
|
18 |
+
async extract_panel_image(pdfFilePath, outputFile) {
|
19 |
+
const form = new FormData();
|
20 |
+
form.append('uploadFile', fs.createReadStream(pdfFilePath));
|
21 |
+
|
22 |
+
const options = {
|
23 |
+
method: 'POST',
|
24 |
+
body: form,
|
25 |
+
headers: {
|
26 |
+
'Authorization': `Bearer ${this.bearerToken}`,
|
27 |
+
},
|
28 |
+
};
|
29 |
+
|
30 |
+
try {
|
31 |
+
const response = await fetch(this.url + "/uploadPdf", options);
|
32 |
+
if (!response.ok) {
|
33 |
+
throw new Error(`HTTP error! status: ${response.status}`);
|
34 |
+
}
|
35 |
+
// Assuming the response is a stream of the image
|
36 |
+
const dest = fs.createWriteStream(outputFile);
|
37 |
+
await pipeline(response.body, dest);
|
38 |
+
console.log(`Image saved as ${outputFile}`);
|
39 |
+
} catch (error) {
|
40 |
+
console.error('Error:', error);
|
41 |
+
}
|
42 |
+
}
|
43 |
+
|
44 |
+
async extract_panel_desc(pdfFilePath) {
|
45 |
+
const form = new FormData();
|
46 |
+
form.append('uploadFile', fs.createReadStream(pdfFilePath));
|
47 |
+
|
48 |
+
const options = {
|
49 |
+
method: 'POST',
|
50 |
+
body: form,
|
51 |
+
headers: {
|
52 |
+
'Authorization': `Bearer ${this.bearerToken}`,
|
53 |
+
},
|
54 |
+
};
|
55 |
+
|
56 |
+
try {
|
57 |
+
const response = await fetch(this.url + "/parsePdf", options);
|
58 |
+
if (!response.ok) {
|
59 |
+
throw new Error(`HTTP error! status: ${response.status}`);
|
60 |
+
}
|
61 |
+
const as_json = await response.json();
|
62 |
+
return as_json;
|
63 |
+
} catch (error) {
|
64 |
+
console.error('Error:', error);
|
65 |
+
}
|
66 |
+
}
|
67 |
+
}
|
68 |
+
|
69 |
+
// Usage example
|
70 |
+
const extractor = new PDFExtractor('http://localhost:7860');
|
71 |
+
const pdfPath = join(__dirname, '../pdf_downloads/0a0824f0-a5e7-4643-a834-0cea06f36c49.pdf');
|
72 |
+
const outputPath = join(__dirname, 'outputImage.jpg');
|
73 |
+
extractor.extract_panel_image(pdfPath, outputPath);
|
74 |
+
console.log(await extractor.extract_panel_desc(pdfPath));
|
node_modules/.package-lock.json
ADDED
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"name": "solar_eyes",
|
3 |
+
"lockfileVersion": 3,
|
4 |
+
"requires": true,
|
5 |
+
"packages": {
|
6 |
+
"node_modules/asynckit": {
|
7 |
+
"version": "0.4.0",
|
8 |
+
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
9 |
+
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
10 |
+
},
|
11 |
+
"node_modules/combined-stream": {
|
12 |
+
"version": "1.0.8",
|
13 |
+
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
14 |
+
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
15 |
+
"dependencies": {
|
16 |
+
"delayed-stream": "~1.0.0"
|
17 |
+
},
|
18 |
+
"engines": {
|
19 |
+
"node": ">= 0.8"
|
20 |
+
}
|
21 |
+
},
|
22 |
+
"node_modules/data-uri-to-buffer": {
|
23 |
+
"version": "4.0.1",
|
24 |
+
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz",
|
25 |
+
"integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==",
|
26 |
+
"engines": {
|
27 |
+
"node": ">= 12"
|
28 |
+
}
|
29 |
+
},
|
30 |
+
"node_modules/delayed-stream": {
|
31 |
+
"version": "1.0.0",
|
32 |
+
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
33 |
+
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
34 |
+
"engines": {
|
35 |
+
"node": ">=0.4.0"
|
36 |
+
}
|
37 |
+
},
|
38 |
+
"node_modules/fetch-blob": {
|
39 |
+
"version": "3.2.0",
|
40 |
+
"resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz",
|
41 |
+
"integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==",
|
42 |
+
"funding": [
|
43 |
+
{
|
44 |
+
"type": "github",
|
45 |
+
"url": "https://github.com/sponsors/jimmywarting"
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"type": "paypal",
|
49 |
+
"url": "https://paypal.me/jimmywarting"
|
50 |
+
}
|
51 |
+
],
|
52 |
+
"dependencies": {
|
53 |
+
"node-domexception": "^1.0.0",
|
54 |
+
"web-streams-polyfill": "^3.0.3"
|
55 |
+
},
|
56 |
+
"engines": {
|
57 |
+
"node": "^12.20 || >= 14.13"
|
58 |
+
}
|
59 |
+
},
|
60 |
+
"node_modules/form-data": {
|
61 |
+
"version": "4.0.0",
|
62 |
+
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
63 |
+
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
64 |
+
"dependencies": {
|
65 |
+
"asynckit": "^0.4.0",
|
66 |
+
"combined-stream": "^1.0.8",
|
67 |
+
"mime-types": "^2.1.12"
|
68 |
+
},
|
69 |
+
"engines": {
|
70 |
+
"node": ">= 6"
|
71 |
+
}
|
72 |
+
},
|
73 |
+
"node_modules/formdata-polyfill": {
|
74 |
+
"version": "4.0.10",
|
75 |
+
"resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
|
76 |
+
"integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==",
|
77 |
+
"dependencies": {
|
78 |
+
"fetch-blob": "^3.1.2"
|
79 |
+
},
|
80 |
+
"engines": {
|
81 |
+
"node": ">=12.20.0"
|
82 |
+
}
|
83 |
+
},
|
84 |
+
"node_modules/mime-db": {
|
85 |
+
"version": "1.52.0",
|
86 |
+
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
87 |
+
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
88 |
+
"engines": {
|
89 |
+
"node": ">= 0.6"
|
90 |
+
}
|
91 |
+
},
|
92 |
+
"node_modules/mime-types": {
|
93 |
+
"version": "2.1.35",
|
94 |
+
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
95 |
+
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
96 |
+
"dependencies": {
|
97 |
+
"mime-db": "1.52.0"
|
98 |
+
},
|
99 |
+
"engines": {
|
100 |
+
"node": ">= 0.6"
|
101 |
+
}
|
102 |
+
},
|
103 |
+
"node_modules/node-domexception": {
|
104 |
+
"version": "1.0.0",
|
105 |
+
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
|
106 |
+
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
|
107 |
+
"funding": [
|
108 |
+
{
|
109 |
+
"type": "github",
|
110 |
+
"url": "https://github.com/sponsors/jimmywarting"
|
111 |
+
},
|
112 |
+
{
|
113 |
+
"type": "github",
|
114 |
+
"url": "https://paypal.me/jimmywarting"
|
115 |
+
}
|
116 |
+
],
|
117 |
+
"engines": {
|
118 |
+
"node": ">=10.5.0"
|
119 |
+
}
|
120 |
+
},
|
121 |
+
"node_modules/node-fetch": {
|
122 |
+
"version": "3.3.2",
|
123 |
+
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz",
|
124 |
+
"integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==",
|
125 |
+
"dependencies": {
|
126 |
+
"data-uri-to-buffer": "^4.0.0",
|
127 |
+
"fetch-blob": "^3.1.4",
|
128 |
+
"formdata-polyfill": "^4.0.10"
|
129 |
+
},
|
130 |
+
"engines": {
|
131 |
+
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
|
132 |
+
},
|
133 |
+
"funding": {
|
134 |
+
"type": "opencollective",
|
135 |
+
"url": "https://opencollective.com/node-fetch"
|
136 |
+
}
|
137 |
+
},
|
138 |
+
"node_modules/web-streams-polyfill": {
|
139 |
+
"version": "3.3.3",
|
140 |
+
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz",
|
141 |
+
"integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==",
|
142 |
+
"engines": {
|
143 |
+
"node": ">= 8"
|
144 |
+
}
|
145 |
+
}
|
146 |
+
}
|
147 |
+
}
|
node_modules/asynckit/LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
The MIT License (MIT)
|
2 |
+
|
3 |
+
Copyright (c) 2016 Alex Indigo
|
4 |
+
|
5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6 |
+
of this software and associated documentation files (the "Software"), to deal
|
7 |
+
in the Software without restriction, including without limitation the rights
|
8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9 |
+
copies of the Software, and to permit persons to whom the Software is
|
10 |
+
furnished to do so, subject to the following conditions:
|
11 |
+
|
12 |
+
The above copyright notice and this permission notice shall be included in all
|
13 |
+
copies or substantial portions of the Software.
|
14 |
+
|
15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21 |
+
SOFTWARE.
|
node_modules/asynckit/README.md
ADDED
@@ -0,0 +1,233 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# asynckit [![NPM Module](https://img.shields.io/npm/v/asynckit.svg?style=flat)](https://www.npmjs.com/package/asynckit)
|
2 |
+
|
3 |
+
Minimal async jobs utility library, with streams support.
|
4 |
+
|
5 |
+
[![PhantomJS Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=browser&style=flat)](https://travis-ci.org/alexindigo/asynckit)
|
6 |
+
[![Linux Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=linux:0.12-6.x&style=flat)](https://travis-ci.org/alexindigo/asynckit)
|
7 |
+
[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/asynckit/v0.4.0.svg?label=windows:0.12-6.x&style=flat)](https://ci.appveyor.com/project/alexindigo/asynckit)
|
8 |
+
|
9 |
+
[![Coverage Status](https://img.shields.io/coveralls/alexindigo/asynckit/v0.4.0.svg?label=code+coverage&style=flat)](https://coveralls.io/github/alexindigo/asynckit?branch=master)
|
10 |
+
[![Dependency Status](https://img.shields.io/david/alexindigo/asynckit/v0.4.0.svg?style=flat)](https://david-dm.org/alexindigo/asynckit)
|
11 |
+
[![bitHound Overall Score](https://www.bithound.io/github/alexindigo/asynckit/badges/score.svg)](https://www.bithound.io/github/alexindigo/asynckit)
|
12 |
+
|
13 |
+
<!-- [![Readme](https://img.shields.io/badge/readme-tested-brightgreen.svg?style=flat)](https://www.npmjs.com/package/reamde) -->
|
14 |
+
|
15 |
+
AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects.
|
16 |
+
Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method.
|
17 |
+
|
18 |
+
It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators.
|
19 |
+
|
20 |
+
| compression | size |
|
21 |
+
| :----------------- | -------: |
|
22 |
+
| asynckit.js | 12.34 kB |
|
23 |
+
| asynckit.min.js | 4.11 kB |
|
24 |
+
| asynckit.min.js.gz | 1.47 kB |
|
25 |
+
|
26 |
+
|
27 |
+
## Install
|
28 |
+
|
29 |
+
```sh
|
30 |
+
$ npm install --save asynckit
|
31 |
+
```
|
32 |
+
|
33 |
+
## Examples
|
34 |
+
|
35 |
+
### Parallel Jobs
|
36 |
+
|
37 |
+
Runs iterator over provided array in parallel. Stores output in the `result` array,
|
38 |
+
on the matching positions. In unlikely event of an error from one of the jobs,
|
39 |
+
will terminate rest of the active jobs (if abort function is provided)
|
40 |
+
and return error along with salvaged data to the main callback function.
|
41 |
+
|
42 |
+
#### Input Array
|
43 |
+
|
44 |
+
```javascript
|
45 |
+
var parallel = require('asynckit').parallel
|
46 |
+
, assert = require('assert')
|
47 |
+
;
|
48 |
+
|
49 |
+
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
50 |
+
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
51 |
+
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
|
52 |
+
, target = []
|
53 |
+
;
|
54 |
+
|
55 |
+
parallel(source, asyncJob, function(err, result)
|
56 |
+
{
|
57 |
+
assert.deepEqual(result, expectedResult);
|
58 |
+
assert.deepEqual(target, expectedTarget);
|
59 |
+
});
|
60 |
+
|
61 |
+
// async job accepts one element from the array
|
62 |
+
// and a callback function
|
63 |
+
function asyncJob(item, cb)
|
64 |
+
{
|
65 |
+
// different delays (in ms) per item
|
66 |
+
var delay = item * 25;
|
67 |
+
|
68 |
+
// pretend different jobs take different time to finish
|
69 |
+
// and not in consequential order
|
70 |
+
var timeoutId = setTimeout(function() {
|
71 |
+
target.push(item);
|
72 |
+
cb(null, item * 2);
|
73 |
+
}, delay);
|
74 |
+
|
75 |
+
// allow to cancel "leftover" jobs upon error
|
76 |
+
// return function, invoking of which will abort this job
|
77 |
+
return clearTimeout.bind(null, timeoutId);
|
78 |
+
}
|
79 |
+
```
|
80 |
+
|
81 |
+
More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js).
|
82 |
+
|
83 |
+
#### Input Object
|
84 |
+
|
85 |
+
Also it supports named jobs, listed via object.
|
86 |
+
|
87 |
+
```javascript
|
88 |
+
var parallel = require('asynckit/parallel')
|
89 |
+
, assert = require('assert')
|
90 |
+
;
|
91 |
+
|
92 |
+
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
|
93 |
+
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
|
94 |
+
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
|
95 |
+
, expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ]
|
96 |
+
, target = []
|
97 |
+
, keys = []
|
98 |
+
;
|
99 |
+
|
100 |
+
parallel(source, asyncJob, function(err, result)
|
101 |
+
{
|
102 |
+
assert.deepEqual(result, expectedResult);
|
103 |
+
assert.deepEqual(target, expectedTarget);
|
104 |
+
assert.deepEqual(keys, expectedKeys);
|
105 |
+
});
|
106 |
+
|
107 |
+
// supports full value, key, callback (shortcut) interface
|
108 |
+
function asyncJob(item, key, cb)
|
109 |
+
{
|
110 |
+
// different delays (in ms) per item
|
111 |
+
var delay = item * 25;
|
112 |
+
|
113 |
+
// pretend different jobs take different time to finish
|
114 |
+
// and not in consequential order
|
115 |
+
var timeoutId = setTimeout(function() {
|
116 |
+
keys.push(key);
|
117 |
+
target.push(item);
|
118 |
+
cb(null, item * 2);
|
119 |
+
}, delay);
|
120 |
+
|
121 |
+
// allow to cancel "leftover" jobs upon error
|
122 |
+
// return function, invoking of which will abort this job
|
123 |
+
return clearTimeout.bind(null, timeoutId);
|
124 |
+
}
|
125 |
+
```
|
126 |
+
|
127 |
+
More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js).
|
128 |
+
|
129 |
+
### Serial Jobs
|
130 |
+
|
131 |
+
Runs iterator over provided array sequentially. Stores output in the `result` array,
|
132 |
+
on the matching positions. In unlikely event of an error from one of the jobs,
|
133 |
+
will not proceed to the rest of the items in the list
|
134 |
+
and return error along with salvaged data to the main callback function.
|
135 |
+
|
136 |
+
#### Input Array
|
137 |
+
|
138 |
+
```javascript
|
139 |
+
var serial = require('asynckit/serial')
|
140 |
+
, assert = require('assert')
|
141 |
+
;
|
142 |
+
|
143 |
+
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
144 |
+
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
145 |
+
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
|
146 |
+
, target = []
|
147 |
+
;
|
148 |
+
|
149 |
+
serial(source, asyncJob, function(err, result)
|
150 |
+
{
|
151 |
+
assert.deepEqual(result, expectedResult);
|
152 |
+
assert.deepEqual(target, expectedTarget);
|
153 |
+
});
|
154 |
+
|
155 |
+
// extended interface (item, key, callback)
|
156 |
+
// also supported for arrays
|
157 |
+
function asyncJob(item, key, cb)
|
158 |
+
{
|
159 |
+
target.push(key);
|
160 |
+
|
161 |
+
// it will be automatically made async
|
162 |
+
// even it iterator "returns" in the same event loop
|
163 |
+
cb(null, item * 2);
|
164 |
+
}
|
165 |
+
```
|
166 |
+
|
167 |
+
More examples could be found in [test/test-serial-array.js](test/test-serial-array.js).
|
168 |
+
|
169 |
+
#### Input Object
|
170 |
+
|
171 |
+
Also it supports named jobs, listed via object.
|
172 |
+
|
173 |
+
```javascript
|
174 |
+
var serial = require('asynckit').serial
|
175 |
+
, assert = require('assert')
|
176 |
+
;
|
177 |
+
|
178 |
+
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
179 |
+
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
180 |
+
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
|
181 |
+
, target = []
|
182 |
+
;
|
183 |
+
|
184 |
+
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
|
185 |
+
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
|
186 |
+
, expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
187 |
+
, target = []
|
188 |
+
;
|
189 |
+
|
190 |
+
|
191 |
+
serial(source, asyncJob, function(err, result)
|
192 |
+
{
|
193 |
+
assert.deepEqual(result, expectedResult);
|
194 |
+
assert.deepEqual(target, expectedTarget);
|
195 |
+
});
|
196 |
+
|
197 |
+
// shortcut interface (item, callback)
|
198 |
+
// works for object as well as for the arrays
|
199 |
+
function asyncJob(item, cb)
|
200 |
+
{
|
201 |
+
target.push(item);
|
202 |
+
|
203 |
+
// it will be automatically made async
|
204 |
+
// even it iterator "returns" in the same event loop
|
205 |
+
cb(null, item * 2);
|
206 |
+
}
|
207 |
+
```
|
208 |
+
|
209 |
+
More examples could be found in [test/test-serial-object.js](test/test-serial-object.js).
|
210 |
+
|
211 |
+
_Note: Since _object_ is an _unordered_ collection of properties,
|
212 |
+
it may produce unexpected results with sequential iterations.
|
213 |
+
Whenever order of the jobs' execution is important please use `serialOrdered` method._
|
214 |
+
|
215 |
+
### Ordered Serial Iterations
|
216 |
+
|
217 |
+
TBD
|
218 |
+
|
219 |
+
For example [compare-property](compare-property) package.
|
220 |
+
|
221 |
+
### Streaming interface
|
222 |
+
|
223 |
+
TBD
|
224 |
+
|
225 |
+
## Want to Know More?
|
226 |
+
|
227 |
+
More examples can be found in [test folder](test/).
|
228 |
+
|
229 |
+
Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions.
|
230 |
+
|
231 |
+
## License
|
232 |
+
|
233 |
+
AsyncKit is licensed under the MIT license.
|
node_modules/asynckit/bench.js
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* eslint no-console: "off" */
|
2 |
+
|
3 |
+
var asynckit = require('./')
|
4 |
+
, async = require('async')
|
5 |
+
, assert = require('assert')
|
6 |
+
, expected = 0
|
7 |
+
;
|
8 |
+
|
9 |
+
var Benchmark = require('benchmark');
|
10 |
+
var suite = new Benchmark.Suite;
|
11 |
+
|
12 |
+
var source = [];
|
13 |
+
for (var z = 1; z < 100; z++)
|
14 |
+
{
|
15 |
+
source.push(z);
|
16 |
+
expected += z;
|
17 |
+
}
|
18 |
+
|
19 |
+
suite
|
20 |
+
// add tests
|
21 |
+
|
22 |
+
.add('async.map', function(deferred)
|
23 |
+
{
|
24 |
+
var total = 0;
|
25 |
+
|
26 |
+
async.map(source,
|
27 |
+
function(i, cb)
|
28 |
+
{
|
29 |
+
setImmediate(function()
|
30 |
+
{
|
31 |
+
total += i;
|
32 |
+
cb(null, total);
|
33 |
+
});
|
34 |
+
},
|
35 |
+
function(err, result)
|
36 |
+
{
|
37 |
+
assert.ifError(err);
|
38 |
+
assert.equal(result[result.length - 1], expected);
|
39 |
+
deferred.resolve();
|
40 |
+
});
|
41 |
+
}, {'defer': true})
|
42 |
+
|
43 |
+
|
44 |
+
.add('asynckit.parallel', function(deferred)
|
45 |
+
{
|
46 |
+
var total = 0;
|
47 |
+
|
48 |
+
asynckit.parallel(source,
|
49 |
+
function(i, cb)
|
50 |
+
{
|
51 |
+
setImmediate(function()
|
52 |
+
{
|
53 |
+
total += i;
|
54 |
+
cb(null, total);
|
55 |
+
});
|
56 |
+
},
|
57 |
+
function(err, result)
|
58 |
+
{
|
59 |
+
assert.ifError(err);
|
60 |
+
assert.equal(result[result.length - 1], expected);
|
61 |
+
deferred.resolve();
|
62 |
+
});
|
63 |
+
}, {'defer': true})
|
64 |
+
|
65 |
+
|
66 |
+
// add listeners
|
67 |
+
.on('cycle', function(ev)
|
68 |
+
{
|
69 |
+
console.log(String(ev.target));
|
70 |
+
})
|
71 |
+
.on('complete', function()
|
72 |
+
{
|
73 |
+
console.log('Fastest is ' + this.filter('fastest').map('name'));
|
74 |
+
})
|
75 |
+
// run async
|
76 |
+
.run({ 'async': true });
|
node_modules/asynckit/index.js
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
module.exports =
|
2 |
+
{
|
3 |
+
parallel : require('./parallel.js'),
|
4 |
+
serial : require('./serial.js'),
|
5 |
+
serialOrdered : require('./serialOrdered.js')
|
6 |
+
};
|
node_modules/asynckit/lib/abort.js
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
// API
|
2 |
+
module.exports = abort;
|
3 |
+
|
4 |
+
/**
|
5 |
+
* Aborts leftover active jobs
|
6 |
+
*
|
7 |
+
* @param {object} state - current state object
|
8 |
+
*/
|
9 |
+
function abort(state)
|
10 |
+
{
|
11 |
+
Object.keys(state.jobs).forEach(clean.bind(state));
|
12 |
+
|
13 |
+
// reset leftover jobs
|
14 |
+
state.jobs = {};
|
15 |
+
}
|
16 |
+
|
17 |
+
/**
|
18 |
+
* Cleans up leftover job by invoking abort function for the provided job id
|
19 |
+
*
|
20 |
+
* @this state
|
21 |
+
* @param {string|number} key - job id to abort
|
22 |
+
*/
|
23 |
+
function clean(key)
|
24 |
+
{
|
25 |
+
if (typeof this.jobs[key] == 'function')
|
26 |
+
{
|
27 |
+
this.jobs[key]();
|
28 |
+
}
|
29 |
+
}
|
node_modules/asynckit/lib/async.js
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var defer = require('./defer.js');
|
2 |
+
|
3 |
+
// API
|
4 |
+
module.exports = async;
|
5 |
+
|
6 |
+
/**
|
7 |
+
* Runs provided callback asynchronously
|
8 |
+
* even if callback itself is not
|
9 |
+
*
|
10 |
+
* @param {function} callback - callback to invoke
|
11 |
+
* @returns {function} - augmented callback
|
12 |
+
*/
|
13 |
+
function async(callback)
|
14 |
+
{
|
15 |
+
var isAsync = false;
|
16 |
+
|
17 |
+
// check if async happened
|
18 |
+
defer(function() { isAsync = true; });
|
19 |
+
|
20 |
+
return function async_callback(err, result)
|
21 |
+
{
|
22 |
+
if (isAsync)
|
23 |
+
{
|
24 |
+
callback(err, result);
|
25 |
+
}
|
26 |
+
else
|
27 |
+
{
|
28 |
+
defer(function nextTick_callback()
|
29 |
+
{
|
30 |
+
callback(err, result);
|
31 |
+
});
|
32 |
+
}
|
33 |
+
};
|
34 |
+
}
|
node_modules/asynckit/lib/defer.js
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
module.exports = defer;
|
2 |
+
|
3 |
+
/**
|
4 |
+
* Runs provided function on next iteration of the event loop
|
5 |
+
*
|
6 |
+
* @param {function} fn - function to run
|
7 |
+
*/
|
8 |
+
function defer(fn)
|
9 |
+
{
|
10 |
+
var nextTick = typeof setImmediate == 'function'
|
11 |
+
? setImmediate
|
12 |
+
: (
|
13 |
+
typeof process == 'object' && typeof process.nextTick == 'function'
|
14 |
+
? process.nextTick
|
15 |
+
: null
|
16 |
+
);
|
17 |
+
|
18 |
+
if (nextTick)
|
19 |
+
{
|
20 |
+
nextTick(fn);
|
21 |
+
}
|
22 |
+
else
|
23 |
+
{
|
24 |
+
setTimeout(fn, 0);
|
25 |
+
}
|
26 |
+
}
|
node_modules/asynckit/lib/iterate.js
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var async = require('./async.js')
|
2 |
+
, abort = require('./abort.js')
|
3 |
+
;
|
4 |
+
|
5 |
+
// API
|
6 |
+
module.exports = iterate;
|
7 |
+
|
8 |
+
/**
|
9 |
+
* Iterates over each job object
|
10 |
+
*
|
11 |
+
* @param {array|object} list - array or object (named list) to iterate over
|
12 |
+
* @param {function} iterator - iterator to run
|
13 |
+
* @param {object} state - current job status
|
14 |
+
* @param {function} callback - invoked when all elements processed
|
15 |
+
*/
|
16 |
+
function iterate(list, iterator, state, callback)
|
17 |
+
{
|
18 |
+
// store current index
|
19 |
+
var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;
|
20 |
+
|
21 |
+
state.jobs[key] = runJob(iterator, key, list[key], function(error, output)
|
22 |
+
{
|
23 |
+
// don't repeat yourself
|
24 |
+
// skip secondary callbacks
|
25 |
+
if (!(key in state.jobs))
|
26 |
+
{
|
27 |
+
return;
|
28 |
+
}
|
29 |
+
|
30 |
+
// clean up jobs
|
31 |
+
delete state.jobs[key];
|
32 |
+
|
33 |
+
if (error)
|
34 |
+
{
|
35 |
+
// don't process rest of the results
|
36 |
+
// stop still active jobs
|
37 |
+
// and reset the list
|
38 |
+
abort(state);
|
39 |
+
}
|
40 |
+
else
|
41 |
+
{
|
42 |
+
state.results[key] = output;
|
43 |
+
}
|
44 |
+
|
45 |
+
// return salvaged results
|
46 |
+
callback(error, state.results);
|
47 |
+
});
|
48 |
+
}
|
49 |
+
|
50 |
+
/**
|
51 |
+
* Runs iterator over provided job element
|
52 |
+
*
|
53 |
+
* @param {function} iterator - iterator to invoke
|
54 |
+
* @param {string|number} key - key/index of the element in the list of jobs
|
55 |
+
* @param {mixed} item - job description
|
56 |
+
* @param {function} callback - invoked after iterator is done with the job
|
57 |
+
* @returns {function|mixed} - job abort function or something else
|
58 |
+
*/
|
59 |
+
function runJob(iterator, key, item, callback)
|
60 |
+
{
|
61 |
+
var aborter;
|
62 |
+
|
63 |
+
// allow shortcut if iterator expects only two arguments
|
64 |
+
if (iterator.length == 2)
|
65 |
+
{
|
66 |
+
aborter = iterator(item, async(callback));
|
67 |
+
}
|
68 |
+
// otherwise go with full three arguments
|
69 |
+
else
|
70 |
+
{
|
71 |
+
aborter = iterator(item, key, async(callback));
|
72 |
+
}
|
73 |
+
|
74 |
+
return aborter;
|
75 |
+
}
|
node_modules/asynckit/lib/readable_asynckit.js
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var streamify = require('./streamify.js')
|
2 |
+
, defer = require('./defer.js')
|
3 |
+
;
|
4 |
+
|
5 |
+
// API
|
6 |
+
module.exports = ReadableAsyncKit;
|
7 |
+
|
8 |
+
/**
|
9 |
+
* Base constructor for all streams
|
10 |
+
* used to hold properties/methods
|
11 |
+
*/
|
12 |
+
function ReadableAsyncKit()
|
13 |
+
{
|
14 |
+
ReadableAsyncKit.super_.apply(this, arguments);
|
15 |
+
|
16 |
+
// list of active jobs
|
17 |
+
this.jobs = {};
|
18 |
+
|
19 |
+
// add stream methods
|
20 |
+
this.destroy = destroy;
|
21 |
+
this._start = _start;
|
22 |
+
this._read = _read;
|
23 |
+
}
|
24 |
+
|
25 |
+
/**
|
26 |
+
* Destroys readable stream,
|
27 |
+
* by aborting outstanding jobs
|
28 |
+
*
|
29 |
+
* @returns {void}
|
30 |
+
*/
|
31 |
+
function destroy()
|
32 |
+
{
|
33 |
+
if (this.destroyed)
|
34 |
+
{
|
35 |
+
return;
|
36 |
+
}
|
37 |
+
|
38 |
+
this.destroyed = true;
|
39 |
+
|
40 |
+
if (typeof this.terminator == 'function')
|
41 |
+
{
|
42 |
+
this.terminator();
|
43 |
+
}
|
44 |
+
}
|
45 |
+
|
46 |
+
/**
|
47 |
+
* Starts provided jobs in async manner
|
48 |
+
*
|
49 |
+
* @private
|
50 |
+
*/
|
51 |
+
function _start()
|
52 |
+
{
|
53 |
+
// first argument – runner function
|
54 |
+
var runner = arguments[0]
|
55 |
+
// take away first argument
|
56 |
+
, args = Array.prototype.slice.call(arguments, 1)
|
57 |
+
// second argument - input data
|
58 |
+
, input = args[0]
|
59 |
+
// last argument - result callback
|
60 |
+
, endCb = streamify.callback.call(this, args[args.length - 1])
|
61 |
+
;
|
62 |
+
|
63 |
+
args[args.length - 1] = endCb;
|
64 |
+
// third argument - iterator
|
65 |
+
args[1] = streamify.iterator.call(this, args[1]);
|
66 |
+
|
67 |
+
// allow time for proper setup
|
68 |
+
defer(function()
|
69 |
+
{
|
70 |
+
if (!this.destroyed)
|
71 |
+
{
|
72 |
+
this.terminator = runner.apply(null, args);
|
73 |
+
}
|
74 |
+
else
|
75 |
+
{
|
76 |
+
endCb(null, Array.isArray(input) ? [] : {});
|
77 |
+
}
|
78 |
+
}.bind(this));
|
79 |
+
}
|
80 |
+
|
81 |
+
|
82 |
+
/**
|
83 |
+
* Implement _read to comply with Readable streams
|
84 |
+
* Doesn't really make sense for flowing object mode
|
85 |
+
*
|
86 |
+
* @private
|
87 |
+
*/
|
88 |
+
function _read()
|
89 |
+
{
|
90 |
+
|
91 |
+
}
|
node_modules/asynckit/lib/readable_parallel.js
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var parallel = require('../parallel.js');
|
2 |
+
|
3 |
+
// API
|
4 |
+
module.exports = ReadableParallel;
|
5 |
+
|
6 |
+
/**
|
7 |
+
* Streaming wrapper to `asynckit.parallel`
|
8 |
+
*
|
9 |
+
* @param {array|object} list - array or object (named list) to iterate over
|
10 |
+
* @param {function} iterator - iterator to run
|
11 |
+
* @param {function} callback - invoked when all elements processed
|
12 |
+
* @returns {stream.Readable#}
|
13 |
+
*/
|
14 |
+
function ReadableParallel(list, iterator, callback)
|
15 |
+
{
|
16 |
+
if (!(this instanceof ReadableParallel))
|
17 |
+
{
|
18 |
+
return new ReadableParallel(list, iterator, callback);
|
19 |
+
}
|
20 |
+
|
21 |
+
// turn on object mode
|
22 |
+
ReadableParallel.super_.call(this, {objectMode: true});
|
23 |
+
|
24 |
+
this._start(parallel, list, iterator, callback);
|
25 |
+
}
|
node_modules/asynckit/lib/readable_serial.js
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var serial = require('../serial.js');
|
2 |
+
|
3 |
+
// API
|
4 |
+
module.exports = ReadableSerial;
|
5 |
+
|
6 |
+
/**
|
7 |
+
* Streaming wrapper to `asynckit.serial`
|
8 |
+
*
|
9 |
+
* @param {array|object} list - array or object (named list) to iterate over
|
10 |
+
* @param {function} iterator - iterator to run
|
11 |
+
* @param {function} callback - invoked when all elements processed
|
12 |
+
* @returns {stream.Readable#}
|
13 |
+
*/
|
14 |
+
function ReadableSerial(list, iterator, callback)
|
15 |
+
{
|
16 |
+
if (!(this instanceof ReadableSerial))
|
17 |
+
{
|
18 |
+
return new ReadableSerial(list, iterator, callback);
|
19 |
+
}
|
20 |
+
|
21 |
+
// turn on object mode
|
22 |
+
ReadableSerial.super_.call(this, {objectMode: true});
|
23 |
+
|
24 |
+
this._start(serial, list, iterator, callback);
|
25 |
+
}
|
node_modules/asynckit/lib/readable_serial_ordered.js
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var serialOrdered = require('../serialOrdered.js');
|
2 |
+
|
3 |
+
// API
|
4 |
+
module.exports = ReadableSerialOrdered;
|
5 |
+
// expose sort helpers
|
6 |
+
module.exports.ascending = serialOrdered.ascending;
|
7 |
+
module.exports.descending = serialOrdered.descending;
|
8 |
+
|
9 |
+
/**
|
10 |
+
* Streaming wrapper to `asynckit.serialOrdered`
|
11 |
+
*
|
12 |
+
* @param {array|object} list - array or object (named list) to iterate over
|
13 |
+
* @param {function} iterator - iterator to run
|
14 |
+
* @param {function} sortMethod - custom sort function
|
15 |
+
* @param {function} callback - invoked when all elements processed
|
16 |
+
* @returns {stream.Readable#}
|
17 |
+
*/
|
18 |
+
function ReadableSerialOrdered(list, iterator, sortMethod, callback)
|
19 |
+
{
|
20 |
+
if (!(this instanceof ReadableSerialOrdered))
|
21 |
+
{
|
22 |
+
return new ReadableSerialOrdered(list, iterator, sortMethod, callback);
|
23 |
+
}
|
24 |
+
|
25 |
+
// turn on object mode
|
26 |
+
ReadableSerialOrdered.super_.call(this, {objectMode: true});
|
27 |
+
|
28 |
+
this._start(serialOrdered, list, iterator, sortMethod, callback);
|
29 |
+
}
|
node_modules/asynckit/lib/state.js
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
// API
|
2 |
+
module.exports = state;
|
3 |
+
|
4 |
+
/**
|
5 |
+
* Creates initial state object
|
6 |
+
* for iteration over list
|
7 |
+
*
|
8 |
+
* @param {array|object} list - list to iterate over
|
9 |
+
* @param {function|null} sortMethod - function to use for keys sort,
|
10 |
+
* or `null` to keep them as is
|
11 |
+
* @returns {object} - initial state object
|
12 |
+
*/
|
13 |
+
function state(list, sortMethod)
|
14 |
+
{
|
15 |
+
var isNamedList = !Array.isArray(list)
|
16 |
+
, initState =
|
17 |
+
{
|
18 |
+
index : 0,
|
19 |
+
keyedList: isNamedList || sortMethod ? Object.keys(list) : null,
|
20 |
+
jobs : {},
|
21 |
+
results : isNamedList ? {} : [],
|
22 |
+
size : isNamedList ? Object.keys(list).length : list.length
|
23 |
+
}
|
24 |
+
;
|
25 |
+
|
26 |
+
if (sortMethod)
|
27 |
+
{
|
28 |
+
// sort array keys based on it's values
|
29 |
+
// sort object's keys just on own merit
|
30 |
+
initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)
|
31 |
+
{
|
32 |
+
return sortMethod(list[a], list[b]);
|
33 |
+
});
|
34 |
+
}
|
35 |
+
|
36 |
+
return initState;
|
37 |
+
}
|
node_modules/asynckit/lib/streamify.js
ADDED
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var async = require('./async.js');
|
2 |
+
|
3 |
+
// API
|
4 |
+
module.exports = {
|
5 |
+
iterator: wrapIterator,
|
6 |
+
callback: wrapCallback
|
7 |
+
};
|
8 |
+
|
9 |
+
/**
|
10 |
+
* Wraps iterators with long signature
|
11 |
+
*
|
12 |
+
* @this ReadableAsyncKit#
|
13 |
+
* @param {function} iterator - function to wrap
|
14 |
+
* @returns {function} - wrapped function
|
15 |
+
*/
|
16 |
+
function wrapIterator(iterator)
|
17 |
+
{
|
18 |
+
var stream = this;
|
19 |
+
|
20 |
+
return function(item, key, cb)
|
21 |
+
{
|
22 |
+
var aborter
|
23 |
+
, wrappedCb = async(wrapIteratorCallback.call(stream, cb, key))
|
24 |
+
;
|
25 |
+
|
26 |
+
stream.jobs[key] = wrappedCb;
|
27 |
+
|
28 |
+
// it's either shortcut (item, cb)
|
29 |
+
if (iterator.length == 2)
|
30 |
+
{
|
31 |
+
aborter = iterator(item, wrappedCb);
|
32 |
+
}
|
33 |
+
// or long format (item, key, cb)
|
34 |
+
else
|
35 |
+
{
|
36 |
+
aborter = iterator(item, key, wrappedCb);
|
37 |
+
}
|
38 |
+
|
39 |
+
return aborter;
|
40 |
+
};
|
41 |
+
}
|
42 |
+
|
43 |
+
/**
|
44 |
+
* Wraps provided callback function
|
45 |
+
* allowing to execute snitch function before
|
46 |
+
* real callback
|
47 |
+
*
|
48 |
+
* @this ReadableAsyncKit#
|
49 |
+
* @param {function} callback - function to wrap
|
50 |
+
* @returns {function} - wrapped function
|
51 |
+
*/
|
52 |
+
function wrapCallback(callback)
|
53 |
+
{
|
54 |
+
var stream = this;
|
55 |
+
|
56 |
+
var wrapped = function(error, result)
|
57 |
+
{
|
58 |
+
return finisher.call(stream, error, result, callback);
|
59 |
+
};
|
60 |
+
|
61 |
+
return wrapped;
|
62 |
+
}
|
63 |
+
|
64 |
+
/**
|
65 |
+
* Wraps provided iterator callback function
|
66 |
+
* makes sure snitch only called once,
|
67 |
+
* but passes secondary calls to the original callback
|
68 |
+
*
|
69 |
+
* @this ReadableAsyncKit#
|
70 |
+
* @param {function} callback - callback to wrap
|
71 |
+
* @param {number|string} key - iteration key
|
72 |
+
* @returns {function} wrapped callback
|
73 |
+
*/
|
74 |
+
function wrapIteratorCallback(callback, key)
|
75 |
+
{
|
76 |
+
var stream = this;
|
77 |
+
|
78 |
+
return function(error, output)
|
79 |
+
{
|
80 |
+
// don't repeat yourself
|
81 |
+
if (!(key in stream.jobs))
|
82 |
+
{
|
83 |
+
callback(error, output);
|
84 |
+
return;
|
85 |
+
}
|
86 |
+
|
87 |
+
// clean up jobs
|
88 |
+
delete stream.jobs[key];
|
89 |
+
|
90 |
+
return streamer.call(stream, error, {key: key, value: output}, callback);
|
91 |
+
};
|
92 |
+
}
|
93 |
+
|
94 |
+
/**
|
95 |
+
* Stream wrapper for iterator callback
|
96 |
+
*
|
97 |
+
* @this ReadableAsyncKit#
|
98 |
+
* @param {mixed} error - error response
|
99 |
+
* @param {mixed} output - iterator output
|
100 |
+
* @param {function} callback - callback that expects iterator results
|
101 |
+
*/
|
102 |
+
function streamer(error, output, callback)
|
103 |
+
{
|
104 |
+
if (error && !this.error)
|
105 |
+
{
|
106 |
+
this.error = error;
|
107 |
+
this.pause();
|
108 |
+
this.emit('error', error);
|
109 |
+
// send back value only, as expected
|
110 |
+
callback(error, output && output.value);
|
111 |
+
return;
|
112 |
+
}
|
113 |
+
|
114 |
+
// stream stuff
|
115 |
+
this.push(output);
|
116 |
+
|
117 |
+
// back to original track
|
118 |
+
// send back value only, as expected
|
119 |
+
callback(error, output && output.value);
|
120 |
+
}
|
121 |
+
|
122 |
+
/**
|
123 |
+
* Stream wrapper for finishing callback
|
124 |
+
*
|
125 |
+
* @this ReadableAsyncKit#
|
126 |
+
* @param {mixed} error - error response
|
127 |
+
* @param {mixed} output - iterator output
|
128 |
+
* @param {function} callback - callback that expects final results
|
129 |
+
*/
|
130 |
+
function finisher(error, output, callback)
|
131 |
+
{
|
132 |
+
// signal end of the stream
|
133 |
+
// only for successfully finished streams
|
134 |
+
if (!error)
|
135 |
+
{
|
136 |
+
this.push(null);
|
137 |
+
}
|
138 |
+
|
139 |
+
// back to original track
|
140 |
+
callback(error, output);
|
141 |
+
}
|
node_modules/asynckit/lib/terminator.js
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var abort = require('./abort.js')
|
2 |
+
, async = require('./async.js')
|
3 |
+
;
|
4 |
+
|
5 |
+
// API
|
6 |
+
module.exports = terminator;
|
7 |
+
|
8 |
+
/**
|
9 |
+
* Terminates jobs in the attached state context
|
10 |
+
*
|
11 |
+
* @this AsyncKitState#
|
12 |
+
* @param {function} callback - final callback to invoke after termination
|
13 |
+
*/
|
14 |
+
function terminator(callback)
|
15 |
+
{
|
16 |
+
if (!Object.keys(this.jobs).length)
|
17 |
+
{
|
18 |
+
return;
|
19 |
+
}
|
20 |
+
|
21 |
+
// fast forward iteration index
|
22 |
+
this.index = this.size;
|
23 |
+
|
24 |
+
// abort jobs
|
25 |
+
abort(this);
|
26 |
+
|
27 |
+
// send back results we have so far
|
28 |
+
async(callback)(null, this.results);
|
29 |
+
}
|
node_modules/asynckit/package.json
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"name": "asynckit",
|
3 |
+
"version": "0.4.0",
|
4 |
+
"description": "Minimal async jobs utility library, with streams support",
|
5 |
+
"main": "index.js",
|
6 |
+
"scripts": {
|
7 |
+
"clean": "rimraf coverage",
|
8 |
+
"lint": "eslint *.js lib/*.js test/*.js",
|
9 |
+
"test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec",
|
10 |
+
"win-test": "tape test/test-*.js",
|
11 |
+
"browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec",
|
12 |
+
"report": "istanbul report",
|
13 |
+
"size": "browserify index.js | size-table asynckit",
|
14 |
+
"debug": "tape test/test-*.js"
|
15 |
+
},
|
16 |
+
"pre-commit": [
|
17 |
+
"clean",
|
18 |
+
"lint",
|
19 |
+
"test",
|
20 |
+
"browser",
|
21 |
+
"report",
|
22 |
+
"size"
|
23 |
+
],
|
24 |
+
"repository": {
|
25 |
+
"type": "git",
|
26 |
+
"url": "git+https://github.com/alexindigo/asynckit.git"
|
27 |
+
},
|
28 |
+
"keywords": [
|
29 |
+
"async",
|
30 |
+
"jobs",
|
31 |
+
"parallel",
|
32 |
+
"serial",
|
33 |
+
"iterator",
|
34 |
+
"array",
|
35 |
+
"object",
|
36 |
+
"stream",
|
37 |
+
"destroy",
|
38 |
+
"terminate",
|
39 |
+
"abort"
|
40 |
+
],
|
41 |
+
"author": "Alex Indigo <iam@alexindigo.com>",
|
42 |
+
"license": "MIT",
|
43 |
+
"bugs": {
|
44 |
+
"url": "https://github.com/alexindigo/asynckit/issues"
|
45 |
+
},
|
46 |
+
"homepage": "https://github.com/alexindigo/asynckit#readme",
|
47 |
+
"devDependencies": {
|
48 |
+
"browserify": "^13.0.0",
|
49 |
+
"browserify-istanbul": "^2.0.0",
|
50 |
+
"coveralls": "^2.11.9",
|
51 |
+
"eslint": "^2.9.0",
|
52 |
+
"istanbul": "^0.4.3",
|
53 |
+
"obake": "^0.1.2",
|
54 |
+
"phantomjs-prebuilt": "^2.1.7",
|
55 |
+
"pre-commit": "^1.1.3",
|
56 |
+
"reamde": "^1.1.0",
|
57 |
+
"rimraf": "^2.5.2",
|
58 |
+
"size-table": "^0.2.0",
|
59 |
+
"tap-spec": "^4.1.1",
|
60 |
+
"tape": "^4.5.1"
|
61 |
+
},
|
62 |
+
"dependencies": {}
|
63 |
+
}
|
node_modules/asynckit/parallel.js
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var iterate = require('./lib/iterate.js')
|
2 |
+
, initState = require('./lib/state.js')
|
3 |
+
, terminator = require('./lib/terminator.js')
|
4 |
+
;
|
5 |
+
|
6 |
+
// Public API
|
7 |
+
module.exports = parallel;
|
8 |
+
|
9 |
+
/**
|
10 |
+
* Runs iterator over provided array elements in parallel
|
11 |
+
*
|
12 |
+
* @param {array|object} list - array or object (named list) to iterate over
|
13 |
+
* @param {function} iterator - iterator to run
|
14 |
+
* @param {function} callback - invoked when all elements processed
|
15 |
+
* @returns {function} - jobs terminator
|
16 |
+
*/
|
17 |
+
function parallel(list, iterator, callback)
|
18 |
+
{
|
19 |
+
var state = initState(list);
|
20 |
+
|
21 |
+
while (state.index < (state['keyedList'] || list).length)
|
22 |
+
{
|
23 |
+
iterate(list, iterator, state, function(error, result)
|
24 |
+
{
|
25 |
+
if (error)
|
26 |
+
{
|
27 |
+
callback(error, result);
|
28 |
+
return;
|
29 |
+
}
|
30 |
+
|
31 |
+
// looks like it's the last one
|
32 |
+
if (Object.keys(state.jobs).length === 0)
|
33 |
+
{
|
34 |
+
callback(null, state.results);
|
35 |
+
return;
|
36 |
+
}
|
37 |
+
});
|
38 |
+
|
39 |
+
state.index++;
|
40 |
+
}
|
41 |
+
|
42 |
+
return terminator.bind(state, callback);
|
43 |
+
}
|
node_modules/asynckit/serial.js
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var serialOrdered = require('./serialOrdered.js');
|
2 |
+
|
3 |
+
// Public API
|
4 |
+
module.exports = serial;
|
5 |
+
|
6 |
+
/**
|
7 |
+
* Runs iterator over provided array elements in series
|
8 |
+
*
|
9 |
+
* @param {array|object} list - array or object (named list) to iterate over
|
10 |
+
* @param {function} iterator - iterator to run
|
11 |
+
* @param {function} callback - invoked when all elements processed
|
12 |
+
* @returns {function} - jobs terminator
|
13 |
+
*/
|
14 |
+
function serial(list, iterator, callback)
|
15 |
+
{
|
16 |
+
return serialOrdered(list, iterator, null, callback);
|
17 |
+
}
|
node_modules/asynckit/serialOrdered.js
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var iterate = require('./lib/iterate.js')
|
2 |
+
, initState = require('./lib/state.js')
|
3 |
+
, terminator = require('./lib/terminator.js')
|
4 |
+
;
|
5 |
+
|
6 |
+
// Public API
|
7 |
+
module.exports = serialOrdered;
|
8 |
+
// sorting helpers
|
9 |
+
module.exports.ascending = ascending;
|
10 |
+
module.exports.descending = descending;
|
11 |
+
|
12 |
+
/**
|
13 |
+
* Runs iterator over provided sorted array elements in series
|
14 |
+
*
|
15 |
+
* @param {array|object} list - array or object (named list) to iterate over
|
16 |
+
* @param {function} iterator - iterator to run
|
17 |
+
* @param {function} sortMethod - custom sort function
|
18 |
+
* @param {function} callback - invoked when all elements processed
|
19 |
+
* @returns {function} - jobs terminator
|
20 |
+
*/
|
21 |
+
function serialOrdered(list, iterator, sortMethod, callback)
|
22 |
+
{
|
23 |
+
var state = initState(list, sortMethod);
|
24 |
+
|
25 |
+
iterate(list, iterator, state, function iteratorHandler(error, result)
|
26 |
+
{
|
27 |
+
if (error)
|
28 |
+
{
|
29 |
+
callback(error, result);
|
30 |
+
return;
|
31 |
+
}
|
32 |
+
|
33 |
+
state.index++;
|
34 |
+
|
35 |
+
// are we there yet?
|
36 |
+
if (state.index < (state['keyedList'] || list).length)
|
37 |
+
{
|
38 |
+
iterate(list, iterator, state, iteratorHandler);
|
39 |
+
return;
|
40 |
+
}
|
41 |
+
|
42 |
+
// done here
|
43 |
+
callback(null, state.results);
|
44 |
+
});
|
45 |
+
|
46 |
+
return terminator.bind(state, callback);
|
47 |
+
}
|
48 |
+
|
49 |
+
/*
|
50 |
+
* -- Sort methods
|
51 |
+
*/
|
52 |
+
|
53 |
+
/**
|
54 |
+
* sort helper to sort array elements in ascending order
|
55 |
+
*
|
56 |
+
* @param {mixed} a - an item to compare
|
57 |
+
* @param {mixed} b - an item to compare
|
58 |
+
* @returns {number} - comparison result
|
59 |
+
*/
|
60 |
+
function ascending(a, b)
|
61 |
+
{
|
62 |
+
return a < b ? -1 : a > b ? 1 : 0;
|
63 |
+
}
|
64 |
+
|
65 |
+
/**
|
66 |
+
* sort helper to sort array elements in descending order
|
67 |
+
*
|
68 |
+
* @param {mixed} a - an item to compare
|
69 |
+
* @param {mixed} b - an item to compare
|
70 |
+
* @returns {number} - comparison result
|
71 |
+
*/
|
72 |
+
function descending(a, b)
|
73 |
+
{
|
74 |
+
return -1 * ascending(a, b);
|
75 |
+
}
|
node_modules/asynckit/stream.js
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var inherits = require('util').inherits
|
2 |
+
, Readable = require('stream').Readable
|
3 |
+
, ReadableAsyncKit = require('./lib/readable_asynckit.js')
|
4 |
+
, ReadableParallel = require('./lib/readable_parallel.js')
|
5 |
+
, ReadableSerial = require('./lib/readable_serial.js')
|
6 |
+
, ReadableSerialOrdered = require('./lib/readable_serial_ordered.js')
|
7 |
+
;
|
8 |
+
|
9 |
+
// API
|
10 |
+
module.exports =
|
11 |
+
{
|
12 |
+
parallel : ReadableParallel,
|
13 |
+
serial : ReadableSerial,
|
14 |
+
serialOrdered : ReadableSerialOrdered,
|
15 |
+
};
|
16 |
+
|
17 |
+
inherits(ReadableAsyncKit, Readable);
|
18 |
+
|
19 |
+
inherits(ReadableParallel, ReadableAsyncKit);
|
20 |
+
inherits(ReadableSerial, ReadableAsyncKit);
|
21 |
+
inherits(ReadableSerialOrdered, ReadableAsyncKit);
|
node_modules/combined-stream/License
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Copyright (c) 2011 Debuggable Limited <felix@debuggable.com>
|
2 |
+
|
3 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
4 |
+
of this software and associated documentation files (the "Software"), to deal
|
5 |
+
in the Software without restriction, including without limitation the rights
|
6 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
7 |
+
copies of the Software, and to permit persons to whom the Software is
|
8 |
+
furnished to do so, subject to the following conditions:
|
9 |
+
|
10 |
+
The above copyright notice and this permission notice shall be included in
|
11 |
+
all copies or substantial portions of the Software.
|
12 |
+
|
13 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
14 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
15 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
16 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
17 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
18 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
19 |
+
THE SOFTWARE.
|
node_modules/combined-stream/Readme.md
ADDED
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# combined-stream
|
2 |
+
|
3 |
+
A stream that emits multiple other streams one after another.
|
4 |
+
|
5 |
+
**NB** Currently `combined-stream` works with streams version 1 only. There is ongoing effort to switch this library to streams version 2. Any help is welcome. :) Meanwhile you can explore other libraries that provide streams2 support with more or less compatibility with `combined-stream`.
|
6 |
+
|
7 |
+
- [combined-stream2](https://www.npmjs.com/package/combined-stream2): A drop-in streams2-compatible replacement for the combined-stream module.
|
8 |
+
|
9 |
+
- [multistream](https://www.npmjs.com/package/multistream): A stream that emits multiple other streams one after another.
|
10 |
+
|
11 |
+
## Installation
|
12 |
+
|
13 |
+
``` bash
|
14 |
+
npm install combined-stream
|
15 |
+
```
|
16 |
+
|
17 |
+
## Usage
|
18 |
+
|
19 |
+
Here is a simple example that shows how you can use combined-stream to combine
|
20 |
+
two files into one:
|
21 |
+
|
22 |
+
``` javascript
|
23 |
+
var CombinedStream = require('combined-stream');
|
24 |
+
var fs = require('fs');
|
25 |
+
|
26 |
+
var combinedStream = CombinedStream.create();
|
27 |
+
combinedStream.append(fs.createReadStream('file1.txt'));
|
28 |
+
combinedStream.append(fs.createReadStream('file2.txt'));
|
29 |
+
|
30 |
+
combinedStream.pipe(fs.createWriteStream('combined.txt'));
|
31 |
+
```
|
32 |
+
|
33 |
+
While the example above works great, it will pause all source streams until
|
34 |
+
they are needed. If you don't want that to happen, you can set `pauseStreams`
|
35 |
+
to `false`:
|
36 |
+
|
37 |
+
``` javascript
|
38 |
+
var CombinedStream = require('combined-stream');
|
39 |
+
var fs = require('fs');
|
40 |
+
|
41 |
+
var combinedStream = CombinedStream.create({pauseStreams: false});
|
42 |
+
combinedStream.append(fs.createReadStream('file1.txt'));
|
43 |
+
combinedStream.append(fs.createReadStream('file2.txt'));
|
44 |
+
|
45 |
+
combinedStream.pipe(fs.createWriteStream('combined.txt'));
|
46 |
+
```
|
47 |
+
|
48 |
+
However, what if you don't have all the source streams yet, or you don't want
|
49 |
+
to allocate the resources (file descriptors, memory, etc.) for them right away?
|
50 |
+
Well, in that case you can simply provide a callback that supplies the stream
|
51 |
+
by calling a `next()` function:
|
52 |
+
|
53 |
+
``` javascript
|
54 |
+
var CombinedStream = require('combined-stream');
|
55 |
+
var fs = require('fs');
|
56 |
+
|
57 |
+
var combinedStream = CombinedStream.create();
|
58 |
+
combinedStream.append(function(next) {
|
59 |
+
next(fs.createReadStream('file1.txt'));
|
60 |
+
});
|
61 |
+
combinedStream.append(function(next) {
|
62 |
+
next(fs.createReadStream('file2.txt'));
|
63 |
+
});
|
64 |
+
|
65 |
+
combinedStream.pipe(fs.createWriteStream('combined.txt'));
|
66 |
+
```
|
67 |
+
|
68 |
+
## API
|
69 |
+
|
70 |
+
### CombinedStream.create([options])
|
71 |
+
|
72 |
+
Returns a new combined stream object. Available options are:
|
73 |
+
|
74 |
+
* `maxDataSize`
|
75 |
+
* `pauseStreams`
|
76 |
+
|
77 |
+
The effect of those options is described below.
|
78 |
+
|
79 |
+
### combinedStream.pauseStreams = `true`
|
80 |
+
|
81 |
+
Whether to apply back pressure to the underlaying streams. If set to `false`,
|
82 |
+
the underlaying streams will never be paused. If set to `true`, the
|
83 |
+
underlaying streams will be paused right after being appended, as well as when
|
84 |
+
`delayedStream.pipe()` wants to throttle.
|
85 |
+
|
86 |
+
### combinedStream.maxDataSize = `2 * 1024 * 1024`
|
87 |
+
|
88 |
+
The maximum amount of bytes (or characters) to buffer for all source streams.
|
89 |
+
If this value is exceeded, `combinedStream` emits an `'error'` event.
|
90 |
+
|
91 |
+
### combinedStream.dataSize = `0`
|
92 |
+
|
93 |
+
The amount of bytes (or characters) currently buffered by `combinedStream`.
|
94 |
+
|
95 |
+
### combinedStream.append(stream)
|
96 |
+
|
97 |
+
Appends the given `stream` to the combinedStream object. If `pauseStreams` is
|
98 |
+
set to `true, this stream will also be paused right away.
|
99 |
+
|
100 |
+
`streams` can also be a function that takes one parameter called `next`. `next`
|
101 |
+
is a function that must be invoked in order to provide the `next` stream, see
|
102 |
+
example above.
|
103 |
+
|
104 |
+
Regardless of how the `stream` is appended, combined-stream always attaches an
|
105 |
+
`'error'` listener to it, so you don't have to do that manually.
|
106 |
+
|
107 |
+
Special case: `stream` can also be a String or Buffer.
|
108 |
+
|
109 |
+
### combinedStream.write(data)
|
110 |
+
|
111 |
+
You should not call this, `combinedStream` takes care of piping the appended
|
112 |
+
streams into itself for you.
|
113 |
+
|
114 |
+
### combinedStream.resume()
|
115 |
+
|
116 |
+
Causes `combinedStream` to start drain the streams it manages. The function is
|
117 |
+
idempotent, and also emits a `'resume'` event each time which usually goes to
|
118 |
+
the stream that is currently being drained.
|
119 |
+
|
120 |
+
### combinedStream.pause();
|
121 |
+
|
122 |
+
If `combinedStream.pauseStreams` is set to `false`, this does nothing.
|
123 |
+
Otherwise a `'pause'` event is emitted, this goes to the stream that is
|
124 |
+
currently being drained, so you can use it to apply back pressure.
|
125 |
+
|
126 |
+
### combinedStream.end();
|
127 |
+
|
128 |
+
Sets `combinedStream.writable` to false, emits an `'end'` event, and removes
|
129 |
+
all streams from the queue.
|
130 |
+
|
131 |
+
### combinedStream.destroy();
|
132 |
+
|
133 |
+
Same as `combinedStream.end()`, except it emits a `'close'` event instead of
|
134 |
+
`'end'`.
|
135 |
+
|
136 |
+
## License
|
137 |
+
|
138 |
+
combined-stream is licensed under the MIT license.
|
node_modules/combined-stream/lib/combined_stream.js
ADDED
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var util = require('util');
|
2 |
+
var Stream = require('stream').Stream;
|
3 |
+
var DelayedStream = require('delayed-stream');
|
4 |
+
|
5 |
+
module.exports = CombinedStream;
|
6 |
+
function CombinedStream() {
|
7 |
+
this.writable = false;
|
8 |
+
this.readable = true;
|
9 |
+
this.dataSize = 0;
|
10 |
+
this.maxDataSize = 2 * 1024 * 1024;
|
11 |
+
this.pauseStreams = true;
|
12 |
+
|
13 |
+
this._released = false;
|
14 |
+
this._streams = [];
|
15 |
+
this._currentStream = null;
|
16 |
+
this._insideLoop = false;
|
17 |
+
this._pendingNext = false;
|
18 |
+
}
|
19 |
+
util.inherits(CombinedStream, Stream);
|
20 |
+
|
21 |
+
CombinedStream.create = function(options) {
|
22 |
+
var combinedStream = new this();
|
23 |
+
|
24 |
+
options = options || {};
|
25 |
+
for (var option in options) {
|
26 |
+
combinedStream[option] = options[option];
|
27 |
+
}
|
28 |
+
|
29 |
+
return combinedStream;
|
30 |
+
};
|
31 |
+
|
32 |
+
CombinedStream.isStreamLike = function(stream) {
|
33 |
+
return (typeof stream !== 'function')
|
34 |
+
&& (typeof stream !== 'string')
|
35 |
+
&& (typeof stream !== 'boolean')
|
36 |
+
&& (typeof stream !== 'number')
|
37 |
+
&& (!Buffer.isBuffer(stream));
|
38 |
+
};
|
39 |
+
|
40 |
+
CombinedStream.prototype.append = function(stream) {
|
41 |
+
var isStreamLike = CombinedStream.isStreamLike(stream);
|
42 |
+
|
43 |
+
if (isStreamLike) {
|
44 |
+
if (!(stream instanceof DelayedStream)) {
|
45 |
+
var newStream = DelayedStream.create(stream, {
|
46 |
+
maxDataSize: Infinity,
|
47 |
+
pauseStream: this.pauseStreams,
|
48 |
+
});
|
49 |
+
stream.on('data', this._checkDataSize.bind(this));
|
50 |
+
stream = newStream;
|
51 |
+
}
|
52 |
+
|
53 |
+
this._handleErrors(stream);
|
54 |
+
|
55 |
+
if (this.pauseStreams) {
|
56 |
+
stream.pause();
|
57 |
+
}
|
58 |
+
}
|
59 |
+
|
60 |
+
this._streams.push(stream);
|
61 |
+
return this;
|
62 |
+
};
|
63 |
+
|
64 |
+
CombinedStream.prototype.pipe = function(dest, options) {
|
65 |
+
Stream.prototype.pipe.call(this, dest, options);
|
66 |
+
this.resume();
|
67 |
+
return dest;
|
68 |
+
};
|
69 |
+
|
70 |
+
CombinedStream.prototype._getNext = function() {
|
71 |
+
this._currentStream = null;
|
72 |
+
|
73 |
+
if (this._insideLoop) {
|
74 |
+
this._pendingNext = true;
|
75 |
+
return; // defer call
|
76 |
+
}
|
77 |
+
|
78 |
+
this._insideLoop = true;
|
79 |
+
try {
|
80 |
+
do {
|
81 |
+
this._pendingNext = false;
|
82 |
+
this._realGetNext();
|
83 |
+
} while (this._pendingNext);
|
84 |
+
} finally {
|
85 |
+
this._insideLoop = false;
|
86 |
+
}
|
87 |
+
};
|
88 |
+
|
89 |
+
CombinedStream.prototype._realGetNext = function() {
|
90 |
+
var stream = this._streams.shift();
|
91 |
+
|
92 |
+
|
93 |
+
if (typeof stream == 'undefined') {
|
94 |
+
this.end();
|
95 |
+
return;
|
96 |
+
}
|
97 |
+
|
98 |
+
if (typeof stream !== 'function') {
|
99 |
+
this._pipeNext(stream);
|
100 |
+
return;
|
101 |
+
}
|
102 |
+
|
103 |
+
var getStream = stream;
|
104 |
+
getStream(function(stream) {
|
105 |
+
var isStreamLike = CombinedStream.isStreamLike(stream);
|
106 |
+
if (isStreamLike) {
|
107 |
+
stream.on('data', this._checkDataSize.bind(this));
|
108 |
+
this._handleErrors(stream);
|
109 |
+
}
|
110 |
+
|
111 |
+
this._pipeNext(stream);
|
112 |
+
}.bind(this));
|
113 |
+
};
|
114 |
+
|
115 |
+
CombinedStream.prototype._pipeNext = function(stream) {
|
116 |
+
this._currentStream = stream;
|
117 |
+
|
118 |
+
var isStreamLike = CombinedStream.isStreamLike(stream);
|
119 |
+
if (isStreamLike) {
|
120 |
+
stream.on('end', this._getNext.bind(this));
|
121 |
+
stream.pipe(this, {end: false});
|
122 |
+
return;
|
123 |
+
}
|
124 |
+
|
125 |
+
var value = stream;
|
126 |
+
this.write(value);
|
127 |
+
this._getNext();
|
128 |
+
};
|
129 |
+
|
130 |
+
CombinedStream.prototype._handleErrors = function(stream) {
|
131 |
+
var self = this;
|
132 |
+
stream.on('error', function(err) {
|
133 |
+
self._emitError(err);
|
134 |
+
});
|
135 |
+
};
|
136 |
+
|
137 |
+
CombinedStream.prototype.write = function(data) {
|
138 |
+
this.emit('data', data);
|
139 |
+
};
|
140 |
+
|
141 |
+
CombinedStream.prototype.pause = function() {
|
142 |
+
if (!this.pauseStreams) {
|
143 |
+
return;
|
144 |
+
}
|
145 |
+
|
146 |
+
if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause();
|
147 |
+
this.emit('pause');
|
148 |
+
};
|
149 |
+
|
150 |
+
CombinedStream.prototype.resume = function() {
|
151 |
+
if (!this._released) {
|
152 |
+
this._released = true;
|
153 |
+
this.writable = true;
|
154 |
+
this._getNext();
|
155 |
+
}
|
156 |
+
|
157 |
+
if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume();
|
158 |
+
this.emit('resume');
|
159 |
+
};
|
160 |
+
|
161 |
+
CombinedStream.prototype.end = function() {
|
162 |
+
this._reset();
|
163 |
+
this.emit('end');
|
164 |
+
};
|
165 |
+
|
166 |
+
CombinedStream.prototype.destroy = function() {
|
167 |
+
this._reset();
|
168 |
+
this.emit('close');
|
169 |
+
};
|
170 |
+
|
171 |
+
CombinedStream.prototype._reset = function() {
|
172 |
+
this.writable = false;
|
173 |
+
this._streams = [];
|
174 |
+
this._currentStream = null;
|
175 |
+
};
|
176 |
+
|
177 |
+
CombinedStream.prototype._checkDataSize = function() {
|
178 |
+
this._updateDataSize();
|
179 |
+
if (this.dataSize <= this.maxDataSize) {
|
180 |
+
return;
|
181 |
+
}
|
182 |
+
|
183 |
+
var message =
|
184 |
+
'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.';
|
185 |
+
this._emitError(new Error(message));
|
186 |
+
};
|
187 |
+
|
188 |
+
CombinedStream.prototype._updateDataSize = function() {
|
189 |
+
this.dataSize = 0;
|
190 |
+
|
191 |
+
var self = this;
|
192 |
+
this._streams.forEach(function(stream) {
|
193 |
+
if (!stream.dataSize) {
|
194 |
+
return;
|
195 |
+
}
|
196 |
+
|
197 |
+
self.dataSize += stream.dataSize;
|
198 |
+
});
|
199 |
+
|
200 |
+
if (this._currentStream && this._currentStream.dataSize) {
|
201 |
+
this.dataSize += this._currentStream.dataSize;
|
202 |
+
}
|
203 |
+
};
|
204 |
+
|
205 |
+
CombinedStream.prototype._emitError = function(err) {
|
206 |
+
this._reset();
|
207 |
+
this.emit('error', err);
|
208 |
+
};
|
node_modules/combined-stream/package.json
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"author": "Felix Geisendörfer <felix@debuggable.com> (http://debuggable.com/)",
|
3 |
+
"name": "combined-stream",
|
4 |
+
"description": "A stream that emits multiple other streams one after another.",
|
5 |
+
"version": "1.0.8",
|
6 |
+
"homepage": "https://github.com/felixge/node-combined-stream",
|
7 |
+
"repository": {
|
8 |
+
"type": "git",
|
9 |
+
"url": "git://github.com/felixge/node-combined-stream.git"
|
10 |
+
},
|
11 |
+
"main": "./lib/combined_stream",
|
12 |
+
"scripts": {
|
13 |
+
"test": "node test/run.js"
|
14 |
+
},
|
15 |
+
"engines": {
|
16 |
+
"node": ">= 0.8"
|
17 |
+
},
|
18 |
+
"dependencies": {
|
19 |
+
"delayed-stream": "~1.0.0"
|
20 |
+
},
|
21 |
+
"devDependencies": {
|
22 |
+
"far": "~0.0.7"
|
23 |
+
},
|
24 |
+
"license": "MIT"
|
25 |
+
}
|
node_modules/combined-stream/yarn.lock
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
2 |
+
# yarn lockfile v1
|
3 |
+
|
4 |
+
|
5 |
+
delayed-stream@~1.0.0:
|
6 |
+
version "1.0.0"
|
7 |
+
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
|
8 |
+
|
9 |
+
far@~0.0.7:
|
10 |
+
version "0.0.7"
|
11 |
+
resolved "https://registry.yarnpkg.com/far/-/far-0.0.7.tgz#01c1fd362bcd26ce9cf161af3938aa34619f79a7"
|
12 |
+
dependencies:
|
13 |
+
oop "0.0.3"
|
14 |
+
|
15 |
+
oop@0.0.3:
|
16 |
+
version "0.0.3"
|
17 |
+
resolved "https://registry.yarnpkg.com/oop/-/oop-0.0.3.tgz#70fa405a5650891a194fdc82ca68dad6dabf4401"
|
node_modules/data-uri-to-buffer/README.md
ADDED
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
data-uri-to-buffer
|
2 |
+
==================
|
3 |
+
### Generate a Buffer instance from a [Data URI][rfc] string
|
4 |
+
[![Build Status](https://travis-ci.org/TooTallNate/node-data-uri-to-buffer.svg?branch=master)](https://travis-ci.org/TooTallNate/node-data-uri-to-buffer)
|
5 |
+
|
6 |
+
This module accepts a ["data" URI][rfc] String of data, and returns a
|
7 |
+
node.js `Buffer` instance with the decoded data.
|
8 |
+
|
9 |
+
|
10 |
+
Installation
|
11 |
+
------------
|
12 |
+
|
13 |
+
Install with `npm`:
|
14 |
+
|
15 |
+
``` bash
|
16 |
+
$ npm install data-uri-to-buffer
|
17 |
+
```
|
18 |
+
|
19 |
+
|
20 |
+
Example
|
21 |
+
-------
|
22 |
+
|
23 |
+
``` js
|
24 |
+
import dataUriToBuffer from 'data-uri-to-buffer';
|
25 |
+
|
26 |
+
// plain-text data is supported
|
27 |
+
let uri = 'data:,Hello%2C%20World!';
|
28 |
+
let decoded = dataUriToBuffer(uri);
|
29 |
+
console.log(decoded.toString());
|
30 |
+
// 'Hello, World!'
|
31 |
+
|
32 |
+
// base64-encoded data is supported
|
33 |
+
uri = 'data:text/plain;base64,SGVsbG8sIFdvcmxkIQ%3D%3D';
|
34 |
+
decoded = dataUriToBuffer(uri);
|
35 |
+
console.log(decoded.toString());
|
36 |
+
// 'Hello, World!'
|
37 |
+
```
|
38 |
+
|
39 |
+
|
40 |
+
API
|
41 |
+
---
|
42 |
+
|
43 |
+
### dataUriToBuffer(String uri) → Buffer
|
44 |
+
|
45 |
+
The `type` property on the Buffer instance gets set to the main type portion of
|
46 |
+
the "mediatype" portion of the "data" URI, or defaults to `"text/plain"` if not
|
47 |
+
specified.
|
48 |
+
|
49 |
+
The `typeFull` property on the Buffer instance gets set to the entire
|
50 |
+
"mediatype" portion of the "data" URI (including all parameters), or defaults
|
51 |
+
to `"text/plain;charset=US-ASCII"` if not specified.
|
52 |
+
|
53 |
+
The `charset` property on the Buffer instance gets set to the Charset portion of
|
54 |
+
the "mediatype" portion of the "data" URI, or defaults to `"US-ASCII"` if the
|
55 |
+
entire type is not specified, or defaults to `""` otherwise.
|
56 |
+
|
57 |
+
*Note*: If the only the main type is specified but not the charset, e.g.
|
58 |
+
`"data:text/plain,abc"`, the charset is set to the empty string. The spec only
|
59 |
+
defaults to US-ASCII as charset if the entire type is not specified.
|
60 |
+
|
61 |
+
|
62 |
+
License
|
63 |
+
-------
|
64 |
+
|
65 |
+
(The MIT License)
|
66 |
+
|
67 |
+
Copyright (c) 2014 Nathan Rajlich <nathan@tootallnate.net>
|
68 |
+
|
69 |
+
Permission is hereby granted, free of charge, to any person obtaining
|
70 |
+
a copy of this software and associated documentation files (the
|
71 |
+
'Software'), to deal in the Software without restriction, including
|
72 |
+
without limitation the rights to use, copy, modify, merge, publish,
|
73 |
+
distribute, sublicense, and/or sell copies of the Software, and to
|
74 |
+
permit persons to whom the Software is furnished to do so, subject to
|
75 |
+
the following conditions:
|
76 |
+
|
77 |
+
The above copyright notice and this permission notice shall be
|
78 |
+
included in all copies or substantial portions of the Software.
|
79 |
+
|
80 |
+
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
81 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
82 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
83 |
+
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
84 |
+
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
85 |
+
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
86 |
+
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
87 |
+
|
88 |
+
[rfc]: http://tools.ietf.org/html/rfc2397
|
node_modules/data-uri-to-buffer/dist/index.d.ts
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/// <reference types="node" />
|
2 |
+
export interface MimeBuffer extends Buffer {
|
3 |
+
type: string;
|
4 |
+
typeFull: string;
|
5 |
+
charset: string;
|
6 |
+
}
|
7 |
+
/**
|
8 |
+
* Returns a `Buffer` instance from the given data URI `uri`.
|
9 |
+
*
|
10 |
+
* @param {String} uri Data URI to turn into a Buffer instance
|
11 |
+
* @returns {Buffer} Buffer instance from Data URI
|
12 |
+
* @api public
|
13 |
+
*/
|
14 |
+
export declare function dataUriToBuffer(uri: string): MimeBuffer;
|
15 |
+
export default dataUriToBuffer;
|
node_modules/data-uri-to-buffer/dist/index.js
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/**
|
2 |
+
* Returns a `Buffer` instance from the given data URI `uri`.
|
3 |
+
*
|
4 |
+
* @param {String} uri Data URI to turn into a Buffer instance
|
5 |
+
* @returns {Buffer} Buffer instance from Data URI
|
6 |
+
* @api public
|
7 |
+
*/
|
8 |
+
export function dataUriToBuffer(uri) {
|
9 |
+
if (!/^data:/i.test(uri)) {
|
10 |
+
throw new TypeError('`uri` does not appear to be a Data URI (must begin with "data:")');
|
11 |
+
}
|
12 |
+
// strip newlines
|
13 |
+
uri = uri.replace(/\r?\n/g, '');
|
14 |
+
// split the URI up into the "metadata" and the "data" portions
|
15 |
+
const firstComma = uri.indexOf(',');
|
16 |
+
if (firstComma === -1 || firstComma <= 4) {
|
17 |
+
throw new TypeError('malformed data: URI');
|
18 |
+
}
|
19 |
+
// remove the "data:" scheme and parse the metadata
|
20 |
+
const meta = uri.substring(5, firstComma).split(';');
|
21 |
+
let charset = '';
|
22 |
+
let base64 = false;
|
23 |
+
const type = meta[0] || 'text/plain';
|
24 |
+
let typeFull = type;
|
25 |
+
for (let i = 1; i < meta.length; i++) {
|
26 |
+
if (meta[i] === 'base64') {
|
27 |
+
base64 = true;
|
28 |
+
}
|
29 |
+
else if (meta[i]) {
|
30 |
+
typeFull += `;${meta[i]}`;
|
31 |
+
if (meta[i].indexOf('charset=') === 0) {
|
32 |
+
charset = meta[i].substring(8);
|
33 |
+
}
|
34 |
+
}
|
35 |
+
}
|
36 |
+
// defaults to US-ASCII only if type is not provided
|
37 |
+
if (!meta[0] && !charset.length) {
|
38 |
+
typeFull += ';charset=US-ASCII';
|
39 |
+
charset = 'US-ASCII';
|
40 |
+
}
|
41 |
+
// get the encoded data portion and decode URI-encoded chars
|
42 |
+
const encoding = base64 ? 'base64' : 'ascii';
|
43 |
+
const data = unescape(uri.substring(firstComma + 1));
|
44 |
+
const buffer = Buffer.from(data, encoding);
|
45 |
+
// set `.type` and `.typeFull` properties to MIME type
|
46 |
+
buffer.type = type;
|
47 |
+
buffer.typeFull = typeFull;
|
48 |
+
// set the `.charset` property
|
49 |
+
buffer.charset = charset;
|
50 |
+
return buffer;
|
51 |
+
}
|
52 |
+
export default dataUriToBuffer;
|
53 |
+
//# sourceMappingURL=index.js.map
|
node_modules/data-uri-to-buffer/dist/index.js.map
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAMA;;;;;;GAMG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW;IAC1C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QACzB,MAAM,IAAI,SAAS,CAClB,kEAAkE,CAClE,CAAC;KACF;IAED,iBAAiB;IACjB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC;IAEhC,+DAA+D;IAC/D,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACpC,IAAI,UAAU,KAAK,CAAC,CAAC,IAAI,UAAU,IAAI,CAAC,EAAE;QACzC,MAAM,IAAI,SAAS,CAAC,qBAAqB,CAAC,CAAC;KAC3C;IAED,mDAAmD;IACnD,MAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAErD,IAAI,OAAO,GAAG,EAAE,CAAC;IACjB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,MAAM,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,YAAY,CAAC;IACrC,IAAI,QAAQ,GAAG,IAAI,CAAC;IACpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,QAAQ,EAAE;YACzB,MAAM,GAAG,IAAI,CAAC;SACd;aAAM,IAAG,IAAI,CAAC,CAAC,CAAC,EAAE;YAClB,QAAQ,IAAI,IAAM,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;YAC5B,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;gBACtC,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC/B;SACD;KACD;IACD,oDAAoD;IACpD,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE;QAChC,QAAQ,IAAI,mBAAmB,CAAC;QAChC,OAAO,GAAG,UAAU,CAAC;KACrB;IAED,4DAA4D;IAC5D,MAAM,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;IAC7C,MAAM,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACrD,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAe,CAAC;IAEzD,sDAAsD;IACtD,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC;IACnB,MAAM,CAAC,QAAQ,GAAG,QAAQ,CAAC;IAE3B,8BAA8B;IAC9B,MAAM,CAAC,OAAO,GAAG,OAAO,CAAC;IAEzB,OAAO,MAAM,CAAC;AACf,CAAC;AAED,eAAe,eAAe,CAAC"}
|
node_modules/data-uri-to-buffer/package.json
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"name": "data-uri-to-buffer",
|
3 |
+
"version": "4.0.1",
|
4 |
+
"description": "Generate a Buffer instance from a Data URI string",
|
5 |
+
"type": "module",
|
6 |
+
"exports": "./dist/index.js",
|
7 |
+
"main": "./dist/index.js",
|
8 |
+
"types": "./dist/index.d.ts",
|
9 |
+
"files": [
|
10 |
+
"dist",
|
11 |
+
"src"
|
12 |
+
],
|
13 |
+
"scripts": {
|
14 |
+
"build": "tsc",
|
15 |
+
"test": "jest",
|
16 |
+
"prepublishOnly": "npm run build"
|
17 |
+
},
|
18 |
+
"repository": {
|
19 |
+
"type": "git",
|
20 |
+
"url": "git://github.com/TooTallNate/node-data-uri-to-buffer.git"
|
21 |
+
},
|
22 |
+
"engines": {
|
23 |
+
"node": ">= 12"
|
24 |
+
},
|
25 |
+
"keywords": [
|
26 |
+
"data",
|
27 |
+
"uri",
|
28 |
+
"datauri",
|
29 |
+
"data-uri",
|
30 |
+
"buffer",
|
31 |
+
"convert",
|
32 |
+
"rfc2397",
|
33 |
+
"2397"
|
34 |
+
],
|
35 |
+
"author": "Nathan Rajlich <nathan@tootallnate.net> (http://n8.io/)",
|
36 |
+
"license": "MIT",
|
37 |
+
"bugs": {
|
38 |
+
"url": "https://github.com/TooTallNate/node-data-uri-to-buffer/issues"
|
39 |
+
},
|
40 |
+
"homepage": "https://github.com/TooTallNate/node-data-uri-to-buffer",
|
41 |
+
"devDependencies": {
|
42 |
+
"@types/jest": "^27.0.2",
|
43 |
+
"@types/node": "^12.20.36",
|
44 |
+
"jest": "^27.3.1",
|
45 |
+
"ts-jest": "^27.0.7",
|
46 |
+
"typescript": "^4.4.4"
|
47 |
+
},
|
48 |
+
"jest": {
|
49 |
+
"preset": "ts-jest",
|
50 |
+
"globals": {
|
51 |
+
"ts-jest": {
|
52 |
+
"diagnostics": false,
|
53 |
+
"isolatedModules": true
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"verbose": false,
|
57 |
+
"testEnvironment": "node",
|
58 |
+
"testMatch": [
|
59 |
+
"<rootDir>/test/**/*.test.ts"
|
60 |
+
]
|
61 |
+
}
|
62 |
+
}
|
node_modules/data-uri-to-buffer/src/index.ts
ADDED
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export interface MimeBuffer extends Buffer {
|
2 |
+
type: string;
|
3 |
+
typeFull: string;
|
4 |
+
charset: string;
|
5 |
+
}
|
6 |
+
|
7 |
+
/**
|
8 |
+
* Returns a `Buffer` instance from the given data URI `uri`.
|
9 |
+
*
|
10 |
+
* @param {String} uri Data URI to turn into a Buffer instance
|
11 |
+
* @returns {Buffer} Buffer instance from Data URI
|
12 |
+
* @api public
|
13 |
+
*/
|
14 |
+
export function dataUriToBuffer(uri: string): MimeBuffer {
|
15 |
+
if (!/^data:/i.test(uri)) {
|
16 |
+
throw new TypeError(
|
17 |
+
'`uri` does not appear to be a Data URI (must begin with "data:")'
|
18 |
+
);
|
19 |
+
}
|
20 |
+
|
21 |
+
// strip newlines
|
22 |
+
uri = uri.replace(/\r?\n/g, '');
|
23 |
+
|
24 |
+
// split the URI up into the "metadata" and the "data" portions
|
25 |
+
const firstComma = uri.indexOf(',');
|
26 |
+
if (firstComma === -1 || firstComma <= 4) {
|
27 |
+
throw new TypeError('malformed data: URI');
|
28 |
+
}
|
29 |
+
|
30 |
+
// remove the "data:" scheme and parse the metadata
|
31 |
+
const meta = uri.substring(5, firstComma).split(';');
|
32 |
+
|
33 |
+
let charset = '';
|
34 |
+
let base64 = false;
|
35 |
+
const type = meta[0] || 'text/plain';
|
36 |
+
let typeFull = type;
|
37 |
+
for (let i = 1; i < meta.length; i++) {
|
38 |
+
if (meta[i] === 'base64') {
|
39 |
+
base64 = true;
|
40 |
+
} else if(meta[i]) {
|
41 |
+
typeFull += `;${ meta[i]}`;
|
42 |
+
if (meta[i].indexOf('charset=') === 0) {
|
43 |
+
charset = meta[i].substring(8);
|
44 |
+
}
|
45 |
+
}
|
46 |
+
}
|
47 |
+
// defaults to US-ASCII only if type is not provided
|
48 |
+
if (!meta[0] && !charset.length) {
|
49 |
+
typeFull += ';charset=US-ASCII';
|
50 |
+
charset = 'US-ASCII';
|
51 |
+
}
|
52 |
+
|
53 |
+
// get the encoded data portion and decode URI-encoded chars
|
54 |
+
const encoding = base64 ? 'base64' : 'ascii';
|
55 |
+
const data = unescape(uri.substring(firstComma + 1));
|
56 |
+
const buffer = Buffer.from(data, encoding) as MimeBuffer;
|
57 |
+
|
58 |
+
// set `.type` and `.typeFull` properties to MIME type
|
59 |
+
buffer.type = type;
|
60 |
+
buffer.typeFull = typeFull;
|
61 |
+
|
62 |
+
// set the `.charset` property
|
63 |
+
buffer.charset = charset;
|
64 |
+
|
65 |
+
return buffer;
|
66 |
+
}
|
67 |
+
|
68 |
+
export default dataUriToBuffer;
|
node_modules/delayed-stream/.npmignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
test
|
node_modules/delayed-stream/License
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Copyright (c) 2011 Debuggable Limited <felix@debuggable.com>
|
2 |
+
|
3 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
4 |
+
of this software and associated documentation files (the "Software"), to deal
|
5 |
+
in the Software without restriction, including without limitation the rights
|
6 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
7 |
+
copies of the Software, and to permit persons to whom the Software is
|
8 |
+
furnished to do so, subject to the following conditions:
|
9 |
+
|
10 |
+
The above copyright notice and this permission notice shall be included in
|
11 |
+
all copies or substantial portions of the Software.
|
12 |
+
|
13 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
14 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
15 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
16 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
17 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
18 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
19 |
+
THE SOFTWARE.
|
node_modules/delayed-stream/Makefile
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
SHELL := /bin/bash
|
2 |
+
|
3 |
+
test:
|
4 |
+
@./test/run.js
|
5 |
+
|
6 |
+
.PHONY: test
|
7 |
+
|
node_modules/delayed-stream/Readme.md
ADDED
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# delayed-stream
|
2 |
+
|
3 |
+
Buffers events from a stream until you are ready to handle them.
|
4 |
+
|
5 |
+
## Installation
|
6 |
+
|
7 |
+
``` bash
|
8 |
+
npm install delayed-stream
|
9 |
+
```
|
10 |
+
|
11 |
+
## Usage
|
12 |
+
|
13 |
+
The following example shows how to write a http echo server that delays its
|
14 |
+
response by 1000 ms.
|
15 |
+
|
16 |
+
``` javascript
|
17 |
+
var DelayedStream = require('delayed-stream');
|
18 |
+
var http = require('http');
|
19 |
+
|
20 |
+
http.createServer(function(req, res) {
|
21 |
+
var delayed = DelayedStream.create(req);
|
22 |
+
|
23 |
+
setTimeout(function() {
|
24 |
+
res.writeHead(200);
|
25 |
+
delayed.pipe(res);
|
26 |
+
}, 1000);
|
27 |
+
});
|
28 |
+
```
|
29 |
+
|
30 |
+
If you are not using `Stream#pipe`, you can also manually release the buffered
|
31 |
+
events by calling `delayedStream.resume()`:
|
32 |
+
|
33 |
+
``` javascript
|
34 |
+
var delayed = DelayedStream.create(req);
|
35 |
+
|
36 |
+
setTimeout(function() {
|
37 |
+
// Emit all buffered events and resume underlaying source
|
38 |
+
delayed.resume();
|
39 |
+
}, 1000);
|
40 |
+
```
|
41 |
+
|
42 |
+
## Implementation
|
43 |
+
|
44 |
+
In order to use this meta stream properly, here are a few things you should
|
45 |
+
know about the implementation.
|
46 |
+
|
47 |
+
### Event Buffering / Proxying
|
48 |
+
|
49 |
+
All events of the `source` stream are hijacked by overwriting the `source.emit`
|
50 |
+
method. Until node implements a catch-all event listener, this is the only way.
|
51 |
+
|
52 |
+
However, delayed-stream still continues to emit all events it captures on the
|
53 |
+
`source`, regardless of whether you have released the delayed stream yet or
|
54 |
+
not.
|
55 |
+
|
56 |
+
Upon creation, delayed-stream captures all `source` events and stores them in
|
57 |
+
an internal event buffer. Once `delayedStream.release()` is called, all
|
58 |
+
buffered events are emitted on the `delayedStream`, and the event buffer is
|
59 |
+
cleared. After that, delayed-stream merely acts as a proxy for the underlaying
|
60 |
+
source.
|
61 |
+
|
62 |
+
### Error handling
|
63 |
+
|
64 |
+
Error events on `source` are buffered / proxied just like any other events.
|
65 |
+
However, `delayedStream.create` attaches a no-op `'error'` listener to the
|
66 |
+
`source`. This way you only have to handle errors on the `delayedStream`
|
67 |
+
object, rather than in two places.
|
68 |
+
|
69 |
+
### Buffer limits
|
70 |
+
|
71 |
+
delayed-stream provides a `maxDataSize` property that can be used to limit
|
72 |
+
the amount of data being buffered. In order to protect you from bad `source`
|
73 |
+
streams that don't react to `source.pause()`, this feature is enabled by
|
74 |
+
default.
|
75 |
+
|
76 |
+
## API
|
77 |
+
|
78 |
+
### DelayedStream.create(source, [options])
|
79 |
+
|
80 |
+
Returns a new `delayedStream`. Available options are:
|
81 |
+
|
82 |
+
* `pauseStream`
|
83 |
+
* `maxDataSize`
|
84 |
+
|
85 |
+
The description for those properties can be found below.
|
86 |
+
|
87 |
+
### delayedStream.source
|
88 |
+
|
89 |
+
The `source` stream managed by this object. This is useful if you are
|
90 |
+
passing your `delayedStream` around, and you still want to access properties
|
91 |
+
on the `source` object.
|
92 |
+
|
93 |
+
### delayedStream.pauseStream = true
|
94 |
+
|
95 |
+
Whether to pause the underlaying `source` when calling
|
96 |
+
`DelayedStream.create()`. Modifying this property afterwards has no effect.
|
97 |
+
|
98 |
+
### delayedStream.maxDataSize = 1024 * 1024
|
99 |
+
|
100 |
+
The amount of data to buffer before emitting an `error`.
|
101 |
+
|
102 |
+
If the underlaying source is emitting `Buffer` objects, the `maxDataSize`
|
103 |
+
refers to bytes.
|
104 |
+
|
105 |
+
If the underlaying source is emitting JavaScript strings, the size refers to
|
106 |
+
characters.
|
107 |
+
|
108 |
+
If you know what you are doing, you can set this property to `Infinity` to
|
109 |
+
disable this feature. You can also modify this property during runtime.
|
110 |
+
|
111 |
+
### delayedStream.dataSize = 0
|
112 |
+
|
113 |
+
The amount of data buffered so far.
|
114 |
+
|
115 |
+
### delayedStream.readable
|
116 |
+
|
117 |
+
An ECMA5 getter that returns the value of `source.readable`.
|
118 |
+
|
119 |
+
### delayedStream.resume()
|
120 |
+
|
121 |
+
If the `delayedStream` has not been released so far, `delayedStream.release()`
|
122 |
+
is called.
|
123 |
+
|
124 |
+
In either case, `source.resume()` is called.
|
125 |
+
|
126 |
+
### delayedStream.pause()
|
127 |
+
|
128 |
+
Calls `source.pause()`.
|
129 |
+
|
130 |
+
### delayedStream.pipe(dest)
|
131 |
+
|
132 |
+
Calls `delayedStream.resume()` and then proxies the arguments to `source.pipe`.
|
133 |
+
|
134 |
+
### delayedStream.release()
|
135 |
+
|
136 |
+
Emits and clears all events that have been buffered up so far. This does not
|
137 |
+
resume the underlaying source, use `delayedStream.resume()` instead.
|
138 |
+
|
139 |
+
## License
|
140 |
+
|
141 |
+
delayed-stream is licensed under the MIT license.
|
node_modules/delayed-stream/lib/delayed_stream.js
ADDED
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
var Stream = require('stream').Stream;
|
2 |
+
var util = require('util');
|
3 |
+
|
4 |
+
module.exports = DelayedStream;
|
5 |
+
function DelayedStream() {
|
6 |
+
this.source = null;
|
7 |
+
this.dataSize = 0;
|
8 |
+
this.maxDataSize = 1024 * 1024;
|
9 |
+
this.pauseStream = true;
|
10 |
+
|
11 |
+
this._maxDataSizeExceeded = false;
|
12 |
+
this._released = false;
|
13 |
+
this._bufferedEvents = [];
|
14 |
+
}
|
15 |
+
util.inherits(DelayedStream, Stream);
|
16 |
+
|
17 |
+
DelayedStream.create = function(source, options) {
|
18 |
+
var delayedStream = new this();
|
19 |
+
|
20 |
+
options = options || {};
|
21 |
+
for (var option in options) {
|
22 |
+
delayedStream[option] = options[option];
|
23 |
+
}
|
24 |
+
|
25 |
+
delayedStream.source = source;
|
26 |
+
|
27 |
+
var realEmit = source.emit;
|
28 |
+
source.emit = function() {
|
29 |
+
delayedStream._handleEmit(arguments);
|
30 |
+
return realEmit.apply(source, arguments);
|
31 |
+
};
|
32 |
+
|
33 |
+
source.on('error', function() {});
|
34 |
+
if (delayedStream.pauseStream) {
|
35 |
+
source.pause();
|
36 |
+
}
|
37 |
+
|
38 |
+
return delayedStream;
|
39 |
+
};
|
40 |
+
|
41 |
+
Object.defineProperty(DelayedStream.prototype, 'readable', {
|
42 |
+
configurable: true,
|
43 |
+
enumerable: true,
|
44 |
+
get: function() {
|
45 |
+
return this.source.readable;
|
46 |
+
}
|
47 |
+
});
|
48 |
+
|
49 |
+
DelayedStream.prototype.setEncoding = function() {
|
50 |
+
return this.source.setEncoding.apply(this.source, arguments);
|
51 |
+
};
|
52 |
+
|
53 |
+
DelayedStream.prototype.resume = function() {
|
54 |
+
if (!this._released) {
|
55 |
+
this.release();
|
56 |
+
}
|
57 |
+
|
58 |
+
this.source.resume();
|
59 |
+
};
|
60 |
+
|
61 |
+
DelayedStream.prototype.pause = function() {
|
62 |
+
this.source.pause();
|
63 |
+
};
|
64 |
+
|
65 |
+
DelayedStream.prototype.release = function() {
|
66 |
+
this._released = true;
|
67 |
+
|
68 |
+
this._bufferedEvents.forEach(function(args) {
|
69 |
+
this.emit.apply(this, args);
|
70 |
+
}.bind(this));
|
71 |
+
this._bufferedEvents = [];
|
72 |
+
};
|
73 |
+
|
74 |
+
DelayedStream.prototype.pipe = function() {
|
75 |
+
var r = Stream.prototype.pipe.apply(this, arguments);
|
76 |
+
this.resume();
|
77 |
+
return r;
|
78 |
+
};
|
79 |
+
|
80 |
+
DelayedStream.prototype._handleEmit = function(args) {
|
81 |
+
if (this._released) {
|
82 |
+
this.emit.apply(this, args);
|
83 |
+
return;
|
84 |
+
}
|
85 |
+
|
86 |
+
if (args[0] === 'data') {
|
87 |
+
this.dataSize += args[1].length;
|
88 |
+
this._checkIfMaxDataSizeExceeded();
|
89 |
+
}
|
90 |
+
|
91 |
+
this._bufferedEvents.push(args);
|
92 |
+
};
|
93 |
+
|
94 |
+
DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() {
|
95 |
+
if (this._maxDataSizeExceeded) {
|
96 |
+
return;
|
97 |
+
}
|
98 |
+
|
99 |
+
if (this.dataSize <= this.maxDataSize) {
|
100 |
+
return;
|
101 |
+
}
|
102 |
+
|
103 |
+
this._maxDataSizeExceeded = true;
|
104 |
+
var message =
|
105 |
+
'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'
|
106 |
+
this.emit('error', new Error(message));
|
107 |
+
};
|
node_modules/delayed-stream/package.json
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"author": "Felix Geisendörfer <felix@debuggable.com> (http://debuggable.com/)",
|
3 |
+
"contributors": [
|
4 |
+
"Mike Atkins <apeherder@gmail.com>"
|
5 |
+
],
|
6 |
+
"name": "delayed-stream",
|
7 |
+
"description": "Buffers events from a stream until you are ready to handle them.",
|
8 |
+
"license": "MIT",
|
9 |
+
"version": "1.0.0",
|
10 |
+
"homepage": "https://github.com/felixge/node-delayed-stream",
|
11 |
+
"repository": {
|
12 |
+
"type": "git",
|
13 |
+
"url": "git://github.com/felixge/node-delayed-stream.git"
|
14 |
+
},
|
15 |
+
"main": "./lib/delayed_stream",
|
16 |
+
"engines": {
|
17 |
+
"node": ">=0.4.0"
|
18 |
+
},
|
19 |
+
"scripts": {
|
20 |
+
"test": "make test"
|
21 |
+
},
|
22 |
+
"dependencies": {},
|
23 |
+
"devDependencies": {
|
24 |
+
"fake": "0.2.0",
|
25 |
+
"far": "0.0.1"
|
26 |
+
}
|
27 |
+
}
|
node_modules/fetch-blob/LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
MIT License
|
2 |
+
|
3 |
+
Copyright (c) 2019 David Frank
|
4 |
+
|
5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6 |
+
of this software and associated documentation files (the "Software"), to deal
|
7 |
+
in the Software without restriction, including without limitation the rights
|
8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9 |
+
copies of the Software, and to permit persons to whom the Software is
|
10 |
+
furnished to do so, subject to the following conditions:
|
11 |
+
|
12 |
+
The above copyright notice and this permission notice shall be included in all
|
13 |
+
copies or substantial portions of the Software.
|
14 |
+
|
15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21 |
+
SOFTWARE.
|
node_modules/fetch-blob/README.md
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# fetch-blob
|
2 |
+
|
3 |
+
[![npm version][npm-image]][npm-url]
|
4 |
+
[![build status][ci-image]][ci-url]
|
5 |
+
[![coverage status][codecov-image]][codecov-url]
|
6 |
+
[![install size][install-size-image]][install-size-url]
|
7 |
+
|
8 |
+
A Blob implementation in Node.js, originally from [node-fetch](https://github.com/node-fetch/node-fetch).
|
9 |
+
|
10 |
+
## Installation
|
11 |
+
|
12 |
+
```sh
|
13 |
+
npm install fetch-blob
|
14 |
+
```
|
15 |
+
|
16 |
+
<details>
|
17 |
+
<summary>Upgrading from 2x to 3x</summary>
|
18 |
+
|
19 |
+
Updating from 2 to 3 should be a breeze since there is not many changes to the blob specification.
|
20 |
+
The major cause of a major release is coding standards.
|
21 |
+
- internal WeakMaps was replaced with private fields
|
22 |
+
- internal Buffer.from was replaced with TextEncoder/Decoder
|
23 |
+
- internal buffers was replaced with Uint8Arrays
|
24 |
+
- CommonJS was replaced with ESM
|
25 |
+
- The node stream returned by calling `blob.stream()` was replaced with whatwg streams
|
26 |
+
- (Read "Differences from other blobs" for more info.)
|
27 |
+
|
28 |
+
</details>
|
29 |
+
|
30 |
+
<details>
|
31 |
+
<summary>Differences from other Blobs</summary>
|
32 |
+
|
33 |
+
- Unlike NodeJS `buffer.Blob` (Added in: v15.7.0) and browser native Blob this polyfilled version can't be sent via PostMessage
|
34 |
+
- This blob version is more arbitrary, it can be constructed with blob parts that isn't a instance of itself
|
35 |
+
it has to look and behave as a blob to be accepted as a blob part.
|
36 |
+
- The benefit of this is that you can create other types of blobs that don't contain any internal data that has to be read in other ways, such as the `BlobDataItem` created in `from.js` that wraps a file path into a blob-like item and read lazily (nodejs plans to [implement this][fs-blobs] as well)
|
37 |
+
- The `blob.stream()` is the most noticeable differences. It returns a WHATWG stream now. to keep it as a node stream you would have to do:
|
38 |
+
|
39 |
+
```js
|
40 |
+
import {Readable} from 'stream'
|
41 |
+
const stream = Readable.from(blob.stream())
|
42 |
+
```
|
43 |
+
</details>
|
44 |
+
|
45 |
+
## Usage
|
46 |
+
|
47 |
+
```js
|
48 |
+
// Ways to import
|
49 |
+
// (PS it's dependency free ESM package so regular http-import from CDN works too)
|
50 |
+
import Blob from 'fetch-blob'
|
51 |
+
import File from 'fetch-blob/file.js'
|
52 |
+
|
53 |
+
import {Blob} from 'fetch-blob'
|
54 |
+
import {File} from 'fetch-blob/file.js'
|
55 |
+
|
56 |
+
const {Blob} = await import('fetch-blob')
|
57 |
+
|
58 |
+
|
59 |
+
// Ways to read the blob:
|
60 |
+
const blob = new Blob(['hello, world'])
|
61 |
+
|
62 |
+
await blob.text()
|
63 |
+
await blob.arrayBuffer()
|
64 |
+
for await (let chunk of blob.stream()) { ... }
|
65 |
+
blob.stream().getReader().read()
|
66 |
+
blob.stream().getReader({mode: 'byob'}).read(view)
|
67 |
+
```
|
68 |
+
|
69 |
+
### Blob part backed up by filesystem
|
70 |
+
|
71 |
+
`fetch-blob/from.js` comes packed with tools to convert any filepath into either a Blob or a File
|
72 |
+
It will not read the content into memory. It will only stat the file for last modified date and file size.
|
73 |
+
|
74 |
+
```js
|
75 |
+
// The default export is sync and use fs.stat to retrieve size & last modified as a blob
|
76 |
+
import blobFromSync from 'fetch-blob/from.js'
|
77 |
+
import {File, Blob, blobFrom, blobFromSync, fileFrom, fileFromSync} from 'fetch-blob/from.js'
|
78 |
+
|
79 |
+
const fsFile = fileFromSync('./2-GiB-file.bin', 'application/octet-stream')
|
80 |
+
const fsBlob = await blobFrom('./2-GiB-file.mp4')
|
81 |
+
|
82 |
+
// Not a 4 GiB memory snapshot, just holds references
|
83 |
+
// points to where data is located on the disk
|
84 |
+
const blob = new Blob([fsFile, fsBlob, 'memory', new Uint8Array(10)])
|
85 |
+
console.log(blob.size) // ~4 GiB
|
86 |
+
```
|
87 |
+
|
88 |
+
`blobFrom|blobFromSync|fileFrom|fileFromSync(path, [mimetype])`
|
89 |
+
|
90 |
+
### Creating Blobs backed up by other async sources
|
91 |
+
Our Blob & File class are more generic then any other polyfills in the way that it can accept any blob look-a-like item
|
92 |
+
An example of this is that our blob implementation can be constructed with parts coming from [BlobDataItem](https://github.com/node-fetch/fetch-blob/blob/8ef89adad40d255a3bbd55cf38b88597c1cd5480/from.js#L32) (aka a filepath) or from [buffer.Blob](https://nodejs.org/api/buffer.html#buffer_new_buffer_blob_sources_options), It dose not have to implement all the methods - just enough that it can be read/understood by our Blob implementation. The minium requirements is that it has `Symbol.toStringTag`, `size`, `slice()` and either a `stream()` or a `arrayBuffer()` method. If you then wrap it in our Blob or File `new Blob([blobDataItem])` then you get all of the other methods that should be implemented in a blob or file
|
93 |
+
|
94 |
+
An example of this could be to create a file or blob like item coming from a remote HTTP request. Or from a DataBase
|
95 |
+
|
96 |
+
See the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Blob) and [tests](https://github.com/node-fetch/fetch-blob/blob/master/test.js) for more details of how to use the Blob.
|
97 |
+
|
98 |
+
[npm-image]: https://flat.badgen.net/npm/v/fetch-blob
|
99 |
+
[npm-url]: https://www.npmjs.com/package/fetch-blob
|
100 |
+
[ci-image]: https://github.com/node-fetch/fetch-blob/workflows/CI/badge.svg
|
101 |
+
[ci-url]: https://github.com/node-fetch/fetch-blob/actions
|
102 |
+
[codecov-image]: https://flat.badgen.net/codecov/c/github/node-fetch/fetch-blob/master
|
103 |
+
[codecov-url]: https://codecov.io/gh/node-fetch/fetch-blob
|
104 |
+
[install-size-image]: https://flat.badgen.net/packagephobia/install/fetch-blob
|
105 |
+
[install-size-url]: https://packagephobia.now.sh/result?p=fetch-blob
|
106 |
+
[fs-blobs]: https://github.com/nodejs/node/issues/37340
|
node_modules/fetch-blob/file.d.ts
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
/** @type {typeof globalThis.File} */ export const File: typeof globalThis.File;
|
2 |
+
export default File;
|
node_modules/fetch-blob/file.js
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import Blob from './index.js'
|
2 |
+
|
3 |
+
const _File = class File extends Blob {
|
4 |
+
#lastModified = 0
|
5 |
+
#name = ''
|
6 |
+
|
7 |
+
/**
|
8 |
+
* @param {*[]} fileBits
|
9 |
+
* @param {string} fileName
|
10 |
+
* @param {{lastModified?: number, type?: string}} options
|
11 |
+
*/// @ts-ignore
|
12 |
+
constructor (fileBits, fileName, options = {}) {
|
13 |
+
if (arguments.length < 2) {
|
14 |
+
throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`)
|
15 |
+
}
|
16 |
+
super(fileBits, options)
|
17 |
+
|
18 |
+
if (options === null) options = {}
|
19 |
+
|
20 |
+
// Simulate WebIDL type casting for NaN value in lastModified option.
|
21 |
+
const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified)
|
22 |
+
if (!Number.isNaN(lastModified)) {
|
23 |
+
this.#lastModified = lastModified
|
24 |
+
}
|
25 |
+
|
26 |
+
this.#name = String(fileName)
|
27 |
+
}
|
28 |
+
|
29 |
+
get name () {
|
30 |
+
return this.#name
|
31 |
+
}
|
32 |
+
|
33 |
+
get lastModified () {
|
34 |
+
return this.#lastModified
|
35 |
+
}
|
36 |
+
|
37 |
+
get [Symbol.toStringTag] () {
|
38 |
+
return 'File'
|
39 |
+
}
|
40 |
+
|
41 |
+
static [Symbol.hasInstance] (object) {
|
42 |
+
return !!object && object instanceof Blob &&
|
43 |
+
/^(File)$/.test(object[Symbol.toStringTag])
|
44 |
+
}
|
45 |
+
}
|
46 |
+
|
47 |
+
/** @type {typeof globalThis.File} */// @ts-ignore
|
48 |
+
export const File = _File
|
49 |
+
export default File
|
node_modules/fetch-blob/from.d.ts
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export default blobFromSync;
|
2 |
+
/**
|
3 |
+
* @param {string} path filepath on the disk
|
4 |
+
* @param {string} [type] mimetype to use
|
5 |
+
*/
|
6 |
+
export function blobFromSync(path: string, type?: string): Blob;
|
7 |
+
import File from "./file.js";
|
8 |
+
import Blob from "./index.js";
|
9 |
+
/**
|
10 |
+
* @param {string} path filepath on the disk
|
11 |
+
* @param {string} [type] mimetype to use
|
12 |
+
* @returns {Promise<Blob>}
|
13 |
+
*/
|
14 |
+
export function blobFrom(path: string, type?: string): Promise<Blob>;
|
15 |
+
/**
|
16 |
+
* @param {string} path filepath on the disk
|
17 |
+
* @param {string} [type] mimetype to use
|
18 |
+
* @returns {Promise<File>}
|
19 |
+
*/
|
20 |
+
export function fileFrom(path: string, type?: string): Promise<File>;
|
21 |
+
/**
|
22 |
+
* @param {string} path filepath on the disk
|
23 |
+
* @param {string} [type] mimetype to use
|
24 |
+
*/
|
25 |
+
export function fileFromSync(path: string, type?: string): File;
|
26 |
+
export { File, Blob };
|
node_modules/fetch-blob/from.js
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { statSync, createReadStream, promises as fs } from 'node:fs'
|
2 |
+
import { basename } from 'node:path'
|
3 |
+
import DOMException from 'node-domexception'
|
4 |
+
|
5 |
+
import File from './file.js'
|
6 |
+
import Blob from './index.js'
|
7 |
+
|
8 |
+
const { stat } = fs
|
9 |
+
|
10 |
+
/**
|
11 |
+
* @param {string} path filepath on the disk
|
12 |
+
* @param {string} [type] mimetype to use
|
13 |
+
*/
|
14 |
+
const blobFromSync = (path, type) => fromBlob(statSync(path), path, type)
|
15 |
+
|
16 |
+
/**
|
17 |
+
* @param {string} path filepath on the disk
|
18 |
+
* @param {string} [type] mimetype to use
|
19 |
+
* @returns {Promise<Blob>}
|
20 |
+
*/
|
21 |
+
const blobFrom = (path, type) => stat(path).then(stat => fromBlob(stat, path, type))
|
22 |
+
|
23 |
+
/**
|
24 |
+
* @param {string} path filepath on the disk
|
25 |
+
* @param {string} [type] mimetype to use
|
26 |
+
* @returns {Promise<File>}
|
27 |
+
*/
|
28 |
+
const fileFrom = (path, type) => stat(path).then(stat => fromFile(stat, path, type))
|
29 |
+
|
30 |
+
/**
|
31 |
+
* @param {string} path filepath on the disk
|
32 |
+
* @param {string} [type] mimetype to use
|
33 |
+
*/
|
34 |
+
const fileFromSync = (path, type) => fromFile(statSync(path), path, type)
|
35 |
+
|
36 |
+
// @ts-ignore
|
37 |
+
const fromBlob = (stat, path, type = '') => new Blob([new BlobDataItem({
|
38 |
+
path,
|
39 |
+
size: stat.size,
|
40 |
+
lastModified: stat.mtimeMs,
|
41 |
+
start: 0
|
42 |
+
})], { type })
|
43 |
+
|
44 |
+
// @ts-ignore
|
45 |
+
const fromFile = (stat, path, type = '') => new File([new BlobDataItem({
|
46 |
+
path,
|
47 |
+
size: stat.size,
|
48 |
+
lastModified: stat.mtimeMs,
|
49 |
+
start: 0
|
50 |
+
})], basename(path), { type, lastModified: stat.mtimeMs })
|
51 |
+
|
52 |
+
/**
|
53 |
+
* This is a blob backed up by a file on the disk
|
54 |
+
* with minium requirement. Its wrapped around a Blob as a blobPart
|
55 |
+
* so you have no direct access to this.
|
56 |
+
*
|
57 |
+
* @private
|
58 |
+
*/
|
59 |
+
class BlobDataItem {
|
60 |
+
#path
|
61 |
+
#start
|
62 |
+
|
63 |
+
constructor (options) {
|
64 |
+
this.#path = options.path
|
65 |
+
this.#start = options.start
|
66 |
+
this.size = options.size
|
67 |
+
this.lastModified = options.lastModified
|
68 |
+
}
|
69 |
+
|
70 |
+
/**
|
71 |
+
* Slicing arguments is first validated and formatted
|
72 |
+
* to not be out of range by Blob.prototype.slice
|
73 |
+
*/
|
74 |
+
slice (start, end) {
|
75 |
+
return new BlobDataItem({
|
76 |
+
path: this.#path,
|
77 |
+
lastModified: this.lastModified,
|
78 |
+
size: end - start,
|
79 |
+
start: this.#start + start
|
80 |
+
})
|
81 |
+
}
|
82 |
+
|
83 |
+
async * stream () {
|
84 |
+
const { mtimeMs } = await stat(this.#path)
|
85 |
+
if (mtimeMs > this.lastModified) {
|
86 |
+
throw new DOMException('The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.', 'NotReadableError')
|
87 |
+
}
|
88 |
+
yield * createReadStream(this.#path, {
|
89 |
+
start: this.#start,
|
90 |
+
end: this.#start + this.size - 1
|
91 |
+
})
|
92 |
+
}
|
93 |
+
|
94 |
+
get [Symbol.toStringTag] () {
|
95 |
+
return 'Blob'
|
96 |
+
}
|
97 |
+
}
|
98 |
+
|
99 |
+
export default blobFromSync
|
100 |
+
export { File, Blob, blobFrom, blobFromSync, fileFrom, fileFromSync }
|
node_modules/fetch-blob/index.d.ts
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
/** @type {typeof globalThis.Blob} */
|
2 |
+
export const Blob: typeof globalThis.Blob;
|
3 |
+
export default Blob;
|
node_modules/fetch-blob/index.js
ADDED
@@ -0,0 +1,250 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/*! fetch-blob. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
|
2 |
+
|
3 |
+
// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)
|
4 |
+
// Node has recently added whatwg stream into core
|
5 |
+
|
6 |
+
import './streams.cjs'
|
7 |
+
|
8 |
+
// 64 KiB (same size chrome slice theirs blob into Uint8array's)
|
9 |
+
const POOL_SIZE = 65536
|
10 |
+
|
11 |
+
/** @param {(Blob | Uint8Array)[]} parts */
|
12 |
+
async function * toIterator (parts, clone = true) {
|
13 |
+
for (const part of parts) {
|
14 |
+
if ('stream' in part) {
|
15 |
+
yield * (/** @type {AsyncIterableIterator<Uint8Array>} */ (part.stream()))
|
16 |
+
} else if (ArrayBuffer.isView(part)) {
|
17 |
+
if (clone) {
|
18 |
+
let position = part.byteOffset
|
19 |
+
const end = part.byteOffset + part.byteLength
|
20 |
+
while (position !== end) {
|
21 |
+
const size = Math.min(end - position, POOL_SIZE)
|
22 |
+
const chunk = part.buffer.slice(position, position + size)
|
23 |
+
position += chunk.byteLength
|
24 |
+
yield new Uint8Array(chunk)
|
25 |
+
}
|
26 |
+
} else {
|
27 |
+
yield part
|
28 |
+
}
|
29 |
+
/* c8 ignore next 10 */
|
30 |
+
} else {
|
31 |
+
// For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)
|
32 |
+
let position = 0, b = (/** @type {Blob} */ (part))
|
33 |
+
while (position !== b.size) {
|
34 |
+
const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE))
|
35 |
+
const buffer = await chunk.arrayBuffer()
|
36 |
+
position += buffer.byteLength
|
37 |
+
yield new Uint8Array(buffer)
|
38 |
+
}
|
39 |
+
}
|
40 |
+
}
|
41 |
+
}
|
42 |
+
|
43 |
+
const _Blob = class Blob {
|
44 |
+
/** @type {Array.<(Blob|Uint8Array)>} */
|
45 |
+
#parts = []
|
46 |
+
#type = ''
|
47 |
+
#size = 0
|
48 |
+
#endings = 'transparent'
|
49 |
+
|
50 |
+
/**
|
51 |
+
* The Blob() constructor returns a new Blob object. The content
|
52 |
+
* of the blob consists of the concatenation of the values given
|
53 |
+
* in the parameter array.
|
54 |
+
*
|
55 |
+
* @param {*} blobParts
|
56 |
+
* @param {{ type?: string, endings?: string }} [options]
|
57 |
+
*/
|
58 |
+
constructor (blobParts = [], options = {}) {
|
59 |
+
if (typeof blobParts !== 'object' || blobParts === null) {
|
60 |
+
throw new TypeError('Failed to construct \'Blob\': The provided value cannot be converted to a sequence.')
|
61 |
+
}
|
62 |
+
|
63 |
+
if (typeof blobParts[Symbol.iterator] !== 'function') {
|
64 |
+
throw new TypeError('Failed to construct \'Blob\': The object must have a callable @@iterator property.')
|
65 |
+
}
|
66 |
+
|
67 |
+
if (typeof options !== 'object' && typeof options !== 'function') {
|
68 |
+
throw new TypeError('Failed to construct \'Blob\': parameter 2 cannot convert to dictionary.')
|
69 |
+
}
|
70 |
+
|
71 |
+
if (options === null) options = {}
|
72 |
+
|
73 |
+
const encoder = new TextEncoder()
|
74 |
+
for (const element of blobParts) {
|
75 |
+
let part
|
76 |
+
if (ArrayBuffer.isView(element)) {
|
77 |
+
part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength))
|
78 |
+
} else if (element instanceof ArrayBuffer) {
|
79 |
+
part = new Uint8Array(element.slice(0))
|
80 |
+
} else if (element instanceof Blob) {
|
81 |
+
part = element
|
82 |
+
} else {
|
83 |
+
part = encoder.encode(`${element}`)
|
84 |
+
}
|
85 |
+
|
86 |
+
this.#size += ArrayBuffer.isView(part) ? part.byteLength : part.size
|
87 |
+
this.#parts.push(part)
|
88 |
+
}
|
89 |
+
|
90 |
+
this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}`
|
91 |
+
const type = options.type === undefined ? '' : String(options.type)
|
92 |
+
this.#type = /^[\x20-\x7E]*$/.test(type) ? type : ''
|
93 |
+
}
|
94 |
+
|
95 |
+
/**
|
96 |
+
* The Blob interface's size property returns the
|
97 |
+
* size of the Blob in bytes.
|
98 |
+
*/
|
99 |
+
get size () {
|
100 |
+
return this.#size
|
101 |
+
}
|
102 |
+
|
103 |
+
/**
|
104 |
+
* The type property of a Blob object returns the MIME type of the file.
|
105 |
+
*/
|
106 |
+
get type () {
|
107 |
+
return this.#type
|
108 |
+
}
|
109 |
+
|
110 |
+
/**
|
111 |
+
* The text() method in the Blob interface returns a Promise
|
112 |
+
* that resolves with a string containing the contents of
|
113 |
+
* the blob, interpreted as UTF-8.
|
114 |
+
*
|
115 |
+
* @return {Promise<string>}
|
116 |
+
*/
|
117 |
+
async text () {
|
118 |
+
// More optimized than using this.arrayBuffer()
|
119 |
+
// that requires twice as much ram
|
120 |
+
const decoder = new TextDecoder()
|
121 |
+
let str = ''
|
122 |
+
for await (const part of toIterator(this.#parts, false)) {
|
123 |
+
str += decoder.decode(part, { stream: true })
|
124 |
+
}
|
125 |
+
// Remaining
|
126 |
+
str += decoder.decode()
|
127 |
+
return str
|
128 |
+
}
|
129 |
+
|
130 |
+
/**
|
131 |
+
* The arrayBuffer() method in the Blob interface returns a
|
132 |
+
* Promise that resolves with the contents of the blob as
|
133 |
+
* binary data contained in an ArrayBuffer.
|
134 |
+
*
|
135 |
+
* @return {Promise<ArrayBuffer>}
|
136 |
+
*/
|
137 |
+
async arrayBuffer () {
|
138 |
+
// Easier way... Just a unnecessary overhead
|
139 |
+
// const view = new Uint8Array(this.size);
|
140 |
+
// await this.stream().getReader({mode: 'byob'}).read(view);
|
141 |
+
// return view.buffer;
|
142 |
+
|
143 |
+
const data = new Uint8Array(this.size)
|
144 |
+
let offset = 0
|
145 |
+
for await (const chunk of toIterator(this.#parts, false)) {
|
146 |
+
data.set(chunk, offset)
|
147 |
+
offset += chunk.length
|
148 |
+
}
|
149 |
+
|
150 |
+
return data.buffer
|
151 |
+
}
|
152 |
+
|
153 |
+
stream () {
|
154 |
+
const it = toIterator(this.#parts, true)
|
155 |
+
|
156 |
+
return new globalThis.ReadableStream({
|
157 |
+
// @ts-ignore
|
158 |
+
type: 'bytes',
|
159 |
+
async pull (ctrl) {
|
160 |
+
const chunk = await it.next()
|
161 |
+
chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value)
|
162 |
+
},
|
163 |
+
|
164 |
+
async cancel () {
|
165 |
+
await it.return()
|
166 |
+
}
|
167 |
+
})
|
168 |
+
}
|
169 |
+
|
170 |
+
/**
|
171 |
+
* The Blob interface's slice() method creates and returns a
|
172 |
+
* new Blob object which contains data from a subset of the
|
173 |
+
* blob on which it's called.
|
174 |
+
*
|
175 |
+
* @param {number} [start]
|
176 |
+
* @param {number} [end]
|
177 |
+
* @param {string} [type]
|
178 |
+
*/
|
179 |
+
slice (start = 0, end = this.size, type = '') {
|
180 |
+
const { size } = this
|
181 |
+
|
182 |
+
let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size)
|
183 |
+
let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size)
|
184 |
+
|
185 |
+
const span = Math.max(relativeEnd - relativeStart, 0)
|
186 |
+
const parts = this.#parts
|
187 |
+
const blobParts = []
|
188 |
+
let added = 0
|
189 |
+
|
190 |
+
for (const part of parts) {
|
191 |
+
// don't add the overflow to new blobParts
|
192 |
+
if (added >= span) {
|
193 |
+
break
|
194 |
+
}
|
195 |
+
|
196 |
+
const size = ArrayBuffer.isView(part) ? part.byteLength : part.size
|
197 |
+
if (relativeStart && size <= relativeStart) {
|
198 |
+
// Skip the beginning and change the relative
|
199 |
+
// start & end position as we skip the unwanted parts
|
200 |
+
relativeStart -= size
|
201 |
+
relativeEnd -= size
|
202 |
+
} else {
|
203 |
+
let chunk
|
204 |
+
if (ArrayBuffer.isView(part)) {
|
205 |
+
chunk = part.subarray(relativeStart, Math.min(size, relativeEnd))
|
206 |
+
added += chunk.byteLength
|
207 |
+
} else {
|
208 |
+
chunk = part.slice(relativeStart, Math.min(size, relativeEnd))
|
209 |
+
added += chunk.size
|
210 |
+
}
|
211 |
+
relativeEnd -= size
|
212 |
+
blobParts.push(chunk)
|
213 |
+
relativeStart = 0 // All next sequential parts should start at 0
|
214 |
+
}
|
215 |
+
}
|
216 |
+
|
217 |
+
const blob = new Blob([], { type: String(type).toLowerCase() })
|
218 |
+
blob.#size = span
|
219 |
+
blob.#parts = blobParts
|
220 |
+
|
221 |
+
return blob
|
222 |
+
}
|
223 |
+
|
224 |
+
get [Symbol.toStringTag] () {
|
225 |
+
return 'Blob'
|
226 |
+
}
|
227 |
+
|
228 |
+
static [Symbol.hasInstance] (object) {
|
229 |
+
return (
|
230 |
+
object &&
|
231 |
+
typeof object === 'object' &&
|
232 |
+
typeof object.constructor === 'function' &&
|
233 |
+
(
|
234 |
+
typeof object.stream === 'function' ||
|
235 |
+
typeof object.arrayBuffer === 'function'
|
236 |
+
) &&
|
237 |
+
/^(Blob|File)$/.test(object[Symbol.toStringTag])
|
238 |
+
)
|
239 |
+
}
|
240 |
+
}
|
241 |
+
|
242 |
+
Object.defineProperties(_Blob.prototype, {
|
243 |
+
size: { enumerable: true },
|
244 |
+
type: { enumerable: true },
|
245 |
+
slice: { enumerable: true }
|
246 |
+
})
|
247 |
+
|
248 |
+
/** @type {typeof globalThis.Blob} */
|
249 |
+
export const Blob = _Blob
|
250 |
+
export default Blob
|