initial commit
This commit is contained in:
445
node_modules/node-fetch-native/dist/chunks/multipart-parser.cjs
generated
vendored
Normal file
445
node_modules/node-fetch-native/dist/chunks/multipart-parser.cjs
generated
vendored
Normal file
@@ -0,0 +1,445 @@
|
||||
'use strict';
|
||||
|
||||
require('node:fs');
|
||||
require('node:path');
|
||||
const abortController = require('../shared/node-fetch-native.8afd3fea.cjs');
|
||||
require('node:http');
|
||||
require('node:https');
|
||||
require('node:zlib');
|
||||
require('node:stream');
|
||||
require('node:buffer');
|
||||
require('node:util');
|
||||
require('node:url');
|
||||
require('node:net');
|
||||
|
||||
let s = 0;
|
||||
const S = {
|
||||
START_BOUNDARY: s++,
|
||||
HEADER_FIELD_START: s++,
|
||||
HEADER_FIELD: s++,
|
||||
HEADER_VALUE_START: s++,
|
||||
HEADER_VALUE: s++,
|
||||
HEADER_VALUE_ALMOST_DONE: s++,
|
||||
HEADERS_ALMOST_DONE: s++,
|
||||
PART_DATA_START: s++,
|
||||
PART_DATA: s++,
|
||||
END: s++
|
||||
};
|
||||
|
||||
let f = 1;
|
||||
const F = {
|
||||
PART_BOUNDARY: f,
|
||||
LAST_BOUNDARY: f *= 2
|
||||
};
|
||||
|
||||
const LF = 10;
|
||||
const CR = 13;
|
||||
const SPACE = 32;
|
||||
const HYPHEN = 45;
|
||||
const COLON = 58;
|
||||
const A = 97;
|
||||
const Z = 122;
|
||||
|
||||
const lower = c => c | 0x20;
|
||||
|
||||
const noop = () => {};
|
||||
|
||||
class MultipartParser {
|
||||
/**
|
||||
* @param {string} boundary
|
||||
*/
|
||||
constructor(boundary) {
|
||||
this.index = 0;
|
||||
this.flags = 0;
|
||||
|
||||
this.onHeaderEnd = noop;
|
||||
this.onHeaderField = noop;
|
||||
this.onHeadersEnd = noop;
|
||||
this.onHeaderValue = noop;
|
||||
this.onPartBegin = noop;
|
||||
this.onPartData = noop;
|
||||
this.onPartEnd = noop;
|
||||
|
||||
this.boundaryChars = {};
|
||||
|
||||
boundary = '\r\n--' + boundary;
|
||||
const ui8a = new Uint8Array(boundary.length);
|
||||
for (let i = 0; i < boundary.length; i++) {
|
||||
ui8a[i] = boundary.charCodeAt(i);
|
||||
this.boundaryChars[ui8a[i]] = true;
|
||||
}
|
||||
|
||||
this.boundary = ui8a;
|
||||
this.lookbehind = new Uint8Array(this.boundary.length + 8);
|
||||
this.state = S.START_BOUNDARY;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
*/
|
||||
write(data) {
|
||||
let i = 0;
|
||||
const length_ = data.length;
|
||||
let previousIndex = this.index;
|
||||
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
|
||||
const boundaryLength = this.boundary.length;
|
||||
const boundaryEnd = boundaryLength - 1;
|
||||
const bufferLength = data.length;
|
||||
let c;
|
||||
let cl;
|
||||
|
||||
const mark = name => {
|
||||
this[name + 'Mark'] = i;
|
||||
};
|
||||
|
||||
const clear = name => {
|
||||
delete this[name + 'Mark'];
|
||||
};
|
||||
|
||||
const callback = (callbackSymbol, start, end, ui8a) => {
|
||||
if (start === undefined || start !== end) {
|
||||
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
|
||||
}
|
||||
};
|
||||
|
||||
const dataCallback = (name, clear) => {
|
||||
const markSymbol = name + 'Mark';
|
||||
if (!(markSymbol in this)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (clear) {
|
||||
callback(name, this[markSymbol], i, data);
|
||||
delete this[markSymbol];
|
||||
} else {
|
||||
callback(name, this[markSymbol], data.length, data);
|
||||
this[markSymbol] = 0;
|
||||
}
|
||||
};
|
||||
|
||||
for (i = 0; i < length_; i++) {
|
||||
c = data[i];
|
||||
|
||||
switch (state) {
|
||||
case S.START_BOUNDARY:
|
||||
if (index === boundary.length - 2) {
|
||||
if (c === HYPHEN) {
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else if (c !== CR) {
|
||||
return;
|
||||
}
|
||||
|
||||
index++;
|
||||
break;
|
||||
} else if (index - 1 === boundary.length - 2) {
|
||||
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
|
||||
index = 0;
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (c !== boundary[index + 2]) {
|
||||
index = -2;
|
||||
}
|
||||
|
||||
if (c === boundary[index + 2]) {
|
||||
index++;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_FIELD_START:
|
||||
state = S.HEADER_FIELD;
|
||||
mark('onHeaderField');
|
||||
index = 0;
|
||||
// falls through
|
||||
case S.HEADER_FIELD:
|
||||
if (c === CR) {
|
||||
clear('onHeaderField');
|
||||
state = S.HEADERS_ALMOST_DONE;
|
||||
break;
|
||||
}
|
||||
|
||||
index++;
|
||||
if (c === HYPHEN) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (c === COLON) {
|
||||
if (index === 1) {
|
||||
// empty header field
|
||||
return;
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField', true);
|
||||
state = S.HEADER_VALUE_START;
|
||||
break;
|
||||
}
|
||||
|
||||
cl = lower(c);
|
||||
if (cl < A || cl > Z) {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_START:
|
||||
if (c === SPACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
mark('onHeaderValue');
|
||||
state = S.HEADER_VALUE;
|
||||
// falls through
|
||||
case S.HEADER_VALUE:
|
||||
if (c === CR) {
|
||||
dataCallback('onHeaderValue', true);
|
||||
callback('onHeaderEnd');
|
||||
state = S.HEADER_VALUE_ALMOST_DONE;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
case S.HEADERS_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
callback('onHeadersEnd');
|
||||
state = S.PART_DATA_START;
|
||||
break;
|
||||
case S.PART_DATA_START:
|
||||
state = S.PART_DATA;
|
||||
mark('onPartData');
|
||||
// falls through
|
||||
case S.PART_DATA:
|
||||
previousIndex = index;
|
||||
|
||||
if (index === 0) {
|
||||
// boyer-moore derrived algorithm to safely skip non-boundary data
|
||||
i += boundaryEnd;
|
||||
while (i < bufferLength && !(data[i] in boundaryChars)) {
|
||||
i += boundaryLength;
|
||||
}
|
||||
|
||||
i -= boundaryEnd;
|
||||
c = data[i];
|
||||
}
|
||||
|
||||
if (index < boundary.length) {
|
||||
if (boundary[index] === c) {
|
||||
if (index === 0) {
|
||||
dataCallback('onPartData', true);
|
||||
}
|
||||
|
||||
index++;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index === boundary.length) {
|
||||
index++;
|
||||
if (c === CR) {
|
||||
// CR = part boundary
|
||||
flags |= F.PART_BOUNDARY;
|
||||
} else if (c === HYPHEN) {
|
||||
// HYPHEN = end boundary
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index - 1 === boundary.length) {
|
||||
if (flags & F.PART_BOUNDARY) {
|
||||
index = 0;
|
||||
if (c === LF) {
|
||||
// unset the PART_BOUNDARY flag
|
||||
flags &= ~F.PART_BOUNDARY;
|
||||
callback('onPartEnd');
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
}
|
||||
} else if (flags & F.LAST_BOUNDARY) {
|
||||
if (c === HYPHEN) {
|
||||
callback('onPartEnd');
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (index > 0) {
|
||||
// when matching a possible boundary, keep a lookbehind reference
|
||||
// in case it turns out to be a false lead
|
||||
lookbehind[index - 1] = c;
|
||||
} else if (previousIndex > 0) {
|
||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
||||
// belongs to partData
|
||||
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
|
||||
callback('onPartData', 0, previousIndex, _lookbehind);
|
||||
previousIndex = 0;
|
||||
mark('onPartData');
|
||||
|
||||
// reconsider the current character even so it interrupted the sequence
|
||||
// it could be the beginning of a new sequence
|
||||
i--;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.END:
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unexpected state entered: ${state}`);
|
||||
}
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField');
|
||||
dataCallback('onHeaderValue');
|
||||
dataCallback('onPartData');
|
||||
|
||||
// Update properties for the next call
|
||||
this.index = index;
|
||||
this.state = state;
|
||||
this.flags = flags;
|
||||
}
|
||||
|
||||
end() {
|
||||
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
|
||||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
|
||||
this.onPartEnd();
|
||||
} else if (this.state !== S.END) {
|
||||
throw new Error('MultipartParser.end(): stream ended unexpectedly');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _fileName(headerValue) {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
|
||||
if (!m) {
|
||||
return;
|
||||
}
|
||||
|
||||
const match = m[2] || m[3] || '';
|
||||
let filename = match.slice(match.lastIndexOf('\\') + 1);
|
||||
filename = filename.replace(/%22/g, '"');
|
||||
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
|
||||
return String.fromCharCode(code);
|
||||
});
|
||||
return filename;
|
||||
}
|
||||
|
||||
async function toFormData(Body, ct) {
|
||||
if (!/multipart/i.test(ct)) {
|
||||
throw new TypeError('Failed to fetch');
|
||||
}
|
||||
|
||||
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('no or bad content-type header, no multipart boundary');
|
||||
}
|
||||
|
||||
const parser = new MultipartParser(m[1] || m[2]);
|
||||
|
||||
let headerField;
|
||||
let headerValue;
|
||||
let entryValue;
|
||||
let entryName;
|
||||
let contentType;
|
||||
let filename;
|
||||
const entryChunks = [];
|
||||
const formData = new abortController.FormData();
|
||||
|
||||
const onPartData = ui8a => {
|
||||
entryValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
const appendToFile = ui8a => {
|
||||
entryChunks.push(ui8a);
|
||||
};
|
||||
|
||||
const appendFileToFormData = () => {
|
||||
const file = new abortController.File(entryChunks, filename, {type: contentType});
|
||||
formData.append(entryName, file);
|
||||
};
|
||||
|
||||
const appendEntryToFormData = () => {
|
||||
formData.append(entryName, entryValue);
|
||||
};
|
||||
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
decoder.decode();
|
||||
|
||||
parser.onPartBegin = function () {
|
||||
parser.onPartData = onPartData;
|
||||
parser.onPartEnd = appendEntryToFormData;
|
||||
|
||||
headerField = '';
|
||||
headerValue = '';
|
||||
entryValue = '';
|
||||
entryName = '';
|
||||
contentType = '';
|
||||
filename = null;
|
||||
entryChunks.length = 0;
|
||||
};
|
||||
|
||||
parser.onHeaderField = function (ui8a) {
|
||||
headerField += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderValue = function (ui8a) {
|
||||
headerValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderEnd = function () {
|
||||
headerValue += decoder.decode();
|
||||
headerField = headerField.toLowerCase();
|
||||
|
||||
if (headerField === 'content-disposition') {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
|
||||
|
||||
if (m) {
|
||||
entryName = m[2] || m[3] || '';
|
||||
}
|
||||
|
||||
filename = _fileName(headerValue);
|
||||
|
||||
if (filename) {
|
||||
parser.onPartData = appendToFile;
|
||||
parser.onPartEnd = appendFileToFormData;
|
||||
}
|
||||
} else if (headerField === 'content-type') {
|
||||
contentType = headerValue;
|
||||
}
|
||||
|
||||
headerValue = '';
|
||||
headerField = '';
|
||||
};
|
||||
|
||||
for await (const chunk of Body) {
|
||||
parser.write(chunk);
|
||||
}
|
||||
|
||||
parser.end();
|
||||
|
||||
return formData;
|
||||
}
|
||||
|
||||
exports.toFormData = toFormData;
|
||||
443
node_modules/node-fetch-native/dist/chunks/multipart-parser.mjs
generated
vendored
Normal file
443
node_modules/node-fetch-native/dist/chunks/multipart-parser.mjs
generated
vendored
Normal file
@@ -0,0 +1,443 @@
|
||||
import 'node:fs';
|
||||
import 'node:path';
|
||||
import { a as FormData, F as File } from '../shared/node-fetch-native.25a180d0.mjs';
|
||||
import 'node:http';
|
||||
import 'node:https';
|
||||
import 'node:zlib';
|
||||
import 'node:stream';
|
||||
import 'node:buffer';
|
||||
import 'node:util';
|
||||
import 'node:url';
|
||||
import 'node:net';
|
||||
|
||||
let s = 0;
|
||||
const S = {
|
||||
START_BOUNDARY: s++,
|
||||
HEADER_FIELD_START: s++,
|
||||
HEADER_FIELD: s++,
|
||||
HEADER_VALUE_START: s++,
|
||||
HEADER_VALUE: s++,
|
||||
HEADER_VALUE_ALMOST_DONE: s++,
|
||||
HEADERS_ALMOST_DONE: s++,
|
||||
PART_DATA_START: s++,
|
||||
PART_DATA: s++,
|
||||
END: s++
|
||||
};
|
||||
|
||||
let f = 1;
|
||||
const F = {
|
||||
PART_BOUNDARY: f,
|
||||
LAST_BOUNDARY: f *= 2
|
||||
};
|
||||
|
||||
const LF = 10;
|
||||
const CR = 13;
|
||||
const SPACE = 32;
|
||||
const HYPHEN = 45;
|
||||
const COLON = 58;
|
||||
const A = 97;
|
||||
const Z = 122;
|
||||
|
||||
const lower = c => c | 0x20;
|
||||
|
||||
const noop = () => {};
|
||||
|
||||
class MultipartParser {
|
||||
/**
|
||||
* @param {string} boundary
|
||||
*/
|
||||
constructor(boundary) {
|
||||
this.index = 0;
|
||||
this.flags = 0;
|
||||
|
||||
this.onHeaderEnd = noop;
|
||||
this.onHeaderField = noop;
|
||||
this.onHeadersEnd = noop;
|
||||
this.onHeaderValue = noop;
|
||||
this.onPartBegin = noop;
|
||||
this.onPartData = noop;
|
||||
this.onPartEnd = noop;
|
||||
|
||||
this.boundaryChars = {};
|
||||
|
||||
boundary = '\r\n--' + boundary;
|
||||
const ui8a = new Uint8Array(boundary.length);
|
||||
for (let i = 0; i < boundary.length; i++) {
|
||||
ui8a[i] = boundary.charCodeAt(i);
|
||||
this.boundaryChars[ui8a[i]] = true;
|
||||
}
|
||||
|
||||
this.boundary = ui8a;
|
||||
this.lookbehind = new Uint8Array(this.boundary.length + 8);
|
||||
this.state = S.START_BOUNDARY;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
*/
|
||||
write(data) {
|
||||
let i = 0;
|
||||
const length_ = data.length;
|
||||
let previousIndex = this.index;
|
||||
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
|
||||
const boundaryLength = this.boundary.length;
|
||||
const boundaryEnd = boundaryLength - 1;
|
||||
const bufferLength = data.length;
|
||||
let c;
|
||||
let cl;
|
||||
|
||||
const mark = name => {
|
||||
this[name + 'Mark'] = i;
|
||||
};
|
||||
|
||||
const clear = name => {
|
||||
delete this[name + 'Mark'];
|
||||
};
|
||||
|
||||
const callback = (callbackSymbol, start, end, ui8a) => {
|
||||
if (start === undefined || start !== end) {
|
||||
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
|
||||
}
|
||||
};
|
||||
|
||||
const dataCallback = (name, clear) => {
|
||||
const markSymbol = name + 'Mark';
|
||||
if (!(markSymbol in this)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (clear) {
|
||||
callback(name, this[markSymbol], i, data);
|
||||
delete this[markSymbol];
|
||||
} else {
|
||||
callback(name, this[markSymbol], data.length, data);
|
||||
this[markSymbol] = 0;
|
||||
}
|
||||
};
|
||||
|
||||
for (i = 0; i < length_; i++) {
|
||||
c = data[i];
|
||||
|
||||
switch (state) {
|
||||
case S.START_BOUNDARY:
|
||||
if (index === boundary.length - 2) {
|
||||
if (c === HYPHEN) {
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else if (c !== CR) {
|
||||
return;
|
||||
}
|
||||
|
||||
index++;
|
||||
break;
|
||||
} else if (index - 1 === boundary.length - 2) {
|
||||
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
|
||||
index = 0;
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (c !== boundary[index + 2]) {
|
||||
index = -2;
|
||||
}
|
||||
|
||||
if (c === boundary[index + 2]) {
|
||||
index++;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_FIELD_START:
|
||||
state = S.HEADER_FIELD;
|
||||
mark('onHeaderField');
|
||||
index = 0;
|
||||
// falls through
|
||||
case S.HEADER_FIELD:
|
||||
if (c === CR) {
|
||||
clear('onHeaderField');
|
||||
state = S.HEADERS_ALMOST_DONE;
|
||||
break;
|
||||
}
|
||||
|
||||
index++;
|
||||
if (c === HYPHEN) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (c === COLON) {
|
||||
if (index === 1) {
|
||||
// empty header field
|
||||
return;
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField', true);
|
||||
state = S.HEADER_VALUE_START;
|
||||
break;
|
||||
}
|
||||
|
||||
cl = lower(c);
|
||||
if (cl < A || cl > Z) {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_START:
|
||||
if (c === SPACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
mark('onHeaderValue');
|
||||
state = S.HEADER_VALUE;
|
||||
// falls through
|
||||
case S.HEADER_VALUE:
|
||||
if (c === CR) {
|
||||
dataCallback('onHeaderValue', true);
|
||||
callback('onHeaderEnd');
|
||||
state = S.HEADER_VALUE_ALMOST_DONE;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
case S.HEADERS_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
callback('onHeadersEnd');
|
||||
state = S.PART_DATA_START;
|
||||
break;
|
||||
case S.PART_DATA_START:
|
||||
state = S.PART_DATA;
|
||||
mark('onPartData');
|
||||
// falls through
|
||||
case S.PART_DATA:
|
||||
previousIndex = index;
|
||||
|
||||
if (index === 0) {
|
||||
// boyer-moore derrived algorithm to safely skip non-boundary data
|
||||
i += boundaryEnd;
|
||||
while (i < bufferLength && !(data[i] in boundaryChars)) {
|
||||
i += boundaryLength;
|
||||
}
|
||||
|
||||
i -= boundaryEnd;
|
||||
c = data[i];
|
||||
}
|
||||
|
||||
if (index < boundary.length) {
|
||||
if (boundary[index] === c) {
|
||||
if (index === 0) {
|
||||
dataCallback('onPartData', true);
|
||||
}
|
||||
|
||||
index++;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index === boundary.length) {
|
||||
index++;
|
||||
if (c === CR) {
|
||||
// CR = part boundary
|
||||
flags |= F.PART_BOUNDARY;
|
||||
} else if (c === HYPHEN) {
|
||||
// HYPHEN = end boundary
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index - 1 === boundary.length) {
|
||||
if (flags & F.PART_BOUNDARY) {
|
||||
index = 0;
|
||||
if (c === LF) {
|
||||
// unset the PART_BOUNDARY flag
|
||||
flags &= ~F.PART_BOUNDARY;
|
||||
callback('onPartEnd');
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
}
|
||||
} else if (flags & F.LAST_BOUNDARY) {
|
||||
if (c === HYPHEN) {
|
||||
callback('onPartEnd');
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (index > 0) {
|
||||
// when matching a possible boundary, keep a lookbehind reference
|
||||
// in case it turns out to be a false lead
|
||||
lookbehind[index - 1] = c;
|
||||
} else if (previousIndex > 0) {
|
||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
||||
// belongs to partData
|
||||
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
|
||||
callback('onPartData', 0, previousIndex, _lookbehind);
|
||||
previousIndex = 0;
|
||||
mark('onPartData');
|
||||
|
||||
// reconsider the current character even so it interrupted the sequence
|
||||
// it could be the beginning of a new sequence
|
||||
i--;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.END:
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unexpected state entered: ${state}`);
|
||||
}
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField');
|
||||
dataCallback('onHeaderValue');
|
||||
dataCallback('onPartData');
|
||||
|
||||
// Update properties for the next call
|
||||
this.index = index;
|
||||
this.state = state;
|
||||
this.flags = flags;
|
||||
}
|
||||
|
||||
end() {
|
||||
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
|
||||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
|
||||
this.onPartEnd();
|
||||
} else if (this.state !== S.END) {
|
||||
throw new Error('MultipartParser.end(): stream ended unexpectedly');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _fileName(headerValue) {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
|
||||
if (!m) {
|
||||
return;
|
||||
}
|
||||
|
||||
const match = m[2] || m[3] || '';
|
||||
let filename = match.slice(match.lastIndexOf('\\') + 1);
|
||||
filename = filename.replace(/%22/g, '"');
|
||||
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
|
||||
return String.fromCharCode(code);
|
||||
});
|
||||
return filename;
|
||||
}
|
||||
|
||||
async function toFormData(Body, ct) {
|
||||
if (!/multipart/i.test(ct)) {
|
||||
throw new TypeError('Failed to fetch');
|
||||
}
|
||||
|
||||
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('no or bad content-type header, no multipart boundary');
|
||||
}
|
||||
|
||||
const parser = new MultipartParser(m[1] || m[2]);
|
||||
|
||||
let headerField;
|
||||
let headerValue;
|
||||
let entryValue;
|
||||
let entryName;
|
||||
let contentType;
|
||||
let filename;
|
||||
const entryChunks = [];
|
||||
const formData = new FormData();
|
||||
|
||||
const onPartData = ui8a => {
|
||||
entryValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
const appendToFile = ui8a => {
|
||||
entryChunks.push(ui8a);
|
||||
};
|
||||
|
||||
const appendFileToFormData = () => {
|
||||
const file = new File(entryChunks, filename, {type: contentType});
|
||||
formData.append(entryName, file);
|
||||
};
|
||||
|
||||
const appendEntryToFormData = () => {
|
||||
formData.append(entryName, entryValue);
|
||||
};
|
||||
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
decoder.decode();
|
||||
|
||||
parser.onPartBegin = function () {
|
||||
parser.onPartData = onPartData;
|
||||
parser.onPartEnd = appendEntryToFormData;
|
||||
|
||||
headerField = '';
|
||||
headerValue = '';
|
||||
entryValue = '';
|
||||
entryName = '';
|
||||
contentType = '';
|
||||
filename = null;
|
||||
entryChunks.length = 0;
|
||||
};
|
||||
|
||||
parser.onHeaderField = function (ui8a) {
|
||||
headerField += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderValue = function (ui8a) {
|
||||
headerValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderEnd = function () {
|
||||
headerValue += decoder.decode();
|
||||
headerField = headerField.toLowerCase();
|
||||
|
||||
if (headerField === 'content-disposition') {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
|
||||
|
||||
if (m) {
|
||||
entryName = m[2] || m[3] || '';
|
||||
}
|
||||
|
||||
filename = _fileName(headerValue);
|
||||
|
||||
if (filename) {
|
||||
parser.onPartData = appendToFile;
|
||||
parser.onPartEnd = appendFileToFormData;
|
||||
}
|
||||
} else if (headerField === 'content-type') {
|
||||
contentType = headerValue;
|
||||
}
|
||||
|
||||
headerValue = '';
|
||||
headerField = '';
|
||||
};
|
||||
|
||||
for await (const chunk of Body) {
|
||||
parser.write(chunk);
|
||||
}
|
||||
|
||||
parser.end();
|
||||
|
||||
return formData;
|
||||
}
|
||||
|
||||
export { toFormData };
|
||||
138
node_modules/node-fetch-native/dist/index.cjs
generated
vendored
Normal file
138
node_modules/node-fetch-native/dist/index.cjs
generated
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
const abortController = require('./shared/node-fetch-native.8afd3fea.cjs');
|
||||
const node_fs = require('node:fs');
|
||||
const node_path = require('node:path');
|
||||
require('node:http');
|
||||
require('node:https');
|
||||
require('node:zlib');
|
||||
require('node:stream');
|
||||
require('node:buffer');
|
||||
require('node:util');
|
||||
require('node:url');
|
||||
require('node:net');
|
||||
|
||||
const { stat } = node_fs.promises;
|
||||
|
||||
/**
|
||||
* @param {string} path filepath on the disk
|
||||
* @param {string} [type] mimetype to use
|
||||
*/
|
||||
const blobFromSync = (path, type) => fromBlob(node_fs.statSync(path), path, type);
|
||||
|
||||
/**
|
||||
* @param {string} path filepath on the disk
|
||||
* @param {string} [type] mimetype to use
|
||||
* @returns {Promise<Blob>}
|
||||
*/
|
||||
const blobFrom = (path, type) => stat(path).then(stat => fromBlob(stat, path, type));
|
||||
|
||||
/**
|
||||
* @param {string} path filepath on the disk
|
||||
* @param {string} [type] mimetype to use
|
||||
* @returns {Promise<File>}
|
||||
*/
|
||||
const fileFrom = (path, type) => stat(path).then(stat => fromFile(stat, path, type));
|
||||
|
||||
/**
|
||||
* @param {string} path filepath on the disk
|
||||
* @param {string} [type] mimetype to use
|
||||
*/
|
||||
const fileFromSync = (path, type) => fromFile(node_fs.statSync(path), path, type);
|
||||
|
||||
// @ts-ignore
|
||||
const fromBlob = (stat, path, type = '') => new abortController._Blob([new BlobDataItem({
|
||||
path,
|
||||
size: stat.size,
|
||||
lastModified: stat.mtimeMs,
|
||||
start: 0
|
||||
})], { type });
|
||||
|
||||
// @ts-ignore
|
||||
const fromFile = (stat, path, type = '') => new abortController.File([new BlobDataItem({
|
||||
path,
|
||||
size: stat.size,
|
||||
lastModified: stat.mtimeMs,
|
||||
start: 0
|
||||
})], node_path.basename(path), { type, lastModified: stat.mtimeMs });
|
||||
|
||||
/**
|
||||
* This is a blob backed up by a file on the disk
|
||||
* with minium requirement. Its wrapped around a Blob as a blobPart
|
||||
* so you have no direct access to this.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class BlobDataItem {
|
||||
#path
|
||||
#start
|
||||
|
||||
constructor (options) {
|
||||
this.#path = options.path;
|
||||
this.#start = options.start;
|
||||
this.size = options.size;
|
||||
this.lastModified = options.lastModified;
|
||||
this.originalSize = options.originalSize === undefined
|
||||
? options.size
|
||||
: options.originalSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Slicing arguments is first validated and formatted
|
||||
* to not be out of range by Blob.prototype.slice
|
||||
*/
|
||||
slice (start, end) {
|
||||
return new BlobDataItem({
|
||||
path: this.#path,
|
||||
lastModified: this.lastModified,
|
||||
originalSize: this.originalSize,
|
||||
size: end - start,
|
||||
start: this.#start + start
|
||||
})
|
||||
}
|
||||
|
||||
async * stream () {
|
||||
const { mtimeMs, size } = await stat(this.#path);
|
||||
|
||||
if (mtimeMs > this.lastModified || this.originalSize !== size) {
|
||||
throw new abortController.nodeDomexception('The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.', 'NotReadableError')
|
||||
}
|
||||
|
||||
yield * node_fs.createReadStream(this.#path, {
|
||||
start: this.#start,
|
||||
end: this.#start + this.size - 1
|
||||
});
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'Blob'
|
||||
}
|
||||
}
|
||||
|
||||
const fetch = globalThis.fetch || abortController.fetch;
|
||||
const Blob = globalThis.Blob || abortController._Blob;
|
||||
const File = globalThis.File || abortController.File;
|
||||
const FormData = globalThis.FormData || abortController.FormData;
|
||||
const Headers = globalThis.Headers || abortController.Headers;
|
||||
const Request = globalThis.Request || abortController.Request;
|
||||
const Response = globalThis.Response || abortController.Response;
|
||||
const AbortController = globalThis.AbortController || abortController.AbortController;
|
||||
|
||||
exports.AbortError = abortController.AbortError;
|
||||
exports.FetchError = abortController.FetchError;
|
||||
exports.isRedirect = abortController.isRedirect;
|
||||
exports.AbortController = AbortController;
|
||||
exports.Blob = Blob;
|
||||
exports.File = File;
|
||||
exports.FormData = FormData;
|
||||
exports.Headers = Headers;
|
||||
exports.Request = Request;
|
||||
exports.Response = Response;
|
||||
exports.blobFrom = blobFrom;
|
||||
exports.blobFromSync = blobFromSync;
|
||||
exports.default = fetch;
|
||||
exports.fetch = fetch;
|
||||
exports.fileFrom = fileFrom;
|
||||
exports.fileFromSync = fileFromSync;
|
||||
120
node_modules/node-fetch-native/dist/index.mjs
generated
vendored
Normal file
120
node_modules/node-fetch-native/dist/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
import { _ as _Blob, F as File$1, n as nodeDomexception, a as FormData$1, f as fetch$1, H as Headers$1, R as Request$1, b as Response$1, A as AbortController$1 } from './shared/node-fetch-native.25a180d0.mjs';
|
||||
export { c as AbortError, d as FetchError, i as isRedirect } from './shared/node-fetch-native.25a180d0.mjs';
|
||||
import { statSync, createReadStream, promises } from 'node:fs';
|
||||
import { basename } from 'node:path';
|
||||
import 'node:http';
|
||||
import 'node:https';
|
||||
import 'node:zlib';
|
||||
import 'node:stream';
|
||||
import 'node:buffer';
|
||||
import 'node:util';
|
||||
import 'node:url';
|
||||
import 'node:net';
|
||||
|
||||
const { stat } = promises;
|
||||
|
||||
/**
|
||||
* @param {string} path filepath on the disk
|
||||
* @param {string} [type] mimetype to use
|
||||
*/
|
||||
const blobFromSync = (path, type) => fromBlob(statSync(path), path, type);
|
||||
|
||||
/**
|
||||
* @param {string} path filepath on the disk
|
||||
* @param {string} [type] mimetype to use
|
||||
* @returns {Promise<Blob>}
|
||||
*/
|
||||
const blobFrom = (path, type) => stat(path).then(stat => fromBlob(stat, path, type));
|
||||
|
||||
/**
|
||||
* @param {string} path filepath on the disk
|
||||
* @param {string} [type] mimetype to use
|
||||
* @returns {Promise<File>}
|
||||
*/
|
||||
const fileFrom = (path, type) => stat(path).then(stat => fromFile(stat, path, type));
|
||||
|
||||
/**
|
||||
* @param {string} path filepath on the disk
|
||||
* @param {string} [type] mimetype to use
|
||||
*/
|
||||
const fileFromSync = (path, type) => fromFile(statSync(path), path, type);
|
||||
|
||||
// @ts-ignore
|
||||
const fromBlob = (stat, path, type = '') => new _Blob([new BlobDataItem({
|
||||
path,
|
||||
size: stat.size,
|
||||
lastModified: stat.mtimeMs,
|
||||
start: 0
|
||||
})], { type });
|
||||
|
||||
// @ts-ignore
|
||||
const fromFile = (stat, path, type = '') => new File$1([new BlobDataItem({
|
||||
path,
|
||||
size: stat.size,
|
||||
lastModified: stat.mtimeMs,
|
||||
start: 0
|
||||
})], basename(path), { type, lastModified: stat.mtimeMs });
|
||||
|
||||
/**
|
||||
* This is a blob backed up by a file on the disk
|
||||
* with minium requirement. Its wrapped around a Blob as a blobPart
|
||||
* so you have no direct access to this.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class BlobDataItem {
|
||||
#path
|
||||
#start
|
||||
|
||||
constructor (options) {
|
||||
this.#path = options.path;
|
||||
this.#start = options.start;
|
||||
this.size = options.size;
|
||||
this.lastModified = options.lastModified;
|
||||
this.originalSize = options.originalSize === undefined
|
||||
? options.size
|
||||
: options.originalSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Slicing arguments is first validated and formatted
|
||||
* to not be out of range by Blob.prototype.slice
|
||||
*/
|
||||
slice (start, end) {
|
||||
return new BlobDataItem({
|
||||
path: this.#path,
|
||||
lastModified: this.lastModified,
|
||||
originalSize: this.originalSize,
|
||||
size: end - start,
|
||||
start: this.#start + start
|
||||
})
|
||||
}
|
||||
|
||||
async * stream () {
|
||||
const { mtimeMs, size } = await stat(this.#path);
|
||||
|
||||
if (mtimeMs > this.lastModified || this.originalSize !== size) {
|
||||
throw new nodeDomexception('The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.', 'NotReadableError')
|
||||
}
|
||||
|
||||
yield * createReadStream(this.#path, {
|
||||
start: this.#start,
|
||||
end: this.#start + this.size - 1
|
||||
});
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'Blob'
|
||||
}
|
||||
}
|
||||
|
||||
const fetch = globalThis.fetch || fetch$1;
|
||||
const Blob = globalThis.Blob || _Blob;
|
||||
const File = globalThis.File || File$1;
|
||||
const FormData = globalThis.FormData || FormData$1;
|
||||
const Headers = globalThis.Headers || Headers$1;
|
||||
const Request = globalThis.Request || Request$1;
|
||||
const Response = globalThis.Response || Response$1;
|
||||
const AbortController = globalThis.AbortController || AbortController$1;
|
||||
|
||||
export { AbortController, Blob, File, FormData, Headers, Request, Response, blobFrom, blobFromSync, fetch as default, fetch, fileFrom, fileFromSync };
|
||||
24
node_modules/node-fetch-native/dist/native.cjs
generated
vendored
Normal file
24
node_modules/node-fetch-native/dist/native.cjs
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
const Blob = globalThis.Blob;
|
||||
const File = globalThis.File;
|
||||
const FormData = globalThis.FormData;
|
||||
const Headers = globalThis.Headers;
|
||||
const Request = globalThis.Request;
|
||||
const Response = globalThis.Response;
|
||||
const AbortController = globalThis.AbortController;
|
||||
const fetch = globalThis.fetch || (() => {
|
||||
throw new Error("global fetch is not available!");
|
||||
});
|
||||
|
||||
exports.AbortController = AbortController;
|
||||
exports.Blob = Blob;
|
||||
exports.File = File;
|
||||
exports.FormData = FormData;
|
||||
exports.Headers = Headers;
|
||||
exports.Request = Request;
|
||||
exports.Response = Response;
|
||||
exports.default = fetch;
|
||||
exports.fetch = fetch;
|
||||
12
node_modules/node-fetch-native/dist/native.mjs
generated
vendored
Normal file
12
node_modules/node-fetch-native/dist/native.mjs
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
const Blob = globalThis.Blob;
|
||||
const File = globalThis.File;
|
||||
const FormData = globalThis.FormData;
|
||||
const Headers = globalThis.Headers;
|
||||
const Request = globalThis.Request;
|
||||
const Response = globalThis.Response;
|
||||
const AbortController = globalThis.AbortController;
|
||||
const fetch = globalThis.fetch || (() => {
|
||||
throw new Error("global fetch is not available!");
|
||||
});
|
||||
|
||||
export { AbortController, Blob, File, FormData, Headers, Request, Response, fetch as default, fetch };
|
||||
22
node_modules/node-fetch-native/dist/polyfill.cjs
generated
vendored
Normal file
22
node_modules/node-fetch-native/dist/polyfill.cjs
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
'use strict';
|
||||
|
||||
const abortController = require('./shared/node-fetch-native.8afd3fea.cjs');
|
||||
require('node:fs');
|
||||
require('node:path');
|
||||
require('node:http');
|
||||
require('node:https');
|
||||
require('node:zlib');
|
||||
require('node:stream');
|
||||
require('node:buffer');
|
||||
require('node:util');
|
||||
require('node:url');
|
||||
require('node:net');
|
||||
|
||||
globalThis.fetch = globalThis.fetch || abortController.fetch;
|
||||
globalThis.Blob = globalThis.Blob || abortController._Blob;
|
||||
globalThis.File = globalThis.File || abortController.File;
|
||||
globalThis.FormData = globalThis.FormData || abortController.FormData;
|
||||
globalThis.Headers = globalThis.Headers || abortController.Headers;
|
||||
globalThis.Request = globalThis.Request || abortController.Request;
|
||||
globalThis.Response = globalThis.Response || abortController.Response;
|
||||
globalThis.AbortController = globalThis.AbortController || abortController.AbortController;
|
||||
20
node_modules/node-fetch-native/dist/polyfill.mjs
generated
vendored
Normal file
20
node_modules/node-fetch-native/dist/polyfill.mjs
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import { f as fetch, _ as _Blob, F as File, a as FormData, H as Headers, R as Request, b as Response, A as AbortController } from './shared/node-fetch-native.25a180d0.mjs';
|
||||
import 'node:fs';
|
||||
import 'node:path';
|
||||
import 'node:http';
|
||||
import 'node:https';
|
||||
import 'node:zlib';
|
||||
import 'node:stream';
|
||||
import 'node:buffer';
|
||||
import 'node:util';
|
||||
import 'node:url';
|
||||
import 'node:net';
|
||||
|
||||
globalThis.fetch = globalThis.fetch || fetch;
|
||||
globalThis.Blob = globalThis.Blob || _Blob;
|
||||
globalThis.File = globalThis.File || File;
|
||||
globalThis.FormData = globalThis.FormData || FormData;
|
||||
globalThis.Headers = globalThis.Headers || Headers;
|
||||
globalThis.Request = globalThis.Request || Request;
|
||||
globalThis.Response = globalThis.Response || Response;
|
||||
globalThis.AbortController = globalThis.AbortController || AbortController;
|
||||
7655
node_modules/node-fetch-native/dist/shared/node-fetch-native.25a180d0.mjs
generated
vendored
Normal file
7655
node_modules/node-fetch-native/dist/shared/node-fetch-native.25a180d0.mjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
7668
node_modules/node-fetch-native/dist/shared/node-fetch-native.8afd3fea.cjs
generated
vendored
Normal file
7668
node_modules/node-fetch-native/dist/shared/node-fetch-native.8afd3fea.cjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user