initial commit
This commit is contained in:
68
node_modules/unstorage/dist/drivers/cloudflare-kv-binding.cjs
generated
vendored
Normal file
68
node_modules/unstorage/dist/drivers/cloudflare-kv-binding.cjs
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
module.exports = void 0;
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
|
||||
var _default = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const binding = getBinding(opts.binding);
|
||||
|
||||
async function getKeys(base) {
|
||||
const kvList = await binding.list(base ? {
|
||||
prefix: base
|
||||
} : void 0);
|
||||
return kvList.keys.map(key => key.name);
|
||||
}
|
||||
|
||||
return {
|
||||
async hasItem(key) {
|
||||
return (await binding.get(key)) !== null;
|
||||
},
|
||||
|
||||
getItem(key) {
|
||||
return binding.get(key);
|
||||
},
|
||||
|
||||
setItem(key, value) {
|
||||
return binding.put(key, value);
|
||||
},
|
||||
|
||||
removeItem(key) {
|
||||
return binding.delete(key);
|
||||
},
|
||||
|
||||
getKeys,
|
||||
|
||||
async clear() {
|
||||
const keys = await getKeys();
|
||||
await Promise.all(keys.map(key => binding.delete(key)));
|
||||
}
|
||||
|
||||
};
|
||||
});
|
||||
|
||||
module.exports = _default;
|
||||
|
||||
function getBinding(binding = "STORAGE") {
|
||||
let bindingName = "[binding]";
|
||||
|
||||
if (typeof binding === "string") {
|
||||
bindingName = binding;
|
||||
binding = globalThis[bindingName];
|
||||
}
|
||||
|
||||
if (!binding) {
|
||||
throw new Error(`Invalid Cloudflare KV binding '${bindingName}': ${binding}`);
|
||||
}
|
||||
|
||||
for (const key of ["get", "put", "delete"]) {
|
||||
if (!(key in binding)) {
|
||||
throw new Error(`Invalid Cloudflare KV binding '${bindingName}': '${key}' key is missing`);
|
||||
}
|
||||
}
|
||||
|
||||
return binding;
|
||||
}
|
||||
6
node_modules/unstorage/dist/drivers/cloudflare-kv-binding.d.ts
generated
vendored
Normal file
6
node_modules/unstorage/dist/drivers/cloudflare-kv-binding.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
/// <reference types="@cloudflare/workers-types" />
|
||||
export interface KVOptions {
|
||||
binding?: string | KVNamespace;
|
||||
}
|
||||
declare const _default: (opts?: KVOptions) => import("../types").Driver;
|
||||
export default _default;
|
||||
43
node_modules/unstorage/dist/drivers/cloudflare-kv-binding.mjs
generated
vendored
Normal file
43
node_modules/unstorage/dist/drivers/cloudflare-kv-binding.mjs
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const binding = getBinding(opts.binding);
|
||||
async function getKeys(base) {
|
||||
const kvList = await binding.list(base ? { prefix: base } : void 0);
|
||||
return kvList.keys.map((key) => key.name);
|
||||
}
|
||||
return {
|
||||
async hasItem(key) {
|
||||
return await binding.get(key) !== null;
|
||||
},
|
||||
getItem(key) {
|
||||
return binding.get(key);
|
||||
},
|
||||
setItem(key, value) {
|
||||
return binding.put(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
return binding.delete(key);
|
||||
},
|
||||
getKeys,
|
||||
async clear() {
|
||||
const keys = await getKeys();
|
||||
await Promise.all(keys.map((key) => binding.delete(key)));
|
||||
}
|
||||
};
|
||||
});
|
||||
function getBinding(binding = "STORAGE") {
|
||||
let bindingName = "[binding]";
|
||||
if (typeof binding === "string") {
|
||||
bindingName = binding;
|
||||
binding = globalThis[bindingName];
|
||||
}
|
||||
if (!binding) {
|
||||
throw new Error(`Invalid Cloudflare KV binding '${bindingName}': ${binding}`);
|
||||
}
|
||||
for (const key of ["get", "put", "delete"]) {
|
||||
if (!(key in binding)) {
|
||||
throw new Error(`Invalid Cloudflare KV binding '${bindingName}': '${key}' key is missing`);
|
||||
}
|
||||
}
|
||||
return binding;
|
||||
}
|
||||
165
node_modules/unstorage/dist/drivers/cloudflare-kv-http.cjs
generated
vendored
Normal file
165
node_modules/unstorage/dist/drivers/cloudflare-kv-http.cjs
generated
vendored
Normal file
@@ -0,0 +1,165 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
module.exports = void 0;
|
||||
|
||||
var _ofetch = require("ofetch");
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
|
||||
const LOG_TAG = "[unstorage] [cloudflare-http] ";
|
||||
|
||||
var _default = (0, _utils.defineDriver)(opts => {
|
||||
if (!opts.accountId) {
|
||||
throw new Error(LOG_TAG + "`accountId` is required.");
|
||||
}
|
||||
|
||||
if (!opts.namespaceId) {
|
||||
throw new Error(LOG_TAG + "`namespaceId` is required.");
|
||||
}
|
||||
|
||||
let headers;
|
||||
|
||||
if ("apiToken" in opts) {
|
||||
headers = {
|
||||
Authorization: `Bearer ${opts.apiToken}`
|
||||
};
|
||||
} else if ("userServiceKey" in opts) {
|
||||
headers = {
|
||||
"X-Auth-User-Service-Key": opts.userServiceKey
|
||||
};
|
||||
} else if (opts.email && opts.apiKey) {
|
||||
headers = {
|
||||
"X-Auth-Email": opts.email,
|
||||
"X-Auth-Key": opts.apiKey
|
||||
};
|
||||
} else {
|
||||
throw new Error(LOG_TAG + "One of the `apiToken`, `userServiceKey`, or a combination of `email` and `apiKey` is required.");
|
||||
}
|
||||
|
||||
const apiURL = opts.apiURL || "https://api.cloudflare.com";
|
||||
const baseURL = `${apiURL}/client/v4/accounts/${opts.accountId}/storage/kv/namespaces/${opts.namespaceId}`;
|
||||
|
||||
const kvFetch = _ofetch.$fetch.create({
|
||||
baseURL,
|
||||
headers
|
||||
});
|
||||
|
||||
const hasItem = async key => {
|
||||
try {
|
||||
const res = await kvFetch(`/metadata/${key}`);
|
||||
return res?.success === true;
|
||||
} catch (err) {
|
||||
if (!err.response) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (err.response.status === 404) {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const getItem = async key => {
|
||||
try {
|
||||
return await kvFetch(`/values/${key}`).then(r => r.text());
|
||||
} catch (err) {
|
||||
if (!err.response) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (err.response.status === 404) {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const setItem = async (key, value) => {
|
||||
return await kvFetch(`/values/${key}`, {
|
||||
method: "PUT",
|
||||
body: value
|
||||
});
|
||||
};
|
||||
|
||||
const removeItem = async key => {
|
||||
return await kvFetch(`/values/${key}`, {
|
||||
method: "DELETE"
|
||||
});
|
||||
};
|
||||
|
||||
const getKeys = async base => {
|
||||
const keys = [];
|
||||
const params = new URLSearchParams();
|
||||
|
||||
if (base) {
|
||||
params.set("prefix", base);
|
||||
}
|
||||
|
||||
const firstPage = await kvFetch("/keys", {
|
||||
params
|
||||
});
|
||||
firstPage.result.forEach(({
|
||||
name
|
||||
}) => keys.push(name));
|
||||
const cursor = firstPage.result_info.cursor;
|
||||
|
||||
if (cursor) {
|
||||
params.set("cursor", cursor);
|
||||
}
|
||||
|
||||
while (params.has("cursor")) {
|
||||
const pageResult = await kvFetch("/keys", {
|
||||
params
|
||||
});
|
||||
pageResult.result.forEach(({
|
||||
name
|
||||
}) => keys.push(name));
|
||||
const pageCursor = pageResult.result_info.cursor;
|
||||
|
||||
if (pageCursor) {
|
||||
params.set("cursor", pageCursor);
|
||||
} else {
|
||||
params.delete("cursor");
|
||||
}
|
||||
}
|
||||
|
||||
return keys;
|
||||
};
|
||||
|
||||
const clear = async () => {
|
||||
const keys = await getKeys();
|
||||
const chunks = keys.reduce((acc, key, i) => {
|
||||
if (i % 1e4 === 0) {
|
||||
acc.push([]);
|
||||
}
|
||||
|
||||
acc[acc.length - 1].push(key);
|
||||
return acc;
|
||||
}, [[]]);
|
||||
await Promise.all(chunks.map(chunk => {
|
||||
return kvFetch("/bulk", {
|
||||
method: "DELETE",
|
||||
body: {
|
||||
keys: chunk
|
||||
}
|
||||
});
|
||||
}));
|
||||
};
|
||||
|
||||
return {
|
||||
hasItem,
|
||||
getItem,
|
||||
setItem,
|
||||
removeItem,
|
||||
getKeys,
|
||||
clear
|
||||
};
|
||||
});
|
||||
|
||||
module.exports = _default;
|
||||
47
node_modules/unstorage/dist/drivers/cloudflare-kv-http.d.ts
generated
vendored
Normal file
47
node_modules/unstorage/dist/drivers/cloudflare-kv-http.d.ts
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
interface KVAuthAPIToken {
|
||||
/**
|
||||
* API Token generated from the [User Profile 'API Tokens' page](https://dash.cloudflare.com/profile/api-tokens)
|
||||
* of the Cloudflare console.
|
||||
* @see https://api.cloudflare.com/#getting-started-requests
|
||||
*/
|
||||
apiToken: string;
|
||||
}
|
||||
interface KVAuthServiceKey {
|
||||
/**
|
||||
* A special Cloudflare API key good for a restricted set of endpoints.
|
||||
* Always begins with "v1.0-", may vary in length.
|
||||
* May be used to authenticate in place of `apiToken` or `apiKey` and `email`.
|
||||
* @see https://api.cloudflare.com/#getting-started-requests
|
||||
*/
|
||||
userServiceKey: string;
|
||||
}
|
||||
interface KVAuthEmailKey {
|
||||
/**
|
||||
* Email address associated with your account.
|
||||
* Should be used along with `apiKey` to authenticate in place of `apiToken`.
|
||||
*/
|
||||
email: string;
|
||||
/**
|
||||
* API key generated on the "My Account" page of the Cloudflare console.
|
||||
* Should be used along with `email` to authenticate in place of `apiToken`.
|
||||
* @see https://api.cloudflare.com/#getting-started-requests
|
||||
*/
|
||||
apiKey: string;
|
||||
}
|
||||
export declare type KVHTTPOptions = {
|
||||
/**
|
||||
* Cloudflare account ID (required)
|
||||
*/
|
||||
accountId: string;
|
||||
/**
|
||||
* The ID of the KV namespace to target (required)
|
||||
*/
|
||||
namespaceId: string;
|
||||
/**
|
||||
* The URL of the Cloudflare API.
|
||||
* @default https://api.cloudflare.com
|
||||
*/
|
||||
apiURL?: string;
|
||||
} & (KVAuthServiceKey | KVAuthAPIToken | KVAuthEmailKey);
|
||||
declare const _default: (opts?: KVHTTPOptions) => import("../types").Driver;
|
||||
export default _default;
|
||||
107
node_modules/unstorage/dist/drivers/cloudflare-kv-http.mjs
generated
vendored
Normal file
107
node_modules/unstorage/dist/drivers/cloudflare-kv-http.mjs
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
import { $fetch } from "ofetch";
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
const LOG_TAG = "[unstorage] [cloudflare-http] ";
|
||||
export default defineDriver((opts) => {
|
||||
if (!opts.accountId) {
|
||||
throw new Error(LOG_TAG + "`accountId` is required.");
|
||||
}
|
||||
if (!opts.namespaceId) {
|
||||
throw new Error(LOG_TAG + "`namespaceId` is required.");
|
||||
}
|
||||
let headers;
|
||||
if ("apiToken" in opts) {
|
||||
headers = { Authorization: `Bearer ${opts.apiToken}` };
|
||||
} else if ("userServiceKey" in opts) {
|
||||
headers = { "X-Auth-User-Service-Key": opts.userServiceKey };
|
||||
} else if (opts.email && opts.apiKey) {
|
||||
headers = { "X-Auth-Email": opts.email, "X-Auth-Key": opts.apiKey };
|
||||
} else {
|
||||
throw new Error(
|
||||
LOG_TAG + "One of the `apiToken`, `userServiceKey`, or a combination of `email` and `apiKey` is required."
|
||||
);
|
||||
}
|
||||
const apiURL = opts.apiURL || "https://api.cloudflare.com";
|
||||
const baseURL = `${apiURL}/client/v4/accounts/${opts.accountId}/storage/kv/namespaces/${opts.namespaceId}`;
|
||||
const kvFetch = $fetch.create({ baseURL, headers });
|
||||
const hasItem = async (key) => {
|
||||
try {
|
||||
const res = await kvFetch(`/metadata/${key}`);
|
||||
return res?.success === true;
|
||||
} catch (err) {
|
||||
if (!err.response) {
|
||||
throw err;
|
||||
}
|
||||
if (err.response.status === 404) {
|
||||
return false;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
const getItem = async (key) => {
|
||||
try {
|
||||
return await kvFetch(`/values/${key}`).then((r) => r.text());
|
||||
} catch (err) {
|
||||
if (!err.response) {
|
||||
throw err;
|
||||
}
|
||||
if (err.response.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
const setItem = async (key, value) => {
|
||||
return await kvFetch(`/values/${key}`, { method: "PUT", body: value });
|
||||
};
|
||||
const removeItem = async (key) => {
|
||||
return await kvFetch(`/values/${key}`, { method: "DELETE" });
|
||||
};
|
||||
const getKeys = async (base) => {
|
||||
const keys = [];
|
||||
const params = new URLSearchParams();
|
||||
if (base) {
|
||||
params.set("prefix", base);
|
||||
}
|
||||
const firstPage = await kvFetch("/keys", { params });
|
||||
firstPage.result.forEach(({ name }) => keys.push(name));
|
||||
const cursor = firstPage.result_info.cursor;
|
||||
if (cursor) {
|
||||
params.set("cursor", cursor);
|
||||
}
|
||||
while (params.has("cursor")) {
|
||||
const pageResult = await kvFetch("/keys", { params });
|
||||
pageResult.result.forEach(({ name }) => keys.push(name));
|
||||
const pageCursor = pageResult.result_info.cursor;
|
||||
if (pageCursor) {
|
||||
params.set("cursor", pageCursor);
|
||||
} else {
|
||||
params.delete("cursor");
|
||||
}
|
||||
}
|
||||
return keys;
|
||||
};
|
||||
const clear = async () => {
|
||||
const keys = await getKeys();
|
||||
const chunks = keys.reduce((acc, key, i) => {
|
||||
if (i % 1e4 === 0) {
|
||||
acc.push([]);
|
||||
}
|
||||
acc[acc.length - 1].push(key);
|
||||
return acc;
|
||||
}, [[]]);
|
||||
await Promise.all(chunks.map((chunk) => {
|
||||
return kvFetch("/bulk", {
|
||||
method: "DELETE",
|
||||
body: { keys: chunk }
|
||||
});
|
||||
}));
|
||||
};
|
||||
return {
|
||||
hasItem,
|
||||
getItem,
|
||||
setItem,
|
||||
removeItem,
|
||||
getKeys,
|
||||
clear
|
||||
};
|
||||
});
|
||||
121
node_modules/unstorage/dist/drivers/fs.cjs
generated
vendored
Normal file
121
node_modules/unstorage/dist/drivers/fs.cjs
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
module.exports = void 0;
|
||||
|
||||
var _fs = require("fs");
|
||||
|
||||
var _path = require("path");
|
||||
|
||||
var _chokidar = require("chokidar");
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
|
||||
var _nodeFs = require("./utils/node-fs.cjs");
|
||||
|
||||
var _anymatch = _interopRequireDefault(require("anymatch"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const PATH_TRAVERSE_RE = /\.\.\:|\.\.$/;
|
||||
|
||||
var _default = (0, _utils.defineDriver)((opts = {}) => {
|
||||
if (!opts.base) {
|
||||
throw new Error("base is required");
|
||||
}
|
||||
|
||||
if (!opts.ignore) {
|
||||
opts.ignore = ["**/node_modules/**", "**/.git/**"];
|
||||
}
|
||||
|
||||
opts.base = (0, _path.resolve)(opts.base);
|
||||
|
||||
const r = key => {
|
||||
if (PATH_TRAVERSE_RE.test(key)) {
|
||||
throw new Error("[unstorage] [fs] Invalid key. It should not contain `..` segments: " + key);
|
||||
}
|
||||
|
||||
const resolved = (0, _path.join)(opts.base, key.replace(/:/g, "/"));
|
||||
return resolved;
|
||||
};
|
||||
|
||||
let _watcher;
|
||||
|
||||
return {
|
||||
hasItem(key) {
|
||||
return (0, _fs.existsSync)(r(key));
|
||||
},
|
||||
|
||||
getItem(key) {
|
||||
return (0, _nodeFs.readFile)(r(key));
|
||||
},
|
||||
|
||||
async getMeta(key) {
|
||||
const {
|
||||
atime,
|
||||
mtime,
|
||||
size
|
||||
} = await _fs.promises.stat(r(key)).catch(() => ({
|
||||
atime: void 0,
|
||||
mtime: void 0,
|
||||
size: void 0
|
||||
}));
|
||||
return {
|
||||
atime,
|
||||
mtime,
|
||||
size
|
||||
};
|
||||
},
|
||||
|
||||
setItem(key, value) {
|
||||
return (0, _nodeFs.writeFile)(r(key), value);
|
||||
},
|
||||
|
||||
removeItem(key) {
|
||||
return (0, _nodeFs.unlink)(r(key));
|
||||
},
|
||||
|
||||
getKeys() {
|
||||
return (0, _nodeFs.readdirRecursive)(r("."), (0, _anymatch.default)(opts.ignore || []));
|
||||
},
|
||||
|
||||
async clear() {
|
||||
await (0, _nodeFs.rmRecursive)(r("."));
|
||||
},
|
||||
|
||||
async dispose() {
|
||||
if (_watcher) {
|
||||
await _watcher.close();
|
||||
}
|
||||
},
|
||||
|
||||
watch(callback) {
|
||||
if (_watcher) {
|
||||
return;
|
||||
}
|
||||
|
||||
return new Promise((resolve2, reject) => {
|
||||
_watcher = (0, _chokidar.watch)(opts.base, {
|
||||
ignoreInitial: true,
|
||||
ignored: opts.ignore,
|
||||
...opts.watchOptions
|
||||
}).on("ready", () => {
|
||||
resolve2(() => _watcher.close().then(() => _watcher = void 0));
|
||||
}).on("error", reject).on("all", (eventName, path) => {
|
||||
path = (0, _path.relative)(opts.base, path);
|
||||
|
||||
if (eventName === "change" || eventName === "add") {
|
||||
callback("update", path);
|
||||
} else if (eventName === "unlink") {
|
||||
callback("remove", path);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
};
|
||||
});
|
||||
|
||||
module.exports = _default;
|
||||
8
node_modules/unstorage/dist/drivers/fs.d.ts
generated
vendored
Normal file
8
node_modules/unstorage/dist/drivers/fs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import { WatchOptions } from 'chokidar';
|
||||
export interface FSStorageOptions {
|
||||
base?: string;
|
||||
ignore?: string[];
|
||||
watchOptions?: WatchOptions;
|
||||
}
|
||||
declare const _default: (opts?: FSStorageOptions) => import("../types").Driver;
|
||||
export default _default;
|
||||
77
node_modules/unstorage/dist/drivers/fs.mjs
generated
vendored
Normal file
77
node_modules/unstorage/dist/drivers/fs.mjs
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
import { existsSync, promises as fsp } from "fs";
|
||||
import { resolve, relative, join } from "path";
|
||||
import { watch } from "chokidar";
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import { readFile, writeFile, readdirRecursive, rmRecursive, unlink } from "./utils/node-fs.mjs";
|
||||
import anymatch from "anymatch";
|
||||
const PATH_TRAVERSE_RE = /\.\.\:|\.\.$/;
|
||||
export default defineDriver((opts = {}) => {
|
||||
if (!opts.base) {
|
||||
throw new Error("base is required");
|
||||
}
|
||||
if (!opts.ignore) {
|
||||
opts.ignore = [
|
||||
"**/node_modules/**",
|
||||
"**/.git/**"
|
||||
];
|
||||
}
|
||||
opts.base = resolve(opts.base);
|
||||
const r = (key) => {
|
||||
if (PATH_TRAVERSE_RE.test(key)) {
|
||||
throw new Error("[unstorage] [fs] Invalid key. It should not contain `..` segments: " + key);
|
||||
}
|
||||
const resolved = join(opts.base, key.replace(/:/g, "/"));
|
||||
return resolved;
|
||||
};
|
||||
let _watcher;
|
||||
return {
|
||||
hasItem(key) {
|
||||
return existsSync(r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return readFile(r(key));
|
||||
},
|
||||
async getMeta(key) {
|
||||
const { atime, mtime, size } = await fsp.stat(r(key)).catch(() => ({ atime: void 0, mtime: void 0, size: void 0 }));
|
||||
return { atime, mtime, size };
|
||||
},
|
||||
setItem(key, value) {
|
||||
return writeFile(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
return unlink(r(key));
|
||||
},
|
||||
getKeys() {
|
||||
return readdirRecursive(r("."), anymatch(opts.ignore || []));
|
||||
},
|
||||
async clear() {
|
||||
await rmRecursive(r("."));
|
||||
},
|
||||
async dispose() {
|
||||
if (_watcher) {
|
||||
await _watcher.close();
|
||||
}
|
||||
},
|
||||
watch(callback) {
|
||||
if (_watcher) {
|
||||
return;
|
||||
}
|
||||
return new Promise((resolve2, reject) => {
|
||||
_watcher = watch(opts.base, {
|
||||
ignoreInitial: true,
|
||||
ignored: opts.ignore,
|
||||
...opts.watchOptions
|
||||
}).on("ready", () => {
|
||||
resolve2(() => _watcher.close().then(() => _watcher = void 0));
|
||||
}).on("error", reject).on("all", (eventName, path) => {
|
||||
path = relative(opts.base, path);
|
||||
if (eventName === "change" || eventName === "add") {
|
||||
callback("update", path);
|
||||
} else if (eventName === "unlink") {
|
||||
callback("remove", path);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
131
node_modules/unstorage/dist/drivers/github.cjs
generated
vendored
Normal file
131
node_modules/unstorage/dist/drivers/github.cjs
generated
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
module.exports = void 0;
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
|
||||
var _ofetch = require("ofetch");
|
||||
|
||||
var _ufo = require("ufo");
|
||||
|
||||
const defaultOptions = {
|
||||
repo: null,
|
||||
branch: "main",
|
||||
ttl: 600,
|
||||
dir: "",
|
||||
apiURL: "https://api.github.com",
|
||||
cdnURL: "https://raw.githubusercontent.com"
|
||||
};
|
||||
|
||||
var _default = (0, _utils.defineDriver)(_opts => {
|
||||
const opts = { ...defaultOptions,
|
||||
..._opts
|
||||
};
|
||||
const rawUrl = (0, _ufo.joinURL)(opts.cdnURL, opts.repo, opts.branch, opts.dir);
|
||||
let files = {};
|
||||
let lastCheck = 0;
|
||||
let syncPromise;
|
||||
|
||||
if (!opts.repo) {
|
||||
throw new Error('[unstorage] [github] Missing required option "repo"');
|
||||
}
|
||||
|
||||
const syncFiles = async () => {
|
||||
if (lastCheck + opts.ttl * 1e3 > Date.now()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!syncPromise) {
|
||||
syncPromise = fetchFiles(opts);
|
||||
}
|
||||
|
||||
files = await syncPromise;
|
||||
lastCheck = Date.now();
|
||||
syncPromise = void 0;
|
||||
};
|
||||
|
||||
return {
|
||||
async getKeys() {
|
||||
await syncFiles();
|
||||
return Object.keys(files);
|
||||
},
|
||||
|
||||
async hasItem(key) {
|
||||
await syncFiles();
|
||||
return key in files;
|
||||
},
|
||||
|
||||
async getItem(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
|
||||
if (!item) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!item.body) {
|
||||
try {
|
||||
item.body = await (0, _ofetch.$fetch)(key.replace(/:/g, "/"), {
|
||||
baseURL: rawUrl,
|
||||
headers: {
|
||||
Authorization: opts.token ? `token ${opts.token}` : void 0
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
throw new Error(`[unstorage] [github] Failed to fetch "${key}"`, {
|
||||
cause: err
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return item.body;
|
||||
},
|
||||
|
||||
async getMeta(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
return item ? item.meta : null;
|
||||
}
|
||||
|
||||
};
|
||||
});
|
||||
|
||||
module.exports = _default;
|
||||
|
||||
async function fetchFiles(opts) {
|
||||
const prefix = (0, _ufo.withTrailingSlash)(opts.dir).replace(/^\//, "");
|
||||
const files = {};
|
||||
|
||||
try {
|
||||
const trees = await (0, _ofetch.$fetch)(`/repos/${opts.repo}/git/trees/${opts.branch}?recursive=1`, {
|
||||
baseURL: opts.apiURL,
|
||||
headers: {
|
||||
Authorization: opts.token ? `token ${opts.token}` : void 0
|
||||
}
|
||||
});
|
||||
|
||||
for (const node of trees.tree) {
|
||||
if (node.type !== "blob" || !node.path.startsWith(prefix)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const key = node.path.substring(prefix.length).replace(/\//g, ":");
|
||||
files[key] = {
|
||||
meta: {
|
||||
sha: node.sha,
|
||||
mode: node.mode,
|
||||
size: node.size
|
||||
}
|
||||
};
|
||||
}
|
||||
} catch (err) {
|
||||
throw new Error(`[unstorage] [github] Failed to fetch git tree`, {
|
||||
cause: err
|
||||
});
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
34
node_modules/unstorage/dist/drivers/github.d.ts
generated
vendored
Normal file
34
node_modules/unstorage/dist/drivers/github.d.ts
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
export interface GithubOptions {
|
||||
/**
|
||||
* The name of the repository. (e.g. `username/my-repo`)
|
||||
* Required
|
||||
*/
|
||||
repo: string;
|
||||
/**
|
||||
* The branch to fetch. (e.g. `dev`)
|
||||
* @default "main"
|
||||
*/
|
||||
branch: string;
|
||||
/**
|
||||
* @default ""
|
||||
*/
|
||||
dir: string;
|
||||
/**
|
||||
* @default 600
|
||||
*/
|
||||
ttl: number;
|
||||
/**
|
||||
* Github API token (recommended)
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* @default "https://api.github.com"
|
||||
*/
|
||||
apiURL?: string;
|
||||
/**
|
||||
* @default "https://raw.githubusercontent.com"
|
||||
*/
|
||||
cdnURL?: string;
|
||||
}
|
||||
declare const _default: (opts?: GithubOptions) => import("../types").Driver;
|
||||
export default _default;
|
||||
95
node_modules/unstorage/dist/drivers/github.mjs
generated
vendored
Normal file
95
node_modules/unstorage/dist/drivers/github.mjs
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import { $fetch } from "ofetch";
|
||||
import { withTrailingSlash, joinURL } from "ufo";
|
||||
const defaultOptions = {
|
||||
repo: null,
|
||||
branch: "main",
|
||||
ttl: 600,
|
||||
dir: "",
|
||||
apiURL: "https://api.github.com",
|
||||
cdnURL: "https://raw.githubusercontent.com"
|
||||
};
|
||||
export default defineDriver((_opts) => {
|
||||
const opts = { ...defaultOptions, ..._opts };
|
||||
const rawUrl = joinURL(opts.cdnURL, opts.repo, opts.branch, opts.dir);
|
||||
let files = {};
|
||||
let lastCheck = 0;
|
||||
let syncPromise;
|
||||
if (!opts.repo) {
|
||||
throw new Error('[unstorage] [github] Missing required option "repo"');
|
||||
}
|
||||
const syncFiles = async () => {
|
||||
if (lastCheck + opts.ttl * 1e3 > Date.now()) {
|
||||
return;
|
||||
}
|
||||
if (!syncPromise) {
|
||||
syncPromise = fetchFiles(opts);
|
||||
}
|
||||
files = await syncPromise;
|
||||
lastCheck = Date.now();
|
||||
syncPromise = void 0;
|
||||
};
|
||||
return {
|
||||
async getKeys() {
|
||||
await syncFiles();
|
||||
return Object.keys(files);
|
||||
},
|
||||
async hasItem(key) {
|
||||
await syncFiles();
|
||||
return key in files;
|
||||
},
|
||||
async getItem(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
if (!item) {
|
||||
return null;
|
||||
}
|
||||
if (!item.body) {
|
||||
try {
|
||||
item.body = await $fetch(key.replace(/:/g, "/"), {
|
||||
baseURL: rawUrl,
|
||||
headers: {
|
||||
Authorization: opts.token ? `token ${opts.token}` : void 0
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
throw new Error(`[unstorage] [github] Failed to fetch "${key}"`, { cause: err });
|
||||
}
|
||||
}
|
||||
return item.body;
|
||||
},
|
||||
async getMeta(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
return item ? item.meta : null;
|
||||
}
|
||||
};
|
||||
});
|
||||
async function fetchFiles(opts) {
|
||||
const prefix = withTrailingSlash(opts.dir).replace(/^\//, "");
|
||||
const files = {};
|
||||
try {
|
||||
const trees = await $fetch(`/repos/${opts.repo}/git/trees/${opts.branch}?recursive=1`, {
|
||||
baseURL: opts.apiURL,
|
||||
headers: {
|
||||
Authorization: opts.token ? `token ${opts.token}` : void 0
|
||||
}
|
||||
});
|
||||
for (const node of trees.tree) {
|
||||
if (node.type !== "blob" || !node.path.startsWith(prefix)) {
|
||||
continue;
|
||||
}
|
||||
const key = node.path.substring(prefix.length).replace(/\//g, ":");
|
||||
files[key] = {
|
||||
meta: {
|
||||
sha: node.sha,
|
||||
mode: node.mode,
|
||||
size: node.size
|
||||
}
|
||||
};
|
||||
}
|
||||
} catch (err) {
|
||||
throw new Error(`[unstorage] [github] Failed to fetch git tree`, { cause: err });
|
||||
}
|
||||
return files;
|
||||
}
|
||||
70
node_modules/unstorage/dist/drivers/http.cjs
generated
vendored
Normal file
70
node_modules/unstorage/dist/drivers/http.cjs
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
module.exports = void 0;
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
|
||||
var _ofetch = require("ofetch");
|
||||
|
||||
var _ufo = require("ufo");
|
||||
|
||||
var _default = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const r = key => (0, _ufo.joinURL)(opts.base, key.replace(/:/g, "/"));
|
||||
|
||||
return {
|
||||
hasItem(key) {
|
||||
return (0, _ofetch.$fetch)(r(key), {
|
||||
method: "HEAD"
|
||||
}).then(() => true).catch(() => false);
|
||||
},
|
||||
|
||||
async getItem(key) {
|
||||
const value = await (0, _ofetch.$fetch)(r(key));
|
||||
return value;
|
||||
},
|
||||
|
||||
async getMeta(key) {
|
||||
const res = await _ofetch.$fetch.raw(r(key), {
|
||||
method: "HEAD"
|
||||
});
|
||||
let mtime = void 0;
|
||||
|
||||
const _lastModified = res.headers.get("last-modified");
|
||||
|
||||
if (_lastModified) {
|
||||
mtime = new Date(_lastModified);
|
||||
}
|
||||
|
||||
return {
|
||||
status: res.status,
|
||||
mtime
|
||||
};
|
||||
},
|
||||
|
||||
async setItem(key, value) {
|
||||
await (0, _ofetch.$fetch)(r(key), {
|
||||
method: "PUT",
|
||||
body: (0, _utils.stringify)(value)
|
||||
});
|
||||
},
|
||||
|
||||
async removeItem(key) {
|
||||
await (0, _ofetch.$fetch)(r(key), {
|
||||
method: "DELETE"
|
||||
});
|
||||
},
|
||||
|
||||
async getKeys() {
|
||||
const value = await (0, _ofetch.$fetch)(r(""));
|
||||
return Array.isArray(value) ? value : [];
|
||||
},
|
||||
|
||||
clear() {}
|
||||
|
||||
};
|
||||
});
|
||||
|
||||
module.exports = _default;
|
||||
5
node_modules/unstorage/dist/drivers/http.d.ts
generated
vendored
Normal file
5
node_modules/unstorage/dist/drivers/http.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export interface HTTPOptions {
|
||||
base?: string;
|
||||
}
|
||||
declare const _default: (opts?: HTTPOptions) => import("../types").Driver;
|
||||
export default _default;
|
||||
40
node_modules/unstorage/dist/drivers/http.mjs
generated
vendored
Normal file
40
node_modules/unstorage/dist/drivers/http.mjs
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import { stringify } from "./utils/index.mjs";
|
||||
import { $fetch } from "ofetch";
|
||||
import { joinURL } from "ufo";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const r = (key) => joinURL(opts.base, key.replace(/:/g, "/"));
|
||||
return {
|
||||
hasItem(key) {
|
||||
return $fetch(r(key), { method: "HEAD" }).then(() => true).catch(() => false);
|
||||
},
|
||||
async getItem(key) {
|
||||
const value = await $fetch(r(key));
|
||||
return value;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const res = await $fetch.raw(r(key), { method: "HEAD" });
|
||||
let mtime = void 0;
|
||||
const _lastModified = res.headers.get("last-modified");
|
||||
if (_lastModified) {
|
||||
mtime = new Date(_lastModified);
|
||||
}
|
||||
return {
|
||||
status: res.status,
|
||||
mtime
|
||||
};
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await $fetch(r(key), { method: "PUT", body: stringify(value) });
|
||||
},
|
||||
async removeItem(key) {
|
||||
await $fetch(r(key), { method: "DELETE" });
|
||||
},
|
||||
async getKeys() {
|
||||
const value = await $fetch(r(""));
|
||||
return Array.isArray(value) ? value : [];
|
||||
},
|
||||
clear() {
|
||||
}
|
||||
};
|
||||
});
|
||||
83
node_modules/unstorage/dist/drivers/localstorage.cjs
generated
vendored
Normal file
83
node_modules/unstorage/dist/drivers/localstorage.cjs
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
module.exports = void 0;
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
|
||||
var _default = (0, _utils.defineDriver)((opts = {}) => {
|
||||
if (!opts.window) {
|
||||
opts.window = typeof window !== "undefined" ? window : void 0;
|
||||
}
|
||||
|
||||
if (!opts.localStorage) {
|
||||
opts.localStorage = opts.window?.localStorage;
|
||||
}
|
||||
|
||||
if (!opts.localStorage) {
|
||||
throw new Error("localStorage not available");
|
||||
}
|
||||
|
||||
const r = key => (opts.base ? opts.base + ":" : "") + key;
|
||||
|
||||
let _storageListener;
|
||||
|
||||
return {
|
||||
hasItem(key) {
|
||||
return Object.prototype.hasOwnProperty.call(opts.localStorage, r(key));
|
||||
},
|
||||
|
||||
getItem(key) {
|
||||
return opts.localStorage.getItem(r(key));
|
||||
},
|
||||
|
||||
setItem(key, value) {
|
||||
return opts.localStorage.setItem(r(key), value);
|
||||
},
|
||||
|
||||
removeItem(key) {
|
||||
return opts.localStorage.removeItem(r(key));
|
||||
},
|
||||
|
||||
getKeys() {
|
||||
return Object.keys(opts.localStorage);
|
||||
},
|
||||
|
||||
clear() {
|
||||
if (!opts.base) {
|
||||
opts.localStorage.clear();
|
||||
} else {
|
||||
for (const key of Object.keys(opts.localStorage)) {
|
||||
opts.localStorage?.removeItem(key);
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.window && _storageListener) {
|
||||
opts.window.removeEventListener("storage", _storageListener);
|
||||
}
|
||||
},
|
||||
|
||||
watch(callback) {
|
||||
if (!opts.window) {
|
||||
return;
|
||||
}
|
||||
|
||||
_storageListener = ev => {
|
||||
if (ev.key) {
|
||||
callback(ev.newValue ? "update" : "remove", ev.key);
|
||||
}
|
||||
};
|
||||
|
||||
opts.window.addEventListener("storage", _storageListener);
|
||||
return () => {
|
||||
opts.window.removeEventListener("storage", _storageListener);
|
||||
_storageListener = void 0;
|
||||
};
|
||||
}
|
||||
|
||||
};
|
||||
});
|
||||
|
||||
module.exports = _default;
|
||||
7
node_modules/unstorage/dist/drivers/localstorage.d.ts
generated
vendored
Normal file
7
node_modules/unstorage/dist/drivers/localstorage.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export interface LocalStorageOptions {
|
||||
base?: string;
|
||||
window?: typeof window;
|
||||
localStorage?: typeof window.localStorage;
|
||||
}
|
||||
declare const _default: (opts?: LocalStorageOptions) => import("../types").Driver;
|
||||
export default _default;
|
||||
58
node_modules/unstorage/dist/drivers/localstorage.mjs
generated
vendored
Normal file
58
node_modules/unstorage/dist/drivers/localstorage.mjs
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
export default defineDriver((opts = {}) => {
|
||||
if (!opts.window) {
|
||||
opts.window = typeof window !== "undefined" ? window : void 0;
|
||||
}
|
||||
if (!opts.localStorage) {
|
||||
opts.localStorage = opts.window?.localStorage;
|
||||
}
|
||||
if (!opts.localStorage) {
|
||||
throw new Error("localStorage not available");
|
||||
}
|
||||
const r = (key) => (opts.base ? opts.base + ":" : "") + key;
|
||||
let _storageListener;
|
||||
return {
|
||||
hasItem(key) {
|
||||
return Object.prototype.hasOwnProperty.call(opts.localStorage, r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return opts.localStorage.getItem(r(key));
|
||||
},
|
||||
setItem(key, value) {
|
||||
return opts.localStorage.setItem(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
return opts.localStorage.removeItem(r(key));
|
||||
},
|
||||
getKeys() {
|
||||
return Object.keys(opts.localStorage);
|
||||
},
|
||||
clear() {
|
||||
if (!opts.base) {
|
||||
opts.localStorage.clear();
|
||||
} else {
|
||||
for (const key of Object.keys(opts.localStorage)) {
|
||||
opts.localStorage?.removeItem(key);
|
||||
}
|
||||
}
|
||||
if (opts.window && _storageListener) {
|
||||
opts.window.removeEventListener("storage", _storageListener);
|
||||
}
|
||||
},
|
||||
watch(callback) {
|
||||
if (!opts.window) {
|
||||
return;
|
||||
}
|
||||
_storageListener = (ev) => {
|
||||
if (ev.key) {
|
||||
callback(ev.newValue ? "update" : "remove", ev.key);
|
||||
}
|
||||
};
|
||||
opts.window.addEventListener("storage", _storageListener);
|
||||
return () => {
|
||||
opts.window.removeEventListener("storage", _storageListener);
|
||||
_storageListener = void 0;
|
||||
};
|
||||
}
|
||||
};
|
||||
});
|
||||
44
node_modules/unstorage/dist/drivers/memory.cjs
generated
vendored
Normal file
44
node_modules/unstorage/dist/drivers/memory.cjs
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
module.exports = void 0;
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
|
||||
var _default = (0, _utils.defineDriver)(() => {
|
||||
const data = /* @__PURE__ */new Map();
|
||||
return {
|
||||
hasItem(key) {
|
||||
return data.has(key);
|
||||
},
|
||||
|
||||
getItem(key) {
|
||||
return data.get(key) || null;
|
||||
},
|
||||
|
||||
setItem(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
|
||||
removeItem(key) {
|
||||
data.delete(key);
|
||||
},
|
||||
|
||||
getKeys() {
|
||||
return Array.from(data.keys());
|
||||
},
|
||||
|
||||
clear() {
|
||||
data.clear();
|
||||
},
|
||||
|
||||
dispose() {
|
||||
data.clear();
|
||||
}
|
||||
|
||||
};
|
||||
});
|
||||
|
||||
module.exports = _default;
|
||||
2
node_modules/unstorage/dist/drivers/memory.d.ts
generated
vendored
Normal file
2
node_modules/unstorage/dist/drivers/memory.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
declare const _default: (opts?: any) => import("../types").Driver;
|
||||
export default _default;
|
||||
27
node_modules/unstorage/dist/drivers/memory.mjs
generated
vendored
Normal file
27
node_modules/unstorage/dist/drivers/memory.mjs
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
export default defineDriver(() => {
|
||||
const data = /* @__PURE__ */ new Map();
|
||||
return {
|
||||
hasItem(key) {
|
||||
return data.has(key);
|
||||
},
|
||||
getItem(key) {
|
||||
return data.get(key) || null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
data.delete(key);
|
||||
},
|
||||
getKeys() {
|
||||
return Array.from(data.keys());
|
||||
},
|
||||
clear() {
|
||||
data.clear();
|
||||
},
|
||||
dispose() {
|
||||
data.clear();
|
||||
}
|
||||
};
|
||||
});
|
||||
81
node_modules/unstorage/dist/drivers/overlay.cjs
generated
vendored
Normal file
81
node_modules/unstorage/dist/drivers/overlay.cjs
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
module.exports = void 0;
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
|
||||
const OVERLAY_REMOVED = "__OVERLAY_REMOVED__";
|
||||
|
||||
var _default = (0, _utils.defineDriver)(options => {
|
||||
return {
|
||||
async hasItem(key) {
|
||||
for (const layer of options.layers) {
|
||||
if (await layer.hasItem(key)) {
|
||||
if (layer === options.layers[0]) {
|
||||
if ((await options.layers[0]?.getItem(key)) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
async getItem(key) {
|
||||
for (const layer of options.layers) {
|
||||
const value = await layer.getItem(key);
|
||||
|
||||
if (value === OVERLAY_REMOVED) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (value !== null) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
async setItem(key, value) {
|
||||
await options.layers[0]?.setItem(key, value);
|
||||
},
|
||||
|
||||
async removeItem(key) {
|
||||
await options.layers[0]?.setItem(key, OVERLAY_REMOVED);
|
||||
},
|
||||
|
||||
async getKeys(base) {
|
||||
const allKeys = await Promise.all(options.layers.map(async layer => {
|
||||
const keys = await layer.getKeys(base);
|
||||
return keys.map(key => (0, _utils.normalizeKey)(key));
|
||||
}));
|
||||
const uniqueKeys = Array.from(new Set(allKeys.flat()));
|
||||
const existingKeys = await Promise.all(uniqueKeys.map(async key => {
|
||||
if ((await options.layers[0]?.getItem(key)) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return key;
|
||||
}));
|
||||
return existingKeys.filter(Boolean);
|
||||
},
|
||||
|
||||
async dispose() {
|
||||
await Promise.all(options.layers.map(async layer => {
|
||||
if (layer.dispose) {
|
||||
await layer.dispose();
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
};
|
||||
});
|
||||
|
||||
module.exports = _default;
|
||||
6
node_modules/unstorage/dist/drivers/overlay.d.ts
generated
vendored
Normal file
6
node_modules/unstorage/dist/drivers/overlay.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import type { Driver } from '../types';
|
||||
export interface OverlayStorageOptions {
|
||||
layers: Driver[];
|
||||
}
|
||||
declare const _default: (opts?: OverlayStorageOptions) => Driver;
|
||||
export default _default;
|
||||
59
node_modules/unstorage/dist/drivers/overlay.mjs
generated
vendored
Normal file
59
node_modules/unstorage/dist/drivers/overlay.mjs
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import { normalizeKey } from "./utils/index.mjs";
|
||||
const OVERLAY_REMOVED = "__OVERLAY_REMOVED__";
|
||||
export default defineDriver((options) => {
|
||||
return {
|
||||
async hasItem(key) {
|
||||
for (const layer of options.layers) {
|
||||
if (await layer.hasItem(key)) {
|
||||
if (layer === options.layers[0]) {
|
||||
if (await options.layers[0]?.getItem(key) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
},
|
||||
async getItem(key) {
|
||||
for (const layer of options.layers) {
|
||||
const value = await layer.getItem(key);
|
||||
if (value === OVERLAY_REMOVED) {
|
||||
return null;
|
||||
}
|
||||
if (value !== null) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await options.layers[0]?.setItem(key, value);
|
||||
},
|
||||
async removeItem(key) {
|
||||
await options.layers[0]?.setItem(key, OVERLAY_REMOVED);
|
||||
},
|
||||
async getKeys(base) {
|
||||
const allKeys = await Promise.all(options.layers.map(async (layer) => {
|
||||
const keys = await layer.getKeys(base);
|
||||
return keys.map((key) => normalizeKey(key));
|
||||
}));
|
||||
const uniqueKeys = Array.from(new Set(allKeys.flat()));
|
||||
const existingKeys = await Promise.all(uniqueKeys.map(async (key) => {
|
||||
if (await options.layers[0]?.getItem(key) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
return key;
|
||||
}));
|
||||
return existingKeys.filter(Boolean);
|
||||
},
|
||||
async dispose() {
|
||||
await Promise.all(options.layers.map(async (layer) => {
|
||||
if (layer.dispose) {
|
||||
await layer.dispose();
|
||||
}
|
||||
}));
|
||||
}
|
||||
};
|
||||
});
|
||||
61
node_modules/unstorage/dist/drivers/redis.cjs
generated
vendored
Normal file
61
node_modules/unstorage/dist/drivers/redis.cjs
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
module.exports = void 0;
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
|
||||
var _ioredis = _interopRequireDefault(require("ioredis"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
var _default = (0, _utils.defineDriver)(_opts => {
|
||||
const opts = {
|
||||
lazyConnect: true,
|
||||
..._opts
|
||||
};
|
||||
const redis = opts.url ? new _ioredis.default(opts?.url, opts) : new _ioredis.default(opts);
|
||||
let base = opts?.base || "";
|
||||
|
||||
if (base && !base.endsWith(":")) {
|
||||
base += ":";
|
||||
}
|
||||
|
||||
const r = key => base + key;
|
||||
|
||||
return {
|
||||
hasItem(key) {
|
||||
return redis.exists(r(key)).then(Boolean);
|
||||
},
|
||||
|
||||
getItem(key) {
|
||||
return redis.get(r(key));
|
||||
},
|
||||
|
||||
setItem(key, value) {
|
||||
return redis.set(r(key), value).then(() => {});
|
||||
},
|
||||
|
||||
removeItem(key) {
|
||||
return redis.del(r(key)).then(() => {});
|
||||
},
|
||||
|
||||
getKeys() {
|
||||
return redis.keys(r("*"));
|
||||
},
|
||||
|
||||
async clear() {
|
||||
const keys = await redis.keys(r("*"));
|
||||
return redis.del(keys.map(key => r(key))).then(() => {});
|
||||
},
|
||||
|
||||
dispose() {
|
||||
return redis.disconnect();
|
||||
}
|
||||
|
||||
};
|
||||
});
|
||||
|
||||
module.exports = _default;
|
||||
7
node_modules/unstorage/dist/drivers/redis.d.ts
generated
vendored
Normal file
7
node_modules/unstorage/dist/drivers/redis.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import { RedisOptions as _RedisOptions } from 'ioredis';
|
||||
export interface RedisOptions extends _RedisOptions {
|
||||
base: string;
|
||||
url: string;
|
||||
}
|
||||
declare const _default: (opts?: RedisOptions) => import("../types").Driver;
|
||||
export default _default;
|
||||
38
node_modules/unstorage/dist/drivers/redis.mjs
generated
vendored
Normal file
38
node_modules/unstorage/dist/drivers/redis.mjs
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import Redis from "ioredis";
|
||||
export default defineDriver((_opts) => {
|
||||
const opts = { lazyConnect: true, ..._opts };
|
||||
const redis = opts.url ? new Redis(opts?.url, opts) : new Redis(opts);
|
||||
let base = opts?.base || "";
|
||||
if (base && !base.endsWith(":")) {
|
||||
base += ":";
|
||||
}
|
||||
const r = (key) => base + key;
|
||||
return {
|
||||
hasItem(key) {
|
||||
return redis.exists(r(key)).then(Boolean);
|
||||
},
|
||||
getItem(key) {
|
||||
return redis.get(r(key));
|
||||
},
|
||||
setItem(key, value) {
|
||||
return redis.set(r(key), value).then(() => {
|
||||
});
|
||||
},
|
||||
removeItem(key) {
|
||||
return redis.del(r(key)).then(() => {
|
||||
});
|
||||
},
|
||||
getKeys() {
|
||||
return redis.keys(r("*"));
|
||||
},
|
||||
async clear() {
|
||||
const keys = await redis.keys(r("*"));
|
||||
return redis.del(keys.map((key) => r(key))).then(() => {
|
||||
});
|
||||
},
|
||||
dispose() {
|
||||
return redis.disconnect();
|
||||
}
|
||||
};
|
||||
});
|
||||
30
node_modules/unstorage/dist/drivers/utils/index.cjs
generated
vendored
Normal file
30
node_modules/unstorage/dist/drivers/utils/index.cjs
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.defineDriver = defineDriver;
|
||||
exports.isPrimitive = isPrimitive;
|
||||
exports.normalizeKey = normalizeKey;
|
||||
exports.stringify = stringify;
|
||||
|
||||
function defineDriver(factory) {
|
||||
return factory;
|
||||
}
|
||||
|
||||
function isPrimitive(arg) {
|
||||
const type = typeof arg;
|
||||
return arg === null || type !== "object" && type !== "function";
|
||||
}
|
||||
|
||||
function stringify(arg) {
|
||||
return isPrimitive(arg) ? arg + "" : JSON.stringify(arg);
|
||||
}
|
||||
|
||||
function normalizeKey(key) {
|
||||
if (!key) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return key.replace(/[/\\]/g, ":").replace(/^:|:$/g, "");
|
||||
}
|
||||
7
node_modules/unstorage/dist/drivers/utils/index.d.ts
generated
vendored
Normal file
7
node_modules/unstorage/dist/drivers/utils/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import type { Driver } from '../../types';
|
||||
declare type DriverFactory<T> = (opts?: T) => Driver;
|
||||
export declare function defineDriver<T = any>(factory: DriverFactory<T>): DriverFactory<T>;
|
||||
export declare function isPrimitive(arg: any): boolean;
|
||||
export declare function stringify(arg: any): string;
|
||||
export declare function normalizeKey(key: string | undefined): string;
|
||||
export {};
|
||||
16
node_modules/unstorage/dist/drivers/utils/index.mjs
generated
vendored
Normal file
16
node_modules/unstorage/dist/drivers/utils/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
export function defineDriver(factory) {
|
||||
return factory;
|
||||
}
|
||||
export function isPrimitive(arg) {
|
||||
const type = typeof arg;
|
||||
return arg === null || type !== "object" && type !== "function";
|
||||
}
|
||||
export function stringify(arg) {
|
||||
return isPrimitive(arg) ? arg + "" : JSON.stringify(arg);
|
||||
}
|
||||
export function normalizeKey(key) {
|
||||
if (!key) {
|
||||
return "";
|
||||
}
|
||||
return key.replace(/[/\\]/g, ":").replace(/^:|:$/g, "");
|
||||
}
|
||||
92
node_modules/unstorage/dist/drivers/utils/node-fs.cjs
generated
vendored
Normal file
92
node_modules/unstorage/dist/drivers/utils/node-fs.cjs
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.ensuredir = ensuredir;
|
||||
exports.readFile = readFile;
|
||||
exports.readdir = readdir;
|
||||
exports.readdirRecursive = readdirRecursive;
|
||||
exports.rmRecursive = rmRecursive;
|
||||
exports.stat = stat;
|
||||
exports.unlink = unlink;
|
||||
exports.writeFile = writeFile;
|
||||
|
||||
var _fs = require("fs");
|
||||
|
||||
var _path = require("path");
|
||||
|
||||
function ignoreNotfound(err) {
|
||||
return err.code === "ENOENT" || err.code === "EISDIR" ? null : err;
|
||||
}
|
||||
|
||||
function ignoreExists(err) {
|
||||
return err.code === "EEXIST" ? null : err;
|
||||
}
|
||||
|
||||
async function writeFile(path, data) {
|
||||
await ensuredir((0, _path.dirname)(path));
|
||||
return _fs.promises.writeFile(path, data, "utf8");
|
||||
}
|
||||
|
||||
function readFile(path) {
|
||||
return _fs.promises.readFile(path, "utf8").catch(ignoreNotfound);
|
||||
}
|
||||
|
||||
function stat(path) {
|
||||
return _fs.promises.stat(path).catch(ignoreNotfound);
|
||||
}
|
||||
|
||||
function unlink(path) {
|
||||
return _fs.promises.unlink(path).catch(ignoreNotfound);
|
||||
}
|
||||
|
||||
function readdir(dir) {
|
||||
return _fs.promises.readdir(dir, {
|
||||
withFileTypes: true
|
||||
}).catch(ignoreNotfound).then(r => r || []);
|
||||
}
|
||||
|
||||
async function ensuredir(dir) {
|
||||
if ((0, _fs.existsSync)(dir)) {
|
||||
return;
|
||||
}
|
||||
|
||||
await ensuredir((0, _path.dirname)(dir)).catch(ignoreExists);
|
||||
await _fs.promises.mkdir(dir).catch(ignoreExists);
|
||||
}
|
||||
|
||||
async function readdirRecursive(dir, ignore) {
|
||||
if (ignore && ignore(dir)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const entries = await readdir(dir);
|
||||
const files = [];
|
||||
await Promise.all(entries.map(async entry => {
|
||||
const entryPath = (0, _path.resolve)(dir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const dirFiles = await readdirRecursive(entryPath, ignore);
|
||||
files.push(...dirFiles.map(f => entry.name + "/" + f));
|
||||
} else {
|
||||
if (ignore && !ignore(entry.name)) {
|
||||
files.push(entry.name);
|
||||
}
|
||||
}
|
||||
}));
|
||||
return files;
|
||||
}
|
||||
|
||||
async function rmRecursive(dir) {
|
||||
const entries = await readdir(dir);
|
||||
await Promise.all(entries.map(entry => {
|
||||
const entryPath = (0, _path.resolve)(dir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
return rmRecursive(entryPath).then(() => _fs.promises.rmdir(entryPath));
|
||||
} else {
|
||||
return _fs.promises.unlink(entryPath);
|
||||
}
|
||||
}));
|
||||
}
|
||||
10
node_modules/unstorage/dist/drivers/utils/node-fs.d.ts
generated
vendored
Normal file
10
node_modules/unstorage/dist/drivers/utils/node-fs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
/// <reference types="node" />
|
||||
import { Dirent } from 'fs';
|
||||
export declare function writeFile(path: string, data: string): Promise<void>;
|
||||
export declare function readFile(path: string): Promise<any>;
|
||||
export declare function stat(path: string): Promise<any>;
|
||||
export declare function unlink(path: string): Promise<any>;
|
||||
export declare function readdir(dir: string): Promise<Dirent[]>;
|
||||
export declare function ensuredir(dir: string): Promise<void>;
|
||||
export declare function readdirRecursive(dir: string, ignore?: (p: string) => boolean): Promise<string[]>;
|
||||
export declare function rmRecursive(dir: string): Promise<void>;
|
||||
61
node_modules/unstorage/dist/drivers/utils/node-fs.mjs
generated
vendored
Normal file
61
node_modules/unstorage/dist/drivers/utils/node-fs.mjs
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
import { existsSync, promises as fsPromises } from "fs";
|
||||
import { resolve, dirname } from "path";
|
||||
function ignoreNotfound(err) {
|
||||
return err.code === "ENOENT" || err.code === "EISDIR" ? null : err;
|
||||
}
|
||||
function ignoreExists(err) {
|
||||
return err.code === "EEXIST" ? null : err;
|
||||
}
|
||||
export async function writeFile(path, data) {
|
||||
await ensuredir(dirname(path));
|
||||
return fsPromises.writeFile(path, data, "utf8");
|
||||
}
|
||||
export function readFile(path) {
|
||||
return fsPromises.readFile(path, "utf8").catch(ignoreNotfound);
|
||||
}
|
||||
export function stat(path) {
|
||||
return fsPromises.stat(path).catch(ignoreNotfound);
|
||||
}
|
||||
export function unlink(path) {
|
||||
return fsPromises.unlink(path).catch(ignoreNotfound);
|
||||
}
|
||||
export function readdir(dir) {
|
||||
return fsPromises.readdir(dir, { withFileTypes: true }).catch(ignoreNotfound).then((r) => r || []);
|
||||
}
|
||||
export async function ensuredir(dir) {
|
||||
if (existsSync(dir)) {
|
||||
return;
|
||||
}
|
||||
await ensuredir(dirname(dir)).catch(ignoreExists);
|
||||
await fsPromises.mkdir(dir).catch(ignoreExists);
|
||||
}
|
||||
export async function readdirRecursive(dir, ignore) {
|
||||
if (ignore && ignore(dir)) {
|
||||
return [];
|
||||
}
|
||||
const entries = await readdir(dir);
|
||||
const files = [];
|
||||
await Promise.all(entries.map(async (entry) => {
|
||||
const entryPath = resolve(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
const dirFiles = await readdirRecursive(entryPath, ignore);
|
||||
files.push(...dirFiles.map((f) => entry.name + "/" + f));
|
||||
} else {
|
||||
if (ignore && !ignore(entry.name)) {
|
||||
files.push(entry.name);
|
||||
}
|
||||
}
|
||||
}));
|
||||
return files;
|
||||
}
|
||||
export async function rmRecursive(dir) {
|
||||
const entries = await readdir(dir);
|
||||
await Promise.all(entries.map((entry) => {
|
||||
const entryPath = resolve(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
return rmRecursive(entryPath).then(() => fsPromises.rmdir(entryPath));
|
||||
} else {
|
||||
return fsPromises.unlink(entryPath);
|
||||
}
|
||||
}));
|
||||
}
|
||||
Reference in New Issue
Block a user