mirror of
https://github.com/Wei-Shaw/claude-relay-service.git
synced 2026-01-23 09:38:02 +00:00
chore: update frontend build for v1.1.82 [skip ci]
This commit is contained in:
21
web/admin-spa/node_modules/unimport/LICENSE
generated
vendored
21
web/admin-spa/node_modules/unimport/LICENSE
generated
vendored
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022 - UnJS
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
450
web/admin-spa/node_modules/unimport/README.md
generated
vendored
450
web/admin-spa/node_modules/unimport/README.md
generated
vendored
@@ -1,450 +0,0 @@
|
||||
# unimport
|
||||
|
||||
[![npm version][npm-version-src]][npm-version-href]
|
||||
[![npm downloads][npm-downloads-src]][npm-downloads-href]
|
||||
[![Codecov][codecov-src]][codecov-href]
|
||||
|
||||
> Unified utils for auto importing APIs in modules, used in [nuxt](https://github.com/nuxt/nuxt) and [unplugin-auto-import](https://github.com/antfu/unplugin-auto-import)
|
||||
|
||||
## Features
|
||||
|
||||
- Auto import register APIs for Vite, Webpack or esbuild powered by [unplugin](https://github.com/unjs/unplugin)
|
||||
- TypeScript declaration file generation
|
||||
- Auto import for custom APIs defined under specific directories
|
||||
- Auto import for Vue template
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
# npm
|
||||
npm install unimport
|
||||
|
||||
# yarn
|
||||
yarn add unimport
|
||||
|
||||
# pnpm
|
||||
pnpm install unimport
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Plugin Usage
|
||||
|
||||
Powered by [unplugin](https://github.com/unjs/unplugin), `unimport` provides a plugin interface for bundlers.
|
||||
|
||||
#### Vite / Rollup
|
||||
|
||||
```ts
|
||||
// vite.config.js / rollup.config.js
|
||||
import Unimport from 'unimport/unplugin'
|
||||
|
||||
export default {
|
||||
plugins: [
|
||||
Unimport.vite({ /* plugin options */ })
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Webpack
|
||||
|
||||
```ts
|
||||
// webpack.config.js
|
||||
import Unimport from 'unimport/unplugin'
|
||||
|
||||
module.exports = {
|
||||
plugins: [
|
||||
Unimport.webpack({ /* plugin options */ })
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Programmatic Usage
|
||||
|
||||
<!-- eslint-skip -->
|
||||
|
||||
```js
|
||||
// ESM
|
||||
import { createUnimport } from 'unimport'
|
||||
|
||||
// CommonJS
|
||||
const { createUnimport } = require('unimport')
|
||||
```
|
||||
|
||||
```js
|
||||
const { injectImports } = createUnimport({
|
||||
imports: [{ name: 'fooBar', from: 'test-id' }]
|
||||
})
|
||||
|
||||
// { code: "import { fooBar } from 'test-id';console.log(fooBar())" }
|
||||
console.log(injectImports('console.log(fooBar())'))
|
||||
```
|
||||
|
||||
## Configurations
|
||||
|
||||
### Imports Item
|
||||
|
||||
###### Named import
|
||||
|
||||
```ts
|
||||
imports: [
|
||||
{ name: 'ref', from: 'vue' },
|
||||
{ name: 'useState', as: 'useSignal', from: 'react' },
|
||||
]
|
||||
```
|
||||
|
||||
Will be injected as:
|
||||
|
||||
```ts
|
||||
import { useState as useSignal } from 'react'
|
||||
import { ref } from 'vue'
|
||||
```
|
||||
|
||||
###### Default import
|
||||
|
||||
```ts
|
||||
imports: [
|
||||
{ name: 'default', as: '_', from: 'lodash' }
|
||||
]
|
||||
```
|
||||
|
||||
Will be injected as:
|
||||
|
||||
```ts
|
||||
import _ from 'lodash'
|
||||
```
|
||||
|
||||
###### Namespace import
|
||||
|
||||
```ts
|
||||
imports: [
|
||||
{ name: '*', as: '_', from: 'lodash' }
|
||||
]
|
||||
```
|
||||
|
||||
Will be injected as:
|
||||
|
||||
```ts
|
||||
import * as _ from 'lodash'
|
||||
```
|
||||
|
||||
###### Export assignment import
|
||||
|
||||
This is a special case for libraries authored with [TypeScript's `export =` syntax](https://www.typescriptlang.org/docs/handbook/modules/reference.html#export--and-import--require). You don't need it the most of the time.
|
||||
|
||||
```ts
|
||||
imports: [
|
||||
{ name: '=', as: 'browser', from: 'webextension-polyfill' }
|
||||
]
|
||||
```
|
||||
|
||||
Will be injected as:
|
||||
|
||||
```ts
|
||||
import browser from 'webextension-polyfill'
|
||||
```
|
||||
|
||||
And the type declaration will be added as:
|
||||
|
||||
```ts
|
||||
const browser: typeof import('webextension-polyfill')
|
||||
```
|
||||
|
||||
###### Custom Presets
|
||||
|
||||
Presets are provided as a shorthand for declaring imports from the same package:
|
||||
|
||||
```ts
|
||||
presets: [
|
||||
{
|
||||
from: 'vue',
|
||||
imports: [
|
||||
'ref',
|
||||
'reactive',
|
||||
// ...
|
||||
]
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
Will be equivalent as:
|
||||
|
||||
```ts
|
||||
imports: [
|
||||
{ name: 'ref', from: 'vue' },
|
||||
{ name: 'reactive', from: 'vue' },
|
||||
// ...
|
||||
]
|
||||
```
|
||||
|
||||
###### Built-in Presets
|
||||
|
||||
`unimport` also provides some builtin presets for common libraries:
|
||||
|
||||
```ts
|
||||
presets: [
|
||||
'vue',
|
||||
'pinia',
|
||||
'vue-i18n',
|
||||
// ...
|
||||
]
|
||||
```
|
||||
|
||||
You can check out [`src/presets`](./src/presets/) for all the options available or refer to the type declaration.
|
||||
|
||||
###### Exports Auto Scan
|
||||
|
||||
Since `unimport` v0.7.0, we also support auto scanning the examples from a local installed package, for example:
|
||||
|
||||
```ts
|
||||
presets: [
|
||||
{
|
||||
package: 'h3',
|
||||
ignore: ['isStream', /^[A-Z]/, /^[a-z]*$/, r => r.length > 8]
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
This will be expanded into:
|
||||
|
||||
```ts
|
||||
imports: [
|
||||
{
|
||||
from: 'h3',
|
||||
name: 'appendHeader',
|
||||
},
|
||||
{
|
||||
from: 'h3',
|
||||
name: 'appendHeaders',
|
||||
},
|
||||
{
|
||||
from: 'h3',
|
||||
name: 'appendResponseHeader',
|
||||
},
|
||||
// ...
|
||||
]
|
||||
```
|
||||
|
||||
The `ignore` option is used to filter out the exports, it can be a string, regex or a function that returns a boolean.
|
||||
|
||||
By default, the result is strongly cached by the version of the package. You can disable this by setting `cache: false`.
|
||||
|
||||
### Type Declarations
|
||||
|
||||
```ts
|
||||
Unimport.vite({
|
||||
dts: true // or a path to generated file
|
||||
})
|
||||
```
|
||||
|
||||
### Directory Auto Import
|
||||
|
||||
```ts
|
||||
Unimport.vite({
|
||||
dirs: [
|
||||
'./composables/*',
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
Scan for modules under `./composables` and auto-import the named exports.
|
||||
|
||||
#### Nested Directories
|
||||
|
||||
```ts
|
||||
Unimport.vite({
|
||||
dirs: [
|
||||
'./composables/**/*',
|
||||
{
|
||||
glob: './composables/nested/**/*',
|
||||
types: false // disable scan the type declarations
|
||||
}
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
Named exports for modules under `./composables/**/*` will be registered for auto imports, and filter out the types in `./composables/nested/**/*`.
|
||||
|
||||
#### Directory Scan Options
|
||||
|
||||
You can also provide custom options for directory scan, for example:
|
||||
|
||||
```ts
|
||||
Unimport.vite({
|
||||
dirsScanOptions: {
|
||||
filePatterns: ['*.ts'], // optional, default `['*.{ts,js,mjs,cjs,mts,cts}']`, glob patterns for matching files
|
||||
fileFilter: file => file.endsWith('.ts'), // optional, default `() => true`, filter files
|
||||
types: true, // optional, default `true`, enable/disable scan the type declarations
|
||||
cwd: process.cwd(), // optional, default `process.cwd()`, custom cwd for directory scan
|
||||
},
|
||||
dirs: [
|
||||
'./composables/**/*',
|
||||
{
|
||||
glob: './composables/nested/**/*',
|
||||
types: false
|
||||
}
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
### Opt-out Auto Import
|
||||
|
||||
You can opt-out auto-import for specific modules by adding a comment:
|
||||
|
||||
```ts
|
||||
// @unimport-disable
|
||||
```
|
||||
|
||||
It can be customized by setting `commentsDisable`:
|
||||
|
||||
```ts
|
||||
Unimport.vite({
|
||||
commentsDisable: [
|
||||
'@unimport-disable',
|
||||
'@custom-imports-disable',
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
### Acorn Parser
|
||||
|
||||
By default, `unimport` uses RegExp to detect unimport entries. In some cases, RegExp might not be able to detect all the entries (false positive & false negative).
|
||||
|
||||
We introduced a new AST-based parser powered by [acorn](https://github.com/acornjs/acorn), providing a more accurate result. The limitation is when using Acorn, it assumes all input code are valid and vanilla JavaScript code.
|
||||
|
||||
```ts
|
||||
Unimport.vite({
|
||||
parser: 'acorn'
|
||||
})
|
||||
```
|
||||
|
||||
### Vue Template Auto Import
|
||||
|
||||
In Vue's template, the usage of API is in a different context than plain modules. Thus some custom transformations are required. To enable it, set `addons.vueTemplate` to `true`:
|
||||
|
||||
```ts
|
||||
Unimport.vite({
|
||||
addons: {
|
||||
vueTemplate: true
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
#### Caveats
|
||||
|
||||
When auto-import a ref, inline operations won't be auto-unwrapped.
|
||||
|
||||
```ts
|
||||
export const counter = ref(0)
|
||||
```
|
||||
|
||||
```html
|
||||
<template>
|
||||
<!-- this is ok -->
|
||||
<div>{{ counter }}</div>
|
||||
|
||||
<!-- counter here is a ref, this won't work, volar will throw -->
|
||||
<div>{{ counter + 1 }}</div>
|
||||
|
||||
<!-- use this instead -->
|
||||
<div>{{ counter.value + 1 }}</div>
|
||||
</template>
|
||||
```
|
||||
|
||||
We recommend using [Volar](https://github.com/johnsoncodehk/volar) for type checking, which will help you to identify the misusage.
|
||||
|
||||
### Vue Directives Auto Import and TypeScript Declaration Generation
|
||||
|
||||
In Vue's template, the usage of directives is in a different context than plain modules. Thus some custom transformations are required. To enable it, set `addons.vueDirectives` to `true`:
|
||||
|
||||
```ts
|
||||
Unimport.vite({
|
||||
addons: {
|
||||
vueDirectives: true
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
#### Library Authors
|
||||
|
||||
When including directives in your presets, you should:
|
||||
- provide the corresponding imports with `meta.vueDirective` set to `true`, otherwise, `unimport` will not be able to detect your directives.
|
||||
- use named exports for your directives, or use default export and use `as` in the Import.
|
||||
- set `dtsDisabled` to `true` if you provide a type declaration for your directives.
|
||||
|
||||
```ts
|
||||
import type { InlinePreset } from 'unimport'
|
||||
import { defineUnimportPreset } from 'unimport'
|
||||
|
||||
export const composables = defineUnimportPreset({
|
||||
from: 'my-unimport-library/composables',
|
||||
/* imports and other options */
|
||||
})
|
||||
|
||||
export const directives = defineUnimportPreset({
|
||||
from: 'my-unimport-library/directives',
|
||||
// disable dts generation globally
|
||||
dtsEnabled: false,
|
||||
// you can declare the vue directive globally
|
||||
meta: {
|
||||
vueDirective: true
|
||||
},
|
||||
imports: [{
|
||||
name: 'ClickOutside',
|
||||
// disable dts generation per import
|
||||
dtsEnabled: false,
|
||||
// you can declare the vue directive per import
|
||||
meta: {
|
||||
vueDirective: true
|
||||
}
|
||||
}, {
|
||||
name: 'default',
|
||||
// you should declare `as` for default exports
|
||||
as: 'Focus'
|
||||
}]
|
||||
})
|
||||
```
|
||||
|
||||
#### Using Directory Scan and Local Directives
|
||||
|
||||
If you add a directory scan for your local directives in the project, you need to:
|
||||
- provide `isDirective` in the `vueDirectives`: `unimport` will use it to detect them (will never be called for imports with `meta.vueDirective` set to `true`).
|
||||
- use always named exports for your directives.
|
||||
|
||||
```ts
|
||||
Unimport.vite({
|
||||
dirs: ['./directives/**'],
|
||||
addons: {
|
||||
vueDirectives: {
|
||||
isDirective: (normalizedImportFrom, _importEntry) => {
|
||||
return normalizedImportFrom.includes('/directives/')
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
## 💻 Development
|
||||
|
||||
- Clone this repository
|
||||
- Enable [Corepack](https://github.com/nodejs/corepack) using `corepack enable` (use `npm i -g corepack` for Node.js < 16.10)
|
||||
- Install dependencies using `pnpm install`
|
||||
- Run interactive tests using `pnpm dev`
|
||||
|
||||
## License
|
||||
|
||||
Made with 💛
|
||||
|
||||
Published under [MIT License](./LICENSE).
|
||||
|
||||
<!-- Badges -->
|
||||
[npm-version-src]: https://img.shields.io/npm/v/unimport?style=flat-square
|
||||
[npm-version-href]: https://npmjs.com/package/unimport
|
||||
|
||||
[npm-downloads-src]: https://img.shields.io/npm/dm/unimport?style=flat-square
|
||||
[npm-downloads-href]: https://npmjs.com/package/unimport
|
||||
|
||||
[github-actions-src]: https://img.shields.io/github/workflow/status/unjs/unimport/ci/main?style=flat-square
|
||||
[github-actions-href]: https://github.com/unjs/unimport/actions?query=workflow%3Aci
|
||||
|
||||
[codecov-src]: https://img.shields.io/codecov/c/gh/unjs/unimport/main?style=flat-square
|
||||
[codecov-href]: https://codecov.io/gh/unjs/unimport
|
||||
2
web/admin-spa/node_modules/unimport/addons.d.ts
generated
vendored
2
web/admin-spa/node_modules/unimport/addons.d.ts
generated
vendored
@@ -1,2 +0,0 @@
|
||||
// redirect for TypeScript to pick it up
|
||||
export * from './dist/addons'
|
||||
15
web/admin-spa/node_modules/unimport/dist/addons.cjs
generated
vendored
15
web/admin-spa/node_modules/unimport/dist/addons.cjs
generated
vendored
@@ -1,15 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const vueDirectives = require('./shared/unimport.MMUMmZ45.cjs');
|
||||
require('node:path');
|
||||
require('node:process');
|
||||
require('pathe');
|
||||
require('scule');
|
||||
require('magic-string');
|
||||
require('mlly');
|
||||
require('strip-literal');
|
||||
|
||||
|
||||
|
||||
exports.vueDirectivesAddon = vueDirectives.vueDirectivesAddon;
|
||||
exports.vueTemplateAddon = vueDirectives.vueTemplateAddon;
|
||||
8
web/admin-spa/node_modules/unimport/dist/addons.d.cts
generated
vendored
8
web/admin-spa/node_modules/unimport/dist/addons.d.cts
generated
vendored
@@ -1,8 +0,0 @@
|
||||
import { n as AddonVueDirectivesOptions, r as Addon } from './shared/unimport.CaVRR9SH.cjs';
|
||||
export { v as vueTemplateAddon } from './shared/unimport.D0oAO1c8.cjs';
|
||||
import 'magic-string';
|
||||
import 'mlly';
|
||||
|
||||
declare function vueDirectivesAddon(options?: AddonVueDirectivesOptions): Addon;
|
||||
|
||||
export { vueDirectivesAddon };
|
||||
8
web/admin-spa/node_modules/unimport/dist/addons.d.mts
generated
vendored
8
web/admin-spa/node_modules/unimport/dist/addons.d.mts
generated
vendored
@@ -1,8 +0,0 @@
|
||||
import { n as AddonVueDirectivesOptions, r as Addon } from './shared/unimport.CaVRR9SH.mjs';
|
||||
export { v as vueTemplateAddon } from './shared/unimport.CzOA5cgj.mjs';
|
||||
import 'magic-string';
|
||||
import 'mlly';
|
||||
|
||||
declare function vueDirectivesAddon(options?: AddonVueDirectivesOptions): Addon;
|
||||
|
||||
export { vueDirectivesAddon };
|
||||
8
web/admin-spa/node_modules/unimport/dist/addons.d.ts
generated
vendored
8
web/admin-spa/node_modules/unimport/dist/addons.d.ts
generated
vendored
@@ -1,8 +0,0 @@
|
||||
import { n as AddonVueDirectivesOptions, r as Addon } from './shared/unimport.CaVRR9SH.js';
|
||||
export { v as vueTemplateAddon } from './shared/unimport.BGSZL1Hy.js';
|
||||
import 'magic-string';
|
||||
import 'mlly';
|
||||
|
||||
declare function vueDirectivesAddon(options?: AddonVueDirectivesOptions): Addon;
|
||||
|
||||
export { vueDirectivesAddon };
|
||||
8
web/admin-spa/node_modules/unimport/dist/addons.mjs
generated
vendored
8
web/admin-spa/node_modules/unimport/dist/addons.mjs
generated
vendored
@@ -1,8 +0,0 @@
|
||||
export { v as vueDirectivesAddon, a as vueTemplateAddon } from './shared/unimport.0aitavbJ.mjs';
|
||||
import 'node:path';
|
||||
import 'node:process';
|
||||
import 'pathe';
|
||||
import 'scule';
|
||||
import 'magic-string';
|
||||
import 'mlly';
|
||||
import 'strip-literal';
|
||||
234
web/admin-spa/node_modules/unimport/dist/chunks/detect-acorn.cjs
generated
vendored
234
web/admin-spa/node_modules/unimport/dist/chunks/detect-acorn.cjs
generated
vendored
@@ -1,234 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const acorn = require('acorn');
|
||||
const estreeWalker = require('estree-walker');
|
||||
const vueDirectives = require('../shared/unimport.MMUMmZ45.cjs');
|
||||
require('node:path');
|
||||
require('node:process');
|
||||
require('pathe');
|
||||
require('scule');
|
||||
require('magic-string');
|
||||
require('mlly');
|
||||
require('strip-literal');
|
||||
|
||||
async function detectImportsAcorn(code, ctx, options) {
|
||||
const s = vueDirectives.getMagicString(code);
|
||||
const map = await ctx.getImportMap();
|
||||
let matchedImports = [];
|
||||
const enableAutoImport = options?.autoImport !== false;
|
||||
const enableTransformVirtualImports = options?.transformVirtualImports !== false && ctx.options.virtualImports?.length;
|
||||
if (enableAutoImport || enableTransformVirtualImports) {
|
||||
const ast = acorn.parse(s.original, {
|
||||
sourceType: "module",
|
||||
ecmaVersion: "latest",
|
||||
locations: true
|
||||
});
|
||||
const virtualImports = createVirtualImportsAcronWalker(map, ctx.options.virtualImports);
|
||||
const scopes = traveseScopes(
|
||||
ast,
|
||||
enableTransformVirtualImports ? virtualImports.walk : {}
|
||||
);
|
||||
if (enableAutoImport) {
|
||||
const identifiers = scopes.unmatched;
|
||||
matchedImports.push(
|
||||
...Array.from(identifiers).map((name) => {
|
||||
const item = map.get(name);
|
||||
if (item && !item.disabled)
|
||||
return item;
|
||||
return null;
|
||||
}).filter(Boolean)
|
||||
);
|
||||
for (const addon of ctx.addons)
|
||||
matchedImports = await addon.matchImports?.call(ctx, identifiers, matchedImports) || matchedImports;
|
||||
}
|
||||
virtualImports.ranges.forEach(([start, end]) => {
|
||||
s.remove(start, end);
|
||||
});
|
||||
matchedImports.push(...virtualImports.imports);
|
||||
}
|
||||
return {
|
||||
s,
|
||||
strippedCode: code.toString(),
|
||||
matchedImports,
|
||||
isCJSContext: false,
|
||||
firstOccurrence: 0
|
||||
// TODO:
|
||||
};
|
||||
}
|
||||
function traveseScopes(ast, additionalWalk) {
|
||||
const scopes = [];
|
||||
let scopeCurrent = undefined;
|
||||
const scopesStack = [];
|
||||
function pushScope(node) {
|
||||
scopeCurrent = {
|
||||
node,
|
||||
parent: scopeCurrent,
|
||||
declarations: /* @__PURE__ */ new Set(),
|
||||
references: /* @__PURE__ */ new Set()
|
||||
};
|
||||
scopes.push(scopeCurrent);
|
||||
scopesStack.push(scopeCurrent);
|
||||
}
|
||||
function popScope(node) {
|
||||
const scope = scopesStack.pop();
|
||||
if (scope?.node !== node)
|
||||
throw new Error("Scope mismatch");
|
||||
scopeCurrent = scopesStack[scopesStack.length - 1];
|
||||
}
|
||||
pushScope(undefined);
|
||||
estreeWalker.walk(ast, {
|
||||
enter(node, parent, prop, index) {
|
||||
additionalWalk?.enter?.call(this, node, parent, prop, index);
|
||||
switch (node.type) {
|
||||
// ====== Declaration ======
|
||||
case "ImportSpecifier":
|
||||
case "ImportDefaultSpecifier":
|
||||
case "ImportNamespaceSpecifier":
|
||||
scopeCurrent.declarations.add(node.local.name);
|
||||
return;
|
||||
case "FunctionDeclaration":
|
||||
case "ClassDeclaration":
|
||||
if (node.id)
|
||||
scopeCurrent.declarations.add(node.id.name);
|
||||
return;
|
||||
case "VariableDeclarator":
|
||||
if (node.id.type === "Identifier") {
|
||||
scopeCurrent.declarations.add(node.id.name);
|
||||
} else {
|
||||
estreeWalker.walk(node.id, {
|
||||
enter(node2) {
|
||||
if (node2.type === "ObjectPattern") {
|
||||
node2.properties.forEach((i) => {
|
||||
if (i.type === "Property" && i.value.type === "Identifier")
|
||||
scopeCurrent.declarations.add(i.value.name);
|
||||
else if (i.type === "RestElement" && i.argument.type === "Identifier")
|
||||
scopeCurrent.declarations.add(i.argument.name);
|
||||
});
|
||||
} else if (node2.type === "ArrayPattern") {
|
||||
node2.elements.forEach((i) => {
|
||||
if (i?.type === "Identifier")
|
||||
scopeCurrent.declarations.add(i.name);
|
||||
if (i?.type === "RestElement" && i.argument.type === "Identifier")
|
||||
scopeCurrent.declarations.add(i.argument.name);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return;
|
||||
// ====== Scope ======
|
||||
case "BlockStatement":
|
||||
pushScope(node);
|
||||
return;
|
||||
// ====== Reference ======
|
||||
case "Identifier":
|
||||
switch (parent?.type) {
|
||||
case "CallExpression":
|
||||
if (parent.callee === node || parent.arguments.includes(node))
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "MemberExpression":
|
||||
if (parent.object === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "VariableDeclarator":
|
||||
if (parent.init === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "SpreadElement":
|
||||
if (parent.argument === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "ClassDeclaration":
|
||||
if (parent.superClass === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "Property":
|
||||
if (parent.value === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "TemplateLiteral":
|
||||
if (parent.expressions.includes(node))
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "AssignmentExpression":
|
||||
if (parent.right === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "IfStatement":
|
||||
case "WhileStatement":
|
||||
case "DoWhileStatement":
|
||||
if (parent.test === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "SwitchStatement":
|
||||
if (parent.discriminant === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
}
|
||||
if (parent?.type.includes("Expression"))
|
||||
scopeCurrent.references.add(node.name);
|
||||
}
|
||||
},
|
||||
leave(node, parent, prop, index) {
|
||||
additionalWalk?.leave?.call(this, node, parent, prop, index);
|
||||
switch (node.type) {
|
||||
case "BlockStatement":
|
||||
popScope(node);
|
||||
}
|
||||
}
|
||||
});
|
||||
const unmatched = /* @__PURE__ */ new Set();
|
||||
for (const scope of scopes) {
|
||||
for (const name of scope.references) {
|
||||
let defined = false;
|
||||
let parent = scope;
|
||||
while (parent) {
|
||||
if (parent.declarations.has(name)) {
|
||||
defined = true;
|
||||
break;
|
||||
}
|
||||
parent = parent?.parent;
|
||||
}
|
||||
if (!defined)
|
||||
unmatched.add(name);
|
||||
}
|
||||
}
|
||||
return {
|
||||
unmatched,
|
||||
scopes
|
||||
};
|
||||
}
|
||||
function createVirtualImportsAcronWalker(importMap, virtualImports = []) {
|
||||
const imports = [];
|
||||
const ranges = [];
|
||||
return {
|
||||
imports,
|
||||
ranges,
|
||||
walk: {
|
||||
enter(node) {
|
||||
if (node.type === "ImportDeclaration") {
|
||||
if (virtualImports.includes(node.source.value)) {
|
||||
ranges.push([node.start, node.end]);
|
||||
node.specifiers.forEach((i) => {
|
||||
if (i.type === "ImportSpecifier" && i.imported.type === "Identifier") {
|
||||
const original = importMap.get(i.imported.name);
|
||||
if (!original)
|
||||
throw new Error(`[unimport] failed to find "${i.imported.name}" imported from "${node.source.value}"`);
|
||||
imports.push({
|
||||
from: original.from,
|
||||
name: original.name,
|
||||
as: i.local.name
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
exports.createVirtualImportsAcronWalker = createVirtualImportsAcronWalker;
|
||||
exports.detectImportsAcorn = detectImportsAcorn;
|
||||
exports.traveseScopes = traveseScopes;
|
||||
230
web/admin-spa/node_modules/unimport/dist/chunks/detect-acorn.mjs
generated
vendored
230
web/admin-spa/node_modules/unimport/dist/chunks/detect-acorn.mjs
generated
vendored
@@ -1,230 +0,0 @@
|
||||
import { parse } from 'acorn';
|
||||
import { walk } from 'estree-walker';
|
||||
import { n as getMagicString } from '../shared/unimport.0aitavbJ.mjs';
|
||||
import 'node:path';
|
||||
import 'node:process';
|
||||
import 'pathe';
|
||||
import 'scule';
|
||||
import 'magic-string';
|
||||
import 'mlly';
|
||||
import 'strip-literal';
|
||||
|
||||
async function detectImportsAcorn(code, ctx, options) {
|
||||
const s = getMagicString(code);
|
||||
const map = await ctx.getImportMap();
|
||||
let matchedImports = [];
|
||||
const enableAutoImport = options?.autoImport !== false;
|
||||
const enableTransformVirtualImports = options?.transformVirtualImports !== false && ctx.options.virtualImports?.length;
|
||||
if (enableAutoImport || enableTransformVirtualImports) {
|
||||
const ast = parse(s.original, {
|
||||
sourceType: "module",
|
||||
ecmaVersion: "latest",
|
||||
locations: true
|
||||
});
|
||||
const virtualImports = createVirtualImportsAcronWalker(map, ctx.options.virtualImports);
|
||||
const scopes = traveseScopes(
|
||||
ast,
|
||||
enableTransformVirtualImports ? virtualImports.walk : {}
|
||||
);
|
||||
if (enableAutoImport) {
|
||||
const identifiers = scopes.unmatched;
|
||||
matchedImports.push(
|
||||
...Array.from(identifiers).map((name) => {
|
||||
const item = map.get(name);
|
||||
if (item && !item.disabled)
|
||||
return item;
|
||||
return null;
|
||||
}).filter(Boolean)
|
||||
);
|
||||
for (const addon of ctx.addons)
|
||||
matchedImports = await addon.matchImports?.call(ctx, identifiers, matchedImports) || matchedImports;
|
||||
}
|
||||
virtualImports.ranges.forEach(([start, end]) => {
|
||||
s.remove(start, end);
|
||||
});
|
||||
matchedImports.push(...virtualImports.imports);
|
||||
}
|
||||
return {
|
||||
s,
|
||||
strippedCode: code.toString(),
|
||||
matchedImports,
|
||||
isCJSContext: false,
|
||||
firstOccurrence: 0
|
||||
// TODO:
|
||||
};
|
||||
}
|
||||
function traveseScopes(ast, additionalWalk) {
|
||||
const scopes = [];
|
||||
let scopeCurrent = undefined;
|
||||
const scopesStack = [];
|
||||
function pushScope(node) {
|
||||
scopeCurrent = {
|
||||
node,
|
||||
parent: scopeCurrent,
|
||||
declarations: /* @__PURE__ */ new Set(),
|
||||
references: /* @__PURE__ */ new Set()
|
||||
};
|
||||
scopes.push(scopeCurrent);
|
||||
scopesStack.push(scopeCurrent);
|
||||
}
|
||||
function popScope(node) {
|
||||
const scope = scopesStack.pop();
|
||||
if (scope?.node !== node)
|
||||
throw new Error("Scope mismatch");
|
||||
scopeCurrent = scopesStack[scopesStack.length - 1];
|
||||
}
|
||||
pushScope(undefined);
|
||||
walk(ast, {
|
||||
enter(node, parent, prop, index) {
|
||||
additionalWalk?.enter?.call(this, node, parent, prop, index);
|
||||
switch (node.type) {
|
||||
// ====== Declaration ======
|
||||
case "ImportSpecifier":
|
||||
case "ImportDefaultSpecifier":
|
||||
case "ImportNamespaceSpecifier":
|
||||
scopeCurrent.declarations.add(node.local.name);
|
||||
return;
|
||||
case "FunctionDeclaration":
|
||||
case "ClassDeclaration":
|
||||
if (node.id)
|
||||
scopeCurrent.declarations.add(node.id.name);
|
||||
return;
|
||||
case "VariableDeclarator":
|
||||
if (node.id.type === "Identifier") {
|
||||
scopeCurrent.declarations.add(node.id.name);
|
||||
} else {
|
||||
walk(node.id, {
|
||||
enter(node2) {
|
||||
if (node2.type === "ObjectPattern") {
|
||||
node2.properties.forEach((i) => {
|
||||
if (i.type === "Property" && i.value.type === "Identifier")
|
||||
scopeCurrent.declarations.add(i.value.name);
|
||||
else if (i.type === "RestElement" && i.argument.type === "Identifier")
|
||||
scopeCurrent.declarations.add(i.argument.name);
|
||||
});
|
||||
} else if (node2.type === "ArrayPattern") {
|
||||
node2.elements.forEach((i) => {
|
||||
if (i?.type === "Identifier")
|
||||
scopeCurrent.declarations.add(i.name);
|
||||
if (i?.type === "RestElement" && i.argument.type === "Identifier")
|
||||
scopeCurrent.declarations.add(i.argument.name);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return;
|
||||
// ====== Scope ======
|
||||
case "BlockStatement":
|
||||
pushScope(node);
|
||||
return;
|
||||
// ====== Reference ======
|
||||
case "Identifier":
|
||||
switch (parent?.type) {
|
||||
case "CallExpression":
|
||||
if (parent.callee === node || parent.arguments.includes(node))
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "MemberExpression":
|
||||
if (parent.object === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "VariableDeclarator":
|
||||
if (parent.init === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "SpreadElement":
|
||||
if (parent.argument === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "ClassDeclaration":
|
||||
if (parent.superClass === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "Property":
|
||||
if (parent.value === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "TemplateLiteral":
|
||||
if (parent.expressions.includes(node))
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "AssignmentExpression":
|
||||
if (parent.right === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "IfStatement":
|
||||
case "WhileStatement":
|
||||
case "DoWhileStatement":
|
||||
if (parent.test === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
case "SwitchStatement":
|
||||
if (parent.discriminant === node)
|
||||
scopeCurrent.references.add(node.name);
|
||||
return;
|
||||
}
|
||||
if (parent?.type.includes("Expression"))
|
||||
scopeCurrent.references.add(node.name);
|
||||
}
|
||||
},
|
||||
leave(node, parent, prop, index) {
|
||||
additionalWalk?.leave?.call(this, node, parent, prop, index);
|
||||
switch (node.type) {
|
||||
case "BlockStatement":
|
||||
popScope(node);
|
||||
}
|
||||
}
|
||||
});
|
||||
const unmatched = /* @__PURE__ */ new Set();
|
||||
for (const scope of scopes) {
|
||||
for (const name of scope.references) {
|
||||
let defined = false;
|
||||
let parent = scope;
|
||||
while (parent) {
|
||||
if (parent.declarations.has(name)) {
|
||||
defined = true;
|
||||
break;
|
||||
}
|
||||
parent = parent?.parent;
|
||||
}
|
||||
if (!defined)
|
||||
unmatched.add(name);
|
||||
}
|
||||
}
|
||||
return {
|
||||
unmatched,
|
||||
scopes
|
||||
};
|
||||
}
|
||||
function createVirtualImportsAcronWalker(importMap, virtualImports = []) {
|
||||
const imports = [];
|
||||
const ranges = [];
|
||||
return {
|
||||
imports,
|
||||
ranges,
|
||||
walk: {
|
||||
enter(node) {
|
||||
if (node.type === "ImportDeclaration") {
|
||||
if (virtualImports.includes(node.source.value)) {
|
||||
ranges.push([node.start, node.end]);
|
||||
node.specifiers.forEach((i) => {
|
||||
if (i.type === "ImportSpecifier" && i.imported.type === "Identifier") {
|
||||
const original = importMap.get(i.imported.name);
|
||||
if (!original)
|
||||
throw new Error(`[unimport] failed to find "${i.imported.name}" imported from "${node.source.value}"`);
|
||||
imports.push({
|
||||
from: original.from,
|
||||
name: original.name,
|
||||
as: i.local.name
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export { createVirtualImportsAcronWalker, detectImportsAcorn, traveseScopes };
|
||||
71
web/admin-spa/node_modules/unimport/dist/index.cjs
generated
vendored
71
web/admin-spa/node_modules/unimport/dist/index.cjs
generated
vendored
@@ -1,71 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const context = require('./shared/unimport.D6_N7ILk.cjs');
|
||||
const vueDirectives = require('./shared/unimport.MMUMmZ45.cjs');
|
||||
require('mlly');
|
||||
require('node:fs');
|
||||
require('node:fs/promises');
|
||||
require('node:process');
|
||||
require('node:url');
|
||||
require('fast-glob');
|
||||
require('pathe');
|
||||
require('picomatch');
|
||||
require('scule');
|
||||
require('node:os');
|
||||
require('pkg-types');
|
||||
require('local-pkg');
|
||||
require('node:path');
|
||||
require('magic-string');
|
||||
require('strip-literal');
|
||||
|
||||
async function installGlobalAutoImports(imports, options = {}) {
|
||||
const {
|
||||
globalObject = globalThis,
|
||||
overrides = false
|
||||
} = options;
|
||||
imports = Array.isArray(imports) ? imports : await imports.getImports();
|
||||
await Promise.all(
|
||||
imports.map(async (i) => {
|
||||
if (i.disabled || i.type)
|
||||
return;
|
||||
const as = i.as || i.name;
|
||||
if (overrides || !(as in globalObject)) {
|
||||
const module = await import(i.from);
|
||||
globalObject[as] = module[i.name];
|
||||
}
|
||||
})
|
||||
);
|
||||
return globalObject;
|
||||
}
|
||||
|
||||
exports.builtinPresets = context.builtinPresets;
|
||||
exports.createUnimport = context.createUnimport;
|
||||
exports.dedupeDtsExports = context.dedupeDtsExports;
|
||||
exports.normalizeScanDirs = context.normalizeScanDirs;
|
||||
exports.resolveBuiltinPresets = context.resolveBuiltinPresets;
|
||||
exports.resolvePreset = context.resolvePreset;
|
||||
exports.scanDirExports = context.scanDirExports;
|
||||
exports.scanExports = context.scanExports;
|
||||
exports.scanFilesFromDir = context.scanFilesFromDir;
|
||||
exports.version = context.version;
|
||||
exports.addImportToCode = vueDirectives.addImportToCode;
|
||||
exports.dedupeImports = vueDirectives.dedupeImports;
|
||||
exports.defineUnimportPreset = vueDirectives.defineUnimportPreset;
|
||||
exports.excludeRE = vueDirectives.excludeRE;
|
||||
exports.getMagicString = vueDirectives.getMagicString;
|
||||
exports.getString = vueDirectives.getString;
|
||||
exports.importAsRE = vueDirectives.importAsRE;
|
||||
exports.matchRE = vueDirectives.matchRE;
|
||||
exports.normalizeImports = vueDirectives.normalizeImports;
|
||||
exports.resolveIdAbsolute = vueDirectives.resolveIdAbsolute;
|
||||
exports.separatorRE = vueDirectives.separatorRE;
|
||||
exports.stringifyImports = vueDirectives.stringifyImports;
|
||||
exports.stripCommentsAndStrings = vueDirectives.stripCommentsAndStrings;
|
||||
exports.stripFileExtension = vueDirectives.stripFileExtension;
|
||||
exports.toExports = vueDirectives.toExports;
|
||||
exports.toImports = vueDirectives.toImports;
|
||||
exports.toTypeDeclarationFile = vueDirectives.toTypeDeclarationFile;
|
||||
exports.toTypeDeclarationItems = vueDirectives.toTypeDeclarationItems;
|
||||
exports.toTypeReExports = vueDirectives.toTypeReExports;
|
||||
exports.vueTemplateAddon = vueDirectives.vueTemplateAddon;
|
||||
exports.installGlobalAutoImports = installGlobalAutoImports;
|
||||
52
web/admin-spa/node_modules/unimport/dist/index.d.cts
generated
vendored
52
web/admin-spa/node_modules/unimport/dist/index.d.cts
generated
vendored
@@ -1,52 +0,0 @@
|
||||
export { v as vueTemplateAddon } from './shared/unimport.D0oAO1c8.cjs';
|
||||
import { U as UnimportOptions, a as Unimport, I as Import, b as InstallGlobalOptions, S as ScanDir, c as ScanDirExportsOptions, P as Preset, B as BuiltinPresetName, d as InlinePreset, T as TypeDeclarationOptions, M as MagicStringResult } from './shared/unimport.CaVRR9SH.cjs';
|
||||
export { r as Addon, n as AddonVueDirectivesOptions, A as AddonsOptions, D as DetectImportResult, h as ImportCommon, s as ImportInjectionResult, g as ImportName, p as InjectImportsOptions, l as InjectionUsageRecord, f as ModuleId, j as PackagePreset, o as PathFromResolver, i as PresetImport, q as Thenable, k as UnimportContext, m as UnimportMeta, e as builtinPresets } from './shared/unimport.CaVRR9SH.cjs';
|
||||
import { StripLiteralOptions } from 'strip-literal';
|
||||
import MagicString from 'magic-string';
|
||||
import 'mlly';
|
||||
|
||||
const version = "3.14.6";
|
||||
|
||||
declare function createUnimport(opts: Partial<UnimportOptions>): Unimport;
|
||||
|
||||
declare function installGlobalAutoImports(imports: Import[] | Unimport, options?: InstallGlobalOptions): Promise<any>;
|
||||
|
||||
declare function normalizeScanDirs(dirs: (string | ScanDir)[], options?: ScanDirExportsOptions): Required<ScanDir>[];
|
||||
declare function scanFilesFromDir(dir: ScanDir | ScanDir[], options?: ScanDirExportsOptions): Promise<string[]>;
|
||||
declare function scanDirExports(dirs: (string | ScanDir)[], options?: ScanDirExportsOptions): Promise<Import[]>;
|
||||
declare function dedupeDtsExports(exports: Import[]): Import[];
|
||||
declare function scanExports(filepath: string, includeTypes: boolean, seen?: Set<string>): Promise<Import[]>;
|
||||
|
||||
declare function resolvePreset(preset: Preset): Promise<Import[]>;
|
||||
declare function resolveBuiltinPresets(presets: (BuiltinPresetName | Preset)[]): Promise<Import[]>;
|
||||
|
||||
declare const excludeRE: RegExp[];
|
||||
declare const importAsRE: RegExp;
|
||||
declare const separatorRE: RegExp;
|
||||
/**
|
||||
* | |
|
||||
* destructing case&ternary non-call inheritance | id |
|
||||
* ↓ ↓ ↓ ↓ | |
|
||||
*/
|
||||
declare const matchRE: RegExp;
|
||||
declare function stripCommentsAndStrings(code: string, options?: StripLiteralOptions): string;
|
||||
|
||||
declare function defineUnimportPreset(preset: InlinePreset): InlinePreset;
|
||||
declare function stringifyImports(imports: Import[], isCJS?: boolean): string;
|
||||
declare function dedupeImports(imports: Import[], warn: (msg: string) => void): Import[];
|
||||
declare function toExports(imports: Import[], fileDir?: string, includeType?: boolean): string;
|
||||
declare function stripFileExtension(path: string): string;
|
||||
declare function toTypeDeclarationItems(imports: Import[], options?: TypeDeclarationOptions): string[];
|
||||
declare function toTypeDeclarationFile(imports: Import[], options?: TypeDeclarationOptions): string;
|
||||
declare function toTypeReExports(imports: Import[], options?: TypeDeclarationOptions): string;
|
||||
declare function getString(code: string | MagicString): string;
|
||||
declare function getMagicString(code: string | MagicString): MagicString;
|
||||
declare function addImportToCode(code: string | MagicString, imports: Import[], isCJS?: boolean, mergeExisting?: boolean, injectAtLast?: boolean, firstOccurrence?: number, onResolved?: (imports: Import[]) => void | Import[], onStringified?: (str: string, imports: Import[]) => void | string): MagicStringResult;
|
||||
declare function normalizeImports(imports: Import[]): Import[];
|
||||
declare function resolveIdAbsolute(id: string, parentId?: string): string;
|
||||
/**
|
||||
* @deprecated renamed to `stringifyImports`
|
||||
*/
|
||||
declare const toImports: typeof stringifyImports;
|
||||
|
||||
export { BuiltinPresetName, Import, InlinePreset, InstallGlobalOptions, MagicStringResult, Preset, ScanDir, ScanDirExportsOptions, TypeDeclarationOptions, Unimport, UnimportOptions, addImportToCode, createUnimport, dedupeDtsExports, dedupeImports, defineUnimportPreset, excludeRE, getMagicString, getString, importAsRE, installGlobalAutoImports, matchRE, normalizeImports, normalizeScanDirs, resolveBuiltinPresets, resolveIdAbsolute, resolvePreset, scanDirExports, scanExports, scanFilesFromDir, separatorRE, stringifyImports, stripCommentsAndStrings, stripFileExtension, toExports, toImports, toTypeDeclarationFile, toTypeDeclarationItems, toTypeReExports, version };
|
||||
52
web/admin-spa/node_modules/unimport/dist/index.d.mts
generated
vendored
52
web/admin-spa/node_modules/unimport/dist/index.d.mts
generated
vendored
@@ -1,52 +0,0 @@
|
||||
export { v as vueTemplateAddon } from './shared/unimport.CzOA5cgj.mjs';
|
||||
import { U as UnimportOptions, a as Unimport, I as Import, b as InstallGlobalOptions, S as ScanDir, c as ScanDirExportsOptions, P as Preset, B as BuiltinPresetName, d as InlinePreset, T as TypeDeclarationOptions, M as MagicStringResult } from './shared/unimport.CaVRR9SH.mjs';
|
||||
export { r as Addon, n as AddonVueDirectivesOptions, A as AddonsOptions, D as DetectImportResult, h as ImportCommon, s as ImportInjectionResult, g as ImportName, p as InjectImportsOptions, l as InjectionUsageRecord, f as ModuleId, j as PackagePreset, o as PathFromResolver, i as PresetImport, q as Thenable, k as UnimportContext, m as UnimportMeta, e as builtinPresets } from './shared/unimport.CaVRR9SH.mjs';
|
||||
import { StripLiteralOptions } from 'strip-literal';
|
||||
import MagicString from 'magic-string';
|
||||
import 'mlly';
|
||||
|
||||
const version = "3.14.6";
|
||||
|
||||
declare function createUnimport(opts: Partial<UnimportOptions>): Unimport;
|
||||
|
||||
declare function installGlobalAutoImports(imports: Import[] | Unimport, options?: InstallGlobalOptions): Promise<any>;
|
||||
|
||||
declare function normalizeScanDirs(dirs: (string | ScanDir)[], options?: ScanDirExportsOptions): Required<ScanDir>[];
|
||||
declare function scanFilesFromDir(dir: ScanDir | ScanDir[], options?: ScanDirExportsOptions): Promise<string[]>;
|
||||
declare function scanDirExports(dirs: (string | ScanDir)[], options?: ScanDirExportsOptions): Promise<Import[]>;
|
||||
declare function dedupeDtsExports(exports: Import[]): Import[];
|
||||
declare function scanExports(filepath: string, includeTypes: boolean, seen?: Set<string>): Promise<Import[]>;
|
||||
|
||||
declare function resolvePreset(preset: Preset): Promise<Import[]>;
|
||||
declare function resolveBuiltinPresets(presets: (BuiltinPresetName | Preset)[]): Promise<Import[]>;
|
||||
|
||||
declare const excludeRE: RegExp[];
|
||||
declare const importAsRE: RegExp;
|
||||
declare const separatorRE: RegExp;
|
||||
/**
|
||||
* | |
|
||||
* destructing case&ternary non-call inheritance | id |
|
||||
* ↓ ↓ ↓ ↓ | |
|
||||
*/
|
||||
declare const matchRE: RegExp;
|
||||
declare function stripCommentsAndStrings(code: string, options?: StripLiteralOptions): string;
|
||||
|
||||
declare function defineUnimportPreset(preset: InlinePreset): InlinePreset;
|
||||
declare function stringifyImports(imports: Import[], isCJS?: boolean): string;
|
||||
declare function dedupeImports(imports: Import[], warn: (msg: string) => void): Import[];
|
||||
declare function toExports(imports: Import[], fileDir?: string, includeType?: boolean): string;
|
||||
declare function stripFileExtension(path: string): string;
|
||||
declare function toTypeDeclarationItems(imports: Import[], options?: TypeDeclarationOptions): string[];
|
||||
declare function toTypeDeclarationFile(imports: Import[], options?: TypeDeclarationOptions): string;
|
||||
declare function toTypeReExports(imports: Import[], options?: TypeDeclarationOptions): string;
|
||||
declare function getString(code: string | MagicString): string;
|
||||
declare function getMagicString(code: string | MagicString): MagicString;
|
||||
declare function addImportToCode(code: string | MagicString, imports: Import[], isCJS?: boolean, mergeExisting?: boolean, injectAtLast?: boolean, firstOccurrence?: number, onResolved?: (imports: Import[]) => void | Import[], onStringified?: (str: string, imports: Import[]) => void | string): MagicStringResult;
|
||||
declare function normalizeImports(imports: Import[]): Import[];
|
||||
declare function resolveIdAbsolute(id: string, parentId?: string): string;
|
||||
/**
|
||||
* @deprecated renamed to `stringifyImports`
|
||||
*/
|
||||
declare const toImports: typeof stringifyImports;
|
||||
|
||||
export { BuiltinPresetName, Import, InlinePreset, InstallGlobalOptions, MagicStringResult, Preset, ScanDir, ScanDirExportsOptions, TypeDeclarationOptions, Unimport, UnimportOptions, addImportToCode, createUnimport, dedupeDtsExports, dedupeImports, defineUnimportPreset, excludeRE, getMagicString, getString, importAsRE, installGlobalAutoImports, matchRE, normalizeImports, normalizeScanDirs, resolveBuiltinPresets, resolveIdAbsolute, resolvePreset, scanDirExports, scanExports, scanFilesFromDir, separatorRE, stringifyImports, stripCommentsAndStrings, stripFileExtension, toExports, toImports, toTypeDeclarationFile, toTypeDeclarationItems, toTypeReExports, version };
|
||||
52
web/admin-spa/node_modules/unimport/dist/index.d.ts
generated
vendored
52
web/admin-spa/node_modules/unimport/dist/index.d.ts
generated
vendored
@@ -1,52 +0,0 @@
|
||||
export { v as vueTemplateAddon } from './shared/unimport.BGSZL1Hy.js';
|
||||
import { U as UnimportOptions, a as Unimport, I as Import, b as InstallGlobalOptions, S as ScanDir, c as ScanDirExportsOptions, P as Preset, B as BuiltinPresetName, d as InlinePreset, T as TypeDeclarationOptions, M as MagicStringResult } from './shared/unimport.CaVRR9SH.js';
|
||||
export { r as Addon, n as AddonVueDirectivesOptions, A as AddonsOptions, D as DetectImportResult, h as ImportCommon, s as ImportInjectionResult, g as ImportName, p as InjectImportsOptions, l as InjectionUsageRecord, f as ModuleId, j as PackagePreset, o as PathFromResolver, i as PresetImport, q as Thenable, k as UnimportContext, m as UnimportMeta, e as builtinPresets } from './shared/unimport.CaVRR9SH.js';
|
||||
import { StripLiteralOptions } from 'strip-literal';
|
||||
import MagicString from 'magic-string';
|
||||
import 'mlly';
|
||||
|
||||
const version = "3.14.6";
|
||||
|
||||
declare function createUnimport(opts: Partial<UnimportOptions>): Unimport;
|
||||
|
||||
declare function installGlobalAutoImports(imports: Import[] | Unimport, options?: InstallGlobalOptions): Promise<any>;
|
||||
|
||||
declare function normalizeScanDirs(dirs: (string | ScanDir)[], options?: ScanDirExportsOptions): Required<ScanDir>[];
|
||||
declare function scanFilesFromDir(dir: ScanDir | ScanDir[], options?: ScanDirExportsOptions): Promise<string[]>;
|
||||
declare function scanDirExports(dirs: (string | ScanDir)[], options?: ScanDirExportsOptions): Promise<Import[]>;
|
||||
declare function dedupeDtsExports(exports: Import[]): Import[];
|
||||
declare function scanExports(filepath: string, includeTypes: boolean, seen?: Set<string>): Promise<Import[]>;
|
||||
|
||||
declare function resolvePreset(preset: Preset): Promise<Import[]>;
|
||||
declare function resolveBuiltinPresets(presets: (BuiltinPresetName | Preset)[]): Promise<Import[]>;
|
||||
|
||||
declare const excludeRE: RegExp[];
|
||||
declare const importAsRE: RegExp;
|
||||
declare const separatorRE: RegExp;
|
||||
/**
|
||||
* | |
|
||||
* destructing case&ternary non-call inheritance | id |
|
||||
* ↓ ↓ ↓ ↓ | |
|
||||
*/
|
||||
declare const matchRE: RegExp;
|
||||
declare function stripCommentsAndStrings(code: string, options?: StripLiteralOptions): string;
|
||||
|
||||
declare function defineUnimportPreset(preset: InlinePreset): InlinePreset;
|
||||
declare function stringifyImports(imports: Import[], isCJS?: boolean): string;
|
||||
declare function dedupeImports(imports: Import[], warn: (msg: string) => void): Import[];
|
||||
declare function toExports(imports: Import[], fileDir?: string, includeType?: boolean): string;
|
||||
declare function stripFileExtension(path: string): string;
|
||||
declare function toTypeDeclarationItems(imports: Import[], options?: TypeDeclarationOptions): string[];
|
||||
declare function toTypeDeclarationFile(imports: Import[], options?: TypeDeclarationOptions): string;
|
||||
declare function toTypeReExports(imports: Import[], options?: TypeDeclarationOptions): string;
|
||||
declare function getString(code: string | MagicString): string;
|
||||
declare function getMagicString(code: string | MagicString): MagicString;
|
||||
declare function addImportToCode(code: string | MagicString, imports: Import[], isCJS?: boolean, mergeExisting?: boolean, injectAtLast?: boolean, firstOccurrence?: number, onResolved?: (imports: Import[]) => void | Import[], onStringified?: (str: string, imports: Import[]) => void | string): MagicStringResult;
|
||||
declare function normalizeImports(imports: Import[]): Import[];
|
||||
declare function resolveIdAbsolute(id: string, parentId?: string): string;
|
||||
/**
|
||||
* @deprecated renamed to `stringifyImports`
|
||||
*/
|
||||
declare const toImports: typeof stringifyImports;
|
||||
|
||||
export { BuiltinPresetName, Import, InlinePreset, InstallGlobalOptions, MagicStringResult, Preset, ScanDir, ScanDirExportsOptions, TypeDeclarationOptions, Unimport, UnimportOptions, addImportToCode, createUnimport, dedupeDtsExports, dedupeImports, defineUnimportPreset, excludeRE, getMagicString, getString, importAsRE, installGlobalAutoImports, matchRE, normalizeImports, normalizeScanDirs, resolveBuiltinPresets, resolveIdAbsolute, resolvePreset, scanDirExports, scanExports, scanFilesFromDir, separatorRE, stringifyImports, stripCommentsAndStrings, stripFileExtension, toExports, toImports, toTypeDeclarationFile, toTypeDeclarationItems, toTypeReExports, version };
|
||||
39
web/admin-spa/node_modules/unimport/dist/index.mjs
generated
vendored
39
web/admin-spa/node_modules/unimport/dist/index.mjs
generated
vendored
@@ -1,39 +0,0 @@
|
||||
export { b as builtinPresets, c as createUnimport, e as dedupeDtsExports, n as normalizeScanDirs, r as resolveBuiltinPresets, a as resolvePreset, d as scanDirExports, f as scanExports, s as scanFilesFromDir, v as version } from './shared/unimport.Ww9aF1N_.mjs';
|
||||
export { o as addImportToCode, f as dedupeImports, d as defineUnimportPreset, e as excludeRE, n as getMagicString, l as getString, i as importAsRE, m as matchRE, p as normalizeImports, r as resolveIdAbsolute, s as separatorRE, c as stringifyImports, b as stripCommentsAndStrings, g as stripFileExtension, t as toExports, q as toImports, j as toTypeDeclarationFile, h as toTypeDeclarationItems, k as toTypeReExports, a as vueTemplateAddon } from './shared/unimport.0aitavbJ.mjs';
|
||||
import 'mlly';
|
||||
import 'node:fs';
|
||||
import 'node:fs/promises';
|
||||
import 'node:process';
|
||||
import 'node:url';
|
||||
import 'fast-glob';
|
||||
import 'pathe';
|
||||
import 'picomatch';
|
||||
import 'scule';
|
||||
import 'node:os';
|
||||
import 'pkg-types';
|
||||
import 'local-pkg';
|
||||
import 'node:path';
|
||||
import 'magic-string';
|
||||
import 'strip-literal';
|
||||
|
||||
async function installGlobalAutoImports(imports, options = {}) {
|
||||
const {
|
||||
globalObject = globalThis,
|
||||
overrides = false
|
||||
} = options;
|
||||
imports = Array.isArray(imports) ? imports : await imports.getImports();
|
||||
await Promise.all(
|
||||
imports.map(async (i) => {
|
||||
if (i.disabled || i.type)
|
||||
return;
|
||||
const as = i.as || i.name;
|
||||
if (overrides || !(as in globalObject)) {
|
||||
const module = await import(i.from);
|
||||
globalObject[as] = module[i.name];
|
||||
}
|
||||
})
|
||||
);
|
||||
return globalObject;
|
||||
}
|
||||
|
||||
export { installGlobalAutoImports };
|
||||
546
web/admin-spa/node_modules/unimport/dist/shared/unimport.0aitavbJ.mjs
generated
vendored
546
web/admin-spa/node_modules/unimport/dist/shared/unimport.0aitavbJ.mjs
generated
vendored
@@ -1,546 +0,0 @@
|
||||
import { basename } from 'node:path';
|
||||
import process from 'node:process';
|
||||
import { isAbsolute, relative, resolve } from 'pathe';
|
||||
import { camelCase, kebabCase } from 'scule';
|
||||
import MagicString from 'magic-string';
|
||||
import { resolvePathSync, findStaticImports, parseStaticImport } from 'mlly';
|
||||
import { stripLiteral } from 'strip-literal';
|
||||
|
||||
const excludeRE = [
|
||||
// imported/exported from other module
|
||||
/\b(import|export)\b([\w$*{},\s]+?)\bfrom\s*["']/g,
|
||||
// defined as function
|
||||
/\bfunction\s*([\w$]+)\s*\(/g,
|
||||
// defined as class
|
||||
/\bclass\s*([\w$]+)\s*\{/g,
|
||||
// defined as local variable
|
||||
// eslint-disable-next-line regexp/no-super-linear-backtracking
|
||||
/\b(?:const|let|var)\s+?(\[.*?\]|\{.*?\}|.+?)\s*?[=;\n]/gs
|
||||
];
|
||||
const importAsRE = /^.*\sas\s+/;
|
||||
const separatorRE = /[,[\]{}\n]|\b(?:import|export)\b/g;
|
||||
const matchRE = /(^|\.\.\.|(?:\bcase|\?)\s+|[^\w$/)]|\bextends\s+)([\w$]+)\s*(?=[.()[\]}:;?+\-*&|`<>,\n]|\b(?:instanceof|in)\b|$|(?<=extends\s+\w+)\s+\{)/g;
|
||||
const regexRE = /\/\S*?(?<!\\)(?<!\[[^\]]*)\/[gimsuy]*/g;
|
||||
function stripCommentsAndStrings(code, options) {
|
||||
return stripLiteral(code, options).replace(regexRE, 'new RegExp("")');
|
||||
}
|
||||
|
||||
function defineUnimportPreset(preset) {
|
||||
return preset;
|
||||
}
|
||||
const safePropertyName = /^[a-z$_][\w$]*$/i;
|
||||
function stringifyWith(withValues) {
|
||||
let withDefs = "";
|
||||
for (let entries = Object.entries(withValues), l = entries.length, i = 0; i < l; i++) {
|
||||
const [prop, value] = entries[i];
|
||||
withDefs += safePropertyName.test(prop) ? prop : JSON.stringify(prop);
|
||||
withDefs += `: ${JSON.stringify(String(value))}`;
|
||||
if (i + 1 !== l)
|
||||
withDefs += ", ";
|
||||
}
|
||||
return `{ ${withDefs} }`;
|
||||
}
|
||||
function stringifyImports(imports, isCJS = false) {
|
||||
const map = toImportModuleMap(imports);
|
||||
return Object.entries(map).flatMap(([name, importSet]) => {
|
||||
const entries = [];
|
||||
const imports2 = Array.from(importSet).filter((i) => {
|
||||
if (!i.name || i.as === "") {
|
||||
let importStr;
|
||||
if (isCJS) {
|
||||
importStr = `require('${name}');`;
|
||||
} else {
|
||||
importStr = `import '${name}'`;
|
||||
if (i.with)
|
||||
importStr += ` with ${stringifyWith(i.with)}`;
|
||||
importStr += ";";
|
||||
}
|
||||
entries.push(importStr);
|
||||
return false;
|
||||
} else if (i.name === "default" || i.name === "=") {
|
||||
let importStr;
|
||||
if (isCJS) {
|
||||
importStr = i.name === "=" ? `const ${i.as} = require('${name}');` : `const { default: ${i.as} } = require('${name}');`;
|
||||
} else {
|
||||
importStr = `import ${i.as} from '${name}'`;
|
||||
if (i.with)
|
||||
importStr += ` with ${stringifyWith(i.with)}`;
|
||||
importStr += ";";
|
||||
}
|
||||
entries.push(importStr);
|
||||
return false;
|
||||
} else if (i.name === "*") {
|
||||
let importStr;
|
||||
if (isCJS) {
|
||||
importStr = `const ${i.as} = require('${name}');`;
|
||||
} else {
|
||||
importStr = `import * as ${i.as} from '${name}'`;
|
||||
if (i.with)
|
||||
importStr += ` with ${stringifyWith(i.with)}`;
|
||||
importStr += ";";
|
||||
}
|
||||
entries.push(importStr);
|
||||
return false;
|
||||
} else if (!isCJS && i.with) {
|
||||
entries.push(`import { ${stringifyImportAlias(i)} } from '${name}' with ${stringifyWith(i.with)};`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
if (imports2.length) {
|
||||
const importsAs = imports2.map((i) => stringifyImportAlias(i, isCJS));
|
||||
entries.push(
|
||||
isCJS ? `const { ${importsAs.join(", ")} } = require('${name}');` : `import { ${importsAs.join(", ")} } from '${name}';`
|
||||
);
|
||||
}
|
||||
return entries;
|
||||
}).join("\n");
|
||||
}
|
||||
function dedupeImports(imports, warn) {
|
||||
const map = /* @__PURE__ */ new Map();
|
||||
const indexToRemove = /* @__PURE__ */ new Set();
|
||||
imports.filter((i) => !i.disabled).forEach((i, idx) => {
|
||||
if (i.declarationType === "enum" || i.declarationType === "class")
|
||||
return;
|
||||
const name = i.as ?? i.name;
|
||||
if (!map.has(name)) {
|
||||
map.set(name, idx);
|
||||
return;
|
||||
}
|
||||
const other = imports[map.get(name)];
|
||||
if (other.from === i.from) {
|
||||
indexToRemove.add(idx);
|
||||
return;
|
||||
}
|
||||
const diff = (other.priority || 1) - (i.priority || 1);
|
||||
if (diff === 0)
|
||||
warn(`Duplicated imports "${name}", the one from "${other.from}" has been ignored and "${i.from}" is used`);
|
||||
if (diff <= 0) {
|
||||
indexToRemove.add(map.get(name));
|
||||
map.set(name, idx);
|
||||
} else {
|
||||
indexToRemove.add(idx);
|
||||
}
|
||||
});
|
||||
return imports.filter((_, idx) => !indexToRemove.has(idx));
|
||||
}
|
||||
function toExports(imports, fileDir, includeType = false) {
|
||||
const map = toImportModuleMap(imports, includeType);
|
||||
return Object.entries(map).flatMap(([name, imports2]) => {
|
||||
if (isFilePath(name))
|
||||
name = name.replace(/\.[a-z]+$/i, "");
|
||||
if (fileDir && isAbsolute(name)) {
|
||||
name = relative(fileDir, name);
|
||||
if (!name.match(/^[./]/))
|
||||
name = `./${name}`;
|
||||
}
|
||||
const entries = [];
|
||||
const filtered = Array.from(imports2).filter((i) => {
|
||||
if (i.name === "*") {
|
||||
entries.push(`export * as ${i.as} from '${name}';`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
if (filtered.length)
|
||||
entries.push(`export { ${filtered.map((i) => stringifyImportAlias(i, false)).join(", ")} } from '${name}';`);
|
||||
return entries;
|
||||
}).join("\n");
|
||||
}
|
||||
function stripFileExtension(path) {
|
||||
return path.replace(/\.[a-z]+$/i, "");
|
||||
}
|
||||
function toTypeDeclarationItems(imports, options) {
|
||||
return imports.map((i) => {
|
||||
const from = options?.resolvePath?.(i) || stripFileExtension(i.typeFrom || i.from);
|
||||
let typeDef = "";
|
||||
if (i.with)
|
||||
typeDef += `import('${from}', { with: ${stringifyWith(i.with)} })`;
|
||||
else
|
||||
typeDef += `import('${from}')`;
|
||||
if (i.name !== "*" && i.name !== "=")
|
||||
typeDef += `['${i.name}']`;
|
||||
return `const ${i.as}: typeof ${typeDef}`;
|
||||
}).sort();
|
||||
}
|
||||
function toTypeDeclarationFile(imports, options) {
|
||||
const items = toTypeDeclarationItems(imports, options);
|
||||
const {
|
||||
exportHelper = true
|
||||
} = options || {};
|
||||
let declaration = "";
|
||||
if (exportHelper)
|
||||
declaration += "export {}\n";
|
||||
declaration += `declare global {
|
||||
${items.map((i) => ` ${i}`).join("\n")}
|
||||
}`;
|
||||
return declaration;
|
||||
}
|
||||
function makeTypeModulesMap(imports, resolvePath) {
|
||||
const modulesMap = /* @__PURE__ */ new Map();
|
||||
const resolveImportFrom = typeof resolvePath === "function" ? (i) => {
|
||||
return resolvePath(i) || stripFileExtension(i.typeFrom || i.from);
|
||||
} : (i) => stripFileExtension(i.typeFrom || i.from);
|
||||
for (const import_ of imports) {
|
||||
const from = resolveImportFrom(import_);
|
||||
let module = modulesMap.get(from);
|
||||
if (!module) {
|
||||
module = { typeImports: /* @__PURE__ */ new Set(), starTypeImport: undefined };
|
||||
modulesMap.set(from, module);
|
||||
}
|
||||
if (import_.name === "*") {
|
||||
if (import_.as)
|
||||
module.starTypeImport = import_;
|
||||
} else {
|
||||
module.typeImports.add(import_);
|
||||
}
|
||||
}
|
||||
return modulesMap;
|
||||
}
|
||||
function toTypeReExports(imports, options) {
|
||||
const importsMap = makeTypeModulesMap(imports, options?.resolvePath);
|
||||
const code = Array.from(importsMap).flatMap(([from, module]) => {
|
||||
from = from.replace(/\.d\.([cm]?)ts$/i, ".$1js");
|
||||
const { starTypeImport, typeImports } = module;
|
||||
const strings = [];
|
||||
if (typeImports.size) {
|
||||
const typeImportNames = Array.from(typeImports).map(({ name, as }) => {
|
||||
if (as && as !== name)
|
||||
return `${name} as ${as}`;
|
||||
return name;
|
||||
});
|
||||
strings.push(
|
||||
"// @ts-ignore",
|
||||
`export type { ${typeImportNames.join(", ")} } from '${from}'`
|
||||
);
|
||||
}
|
||||
if (starTypeImport) {
|
||||
strings.push(
|
||||
"// @ts-ignore",
|
||||
`export type * as ${starTypeImport.as} from '${from}'`
|
||||
);
|
||||
}
|
||||
if (strings.length) {
|
||||
strings.push(
|
||||
// This is a workaround for a TypeScript issue where type-only re-exports are not properly initialized.
|
||||
`import('${from}')`
|
||||
);
|
||||
}
|
||||
return strings;
|
||||
});
|
||||
return `// for type re-export
|
||||
declare global {
|
||||
${code.map((i) => ` ${i}`).join("\n")}
|
||||
}`;
|
||||
}
|
||||
function stringifyImportAlias(item, isCJS = false) {
|
||||
return item.as === undefined || item.name === item.as ? item.name : isCJS ? `${item.name}: ${item.as}` : `${item.name} as ${item.as}`;
|
||||
}
|
||||
function toImportModuleMap(imports, includeType = false) {
|
||||
const map = {};
|
||||
for (const _import of imports) {
|
||||
if (_import.type && !includeType)
|
||||
continue;
|
||||
if (!map[_import.from])
|
||||
map[_import.from] = /* @__PURE__ */ new Set();
|
||||
map[_import.from].add(_import);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
function getString(code) {
|
||||
if (typeof code === "string")
|
||||
return code;
|
||||
return code.toString();
|
||||
}
|
||||
function getMagicString(code) {
|
||||
if (typeof code === "string")
|
||||
return new MagicString(code);
|
||||
return code;
|
||||
}
|
||||
function addImportToCode(code, imports, isCJS = false, mergeExisting = false, injectAtLast = false, firstOccurrence = Number.POSITIVE_INFINITY, onResolved, onStringified) {
|
||||
let newImports = [];
|
||||
const s = getMagicString(code);
|
||||
let _staticImports;
|
||||
const strippedCode = stripCommentsAndStrings(s.original);
|
||||
function findStaticImportsLazy() {
|
||||
if (!_staticImports) {
|
||||
_staticImports = findStaticImports(s.original).filter((i) => Boolean(strippedCode.slice(i.start, i.end).trim())).map((i) => parseStaticImport(i));
|
||||
}
|
||||
return _staticImports;
|
||||
}
|
||||
function hasShebang() {
|
||||
const shebangRegex = /^#!.+/;
|
||||
return shebangRegex.test(s.original);
|
||||
}
|
||||
if (mergeExisting && !isCJS) {
|
||||
const existingImports = findStaticImportsLazy();
|
||||
const map = /* @__PURE__ */ new Map();
|
||||
imports.forEach((i) => {
|
||||
const target = existingImports.find((e) => e.specifier === i.from && e.imports.startsWith("{"));
|
||||
if (!target)
|
||||
return newImports.push(i);
|
||||
if (!map.has(target))
|
||||
map.set(target, []);
|
||||
map.get(target).push(i);
|
||||
});
|
||||
for (const [target, items] of map.entries()) {
|
||||
const strings = items.map((i) => `${stringifyImportAlias(i)}, `);
|
||||
const importLength = target.code.match(/^\s*import\s*\{/)?.[0]?.length;
|
||||
if (importLength)
|
||||
s.appendLeft(target.start + importLength, ` ${strings.join("").trim()}`);
|
||||
}
|
||||
} else {
|
||||
newImports = imports;
|
||||
}
|
||||
newImports = onResolved?.(newImports) ?? newImports;
|
||||
let newEntries = stringifyImports(newImports, isCJS);
|
||||
newEntries = onStringified?.(newEntries, newImports) ?? newEntries;
|
||||
if (newEntries) {
|
||||
const insertionIndex = injectAtLast ? findStaticImportsLazy().reverse().find((i) => i.end <= firstOccurrence)?.end ?? 0 : 0;
|
||||
if (insertionIndex > 0)
|
||||
s.appendRight(insertionIndex, `
|
||||
${newEntries}
|
||||
`);
|
||||
else if (hasShebang())
|
||||
s.appendLeft(s.original.indexOf("\n") + 1, `
|
||||
${newEntries}
|
||||
`);
|
||||
else
|
||||
s.prepend(`${newEntries}
|
||||
`);
|
||||
}
|
||||
return {
|
||||
s,
|
||||
get code() {
|
||||
return s.toString();
|
||||
}
|
||||
};
|
||||
}
|
||||
function normalizeImports(imports) {
|
||||
for (const _import of imports)
|
||||
_import.as = _import.as ?? _import.name;
|
||||
return imports;
|
||||
}
|
||||
function resolveIdAbsolute(id, parentId) {
|
||||
return resolvePathSync(id, {
|
||||
url: parentId
|
||||
});
|
||||
}
|
||||
function isFilePath(path) {
|
||||
return path.startsWith(".") || isAbsolute(path) || path.includes("://");
|
||||
}
|
||||
const toImports = stringifyImports;
|
||||
|
||||
const contextRE$1 = /\b_ctx\.([$\w]+)\b/g;
|
||||
const UNREF_KEY = "__unimport_unref_";
|
||||
const VUE_TEMPLATE_NAME = "unimport:vue-template";
|
||||
function vueTemplateAddon() {
|
||||
const self = {
|
||||
name: VUE_TEMPLATE_NAME,
|
||||
async transform(s, id) {
|
||||
if (!s.original.includes("_ctx.") || s.original.includes(UNREF_KEY))
|
||||
return s;
|
||||
const matches = Array.from(s.original.matchAll(contextRE$1));
|
||||
const imports = await this.getImports();
|
||||
let targets = [];
|
||||
for (const match of matches) {
|
||||
const name = match[1];
|
||||
const item = imports.find((i) => i.as === name);
|
||||
if (!item)
|
||||
continue;
|
||||
const start = match.index;
|
||||
const end = start + match[0].length;
|
||||
const tempName = `__unimport_${name}`;
|
||||
s.overwrite(start, end, `(${JSON.stringify(name)} in _ctx ? _ctx.${name} : ${UNREF_KEY}(${tempName}))`);
|
||||
if (!targets.find((i) => i.as === tempName)) {
|
||||
targets.push({
|
||||
...item,
|
||||
as: tempName
|
||||
});
|
||||
}
|
||||
}
|
||||
if (targets.length) {
|
||||
targets.push({
|
||||
name: "unref",
|
||||
from: "vue",
|
||||
as: UNREF_KEY
|
||||
});
|
||||
for (const addon of this.addons) {
|
||||
if (addon === self)
|
||||
continue;
|
||||
targets = await addon.injectImportsResolved?.call(this, targets, s, id) ?? targets;
|
||||
}
|
||||
let injection = stringifyImports(targets);
|
||||
for (const addon of this.addons) {
|
||||
if (addon === self)
|
||||
continue;
|
||||
injection = await addon.injectImportsStringified?.call(this, injection, targets, s, id) ?? injection;
|
||||
}
|
||||
s.prepend(injection);
|
||||
}
|
||||
return s;
|
||||
},
|
||||
async declaration(dts, options) {
|
||||
const imports = await this.getImports();
|
||||
const items = imports.map((i) => {
|
||||
if (i.type || i.dtsDisabled)
|
||||
return "";
|
||||
const from = options?.resolvePath?.(i) || i.from;
|
||||
return `readonly ${i.as}: UnwrapRef<typeof import('${from}')${i.name !== "*" ? `['${i.name}']` : ""}>`;
|
||||
}).filter(Boolean).sort();
|
||||
const extendItems = items.map((i) => ` ${i}`).join("\n");
|
||||
return `${dts}
|
||||
// for vue template auto import
|
||||
import { UnwrapRef } from 'vue'
|
||||
declare module 'vue' {
|
||||
interface ComponentCustomProperties {
|
||||
${extendItems}
|
||||
}
|
||||
}`;
|
||||
}
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
const contextRE = /resolveDirective as _resolveDirective/;
|
||||
const contextText = `${contextRE.source}, `;
|
||||
const directiveRE = /(?:var|const) (\w+) = _resolveDirective\("([\w.-]+)"\);?\s*/g;
|
||||
const VUE_DIRECTIVES_NAME = "unimport:vue-directives";
|
||||
function vueDirectivesAddon(options = {}) {
|
||||
function isDirective(importEntry) {
|
||||
let isDirective2 = importEntry.meta?.vueDirective === true;
|
||||
if (isDirective2) {
|
||||
return true;
|
||||
}
|
||||
isDirective2 = options.isDirective?.(normalizePath(process.cwd(), importEntry.from), importEntry) ?? false;
|
||||
if (isDirective2) {
|
||||
importEntry.meta ??= {};
|
||||
importEntry.meta.vueDirective = true;
|
||||
}
|
||||
return isDirective2;
|
||||
}
|
||||
const self = {
|
||||
name: VUE_DIRECTIVES_NAME,
|
||||
async transform(s, id) {
|
||||
if (!s.original.match(contextRE))
|
||||
return s;
|
||||
const matches = Array.from(s.original.matchAll(directiveRE)).sort((a, b) => b.index - a.index);
|
||||
if (!matches.length)
|
||||
return s;
|
||||
let targets = [];
|
||||
for await (const [
|
||||
begin,
|
||||
end,
|
||||
importEntry
|
||||
] of findDirectives(
|
||||
isDirective,
|
||||
matches,
|
||||
this.getImports()
|
||||
)) {
|
||||
s.overwrite(begin, end, "");
|
||||
targets.push(importEntry);
|
||||
}
|
||||
if (!targets.length)
|
||||
return s;
|
||||
s.replace(contextText, "");
|
||||
for (const addon of this.addons) {
|
||||
if (addon === self)
|
||||
continue;
|
||||
targets = await addon.injectImportsResolved?.call(this, targets, s, id) ?? targets;
|
||||
}
|
||||
let injection = stringifyImports(targets);
|
||||
for (const addon of this.addons) {
|
||||
if (addon === self)
|
||||
continue;
|
||||
injection = await addon.injectImportsStringified?.call(this, injection, targets, s, id) ?? injection;
|
||||
}
|
||||
s.prepend(injection);
|
||||
return s;
|
||||
},
|
||||
async declaration(dts, options2) {
|
||||
const directivesMap = await this.getImports().then((imports) => {
|
||||
return imports.filter(isDirective).reduce((acc, i) => {
|
||||
if (i.type || i.dtsDisabled)
|
||||
return acc;
|
||||
let name;
|
||||
if (i.name === "default" && (i.as === "default" || !i.as)) {
|
||||
const file = basename(i.from);
|
||||
const idx = file.indexOf(".");
|
||||
name = idx > -1 ? file.slice(0, idx) : file;
|
||||
} else {
|
||||
name = i.as ?? i.name;
|
||||
}
|
||||
name = name[0] === "v" ? camelCase(name) : camelCase(`v-${name}`);
|
||||
if (!acc.has(name)) {
|
||||
acc.set(name, i);
|
||||
}
|
||||
return acc;
|
||||
}, /* @__PURE__ */ new Map());
|
||||
});
|
||||
if (!directivesMap.size)
|
||||
return dts;
|
||||
const directives = Array.from(directivesMap.entries()).map(([name, i]) => ` ${name}: typeof import('${options2?.resolvePath?.(i) || i.from}')['${i.name}']`).sort().join("\n");
|
||||
return `${dts}
|
||||
// for vue directives auto import
|
||||
declare module 'vue' {
|
||||
interface ComponentCustomProperties {
|
||||
${directives}
|
||||
}
|
||||
interface GlobalDirectives {
|
||||
${directives}
|
||||
}
|
||||
}`;
|
||||
}
|
||||
};
|
||||
return self;
|
||||
}
|
||||
function resolvePath(cwd, path) {
|
||||
return path[0] === "." ? resolve(cwd, path) : path;
|
||||
}
|
||||
function normalizePath(cwd, path) {
|
||||
return resolvePath(cwd, path).replace(/\\/g, "/");
|
||||
}
|
||||
async function* findDirectives(isDirective, regexArray, importsPromise) {
|
||||
const imports = (await importsPromise).filter(isDirective);
|
||||
if (!imports.length)
|
||||
return;
|
||||
const symbols = regexArray.reduce((acc, regex) => {
|
||||
const [all, symbol, resolveDirectiveName] = regex;
|
||||
if (acc.has(symbol))
|
||||
return acc;
|
||||
acc.set(symbol, [
|
||||
regex.index,
|
||||
regex.index + all.length,
|
||||
kebabCase(resolveDirectiveName)
|
||||
]);
|
||||
return acc;
|
||||
}, /* @__PURE__ */ new Map());
|
||||
for (const [symbol, data] of symbols.entries()) {
|
||||
yield* findDirective(imports, symbol, data);
|
||||
}
|
||||
}
|
||||
function* findDirective(imports, symbol, [begin, end, importName]) {
|
||||
let resolvedName;
|
||||
for (const i of imports) {
|
||||
if (i.name === "default" && (i.as === "default" || !i.as)) {
|
||||
const file = basename(i.from);
|
||||
const idx = file.indexOf(".");
|
||||
resolvedName = kebabCase(idx > -1 ? file.slice(0, idx) : file);
|
||||
} else {
|
||||
resolvedName = kebabCase(i.as ?? i.name);
|
||||
}
|
||||
if (resolvedName[0] === "v") {
|
||||
resolvedName = resolvedName.slice(resolvedName[1] === "-" ? 2 : 1);
|
||||
}
|
||||
if (resolvedName === importName) {
|
||||
yield [
|
||||
begin,
|
||||
end,
|
||||
{ ...i, name: i.name, as: symbol }
|
||||
];
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { VUE_TEMPLATE_NAME as V, vueTemplateAddon as a, stripCommentsAndStrings as b, stringifyImports as c, defineUnimportPreset as d, excludeRE as e, dedupeImports as f, stripFileExtension as g, toTypeDeclarationItems as h, importAsRE as i, toTypeDeclarationFile as j, toTypeReExports as k, getString as l, matchRE as m, getMagicString as n, addImportToCode as o, normalizeImports as p, toImports as q, resolveIdAbsolute as r, separatorRE as s, toExports as t, VUE_DIRECTIVES_NAME as u, vueDirectivesAddon as v };
|
||||
5
web/admin-spa/node_modules/unimport/dist/shared/unimport.BGSZL1Hy.d.ts
generated
vendored
5
web/admin-spa/node_modules/unimport/dist/shared/unimport.BGSZL1Hy.d.ts
generated
vendored
@@ -1,5 +0,0 @@
|
||||
import { r as Addon } from './unimport.CaVRR9SH.js';
|
||||
|
||||
declare function vueTemplateAddon(): Addon;
|
||||
|
||||
export { vueTemplateAddon as v };
|
||||
400
web/admin-spa/node_modules/unimport/dist/shared/unimport.CaVRR9SH.d.cts
generated
vendored
400
web/admin-spa/node_modules/unimport/dist/shared/unimport.CaVRR9SH.d.cts
generated
vendored
@@ -1,400 +0,0 @@
|
||||
import MagicString from 'magic-string';
|
||||
import { ESMExport } from 'mlly';
|
||||
|
||||
declare const builtinPresets: {
|
||||
'@vue/composition-api': InlinePreset;
|
||||
'@vueuse/core': () => Preset;
|
||||
'@vueuse/head': InlinePreset;
|
||||
pinia: InlinePreset;
|
||||
preact: InlinePreset;
|
||||
quasar: InlinePreset;
|
||||
react: InlinePreset;
|
||||
'react-router': InlinePreset;
|
||||
'react-router-dom': InlinePreset;
|
||||
svelte: InlinePreset;
|
||||
'svelte/animate': InlinePreset;
|
||||
'svelte/easing': InlinePreset;
|
||||
'svelte/motion': InlinePreset;
|
||||
'svelte/store': InlinePreset;
|
||||
'svelte/transition': InlinePreset;
|
||||
'vee-validate': InlinePreset;
|
||||
vitepress: InlinePreset;
|
||||
'vue-demi': InlinePreset;
|
||||
'vue-i18n': InlinePreset;
|
||||
'vue-router': InlinePreset;
|
||||
'vue-router-composables': InlinePreset;
|
||||
vue: InlinePreset;
|
||||
'vue/macros': InlinePreset;
|
||||
vuex: InlinePreset;
|
||||
vitest: InlinePreset;
|
||||
'uni-app': InlinePreset;
|
||||
'solid-js': InlinePreset;
|
||||
'solid-app-router': InlinePreset;
|
||||
rxjs: InlinePreset;
|
||||
'date-fns': InlinePreset;
|
||||
};
|
||||
type BuiltinPresetName = keyof typeof builtinPresets;
|
||||
|
||||
type ModuleId = string;
|
||||
type ImportName = string;
|
||||
interface ImportCommon {
|
||||
/** Module specifier to import from */
|
||||
from: ModuleId;
|
||||
/**
|
||||
* Priority of the import, if multiple imports have the same name, the one with the highest priority will be used
|
||||
* @default 1
|
||||
*/
|
||||
priority?: number;
|
||||
/** If this import is disabled */
|
||||
disabled?: boolean;
|
||||
/** Won't output import in declaration file if true */
|
||||
dtsDisabled?: boolean;
|
||||
/** Import declaration type like const / var / enum */
|
||||
declarationType?: ESMExport['declarationType'];
|
||||
/**
|
||||
* Metadata of the import
|
||||
*/
|
||||
meta?: {
|
||||
/** Short description of the import */
|
||||
description?: string;
|
||||
/** URL to the documentation */
|
||||
docsUrl?: string;
|
||||
/** Additional metadata */
|
||||
[key: string]: any;
|
||||
};
|
||||
/**
|
||||
* If this import is a pure type import
|
||||
*/
|
||||
type?: boolean;
|
||||
/**
|
||||
* Using this as the from when generating type declarations
|
||||
*/
|
||||
typeFrom?: ModuleId;
|
||||
}
|
||||
interface Import extends ImportCommon {
|
||||
/** Import name to be detected */
|
||||
name: ImportName;
|
||||
/** Import as this name */
|
||||
as?: ImportName;
|
||||
/**
|
||||
* With properties
|
||||
*
|
||||
* Ignored for CJS imports.
|
||||
*/
|
||||
with?: Record<string, string>;
|
||||
}
|
||||
type PresetImport = Omit<Import, 'from'> | ImportName | [name: ImportName, as?: ImportName, from?: ModuleId];
|
||||
interface InlinePreset extends ImportCommon {
|
||||
imports: (PresetImport | InlinePreset)[];
|
||||
}
|
||||
/**
|
||||
* Auto extract exports from a package for auto import
|
||||
*/
|
||||
interface PackagePreset {
|
||||
/**
|
||||
* Name of the package
|
||||
*/
|
||||
package: string;
|
||||
/**
|
||||
* Path of the importer
|
||||
* @default process.cwd()
|
||||
*/
|
||||
url?: string;
|
||||
/**
|
||||
* RegExp, string, or custom function to exclude names of the extracted imports
|
||||
*/
|
||||
ignore?: (string | RegExp | ((name: string) => boolean))[];
|
||||
/**
|
||||
* Use local cache if exits
|
||||
* @default true
|
||||
*/
|
||||
cache?: boolean;
|
||||
}
|
||||
type Preset = InlinePreset | PackagePreset;
|
||||
interface UnimportContext {
|
||||
readonly version: string;
|
||||
options: Partial<UnimportOptions>;
|
||||
staticImports: Import[];
|
||||
dynamicImports: Import[];
|
||||
addons: Addon[];
|
||||
getImports: () => Promise<Import[]>;
|
||||
getImportMap: () => Promise<Map<string, Import>>;
|
||||
getMetadata: () => UnimportMeta | undefined;
|
||||
modifyDynamicImports: (fn: (imports: Import[]) => Thenable<void | Import[]>) => Promise<void>;
|
||||
clearDynamicImports: () => void;
|
||||
replaceImports: (imports: UnimportOptions['imports']) => Promise<Import[]>;
|
||||
invalidate: () => void;
|
||||
resolveId: (id: string, parentId?: string) => Thenable<string | null | undefined | void>;
|
||||
}
|
||||
interface DetectImportResult {
|
||||
s: MagicString;
|
||||
strippedCode: string;
|
||||
isCJSContext: boolean;
|
||||
matchedImports: Import[];
|
||||
firstOccurrence: number;
|
||||
}
|
||||
interface Unimport {
|
||||
readonly version: string;
|
||||
init: () => Promise<void>;
|
||||
clearDynamicImports: UnimportContext['clearDynamicImports'];
|
||||
getImportMap: UnimportContext['getImportMap'];
|
||||
getImports: UnimportContext['getImports'];
|
||||
getInternalContext: () => UnimportContext;
|
||||
getMetadata: UnimportContext['getMetadata'];
|
||||
modifyDynamicImports: UnimportContext['modifyDynamicImports'];
|
||||
generateTypeDeclarations: (options?: TypeDeclarationOptions) => Promise<string>;
|
||||
/**
|
||||
* Get un-imported usages from code
|
||||
*/
|
||||
detectImports: (code: string | MagicString) => Promise<DetectImportResult>;
|
||||
/**
|
||||
* Insert missing imports statements to code
|
||||
*/
|
||||
injectImports: (code: string | MagicString, id?: string, options?: InjectImportsOptions) => Promise<ImportInjectionResult>;
|
||||
scanImportsFromDir: (dir?: (string | ScanDir)[], options?: ScanDirExportsOptions) => Promise<Import[]>;
|
||||
scanImportsFromFile: (file: string, includeTypes?: boolean) => Promise<Import[]>;
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
toExports: (filepath?: string, includeTypes?: boolean) => Promise<string>;
|
||||
}
|
||||
interface InjectionUsageRecord {
|
||||
import: Import;
|
||||
count: number;
|
||||
moduleIds: string[];
|
||||
}
|
||||
interface UnimportMeta {
|
||||
injectionUsage: Record<string, InjectionUsageRecord>;
|
||||
}
|
||||
interface AddonsOptions {
|
||||
addons?: Addon[];
|
||||
/**
|
||||
* Enable auto import inside for Vue's <template>
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
vueTemplate?: boolean;
|
||||
/**
|
||||
* Enable auto import directives for Vue's SFC.
|
||||
*
|
||||
* Library authors should include `meta.vueDirective: true` in the import metadata.
|
||||
*
|
||||
* When using a local directives folder, provide the `isDirective`
|
||||
* callback to check if the import is a Vue directive.
|
||||
*/
|
||||
vueDirectives?: true | AddonVueDirectivesOptions;
|
||||
}
|
||||
interface AddonVueDirectivesOptions {
|
||||
/**
|
||||
* Checks if the import is a Vue directive.
|
||||
*
|
||||
* **NOTES**:
|
||||
* - imports from a library should include `meta.vueDirective: true`.
|
||||
* - this callback is only invoked for local directives (only when meta.vueDirective is not set).
|
||||
*
|
||||
* @param from The path of the import normalized.
|
||||
* @param importEntry The import entry.
|
||||
*/
|
||||
isDirective?: (from: string, importEntry: Import) => boolean;
|
||||
}
|
||||
interface UnimportOptions extends Pick<InjectImportsOptions, 'injectAtEnd' | 'mergeExisting' | 'parser'> {
|
||||
/**
|
||||
* Auto import items
|
||||
*/
|
||||
imports: Import[];
|
||||
/**
|
||||
* Auto import preset
|
||||
*/
|
||||
presets: (Preset | BuiltinPresetName)[];
|
||||
/**
|
||||
* Custom warning function
|
||||
* @default console.warn
|
||||
*/
|
||||
warn: (msg: string) => void;
|
||||
/**
|
||||
* Custom debug log function
|
||||
* @default console.log
|
||||
*/
|
||||
debugLog: (msg: string) => void;
|
||||
/**
|
||||
* Unimport Addons.
|
||||
* To use built-in addons, use:
|
||||
* ```js
|
||||
* addons: {
|
||||
* addons: [<custom-addons-here>] // if you want to use also custom addons
|
||||
* vueTemplate: true,
|
||||
* vueDirectives: [<the-directives-here>]
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* Built-in addons:
|
||||
* - vueDirectives: enable auto import directives for Vue's SFC
|
||||
* - vueTemplate: enable auto import inside for Vue's <template>
|
||||
*
|
||||
* @default {}
|
||||
*/
|
||||
addons: AddonsOptions | Addon[];
|
||||
/**
|
||||
* Name of virtual modules that exposed all the registed auto-imports
|
||||
* @default []
|
||||
*/
|
||||
virtualImports: string[];
|
||||
/**
|
||||
* Directories to scan for auto import
|
||||
* @default []
|
||||
*/
|
||||
dirs?: (string | ScanDir)[];
|
||||
/**
|
||||
* Options for scanning directories for auto import
|
||||
*/
|
||||
dirsScanOptions?: ScanDirExportsOptions;
|
||||
/**
|
||||
* Custom resolver to auto import id
|
||||
*/
|
||||
resolveId?: (id: string, importee?: string) => Thenable<string | void>;
|
||||
/**
|
||||
* Custom magic comments to be opt-out for auto import, per file/module
|
||||
*
|
||||
* @default ['@unimport-disable', '@imports-disable']
|
||||
*/
|
||||
commentsDisable?: string[];
|
||||
/**
|
||||
* Custom magic comments to debug auto import, printed to console
|
||||
*
|
||||
* @default ['@unimport-debug', '@imports-debug']
|
||||
*/
|
||||
commentsDebug?: string[];
|
||||
/**
|
||||
* Collect meta data for each auto import. Accessible via `ctx.meta`
|
||||
*/
|
||||
collectMeta?: boolean;
|
||||
}
|
||||
type PathFromResolver = (_import: Import) => string | undefined;
|
||||
interface ScanDirExportsOptions {
|
||||
/**
|
||||
* Glob patterns for matching files
|
||||
*
|
||||
* @default ['*.{ts,js,mjs,cjs,mts,cts}']
|
||||
*/
|
||||
filePatterns?: string[];
|
||||
/**
|
||||
* Custom function to filter scanned files
|
||||
*/
|
||||
fileFilter?: (file: string) => boolean;
|
||||
/**
|
||||
* Register type exports
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
types?: boolean;
|
||||
/**
|
||||
* Current working directory
|
||||
*
|
||||
* @default process.cwd()
|
||||
*/
|
||||
cwd?: string;
|
||||
}
|
||||
interface ScanDir {
|
||||
/**
|
||||
* Path pattern of the directory
|
||||
*/
|
||||
glob: string;
|
||||
/**
|
||||
* Register type exports
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
types?: boolean;
|
||||
}
|
||||
interface TypeDeclarationOptions {
|
||||
/**
|
||||
* Custom resolver for path of the import
|
||||
*/
|
||||
resolvePath?: PathFromResolver;
|
||||
/**
|
||||
* Append `export {}` to the end of the file
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
exportHelper?: boolean;
|
||||
/**
|
||||
* Auto-import for type exports
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
typeReExports?: boolean;
|
||||
}
|
||||
interface InjectImportsOptions {
|
||||
/**
|
||||
* Merge the existing imports
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
mergeExisting?: boolean;
|
||||
/**
|
||||
* If the module should be auto imported
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
autoImport?: boolean;
|
||||
/**
|
||||
* If the module should be transformed for virtual modules.
|
||||
* Only available when `virtualImports` is set.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
transformVirtualImports?: boolean;
|
||||
/**
|
||||
* Parser to use for parsing the code
|
||||
*
|
||||
* Note that `acorn` only takes valid JS Code, should usually only be used after transformationa and transpilation
|
||||
*
|
||||
* @default 'regex'
|
||||
*/
|
||||
parser?: 'acorn' | 'regex';
|
||||
/**
|
||||
* Inject the imports at the end of other imports
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
injectAtEnd?: boolean;
|
||||
}
|
||||
type Thenable<T> = Promise<T> | T;
|
||||
interface Addon {
|
||||
name?: string;
|
||||
transform?: (this: UnimportContext, code: MagicString, id: string | undefined) => Thenable<MagicString>;
|
||||
declaration?: (this: UnimportContext, dts: string, options: TypeDeclarationOptions) => Thenable<string>;
|
||||
matchImports?: (this: UnimportContext, identifiers: Set<string>, matched: Import[]) => Thenable<Import[] | void>;
|
||||
/**
|
||||
* Extend or modify the imports list before injecting
|
||||
*/
|
||||
extendImports?: (this: UnimportContext, imports: Import[]) => Import[] | void;
|
||||
/**
|
||||
* Resolve imports before injecting
|
||||
*/
|
||||
injectImportsResolved?: (this: UnimportContext, imports: Import[], code: MagicString, id?: string) => Import[] | void;
|
||||
/**
|
||||
* Modify the injection code before injecting
|
||||
*/
|
||||
injectImportsStringified?: (this: UnimportContext, injection: string, imports: Import[], code: MagicString, id?: string) => string | void;
|
||||
}
|
||||
interface InstallGlobalOptions {
|
||||
/**
|
||||
* @default globalThis
|
||||
*/
|
||||
globalObject?: any;
|
||||
/**
|
||||
* Overrides the existing property
|
||||
* @default false
|
||||
*/
|
||||
overrides?: boolean;
|
||||
}
|
||||
interface MagicStringResult {
|
||||
s: MagicString;
|
||||
code: string;
|
||||
}
|
||||
interface ImportInjectionResult extends MagicStringResult {
|
||||
imports: Import[];
|
||||
}
|
||||
|
||||
export { type AddonsOptions as A, type BuiltinPresetName as B, type DetectImportResult as D, type Import as I, type MagicStringResult as M, type Preset as P, type ScanDir as S, type TypeDeclarationOptions as T, type UnimportOptions as U, type Unimport as a, type InstallGlobalOptions as b, type ScanDirExportsOptions as c, type InlinePreset as d, builtinPresets as e, type ModuleId as f, type ImportName as g, type ImportCommon as h, type PresetImport as i, type PackagePreset as j, type UnimportContext as k, type InjectionUsageRecord as l, type UnimportMeta as m, type AddonVueDirectivesOptions as n, type PathFromResolver as o, type InjectImportsOptions as p, type Thenable as q, type Addon as r, type ImportInjectionResult as s };
|
||||
400
web/admin-spa/node_modules/unimport/dist/shared/unimport.CaVRR9SH.d.mts
generated
vendored
400
web/admin-spa/node_modules/unimport/dist/shared/unimport.CaVRR9SH.d.mts
generated
vendored
@@ -1,400 +0,0 @@
|
||||
import MagicString from 'magic-string';
|
||||
import { ESMExport } from 'mlly';
|
||||
|
||||
declare const builtinPresets: {
|
||||
'@vue/composition-api': InlinePreset;
|
||||
'@vueuse/core': () => Preset;
|
||||
'@vueuse/head': InlinePreset;
|
||||
pinia: InlinePreset;
|
||||
preact: InlinePreset;
|
||||
quasar: InlinePreset;
|
||||
react: InlinePreset;
|
||||
'react-router': InlinePreset;
|
||||
'react-router-dom': InlinePreset;
|
||||
svelte: InlinePreset;
|
||||
'svelte/animate': InlinePreset;
|
||||
'svelte/easing': InlinePreset;
|
||||
'svelte/motion': InlinePreset;
|
||||
'svelte/store': InlinePreset;
|
||||
'svelte/transition': InlinePreset;
|
||||
'vee-validate': InlinePreset;
|
||||
vitepress: InlinePreset;
|
||||
'vue-demi': InlinePreset;
|
||||
'vue-i18n': InlinePreset;
|
||||
'vue-router': InlinePreset;
|
||||
'vue-router-composables': InlinePreset;
|
||||
vue: InlinePreset;
|
||||
'vue/macros': InlinePreset;
|
||||
vuex: InlinePreset;
|
||||
vitest: InlinePreset;
|
||||
'uni-app': InlinePreset;
|
||||
'solid-js': InlinePreset;
|
||||
'solid-app-router': InlinePreset;
|
||||
rxjs: InlinePreset;
|
||||
'date-fns': InlinePreset;
|
||||
};
|
||||
type BuiltinPresetName = keyof typeof builtinPresets;
|
||||
|
||||
type ModuleId = string;
|
||||
type ImportName = string;
|
||||
interface ImportCommon {
|
||||
/** Module specifier to import from */
|
||||
from: ModuleId;
|
||||
/**
|
||||
* Priority of the import, if multiple imports have the same name, the one with the highest priority will be used
|
||||
* @default 1
|
||||
*/
|
||||
priority?: number;
|
||||
/** If this import is disabled */
|
||||
disabled?: boolean;
|
||||
/** Won't output import in declaration file if true */
|
||||
dtsDisabled?: boolean;
|
||||
/** Import declaration type like const / var / enum */
|
||||
declarationType?: ESMExport['declarationType'];
|
||||
/**
|
||||
* Metadata of the import
|
||||
*/
|
||||
meta?: {
|
||||
/** Short description of the import */
|
||||
description?: string;
|
||||
/** URL to the documentation */
|
||||
docsUrl?: string;
|
||||
/** Additional metadata */
|
||||
[key: string]: any;
|
||||
};
|
||||
/**
|
||||
* If this import is a pure type import
|
||||
*/
|
||||
type?: boolean;
|
||||
/**
|
||||
* Using this as the from when generating type declarations
|
||||
*/
|
||||
typeFrom?: ModuleId;
|
||||
}
|
||||
interface Import extends ImportCommon {
|
||||
/** Import name to be detected */
|
||||
name: ImportName;
|
||||
/** Import as this name */
|
||||
as?: ImportName;
|
||||
/**
|
||||
* With properties
|
||||
*
|
||||
* Ignored for CJS imports.
|
||||
*/
|
||||
with?: Record<string, string>;
|
||||
}
|
||||
type PresetImport = Omit<Import, 'from'> | ImportName | [name: ImportName, as?: ImportName, from?: ModuleId];
|
||||
interface InlinePreset extends ImportCommon {
|
||||
imports: (PresetImport | InlinePreset)[];
|
||||
}
|
||||
/**
|
||||
* Auto extract exports from a package for auto import
|
||||
*/
|
||||
interface PackagePreset {
|
||||
/**
|
||||
* Name of the package
|
||||
*/
|
||||
package: string;
|
||||
/**
|
||||
* Path of the importer
|
||||
* @default process.cwd()
|
||||
*/
|
||||
url?: string;
|
||||
/**
|
||||
* RegExp, string, or custom function to exclude names of the extracted imports
|
||||
*/
|
||||
ignore?: (string | RegExp | ((name: string) => boolean))[];
|
||||
/**
|
||||
* Use local cache if exits
|
||||
* @default true
|
||||
*/
|
||||
cache?: boolean;
|
||||
}
|
||||
type Preset = InlinePreset | PackagePreset;
|
||||
interface UnimportContext {
|
||||
readonly version: string;
|
||||
options: Partial<UnimportOptions>;
|
||||
staticImports: Import[];
|
||||
dynamicImports: Import[];
|
||||
addons: Addon[];
|
||||
getImports: () => Promise<Import[]>;
|
||||
getImportMap: () => Promise<Map<string, Import>>;
|
||||
getMetadata: () => UnimportMeta | undefined;
|
||||
modifyDynamicImports: (fn: (imports: Import[]) => Thenable<void | Import[]>) => Promise<void>;
|
||||
clearDynamicImports: () => void;
|
||||
replaceImports: (imports: UnimportOptions['imports']) => Promise<Import[]>;
|
||||
invalidate: () => void;
|
||||
resolveId: (id: string, parentId?: string) => Thenable<string | null | undefined | void>;
|
||||
}
|
||||
interface DetectImportResult {
|
||||
s: MagicString;
|
||||
strippedCode: string;
|
||||
isCJSContext: boolean;
|
||||
matchedImports: Import[];
|
||||
firstOccurrence: number;
|
||||
}
|
||||
interface Unimport {
|
||||
readonly version: string;
|
||||
init: () => Promise<void>;
|
||||
clearDynamicImports: UnimportContext['clearDynamicImports'];
|
||||
getImportMap: UnimportContext['getImportMap'];
|
||||
getImports: UnimportContext['getImports'];
|
||||
getInternalContext: () => UnimportContext;
|
||||
getMetadata: UnimportContext['getMetadata'];
|
||||
modifyDynamicImports: UnimportContext['modifyDynamicImports'];
|
||||
generateTypeDeclarations: (options?: TypeDeclarationOptions) => Promise<string>;
|
||||
/**
|
||||
* Get un-imported usages from code
|
||||
*/
|
||||
detectImports: (code: string | MagicString) => Promise<DetectImportResult>;
|
||||
/**
|
||||
* Insert missing imports statements to code
|
||||
*/
|
||||
injectImports: (code: string | MagicString, id?: string, options?: InjectImportsOptions) => Promise<ImportInjectionResult>;
|
||||
scanImportsFromDir: (dir?: (string | ScanDir)[], options?: ScanDirExportsOptions) => Promise<Import[]>;
|
||||
scanImportsFromFile: (file: string, includeTypes?: boolean) => Promise<Import[]>;
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
toExports: (filepath?: string, includeTypes?: boolean) => Promise<string>;
|
||||
}
|
||||
interface InjectionUsageRecord {
|
||||
import: Import;
|
||||
count: number;
|
||||
moduleIds: string[];
|
||||
}
|
||||
interface UnimportMeta {
|
||||
injectionUsage: Record<string, InjectionUsageRecord>;
|
||||
}
|
||||
interface AddonsOptions {
|
||||
addons?: Addon[];
|
||||
/**
|
||||
* Enable auto import inside for Vue's <template>
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
vueTemplate?: boolean;
|
||||
/**
|
||||
* Enable auto import directives for Vue's SFC.
|
||||
*
|
||||
* Library authors should include `meta.vueDirective: true` in the import metadata.
|
||||
*
|
||||
* When using a local directives folder, provide the `isDirective`
|
||||
* callback to check if the import is a Vue directive.
|
||||
*/
|
||||
vueDirectives?: true | AddonVueDirectivesOptions;
|
||||
}
|
||||
interface AddonVueDirectivesOptions {
|
||||
/**
|
||||
* Checks if the import is a Vue directive.
|
||||
*
|
||||
* **NOTES**:
|
||||
* - imports from a library should include `meta.vueDirective: true`.
|
||||
* - this callback is only invoked for local directives (only when meta.vueDirective is not set).
|
||||
*
|
||||
* @param from The path of the import normalized.
|
||||
* @param importEntry The import entry.
|
||||
*/
|
||||
isDirective?: (from: string, importEntry: Import) => boolean;
|
||||
}
|
||||
interface UnimportOptions extends Pick<InjectImportsOptions, 'injectAtEnd' | 'mergeExisting' | 'parser'> {
|
||||
/**
|
||||
* Auto import items
|
||||
*/
|
||||
imports: Import[];
|
||||
/**
|
||||
* Auto import preset
|
||||
*/
|
||||
presets: (Preset | BuiltinPresetName)[];
|
||||
/**
|
||||
* Custom warning function
|
||||
* @default console.warn
|
||||
*/
|
||||
warn: (msg: string) => void;
|
||||
/**
|
||||
* Custom debug log function
|
||||
* @default console.log
|
||||
*/
|
||||
debugLog: (msg: string) => void;
|
||||
/**
|
||||
* Unimport Addons.
|
||||
* To use built-in addons, use:
|
||||
* ```js
|
||||
* addons: {
|
||||
* addons: [<custom-addons-here>] // if you want to use also custom addons
|
||||
* vueTemplate: true,
|
||||
* vueDirectives: [<the-directives-here>]
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* Built-in addons:
|
||||
* - vueDirectives: enable auto import directives for Vue's SFC
|
||||
* - vueTemplate: enable auto import inside for Vue's <template>
|
||||
*
|
||||
* @default {}
|
||||
*/
|
||||
addons: AddonsOptions | Addon[];
|
||||
/**
|
||||
* Name of virtual modules that exposed all the registed auto-imports
|
||||
* @default []
|
||||
*/
|
||||
virtualImports: string[];
|
||||
/**
|
||||
* Directories to scan for auto import
|
||||
* @default []
|
||||
*/
|
||||
dirs?: (string | ScanDir)[];
|
||||
/**
|
||||
* Options for scanning directories for auto import
|
||||
*/
|
||||
dirsScanOptions?: ScanDirExportsOptions;
|
||||
/**
|
||||
* Custom resolver to auto import id
|
||||
*/
|
||||
resolveId?: (id: string, importee?: string) => Thenable<string | void>;
|
||||
/**
|
||||
* Custom magic comments to be opt-out for auto import, per file/module
|
||||
*
|
||||
* @default ['@unimport-disable', '@imports-disable']
|
||||
*/
|
||||
commentsDisable?: string[];
|
||||
/**
|
||||
* Custom magic comments to debug auto import, printed to console
|
||||
*
|
||||
* @default ['@unimport-debug', '@imports-debug']
|
||||
*/
|
||||
commentsDebug?: string[];
|
||||
/**
|
||||
* Collect meta data for each auto import. Accessible via `ctx.meta`
|
||||
*/
|
||||
collectMeta?: boolean;
|
||||
}
|
||||
type PathFromResolver = (_import: Import) => string | undefined;
|
||||
interface ScanDirExportsOptions {
|
||||
/**
|
||||
* Glob patterns for matching files
|
||||
*
|
||||
* @default ['*.{ts,js,mjs,cjs,mts,cts}']
|
||||
*/
|
||||
filePatterns?: string[];
|
||||
/**
|
||||
* Custom function to filter scanned files
|
||||
*/
|
||||
fileFilter?: (file: string) => boolean;
|
||||
/**
|
||||
* Register type exports
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
types?: boolean;
|
||||
/**
|
||||
* Current working directory
|
||||
*
|
||||
* @default process.cwd()
|
||||
*/
|
||||
cwd?: string;
|
||||
}
|
||||
interface ScanDir {
|
||||
/**
|
||||
* Path pattern of the directory
|
||||
*/
|
||||
glob: string;
|
||||
/**
|
||||
* Register type exports
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
types?: boolean;
|
||||
}
|
||||
interface TypeDeclarationOptions {
|
||||
/**
|
||||
* Custom resolver for path of the import
|
||||
*/
|
||||
resolvePath?: PathFromResolver;
|
||||
/**
|
||||
* Append `export {}` to the end of the file
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
exportHelper?: boolean;
|
||||
/**
|
||||
* Auto-import for type exports
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
typeReExports?: boolean;
|
||||
}
|
||||
interface InjectImportsOptions {
|
||||
/**
|
||||
* Merge the existing imports
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
mergeExisting?: boolean;
|
||||
/**
|
||||
* If the module should be auto imported
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
autoImport?: boolean;
|
||||
/**
|
||||
* If the module should be transformed for virtual modules.
|
||||
* Only available when `virtualImports` is set.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
transformVirtualImports?: boolean;
|
||||
/**
|
||||
* Parser to use for parsing the code
|
||||
*
|
||||
* Note that `acorn` only takes valid JS Code, should usually only be used after transformationa and transpilation
|
||||
*
|
||||
* @default 'regex'
|
||||
*/
|
||||
parser?: 'acorn' | 'regex';
|
||||
/**
|
||||
* Inject the imports at the end of other imports
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
injectAtEnd?: boolean;
|
||||
}
|
||||
type Thenable<T> = Promise<T> | T;
|
||||
interface Addon {
|
||||
name?: string;
|
||||
transform?: (this: UnimportContext, code: MagicString, id: string | undefined) => Thenable<MagicString>;
|
||||
declaration?: (this: UnimportContext, dts: string, options: TypeDeclarationOptions) => Thenable<string>;
|
||||
matchImports?: (this: UnimportContext, identifiers: Set<string>, matched: Import[]) => Thenable<Import[] | void>;
|
||||
/**
|
||||
* Extend or modify the imports list before injecting
|
||||
*/
|
||||
extendImports?: (this: UnimportContext, imports: Import[]) => Import[] | void;
|
||||
/**
|
||||
* Resolve imports before injecting
|
||||
*/
|
||||
injectImportsResolved?: (this: UnimportContext, imports: Import[], code: MagicString, id?: string) => Import[] | void;
|
||||
/**
|
||||
* Modify the injection code before injecting
|
||||
*/
|
||||
injectImportsStringified?: (this: UnimportContext, injection: string, imports: Import[], code: MagicString, id?: string) => string | void;
|
||||
}
|
||||
interface InstallGlobalOptions {
|
||||
/**
|
||||
* @default globalThis
|
||||
*/
|
||||
globalObject?: any;
|
||||
/**
|
||||
* Overrides the existing property
|
||||
* @default false
|
||||
*/
|
||||
overrides?: boolean;
|
||||
}
|
||||
interface MagicStringResult {
|
||||
s: MagicString;
|
||||
code: string;
|
||||
}
|
||||
interface ImportInjectionResult extends MagicStringResult {
|
||||
imports: Import[];
|
||||
}
|
||||
|
||||
export { type AddonsOptions as A, type BuiltinPresetName as B, type DetectImportResult as D, type Import as I, type MagicStringResult as M, type Preset as P, type ScanDir as S, type TypeDeclarationOptions as T, type UnimportOptions as U, type Unimport as a, type InstallGlobalOptions as b, type ScanDirExportsOptions as c, type InlinePreset as d, builtinPresets as e, type ModuleId as f, type ImportName as g, type ImportCommon as h, type PresetImport as i, type PackagePreset as j, type UnimportContext as k, type InjectionUsageRecord as l, type UnimportMeta as m, type AddonVueDirectivesOptions as n, type PathFromResolver as o, type InjectImportsOptions as p, type Thenable as q, type Addon as r, type ImportInjectionResult as s };
|
||||
400
web/admin-spa/node_modules/unimport/dist/shared/unimport.CaVRR9SH.d.ts
generated
vendored
400
web/admin-spa/node_modules/unimport/dist/shared/unimport.CaVRR9SH.d.ts
generated
vendored
@@ -1,400 +0,0 @@
|
||||
import MagicString from 'magic-string';
|
||||
import { ESMExport } from 'mlly';
|
||||
|
||||
declare const builtinPresets: {
|
||||
'@vue/composition-api': InlinePreset;
|
||||
'@vueuse/core': () => Preset;
|
||||
'@vueuse/head': InlinePreset;
|
||||
pinia: InlinePreset;
|
||||
preact: InlinePreset;
|
||||
quasar: InlinePreset;
|
||||
react: InlinePreset;
|
||||
'react-router': InlinePreset;
|
||||
'react-router-dom': InlinePreset;
|
||||
svelte: InlinePreset;
|
||||
'svelte/animate': InlinePreset;
|
||||
'svelte/easing': InlinePreset;
|
||||
'svelte/motion': InlinePreset;
|
||||
'svelte/store': InlinePreset;
|
||||
'svelte/transition': InlinePreset;
|
||||
'vee-validate': InlinePreset;
|
||||
vitepress: InlinePreset;
|
||||
'vue-demi': InlinePreset;
|
||||
'vue-i18n': InlinePreset;
|
||||
'vue-router': InlinePreset;
|
||||
'vue-router-composables': InlinePreset;
|
||||
vue: InlinePreset;
|
||||
'vue/macros': InlinePreset;
|
||||
vuex: InlinePreset;
|
||||
vitest: InlinePreset;
|
||||
'uni-app': InlinePreset;
|
||||
'solid-js': InlinePreset;
|
||||
'solid-app-router': InlinePreset;
|
||||
rxjs: InlinePreset;
|
||||
'date-fns': InlinePreset;
|
||||
};
|
||||
type BuiltinPresetName = keyof typeof builtinPresets;
|
||||
|
||||
type ModuleId = string;
|
||||
type ImportName = string;
|
||||
interface ImportCommon {
|
||||
/** Module specifier to import from */
|
||||
from: ModuleId;
|
||||
/**
|
||||
* Priority of the import, if multiple imports have the same name, the one with the highest priority will be used
|
||||
* @default 1
|
||||
*/
|
||||
priority?: number;
|
||||
/** If this import is disabled */
|
||||
disabled?: boolean;
|
||||
/** Won't output import in declaration file if true */
|
||||
dtsDisabled?: boolean;
|
||||
/** Import declaration type like const / var / enum */
|
||||
declarationType?: ESMExport['declarationType'];
|
||||
/**
|
||||
* Metadata of the import
|
||||
*/
|
||||
meta?: {
|
||||
/** Short description of the import */
|
||||
description?: string;
|
||||
/** URL to the documentation */
|
||||
docsUrl?: string;
|
||||
/** Additional metadata */
|
||||
[key: string]: any;
|
||||
};
|
||||
/**
|
||||
* If this import is a pure type import
|
||||
*/
|
||||
type?: boolean;
|
||||
/**
|
||||
* Using this as the from when generating type declarations
|
||||
*/
|
||||
typeFrom?: ModuleId;
|
||||
}
|
||||
interface Import extends ImportCommon {
|
||||
/** Import name to be detected */
|
||||
name: ImportName;
|
||||
/** Import as this name */
|
||||
as?: ImportName;
|
||||
/**
|
||||
* With properties
|
||||
*
|
||||
* Ignored for CJS imports.
|
||||
*/
|
||||
with?: Record<string, string>;
|
||||
}
|
||||
type PresetImport = Omit<Import, 'from'> | ImportName | [name: ImportName, as?: ImportName, from?: ModuleId];
|
||||
interface InlinePreset extends ImportCommon {
|
||||
imports: (PresetImport | InlinePreset)[];
|
||||
}
|
||||
/**
|
||||
* Auto extract exports from a package for auto import
|
||||
*/
|
||||
interface PackagePreset {
|
||||
/**
|
||||
* Name of the package
|
||||
*/
|
||||
package: string;
|
||||
/**
|
||||
* Path of the importer
|
||||
* @default process.cwd()
|
||||
*/
|
||||
url?: string;
|
||||
/**
|
||||
* RegExp, string, or custom function to exclude names of the extracted imports
|
||||
*/
|
||||
ignore?: (string | RegExp | ((name: string) => boolean))[];
|
||||
/**
|
||||
* Use local cache if exits
|
||||
* @default true
|
||||
*/
|
||||
cache?: boolean;
|
||||
}
|
||||
type Preset = InlinePreset | PackagePreset;
|
||||
interface UnimportContext {
|
||||
readonly version: string;
|
||||
options: Partial<UnimportOptions>;
|
||||
staticImports: Import[];
|
||||
dynamicImports: Import[];
|
||||
addons: Addon[];
|
||||
getImports: () => Promise<Import[]>;
|
||||
getImportMap: () => Promise<Map<string, Import>>;
|
||||
getMetadata: () => UnimportMeta | undefined;
|
||||
modifyDynamicImports: (fn: (imports: Import[]) => Thenable<void | Import[]>) => Promise<void>;
|
||||
clearDynamicImports: () => void;
|
||||
replaceImports: (imports: UnimportOptions['imports']) => Promise<Import[]>;
|
||||
invalidate: () => void;
|
||||
resolveId: (id: string, parentId?: string) => Thenable<string | null | undefined | void>;
|
||||
}
|
||||
interface DetectImportResult {
|
||||
s: MagicString;
|
||||
strippedCode: string;
|
||||
isCJSContext: boolean;
|
||||
matchedImports: Import[];
|
||||
firstOccurrence: number;
|
||||
}
|
||||
interface Unimport {
|
||||
readonly version: string;
|
||||
init: () => Promise<void>;
|
||||
clearDynamicImports: UnimportContext['clearDynamicImports'];
|
||||
getImportMap: UnimportContext['getImportMap'];
|
||||
getImports: UnimportContext['getImports'];
|
||||
getInternalContext: () => UnimportContext;
|
||||
getMetadata: UnimportContext['getMetadata'];
|
||||
modifyDynamicImports: UnimportContext['modifyDynamicImports'];
|
||||
generateTypeDeclarations: (options?: TypeDeclarationOptions) => Promise<string>;
|
||||
/**
|
||||
* Get un-imported usages from code
|
||||
*/
|
||||
detectImports: (code: string | MagicString) => Promise<DetectImportResult>;
|
||||
/**
|
||||
* Insert missing imports statements to code
|
||||
*/
|
||||
injectImports: (code: string | MagicString, id?: string, options?: InjectImportsOptions) => Promise<ImportInjectionResult>;
|
||||
scanImportsFromDir: (dir?: (string | ScanDir)[], options?: ScanDirExportsOptions) => Promise<Import[]>;
|
||||
scanImportsFromFile: (file: string, includeTypes?: boolean) => Promise<Import[]>;
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
toExports: (filepath?: string, includeTypes?: boolean) => Promise<string>;
|
||||
}
|
||||
interface InjectionUsageRecord {
|
||||
import: Import;
|
||||
count: number;
|
||||
moduleIds: string[];
|
||||
}
|
||||
interface UnimportMeta {
|
||||
injectionUsage: Record<string, InjectionUsageRecord>;
|
||||
}
|
||||
interface AddonsOptions {
|
||||
addons?: Addon[];
|
||||
/**
|
||||
* Enable auto import inside for Vue's <template>
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
vueTemplate?: boolean;
|
||||
/**
|
||||
* Enable auto import directives for Vue's SFC.
|
||||
*
|
||||
* Library authors should include `meta.vueDirective: true` in the import metadata.
|
||||
*
|
||||
* When using a local directives folder, provide the `isDirective`
|
||||
* callback to check if the import is a Vue directive.
|
||||
*/
|
||||
vueDirectives?: true | AddonVueDirectivesOptions;
|
||||
}
|
||||
interface AddonVueDirectivesOptions {
|
||||
/**
|
||||
* Checks if the import is a Vue directive.
|
||||
*
|
||||
* **NOTES**:
|
||||
* - imports from a library should include `meta.vueDirective: true`.
|
||||
* - this callback is only invoked for local directives (only when meta.vueDirective is not set).
|
||||
*
|
||||
* @param from The path of the import normalized.
|
||||
* @param importEntry The import entry.
|
||||
*/
|
||||
isDirective?: (from: string, importEntry: Import) => boolean;
|
||||
}
|
||||
interface UnimportOptions extends Pick<InjectImportsOptions, 'injectAtEnd' | 'mergeExisting' | 'parser'> {
|
||||
/**
|
||||
* Auto import items
|
||||
*/
|
||||
imports: Import[];
|
||||
/**
|
||||
* Auto import preset
|
||||
*/
|
||||
presets: (Preset | BuiltinPresetName)[];
|
||||
/**
|
||||
* Custom warning function
|
||||
* @default console.warn
|
||||
*/
|
||||
warn: (msg: string) => void;
|
||||
/**
|
||||
* Custom debug log function
|
||||
* @default console.log
|
||||
*/
|
||||
debugLog: (msg: string) => void;
|
||||
/**
|
||||
* Unimport Addons.
|
||||
* To use built-in addons, use:
|
||||
* ```js
|
||||
* addons: {
|
||||
* addons: [<custom-addons-here>] // if you want to use also custom addons
|
||||
* vueTemplate: true,
|
||||
* vueDirectives: [<the-directives-here>]
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* Built-in addons:
|
||||
* - vueDirectives: enable auto import directives for Vue's SFC
|
||||
* - vueTemplate: enable auto import inside for Vue's <template>
|
||||
*
|
||||
* @default {}
|
||||
*/
|
||||
addons: AddonsOptions | Addon[];
|
||||
/**
|
||||
* Name of virtual modules that exposed all the registed auto-imports
|
||||
* @default []
|
||||
*/
|
||||
virtualImports: string[];
|
||||
/**
|
||||
* Directories to scan for auto import
|
||||
* @default []
|
||||
*/
|
||||
dirs?: (string | ScanDir)[];
|
||||
/**
|
||||
* Options for scanning directories for auto import
|
||||
*/
|
||||
dirsScanOptions?: ScanDirExportsOptions;
|
||||
/**
|
||||
* Custom resolver to auto import id
|
||||
*/
|
||||
resolveId?: (id: string, importee?: string) => Thenable<string | void>;
|
||||
/**
|
||||
* Custom magic comments to be opt-out for auto import, per file/module
|
||||
*
|
||||
* @default ['@unimport-disable', '@imports-disable']
|
||||
*/
|
||||
commentsDisable?: string[];
|
||||
/**
|
||||
* Custom magic comments to debug auto import, printed to console
|
||||
*
|
||||
* @default ['@unimport-debug', '@imports-debug']
|
||||
*/
|
||||
commentsDebug?: string[];
|
||||
/**
|
||||
* Collect meta data for each auto import. Accessible via `ctx.meta`
|
||||
*/
|
||||
collectMeta?: boolean;
|
||||
}
|
||||
type PathFromResolver = (_import: Import) => string | undefined;
|
||||
interface ScanDirExportsOptions {
|
||||
/**
|
||||
* Glob patterns for matching files
|
||||
*
|
||||
* @default ['*.{ts,js,mjs,cjs,mts,cts}']
|
||||
*/
|
||||
filePatterns?: string[];
|
||||
/**
|
||||
* Custom function to filter scanned files
|
||||
*/
|
||||
fileFilter?: (file: string) => boolean;
|
||||
/**
|
||||
* Register type exports
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
types?: boolean;
|
||||
/**
|
||||
* Current working directory
|
||||
*
|
||||
* @default process.cwd()
|
||||
*/
|
||||
cwd?: string;
|
||||
}
|
||||
interface ScanDir {
|
||||
/**
|
||||
* Path pattern of the directory
|
||||
*/
|
||||
glob: string;
|
||||
/**
|
||||
* Register type exports
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
types?: boolean;
|
||||
}
|
||||
interface TypeDeclarationOptions {
|
||||
/**
|
||||
* Custom resolver for path of the import
|
||||
*/
|
||||
resolvePath?: PathFromResolver;
|
||||
/**
|
||||
* Append `export {}` to the end of the file
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
exportHelper?: boolean;
|
||||
/**
|
||||
* Auto-import for type exports
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
typeReExports?: boolean;
|
||||
}
|
||||
interface InjectImportsOptions {
|
||||
/**
|
||||
* Merge the existing imports
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
mergeExisting?: boolean;
|
||||
/**
|
||||
* If the module should be auto imported
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
autoImport?: boolean;
|
||||
/**
|
||||
* If the module should be transformed for virtual modules.
|
||||
* Only available when `virtualImports` is set.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
transformVirtualImports?: boolean;
|
||||
/**
|
||||
* Parser to use for parsing the code
|
||||
*
|
||||
* Note that `acorn` only takes valid JS Code, should usually only be used after transformationa and transpilation
|
||||
*
|
||||
* @default 'regex'
|
||||
*/
|
||||
parser?: 'acorn' | 'regex';
|
||||
/**
|
||||
* Inject the imports at the end of other imports
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
injectAtEnd?: boolean;
|
||||
}
|
||||
type Thenable<T> = Promise<T> | T;
|
||||
interface Addon {
|
||||
name?: string;
|
||||
transform?: (this: UnimportContext, code: MagicString, id: string | undefined) => Thenable<MagicString>;
|
||||
declaration?: (this: UnimportContext, dts: string, options: TypeDeclarationOptions) => Thenable<string>;
|
||||
matchImports?: (this: UnimportContext, identifiers: Set<string>, matched: Import[]) => Thenable<Import[] | void>;
|
||||
/**
|
||||
* Extend or modify the imports list before injecting
|
||||
*/
|
||||
extendImports?: (this: UnimportContext, imports: Import[]) => Import[] | void;
|
||||
/**
|
||||
* Resolve imports before injecting
|
||||
*/
|
||||
injectImportsResolved?: (this: UnimportContext, imports: Import[], code: MagicString, id?: string) => Import[] | void;
|
||||
/**
|
||||
* Modify the injection code before injecting
|
||||
*/
|
||||
injectImportsStringified?: (this: UnimportContext, injection: string, imports: Import[], code: MagicString, id?: string) => string | void;
|
||||
}
|
||||
interface InstallGlobalOptions {
|
||||
/**
|
||||
* @default globalThis
|
||||
*/
|
||||
globalObject?: any;
|
||||
/**
|
||||
* Overrides the existing property
|
||||
* @default false
|
||||
*/
|
||||
overrides?: boolean;
|
||||
}
|
||||
interface MagicStringResult {
|
||||
s: MagicString;
|
||||
code: string;
|
||||
}
|
||||
interface ImportInjectionResult extends MagicStringResult {
|
||||
imports: Import[];
|
||||
}
|
||||
|
||||
export { type AddonsOptions as A, type BuiltinPresetName as B, type DetectImportResult as D, type Import as I, type MagicStringResult as M, type Preset as P, type ScanDir as S, type TypeDeclarationOptions as T, type UnimportOptions as U, type Unimport as a, type InstallGlobalOptions as b, type ScanDirExportsOptions as c, type InlinePreset as d, builtinPresets as e, type ModuleId as f, type ImportName as g, type ImportCommon as h, type PresetImport as i, type PackagePreset as j, type UnimportContext as k, type InjectionUsageRecord as l, type UnimportMeta as m, type AddonVueDirectivesOptions as n, type PathFromResolver as o, type InjectImportsOptions as p, type Thenable as q, type Addon as r, type ImportInjectionResult as s };
|
||||
5
web/admin-spa/node_modules/unimport/dist/shared/unimport.CzOA5cgj.d.mts
generated
vendored
5
web/admin-spa/node_modules/unimport/dist/shared/unimport.CzOA5cgj.d.mts
generated
vendored
@@ -1,5 +0,0 @@
|
||||
import { r as Addon } from './unimport.CaVRR9SH.mjs';
|
||||
|
||||
declare function vueTemplateAddon(): Addon;
|
||||
|
||||
export { vueTemplateAddon as v };
|
||||
5
web/admin-spa/node_modules/unimport/dist/shared/unimport.D0oAO1c8.d.cts
generated
vendored
5
web/admin-spa/node_modules/unimport/dist/shared/unimport.D0oAO1c8.d.cts
generated
vendored
@@ -1,5 +0,0 @@
|
||||
import { r as Addon } from './unimport.CaVRR9SH.cjs';
|
||||
|
||||
declare function vueTemplateAddon(): Addon;
|
||||
|
||||
export { vueTemplateAddon as v };
|
||||
1473
web/admin-spa/node_modules/unimport/dist/shared/unimport.D6_N7ILk.cjs
generated
vendored
1473
web/admin-spa/node_modules/unimport/dist/shared/unimport.D6_N7ILk.cjs
generated
vendored
File diff suppressed because it is too large
Load Diff
575
web/admin-spa/node_modules/unimport/dist/shared/unimport.MMUMmZ45.cjs
generated
vendored
575
web/admin-spa/node_modules/unimport/dist/shared/unimport.MMUMmZ45.cjs
generated
vendored
@@ -1,575 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const node_path = require('node:path');
|
||||
const process = require('node:process');
|
||||
const pathe = require('pathe');
|
||||
const scule = require('scule');
|
||||
const MagicString = require('magic-string');
|
||||
const mlly = require('mlly');
|
||||
const stripLiteral = require('strip-literal');
|
||||
|
||||
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e.default : e; }
|
||||
|
||||
const process__default = /*#__PURE__*/_interopDefaultCompat(process);
|
||||
const MagicString__default = /*#__PURE__*/_interopDefaultCompat(MagicString);
|
||||
|
||||
const excludeRE = [
|
||||
// imported/exported from other module
|
||||
/\b(import|export)\b([\w$*{},\s]+?)\bfrom\s*["']/g,
|
||||
// defined as function
|
||||
/\bfunction\s*([\w$]+)\s*\(/g,
|
||||
// defined as class
|
||||
/\bclass\s*([\w$]+)\s*\{/g,
|
||||
// defined as local variable
|
||||
// eslint-disable-next-line regexp/no-super-linear-backtracking
|
||||
/\b(?:const|let|var)\s+?(\[.*?\]|\{.*?\}|.+?)\s*?[=;\n]/gs
|
||||
];
|
||||
const importAsRE = /^.*\sas\s+/;
|
||||
const separatorRE = /[,[\]{}\n]|\b(?:import|export)\b/g;
|
||||
const matchRE = /(^|\.\.\.|(?:\bcase|\?)\s+|[^\w$/)]|\bextends\s+)([\w$]+)\s*(?=[.()[\]}:;?+\-*&|`<>,\n]|\b(?:instanceof|in)\b|$|(?<=extends\s+\w+)\s+\{)/g;
|
||||
const regexRE = /\/\S*?(?<!\\)(?<!\[[^\]]*)\/[gimsuy]*/g;
|
||||
function stripCommentsAndStrings(code, options) {
|
||||
return stripLiteral.stripLiteral(code, options).replace(regexRE, 'new RegExp("")');
|
||||
}
|
||||
|
||||
function defineUnimportPreset(preset) {
|
||||
return preset;
|
||||
}
|
||||
const safePropertyName = /^[a-z$_][\w$]*$/i;
|
||||
function stringifyWith(withValues) {
|
||||
let withDefs = "";
|
||||
for (let entries = Object.entries(withValues), l = entries.length, i = 0; i < l; i++) {
|
||||
const [prop, value] = entries[i];
|
||||
withDefs += safePropertyName.test(prop) ? prop : JSON.stringify(prop);
|
||||
withDefs += `: ${JSON.stringify(String(value))}`;
|
||||
if (i + 1 !== l)
|
||||
withDefs += ", ";
|
||||
}
|
||||
return `{ ${withDefs} }`;
|
||||
}
|
||||
function stringifyImports(imports, isCJS = false) {
|
||||
const map = toImportModuleMap(imports);
|
||||
return Object.entries(map).flatMap(([name, importSet]) => {
|
||||
const entries = [];
|
||||
const imports2 = Array.from(importSet).filter((i) => {
|
||||
if (!i.name || i.as === "") {
|
||||
let importStr;
|
||||
if (isCJS) {
|
||||
importStr = `require('${name}');`;
|
||||
} else {
|
||||
importStr = `import '${name}'`;
|
||||
if (i.with)
|
||||
importStr += ` with ${stringifyWith(i.with)}`;
|
||||
importStr += ";";
|
||||
}
|
||||
entries.push(importStr);
|
||||
return false;
|
||||
} else if (i.name === "default" || i.name === "=") {
|
||||
let importStr;
|
||||
if (isCJS) {
|
||||
importStr = i.name === "=" ? `const ${i.as} = require('${name}');` : `const { default: ${i.as} } = require('${name}');`;
|
||||
} else {
|
||||
importStr = `import ${i.as} from '${name}'`;
|
||||
if (i.with)
|
||||
importStr += ` with ${stringifyWith(i.with)}`;
|
||||
importStr += ";";
|
||||
}
|
||||
entries.push(importStr);
|
||||
return false;
|
||||
} else if (i.name === "*") {
|
||||
let importStr;
|
||||
if (isCJS) {
|
||||
importStr = `const ${i.as} = require('${name}');`;
|
||||
} else {
|
||||
importStr = `import * as ${i.as} from '${name}'`;
|
||||
if (i.with)
|
||||
importStr += ` with ${stringifyWith(i.with)}`;
|
||||
importStr += ";";
|
||||
}
|
||||
entries.push(importStr);
|
||||
return false;
|
||||
} else if (!isCJS && i.with) {
|
||||
entries.push(`import { ${stringifyImportAlias(i)} } from '${name}' with ${stringifyWith(i.with)};`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
if (imports2.length) {
|
||||
const importsAs = imports2.map((i) => stringifyImportAlias(i, isCJS));
|
||||
entries.push(
|
||||
isCJS ? `const { ${importsAs.join(", ")} } = require('${name}');` : `import { ${importsAs.join(", ")} } from '${name}';`
|
||||
);
|
||||
}
|
||||
return entries;
|
||||
}).join("\n");
|
||||
}
|
||||
function dedupeImports(imports, warn) {
|
||||
const map = /* @__PURE__ */ new Map();
|
||||
const indexToRemove = /* @__PURE__ */ new Set();
|
||||
imports.filter((i) => !i.disabled).forEach((i, idx) => {
|
||||
if (i.declarationType === "enum" || i.declarationType === "class")
|
||||
return;
|
||||
const name = i.as ?? i.name;
|
||||
if (!map.has(name)) {
|
||||
map.set(name, idx);
|
||||
return;
|
||||
}
|
||||
const other = imports[map.get(name)];
|
||||
if (other.from === i.from) {
|
||||
indexToRemove.add(idx);
|
||||
return;
|
||||
}
|
||||
const diff = (other.priority || 1) - (i.priority || 1);
|
||||
if (diff === 0)
|
||||
warn(`Duplicated imports "${name}", the one from "${other.from}" has been ignored and "${i.from}" is used`);
|
||||
if (diff <= 0) {
|
||||
indexToRemove.add(map.get(name));
|
||||
map.set(name, idx);
|
||||
} else {
|
||||
indexToRemove.add(idx);
|
||||
}
|
||||
});
|
||||
return imports.filter((_, idx) => !indexToRemove.has(idx));
|
||||
}
|
||||
function toExports(imports, fileDir, includeType = false) {
|
||||
const map = toImportModuleMap(imports, includeType);
|
||||
return Object.entries(map).flatMap(([name, imports2]) => {
|
||||
if (isFilePath(name))
|
||||
name = name.replace(/\.[a-z]+$/i, "");
|
||||
if (fileDir && pathe.isAbsolute(name)) {
|
||||
name = pathe.relative(fileDir, name);
|
||||
if (!name.match(/^[./]/))
|
||||
name = `./${name}`;
|
||||
}
|
||||
const entries = [];
|
||||
const filtered = Array.from(imports2).filter((i) => {
|
||||
if (i.name === "*") {
|
||||
entries.push(`export * as ${i.as} from '${name}';`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
if (filtered.length)
|
||||
entries.push(`export { ${filtered.map((i) => stringifyImportAlias(i, false)).join(", ")} } from '${name}';`);
|
||||
return entries;
|
||||
}).join("\n");
|
||||
}
|
||||
function stripFileExtension(path) {
|
||||
return path.replace(/\.[a-z]+$/i, "");
|
||||
}
|
||||
function toTypeDeclarationItems(imports, options) {
|
||||
return imports.map((i) => {
|
||||
const from = options?.resolvePath?.(i) || stripFileExtension(i.typeFrom || i.from);
|
||||
let typeDef = "";
|
||||
if (i.with)
|
||||
typeDef += `import('${from}', { with: ${stringifyWith(i.with)} })`;
|
||||
else
|
||||
typeDef += `import('${from}')`;
|
||||
if (i.name !== "*" && i.name !== "=")
|
||||
typeDef += `['${i.name}']`;
|
||||
return `const ${i.as}: typeof ${typeDef}`;
|
||||
}).sort();
|
||||
}
|
||||
function toTypeDeclarationFile(imports, options) {
|
||||
const items = toTypeDeclarationItems(imports, options);
|
||||
const {
|
||||
exportHelper = true
|
||||
} = options || {};
|
||||
let declaration = "";
|
||||
if (exportHelper)
|
||||
declaration += "export {}\n";
|
||||
declaration += `declare global {
|
||||
${items.map((i) => ` ${i}`).join("\n")}
|
||||
}`;
|
||||
return declaration;
|
||||
}
|
||||
function makeTypeModulesMap(imports, resolvePath) {
|
||||
const modulesMap = /* @__PURE__ */ new Map();
|
||||
const resolveImportFrom = typeof resolvePath === "function" ? (i) => {
|
||||
return resolvePath(i) || stripFileExtension(i.typeFrom || i.from);
|
||||
} : (i) => stripFileExtension(i.typeFrom || i.from);
|
||||
for (const import_ of imports) {
|
||||
const from = resolveImportFrom(import_);
|
||||
let module = modulesMap.get(from);
|
||||
if (!module) {
|
||||
module = { typeImports: /* @__PURE__ */ new Set(), starTypeImport: undefined };
|
||||
modulesMap.set(from, module);
|
||||
}
|
||||
if (import_.name === "*") {
|
||||
if (import_.as)
|
||||
module.starTypeImport = import_;
|
||||
} else {
|
||||
module.typeImports.add(import_);
|
||||
}
|
||||
}
|
||||
return modulesMap;
|
||||
}
|
||||
function toTypeReExports(imports, options) {
|
||||
const importsMap = makeTypeModulesMap(imports, options?.resolvePath);
|
||||
const code = Array.from(importsMap).flatMap(([from, module]) => {
|
||||
from = from.replace(/\.d\.([cm]?)ts$/i, ".$1js");
|
||||
const { starTypeImport, typeImports } = module;
|
||||
const strings = [];
|
||||
if (typeImports.size) {
|
||||
const typeImportNames = Array.from(typeImports).map(({ name, as }) => {
|
||||
if (as && as !== name)
|
||||
return `${name} as ${as}`;
|
||||
return name;
|
||||
});
|
||||
strings.push(
|
||||
"// @ts-ignore",
|
||||
`export type { ${typeImportNames.join(", ")} } from '${from}'`
|
||||
);
|
||||
}
|
||||
if (starTypeImport) {
|
||||
strings.push(
|
||||
"// @ts-ignore",
|
||||
`export type * as ${starTypeImport.as} from '${from}'`
|
||||
);
|
||||
}
|
||||
if (strings.length) {
|
||||
strings.push(
|
||||
// This is a workaround for a TypeScript issue where type-only re-exports are not properly initialized.
|
||||
`import('${from}')`
|
||||
);
|
||||
}
|
||||
return strings;
|
||||
});
|
||||
return `// for type re-export
|
||||
declare global {
|
||||
${code.map((i) => ` ${i}`).join("\n")}
|
||||
}`;
|
||||
}
|
||||
function stringifyImportAlias(item, isCJS = false) {
|
||||
return item.as === undefined || item.name === item.as ? item.name : isCJS ? `${item.name}: ${item.as}` : `${item.name} as ${item.as}`;
|
||||
}
|
||||
function toImportModuleMap(imports, includeType = false) {
|
||||
const map = {};
|
||||
for (const _import of imports) {
|
||||
if (_import.type && !includeType)
|
||||
continue;
|
||||
if (!map[_import.from])
|
||||
map[_import.from] = /* @__PURE__ */ new Set();
|
||||
map[_import.from].add(_import);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
function getString(code) {
|
||||
if (typeof code === "string")
|
||||
return code;
|
||||
return code.toString();
|
||||
}
|
||||
function getMagicString(code) {
|
||||
if (typeof code === "string")
|
||||
return new MagicString__default(code);
|
||||
return code;
|
||||
}
|
||||
function addImportToCode(code, imports, isCJS = false, mergeExisting = false, injectAtLast = false, firstOccurrence = Number.POSITIVE_INFINITY, onResolved, onStringified) {
|
||||
let newImports = [];
|
||||
const s = getMagicString(code);
|
||||
let _staticImports;
|
||||
const strippedCode = stripCommentsAndStrings(s.original);
|
||||
function findStaticImportsLazy() {
|
||||
if (!_staticImports) {
|
||||
_staticImports = mlly.findStaticImports(s.original).filter((i) => Boolean(strippedCode.slice(i.start, i.end).trim())).map((i) => mlly.parseStaticImport(i));
|
||||
}
|
||||
return _staticImports;
|
||||
}
|
||||
function hasShebang() {
|
||||
const shebangRegex = /^#!.+/;
|
||||
return shebangRegex.test(s.original);
|
||||
}
|
||||
if (mergeExisting && !isCJS) {
|
||||
const existingImports = findStaticImportsLazy();
|
||||
const map = /* @__PURE__ */ new Map();
|
||||
imports.forEach((i) => {
|
||||
const target = existingImports.find((e) => e.specifier === i.from && e.imports.startsWith("{"));
|
||||
if (!target)
|
||||
return newImports.push(i);
|
||||
if (!map.has(target))
|
||||
map.set(target, []);
|
||||
map.get(target).push(i);
|
||||
});
|
||||
for (const [target, items] of map.entries()) {
|
||||
const strings = items.map((i) => `${stringifyImportAlias(i)}, `);
|
||||
const importLength = target.code.match(/^\s*import\s*\{/)?.[0]?.length;
|
||||
if (importLength)
|
||||
s.appendLeft(target.start + importLength, ` ${strings.join("").trim()}`);
|
||||
}
|
||||
} else {
|
||||
newImports = imports;
|
||||
}
|
||||
newImports = onResolved?.(newImports) ?? newImports;
|
||||
let newEntries = stringifyImports(newImports, isCJS);
|
||||
newEntries = onStringified?.(newEntries, newImports) ?? newEntries;
|
||||
if (newEntries) {
|
||||
const insertionIndex = injectAtLast ? findStaticImportsLazy().reverse().find((i) => i.end <= firstOccurrence)?.end ?? 0 : 0;
|
||||
if (insertionIndex > 0)
|
||||
s.appendRight(insertionIndex, `
|
||||
${newEntries}
|
||||
`);
|
||||
else if (hasShebang())
|
||||
s.appendLeft(s.original.indexOf("\n") + 1, `
|
||||
${newEntries}
|
||||
`);
|
||||
else
|
||||
s.prepend(`${newEntries}
|
||||
`);
|
||||
}
|
||||
return {
|
||||
s,
|
||||
get code() {
|
||||
return s.toString();
|
||||
}
|
||||
};
|
||||
}
|
||||
function normalizeImports(imports) {
|
||||
for (const _import of imports)
|
||||
_import.as = _import.as ?? _import.name;
|
||||
return imports;
|
||||
}
|
||||
function resolveIdAbsolute(id, parentId) {
|
||||
return mlly.resolvePathSync(id, {
|
||||
url: parentId
|
||||
});
|
||||
}
|
||||
function isFilePath(path) {
|
||||
return path.startsWith(".") || pathe.isAbsolute(path) || path.includes("://");
|
||||
}
|
||||
const toImports = stringifyImports;
|
||||
|
||||
const contextRE$1 = /\b_ctx\.([$\w]+)\b/g;
|
||||
const UNREF_KEY = "__unimport_unref_";
|
||||
const VUE_TEMPLATE_NAME = "unimport:vue-template";
|
||||
function vueTemplateAddon() {
|
||||
const self = {
|
||||
name: VUE_TEMPLATE_NAME,
|
||||
async transform(s, id) {
|
||||
if (!s.original.includes("_ctx.") || s.original.includes(UNREF_KEY))
|
||||
return s;
|
||||
const matches = Array.from(s.original.matchAll(contextRE$1));
|
||||
const imports = await this.getImports();
|
||||
let targets = [];
|
||||
for (const match of matches) {
|
||||
const name = match[1];
|
||||
const item = imports.find((i) => i.as === name);
|
||||
if (!item)
|
||||
continue;
|
||||
const start = match.index;
|
||||
const end = start + match[0].length;
|
||||
const tempName = `__unimport_${name}`;
|
||||
s.overwrite(start, end, `(${JSON.stringify(name)} in _ctx ? _ctx.${name} : ${UNREF_KEY}(${tempName}))`);
|
||||
if (!targets.find((i) => i.as === tempName)) {
|
||||
targets.push({
|
||||
...item,
|
||||
as: tempName
|
||||
});
|
||||
}
|
||||
}
|
||||
if (targets.length) {
|
||||
targets.push({
|
||||
name: "unref",
|
||||
from: "vue",
|
||||
as: UNREF_KEY
|
||||
});
|
||||
for (const addon of this.addons) {
|
||||
if (addon === self)
|
||||
continue;
|
||||
targets = await addon.injectImportsResolved?.call(this, targets, s, id) ?? targets;
|
||||
}
|
||||
let injection = stringifyImports(targets);
|
||||
for (const addon of this.addons) {
|
||||
if (addon === self)
|
||||
continue;
|
||||
injection = await addon.injectImportsStringified?.call(this, injection, targets, s, id) ?? injection;
|
||||
}
|
||||
s.prepend(injection);
|
||||
}
|
||||
return s;
|
||||
},
|
||||
async declaration(dts, options) {
|
||||
const imports = await this.getImports();
|
||||
const items = imports.map((i) => {
|
||||
if (i.type || i.dtsDisabled)
|
||||
return "";
|
||||
const from = options?.resolvePath?.(i) || i.from;
|
||||
return `readonly ${i.as}: UnwrapRef<typeof import('${from}')${i.name !== "*" ? `['${i.name}']` : ""}>`;
|
||||
}).filter(Boolean).sort();
|
||||
const extendItems = items.map((i) => ` ${i}`).join("\n");
|
||||
return `${dts}
|
||||
// for vue template auto import
|
||||
import { UnwrapRef } from 'vue'
|
||||
declare module 'vue' {
|
||||
interface ComponentCustomProperties {
|
||||
${extendItems}
|
||||
}
|
||||
}`;
|
||||
}
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
const contextRE = /resolveDirective as _resolveDirective/;
|
||||
const contextText = `${contextRE.source}, `;
|
||||
const directiveRE = /(?:var|const) (\w+) = _resolveDirective\("([\w.-]+)"\);?\s*/g;
|
||||
const VUE_DIRECTIVES_NAME = "unimport:vue-directives";
|
||||
function vueDirectivesAddon(options = {}) {
|
||||
function isDirective(importEntry) {
|
||||
let isDirective2 = importEntry.meta?.vueDirective === true;
|
||||
if (isDirective2) {
|
||||
return true;
|
||||
}
|
||||
isDirective2 = options.isDirective?.(normalizePath(process__default.cwd(), importEntry.from), importEntry) ?? false;
|
||||
if (isDirective2) {
|
||||
importEntry.meta ??= {};
|
||||
importEntry.meta.vueDirective = true;
|
||||
}
|
||||
return isDirective2;
|
||||
}
|
||||
const self = {
|
||||
name: VUE_DIRECTIVES_NAME,
|
||||
async transform(s, id) {
|
||||
if (!s.original.match(contextRE))
|
||||
return s;
|
||||
const matches = Array.from(s.original.matchAll(directiveRE)).sort((a, b) => b.index - a.index);
|
||||
if (!matches.length)
|
||||
return s;
|
||||
let targets = [];
|
||||
for await (const [
|
||||
begin,
|
||||
end,
|
||||
importEntry
|
||||
] of findDirectives(
|
||||
isDirective,
|
||||
matches,
|
||||
this.getImports()
|
||||
)) {
|
||||
s.overwrite(begin, end, "");
|
||||
targets.push(importEntry);
|
||||
}
|
||||
if (!targets.length)
|
||||
return s;
|
||||
s.replace(contextText, "");
|
||||
for (const addon of this.addons) {
|
||||
if (addon === self)
|
||||
continue;
|
||||
targets = await addon.injectImportsResolved?.call(this, targets, s, id) ?? targets;
|
||||
}
|
||||
let injection = stringifyImports(targets);
|
||||
for (const addon of this.addons) {
|
||||
if (addon === self)
|
||||
continue;
|
||||
injection = await addon.injectImportsStringified?.call(this, injection, targets, s, id) ?? injection;
|
||||
}
|
||||
s.prepend(injection);
|
||||
return s;
|
||||
},
|
||||
async declaration(dts, options2) {
|
||||
const directivesMap = await this.getImports().then((imports) => {
|
||||
return imports.filter(isDirective).reduce((acc, i) => {
|
||||
if (i.type || i.dtsDisabled)
|
||||
return acc;
|
||||
let name;
|
||||
if (i.name === "default" && (i.as === "default" || !i.as)) {
|
||||
const file = node_path.basename(i.from);
|
||||
const idx = file.indexOf(".");
|
||||
name = idx > -1 ? file.slice(0, idx) : file;
|
||||
} else {
|
||||
name = i.as ?? i.name;
|
||||
}
|
||||
name = name[0] === "v" ? scule.camelCase(name) : scule.camelCase(`v-${name}`);
|
||||
if (!acc.has(name)) {
|
||||
acc.set(name, i);
|
||||
}
|
||||
return acc;
|
||||
}, /* @__PURE__ */ new Map());
|
||||
});
|
||||
if (!directivesMap.size)
|
||||
return dts;
|
||||
const directives = Array.from(directivesMap.entries()).map(([name, i]) => ` ${name}: typeof import('${options2?.resolvePath?.(i) || i.from}')['${i.name}']`).sort().join("\n");
|
||||
return `${dts}
|
||||
// for vue directives auto import
|
||||
declare module 'vue' {
|
||||
interface ComponentCustomProperties {
|
||||
${directives}
|
||||
}
|
||||
interface GlobalDirectives {
|
||||
${directives}
|
||||
}
|
||||
}`;
|
||||
}
|
||||
};
|
||||
return self;
|
||||
}
|
||||
function resolvePath(cwd, path) {
|
||||
return path[0] === "." ? pathe.resolve(cwd, path) : path;
|
||||
}
|
||||
function normalizePath(cwd, path) {
|
||||
return resolvePath(cwd, path).replace(/\\/g, "/");
|
||||
}
|
||||
async function* findDirectives(isDirective, regexArray, importsPromise) {
|
||||
const imports = (await importsPromise).filter(isDirective);
|
||||
if (!imports.length)
|
||||
return;
|
||||
const symbols = regexArray.reduce((acc, regex) => {
|
||||
const [all, symbol, resolveDirectiveName] = regex;
|
||||
if (acc.has(symbol))
|
||||
return acc;
|
||||
acc.set(symbol, [
|
||||
regex.index,
|
||||
regex.index + all.length,
|
||||
scule.kebabCase(resolveDirectiveName)
|
||||
]);
|
||||
return acc;
|
||||
}, /* @__PURE__ */ new Map());
|
||||
for (const [symbol, data] of symbols.entries()) {
|
||||
yield* findDirective(imports, symbol, data);
|
||||
}
|
||||
}
|
||||
function* findDirective(imports, symbol, [begin, end, importName]) {
|
||||
let resolvedName;
|
||||
for (const i of imports) {
|
||||
if (i.name === "default" && (i.as === "default" || !i.as)) {
|
||||
const file = node_path.basename(i.from);
|
||||
const idx = file.indexOf(".");
|
||||
resolvedName = scule.kebabCase(idx > -1 ? file.slice(0, idx) : file);
|
||||
} else {
|
||||
resolvedName = scule.kebabCase(i.as ?? i.name);
|
||||
}
|
||||
if (resolvedName[0] === "v") {
|
||||
resolvedName = resolvedName.slice(resolvedName[1] === "-" ? 2 : 1);
|
||||
}
|
||||
if (resolvedName === importName) {
|
||||
yield [
|
||||
begin,
|
||||
end,
|
||||
{ ...i, name: i.name, as: symbol }
|
||||
];
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.VUE_DIRECTIVES_NAME = VUE_DIRECTIVES_NAME;
|
||||
exports.VUE_TEMPLATE_NAME = VUE_TEMPLATE_NAME;
|
||||
exports.addImportToCode = addImportToCode;
|
||||
exports.dedupeImports = dedupeImports;
|
||||
exports.defineUnimportPreset = defineUnimportPreset;
|
||||
exports.excludeRE = excludeRE;
|
||||
exports.getMagicString = getMagicString;
|
||||
exports.getString = getString;
|
||||
exports.importAsRE = importAsRE;
|
||||
exports.matchRE = matchRE;
|
||||
exports.normalizeImports = normalizeImports;
|
||||
exports.resolveIdAbsolute = resolveIdAbsolute;
|
||||
exports.separatorRE = separatorRE;
|
||||
exports.stringifyImports = stringifyImports;
|
||||
exports.stripCommentsAndStrings = stripCommentsAndStrings;
|
||||
exports.stripFileExtension = stripFileExtension;
|
||||
exports.toExports = toExports;
|
||||
exports.toImports = toImports;
|
||||
exports.toTypeDeclarationFile = toTypeDeclarationFile;
|
||||
exports.toTypeDeclarationItems = toTypeDeclarationItems;
|
||||
exports.toTypeReExports = toTypeReExports;
|
||||
exports.vueDirectivesAddon = vueDirectivesAddon;
|
||||
exports.vueTemplateAddon = vueTemplateAddon;
|
||||
1455
web/admin-spa/node_modules/unimport/dist/shared/unimport.Ww9aF1N_.mjs
generated
vendored
1455
web/admin-spa/node_modules/unimport/dist/shared/unimport.Ww9aF1N_.mjs
generated
vendored
File diff suppressed because it is too large
Load Diff
72
web/admin-spa/node_modules/unimport/dist/unplugin.cjs
generated
vendored
72
web/admin-spa/node_modules/unimport/dist/unplugin.cjs
generated
vendored
@@ -1,72 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
const node_fs = require('node:fs');
|
||||
const pluginutils = require('@rollup/pluginutils');
|
||||
const MagicString = require('magic-string');
|
||||
const unplugin$1 = require('unplugin');
|
||||
const context = require('./shared/unimport.D6_N7ILk.cjs');
|
||||
require('./shared/unimport.MMUMmZ45.cjs');
|
||||
require('node:path');
|
||||
require('node:process');
|
||||
require('pathe');
|
||||
require('scule');
|
||||
require('mlly');
|
||||
require('strip-literal');
|
||||
require('node:fs/promises');
|
||||
require('node:url');
|
||||
require('fast-glob');
|
||||
require('picomatch');
|
||||
require('node:os');
|
||||
require('pkg-types');
|
||||
require('local-pkg');
|
||||
|
||||
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e.default : e; }
|
||||
|
||||
const MagicString__default = /*#__PURE__*/_interopDefaultCompat(MagicString);
|
||||
|
||||
const defaultIncludes = [/\.[jt]sx?$/, /\.vue$/, /\.vue\?vue/, /\.svelte$/];
|
||||
const defaultExcludes = [/[\\/]node_modules[\\/]/, /[\\/]\.git[\\/]/];
|
||||
function toArray(x) {
|
||||
return x == null ? [] : Array.isArray(x) ? x : [x];
|
||||
}
|
||||
const unplugin = unplugin$1.createUnplugin((options = {}) => {
|
||||
const ctx = context.createUnimport(options);
|
||||
const filter = pluginutils.createFilter(
|
||||
toArray(options.include || []).length ? options.include : defaultIncludes,
|
||||
options.exclude || defaultExcludes
|
||||
);
|
||||
const dts = options.dts === true ? "unimport.d.ts" : options.dts;
|
||||
const {
|
||||
autoImport = true
|
||||
} = options;
|
||||
return {
|
||||
name: "unimport",
|
||||
enforce: "post",
|
||||
transformInclude(id) {
|
||||
return filter(id);
|
||||
},
|
||||
async transform(code, id) {
|
||||
const s = new MagicString__default(code);
|
||||
await ctx.injectImports(s, id, {
|
||||
autoImport
|
||||
});
|
||||
if (!s.hasChanged())
|
||||
return;
|
||||
return {
|
||||
code: s.toString(),
|
||||
map: s.generateMap()
|
||||
};
|
||||
},
|
||||
async buildStart() {
|
||||
await ctx.init();
|
||||
if (dts)
|
||||
return node_fs.promises.writeFile(dts, await ctx.generateTypeDeclarations(), "utf-8");
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
exports.default = unplugin;
|
||||
exports.defaultExcludes = defaultExcludes;
|
||||
exports.defaultIncludes = defaultIncludes;
|
||||
23
web/admin-spa/node_modules/unimport/dist/unplugin.d.cts
generated
vendored
23
web/admin-spa/node_modules/unimport/dist/unplugin.d.cts
generated
vendored
@@ -1,23 +0,0 @@
|
||||
import * as unplugin from 'unplugin';
|
||||
import { FilterPattern } from '@rollup/pluginutils';
|
||||
import { U as UnimportOptions } from './shared/unimport.CaVRR9SH.cjs';
|
||||
import 'magic-string';
|
||||
import 'mlly';
|
||||
|
||||
interface UnimportPluginOptions extends UnimportOptions {
|
||||
include: FilterPattern;
|
||||
exclude: FilterPattern;
|
||||
dts: boolean | string;
|
||||
/**
|
||||
* Enable implicit auto import.
|
||||
* Generate global TypeScript definitions.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
autoImport?: boolean;
|
||||
}
|
||||
declare const defaultIncludes: RegExp[];
|
||||
declare const defaultExcludes: RegExp[];
|
||||
declare const _default: unplugin.UnpluginInstance<Partial<UnimportPluginOptions>, boolean>;
|
||||
|
||||
export { type UnimportPluginOptions, _default as default, defaultExcludes, defaultIncludes };
|
||||
23
web/admin-spa/node_modules/unimport/dist/unplugin.d.mts
generated
vendored
23
web/admin-spa/node_modules/unimport/dist/unplugin.d.mts
generated
vendored
@@ -1,23 +0,0 @@
|
||||
import * as unplugin from 'unplugin';
|
||||
import { FilterPattern } from '@rollup/pluginutils';
|
||||
import { U as UnimportOptions } from './shared/unimport.CaVRR9SH.mjs';
|
||||
import 'magic-string';
|
||||
import 'mlly';
|
||||
|
||||
interface UnimportPluginOptions extends UnimportOptions {
|
||||
include: FilterPattern;
|
||||
exclude: FilterPattern;
|
||||
dts: boolean | string;
|
||||
/**
|
||||
* Enable implicit auto import.
|
||||
* Generate global TypeScript definitions.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
autoImport?: boolean;
|
||||
}
|
||||
declare const defaultIncludes: RegExp[];
|
||||
declare const defaultExcludes: RegExp[];
|
||||
declare const _default: unplugin.UnpluginInstance<Partial<UnimportPluginOptions>, boolean>;
|
||||
|
||||
export { type UnimportPluginOptions, _default as default, defaultExcludes, defaultIncludes };
|
||||
23
web/admin-spa/node_modules/unimport/dist/unplugin.d.ts
generated
vendored
23
web/admin-spa/node_modules/unimport/dist/unplugin.d.ts
generated
vendored
@@ -1,23 +0,0 @@
|
||||
import * as unplugin from 'unplugin';
|
||||
import { FilterPattern } from '@rollup/pluginutils';
|
||||
import { U as UnimportOptions } from './shared/unimport.CaVRR9SH.js';
|
||||
import 'magic-string';
|
||||
import 'mlly';
|
||||
|
||||
interface UnimportPluginOptions extends UnimportOptions {
|
||||
include: FilterPattern;
|
||||
exclude: FilterPattern;
|
||||
dts: boolean | string;
|
||||
/**
|
||||
* Enable implicit auto import.
|
||||
* Generate global TypeScript definitions.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
autoImport?: boolean;
|
||||
}
|
||||
declare const defaultIncludes: RegExp[];
|
||||
declare const defaultExcludes: RegExp[];
|
||||
declare const _default: unplugin.UnpluginInstance<Partial<UnimportPluginOptions>, boolean>;
|
||||
|
||||
export { type UnimportPluginOptions, _default as default, defaultExcludes, defaultIncludes };
|
||||
62
web/admin-spa/node_modules/unimport/dist/unplugin.mjs
generated
vendored
62
web/admin-spa/node_modules/unimport/dist/unplugin.mjs
generated
vendored
@@ -1,62 +0,0 @@
|
||||
import { promises } from 'node:fs';
|
||||
import { createFilter } from '@rollup/pluginutils';
|
||||
import MagicString from 'magic-string';
|
||||
import { createUnplugin } from 'unplugin';
|
||||
import { c as createUnimport } from './shared/unimport.Ww9aF1N_.mjs';
|
||||
import './shared/unimport.0aitavbJ.mjs';
|
||||
import 'node:path';
|
||||
import 'node:process';
|
||||
import 'pathe';
|
||||
import 'scule';
|
||||
import 'mlly';
|
||||
import 'strip-literal';
|
||||
import 'node:fs/promises';
|
||||
import 'node:url';
|
||||
import 'fast-glob';
|
||||
import 'picomatch';
|
||||
import 'node:os';
|
||||
import 'pkg-types';
|
||||
import 'local-pkg';
|
||||
|
||||
const defaultIncludes = [/\.[jt]sx?$/, /\.vue$/, /\.vue\?vue/, /\.svelte$/];
|
||||
const defaultExcludes = [/[\\/]node_modules[\\/]/, /[\\/]\.git[\\/]/];
|
||||
function toArray(x) {
|
||||
return x == null ? [] : Array.isArray(x) ? x : [x];
|
||||
}
|
||||
const unplugin = createUnplugin((options = {}) => {
|
||||
const ctx = createUnimport(options);
|
||||
const filter = createFilter(
|
||||
toArray(options.include || []).length ? options.include : defaultIncludes,
|
||||
options.exclude || defaultExcludes
|
||||
);
|
||||
const dts = options.dts === true ? "unimport.d.ts" : options.dts;
|
||||
const {
|
||||
autoImport = true
|
||||
} = options;
|
||||
return {
|
||||
name: "unimport",
|
||||
enforce: "post",
|
||||
transformInclude(id) {
|
||||
return filter(id);
|
||||
},
|
||||
async transform(code, id) {
|
||||
const s = new MagicString(code);
|
||||
await ctx.injectImports(s, id, {
|
||||
autoImport
|
||||
});
|
||||
if (!s.hasChanged())
|
||||
return;
|
||||
return {
|
||||
code: s.toString(),
|
||||
map: s.generateMap()
|
||||
};
|
||||
},
|
||||
async buildStart() {
|
||||
await ctx.init();
|
||||
if (dts)
|
||||
return promises.writeFile(dts, await ctx.generateTypeDeclarations(), "utf-8");
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
export { unplugin as default, defaultExcludes, defaultIncludes };
|
||||
138
web/admin-spa/node_modules/unimport/node_modules/confbox/LICENSE
generated
vendored
138
web/admin-spa/node_modules/unimport/node_modules/confbox/LICENSE
generated
vendored
@@ -1,138 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Pooya Parsa <pooya@pi0.io>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
js-yaml: https://github.com/nodeca/js-yaml/tree/master
|
||||
|
||||
(The MIT License)
|
||||
|
||||
Copyright (C) 2011-2015 by Vitaly Puzrin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
smol-toml: https://github.com/squirrelchat/smol-toml/blob/mistress/LICENSE
|
||||
|
||||
Copyright (c) Squirrel Chat et al., All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
3. Neither the name of the copyright holder nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software without
|
||||
specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
---
|
||||
|
||||
jsonc-parser: https://github.com/microsoft/node-jsonc-parser/blob/main/LICENSE.md
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Microsoft
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
json5: https://github.com/json5/json5/blob/main/LICENSE.md
|
||||
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2012-2018 Aseem Kishore, and others (https://github.com/json5/json5/graphs/contributors)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
detect-indent: https://github.com/sindresorhus/detect-indent/blob/main/license
|
||||
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
ini: https://github.com/npm/ini
|
||||
|
||||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
191
web/admin-spa/node_modules/unimport/node_modules/confbox/README.md
generated
vendored
191
web/admin-spa/node_modules/unimport/node_modules/confbox/README.md
generated
vendored
@@ -1,191 +0,0 @@
|
||||
# confbox
|
||||
|
||||
<!-- automd:badges color=yellow bundlephobia packagephobia -->
|
||||
|
||||
[](https://npmjs.com/package/confbox)
|
||||
[](https://npm.chart.dev/confbox)
|
||||
[](https://bundlephobia.com/package/confbox)
|
||||
[](https://packagephobia.com/result?p=confbox)
|
||||
|
||||
<!-- /automd -->
|
||||
|
||||
Parsing and serialization utils for [YAML](https://yaml.org/) ([js-yaml](https://github.com/nodeca/js-yaml)), [TOML](https://toml.io/) ([smol-toml](https://github.com/squirrelchat/smol-toml)), [JSONC](https://github.com/microsoft/node-jsonc-parser) ([jsonc-parser](https://github.com/microsoft/node-jsonc-parser)), [JSON5](https://json5.org/) ([json5](https://github.com/json5/json5)), [INI](https://en.wikipedia.org/wiki/INI_file) ([ini](https://www.npmjs.com/package/ini)) and [JSON](https://www.json.org/json-en.html).
|
||||
|
||||
✨ Zero dependency and tree-shakable
|
||||
|
||||
✨ Types exported out of the box
|
||||
|
||||
✨ Preserves code style (indentation and whitespace)
|
||||
|
||||
> [!TIP]
|
||||
> Use [unjs/c12](https://github.com/unjs/c12) for a full featured configuration loader!
|
||||
|
||||
## Usage
|
||||
|
||||
Install package:
|
||||
|
||||
<!-- automd:pm-i no-version -->
|
||||
|
||||
```sh
|
||||
# ✨ Auto-detect
|
||||
npx nypm install confbox
|
||||
|
||||
# npm
|
||||
npm install confbox
|
||||
|
||||
# yarn
|
||||
yarn add confbox
|
||||
|
||||
# pnpm
|
||||
pnpm install confbox
|
||||
|
||||
# bun
|
||||
bun install confbox
|
||||
|
||||
# deno
|
||||
deno install confbox
|
||||
```
|
||||
|
||||
<!-- /automd -->
|
||||
|
||||
Import:
|
||||
|
||||
<!-- automd:jsimport cdn src="./src/index.ts" -->
|
||||
|
||||
**ESM** (Node.js, Bun, Deno)
|
||||
|
||||
```js
|
||||
import {
|
||||
parseJSON5,
|
||||
stringifyJSON5,
|
||||
parseJSONC,
|
||||
stringifyJSONC,
|
||||
parseYAML,
|
||||
stringifyYAML,
|
||||
parseJSON,
|
||||
stringifyJSON,
|
||||
parseTOML,
|
||||
stringifyTOML,
|
||||
parseINI,
|
||||
stringifyINI,
|
||||
} from "confbox";
|
||||
```
|
||||
|
||||
**CDN** (Deno, Bun and Browsers)
|
||||
|
||||
```js
|
||||
import {
|
||||
parseJSON5,
|
||||
stringifyJSON5,
|
||||
parseJSONC,
|
||||
stringifyJSONC,
|
||||
parseYAML,
|
||||
stringifyYAML,
|
||||
parseJSON,
|
||||
stringifyJSON,
|
||||
parseTOML,
|
||||
stringifyTOML,
|
||||
parseINI,
|
||||
stringifyINI,
|
||||
} from "https://esm.sh/confbox";
|
||||
```
|
||||
|
||||
<!-- /automd -->
|
||||
|
||||
<!-- automd:jsdocs src="./src/index" -->
|
||||
|
||||
### `parseINI(text, options?)`
|
||||
|
||||
Converts an [INI](https://www.ini.org/ini-en.html) string into an object.
|
||||
|
||||
**Note:** Style and indentation are not preserved currently.
|
||||
|
||||
### `parseJSON(text, options?)`
|
||||
|
||||
Converts a [JSON](https://www.json.org/json-en.html) string into an object.
|
||||
|
||||
Indentation status is auto-detected and preserved when stringifying back using `stringifyJSON`
|
||||
|
||||
### `parseJSON5(text, options?)`
|
||||
|
||||
Converts a [JSON5](https://json5.org/) string into an object.
|
||||
|
||||
### `parseJSONC(text, options?)`
|
||||
|
||||
Converts a [JSONC](https://github.com/microsoft/node-jsonc-parser) string into an object.
|
||||
|
||||
### `parseTOML(text)`
|
||||
|
||||
Converts a [TOML](https://toml.io/) string into an object.
|
||||
|
||||
### `parseYAML(text, options?)`
|
||||
|
||||
Converts a [YAML](https://yaml.org/) string into an object.
|
||||
|
||||
### `stringifyINI(value, options?)`
|
||||
|
||||
Converts a JavaScript value to an [INI](https://www.ini.org/ini-en.html) string.
|
||||
|
||||
**Note:** Style and indentation are not preserved currently.
|
||||
|
||||
### `stringifyJSON(value, options?)`
|
||||
|
||||
Converts a JavaScript value to a [JSON](https://www.json.org/json-en.html) string.
|
||||
|
||||
Indentation status is auto detected and preserved when using value from parseJSON.
|
||||
|
||||
### `stringifyJSON5(value, options?)`
|
||||
|
||||
Converts a JavaScript value to a [JSON5](https://json5.org/) string.
|
||||
|
||||
### `stringifyJSONC(value, options?)`
|
||||
|
||||
Converts a JavaScript value to a [JSONC](https://github.com/microsoft/node-jsonc-parser) string.
|
||||
|
||||
### `stringifyTOML(value)`
|
||||
|
||||
Converts a JavaScript value to a [TOML](https://toml.io/) string.
|
||||
|
||||
### `stringifyYAML(value, options?)`
|
||||
|
||||
Converts a JavaScript value to a [YAML](https://yaml.org/) string.
|
||||
|
||||
<!-- /automd -->
|
||||
|
||||
<!-- automd:fetch url="gh:unjs/.github/main/snippets/readme-contrib-node-pnpm.md" -->
|
||||
|
||||
## Contribution
|
||||
|
||||
<details>
|
||||
<summary>Local development</summary>
|
||||
|
||||
- Clone this repository
|
||||
- Install the latest LTS version of [Node.js](https://nodejs.org/en/)
|
||||
- Enable [Corepack](https://github.com/nodejs/corepack) using `corepack enable`
|
||||
- Install dependencies using `pnpm install`
|
||||
- Run tests using `pnpm dev` or `pnpm test`
|
||||
|
||||
</details>
|
||||
|
||||
<!-- /automd -->
|
||||
|
||||
## License
|
||||
|
||||
<!-- automd:contributors license=MIT author=pi0 -->
|
||||
|
||||
Published under the [MIT](https://github.com/unjs/confbox/blob/main/LICENSE) license.
|
||||
Made by [@pi0](https://github.com/pi0) and [community](https://github.com/unjs/confbox/graphs/contributors) 💛
|
||||
<br><br>
|
||||
<a href="https://github.com/unjs/confbox/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=unjs/confbox" />
|
||||
</a>
|
||||
|
||||
<!-- /automd -->
|
||||
|
||||
<!-- automd:with-automd -->
|
||||
|
||||
---
|
||||
|
||||
_🤖 auto updated with [automd](https://automd.unjs.io)_
|
||||
|
||||
<!-- /automd -->
|
||||
34
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/index.d.mts
generated
vendored
34
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/index.d.mts
generated
vendored
@@ -1,34 +0,0 @@
|
||||
export { JSON5ParseOptions, JSON5StringifyOptions, parseJSON5, stringifyJSON5 } from './json5.mjs';
|
||||
export { JSONCParseError, JSONCParseOptions, parseJSONC, stringifyJSONC } from './jsonc.mjs';
|
||||
export { YAMLParseOptions, YAMLStringifyOptions, parseYAML, stringifyYAML } from './yaml.mjs';
|
||||
import { F as FormatOptions } from './shared/confbox.B202Uz6F.mjs';
|
||||
export { parseTOML, stringifyTOML } from './toml.mjs';
|
||||
export { INIParseOptions, INIStringifyOptions, parseINI, stringifyINI } from './ini.mjs';
|
||||
|
||||
/**
|
||||
* Converts a [JSON](https://www.json.org/json-en.html) string into an object.
|
||||
*
|
||||
* Indentation status is auto-detected and preserved when stringifying back using `stringifyJSON`
|
||||
*/
|
||||
declare function parseJSON<T = unknown>(text: string, options?: JSONParseOptions): T;
|
||||
/**
|
||||
* Converts a JavaScript value to a [JSON](https://www.json.org/json-en.html) string.
|
||||
*
|
||||
* Indentation status is auto detected and preserved when using value from parseJSON.
|
||||
*/
|
||||
declare function stringifyJSON(value: any, options?: JSONStringifyOptions): string;
|
||||
interface JSONParseOptions extends FormatOptions {
|
||||
/**
|
||||
* A function that transforms the results. This function is called for each member of the object.
|
||||
*/
|
||||
reviver?: (this: any, key: string, value: any) => any;
|
||||
}
|
||||
interface JSONStringifyOptions extends FormatOptions {
|
||||
/**
|
||||
* A function that transforms the results. This function is called for each member of the object.
|
||||
*/
|
||||
replacer?: (this: any, key: string, value: any) => any;
|
||||
}
|
||||
|
||||
export { parseJSON, stringifyJSON };
|
||||
export type { JSONParseOptions, JSONStringifyOptions };
|
||||
1
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/index.mjs
generated
vendored
1
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/index.mjs
generated
vendored
@@ -1 +0,0 @@
|
||||
export{parseJSON5,stringifyJSON5}from"./json5.mjs";export{a as parseJSON,p as parseJSONC,b as stringifyJSON,s as stringifyJSONC}from"./shared/confbox.DnMsyigM.mjs";export{parseYAML,stringifyYAML}from"./yaml.mjs";export{parseTOML,stringifyTOML}from"./toml.mjs";export{parseINI,stringifyINI}from"./ini.mjs";import"./shared/confbox.DA7CpUDY.mjs";
|
||||
62
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/ini.d.mts
generated
vendored
62
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/ini.d.mts
generated
vendored
@@ -1,62 +0,0 @@
|
||||
/**
|
||||
* Converts an [INI](https://www.ini.org/ini-en.html) string into an object.
|
||||
*
|
||||
* **Note:** Style and indentation are not preserved currently.
|
||||
*/
|
||||
declare function parseINI<T = unknown>(text: string, options?: INIParseOptions): T;
|
||||
/**
|
||||
* Converts a JavaScript value to an [INI](https://www.ini.org/ini-en.html) string.
|
||||
*
|
||||
* **Note:** Style and indentation are not preserved currently.
|
||||
*/
|
||||
declare function stringifyINI(value: any, options?: INIStringifyOptions): string;
|
||||
interface INIParseOptions {
|
||||
/**
|
||||
* Whether to append `[]` to array keys.
|
||||
*
|
||||
* Some parsers treat duplicate names by themselves as arrays.
|
||||
*/
|
||||
bracketedArray?: boolean;
|
||||
}
|
||||
interface INIStringifyOptions {
|
||||
/**
|
||||
* Whether to insert spaces before & after `=` character.
|
||||
* Enabled by default.
|
||||
*/
|
||||
whitespace?: boolean;
|
||||
/**
|
||||
* Whether to align the `=` character for each section.
|
||||
*/
|
||||
align?: boolean;
|
||||
/**
|
||||
* Identifier to use for global items
|
||||
* and to prepend to all other sections.
|
||||
*/
|
||||
section?: string;
|
||||
/**
|
||||
* Whether to sort all sections & their keys alphabetically.
|
||||
*/
|
||||
sort?: boolean;
|
||||
/**
|
||||
* Whether to insert a newline after each section header.
|
||||
*/
|
||||
newline?: boolean;
|
||||
/**
|
||||
* Which platforms line-endings should be used.
|
||||
*
|
||||
* win32 -> CR+LF
|
||||
* other -> LF
|
||||
*
|
||||
* Default is the current platform
|
||||
*/
|
||||
platform?: string;
|
||||
/**
|
||||
* Whether to append `[]` to array keys.
|
||||
*
|
||||
* Some parsers treat duplicate names by themselves as arrays
|
||||
*/
|
||||
bracketedArray?: boolean;
|
||||
}
|
||||
|
||||
export { parseINI, stringifyINI };
|
||||
export type { INIParseOptions, INIStringifyOptions };
|
||||
3
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/ini.mjs
generated
vendored
3
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/ini.mjs
generated
vendored
@@ -1,3 +0,0 @@
|
||||
var O,x;function j(){if(x)return O;x=1;const{hasOwnProperty:y}=Object.prototype,d=(e,t={})=>{typeof t=="string"&&(t={section:t}),t.align=t.align===!0,t.newline=t.newline===!0,t.sort=t.sort===!0,t.whitespace=t.whitespace===!0||t.align===!0,t.platform=t.platform||typeof process<"u"&&process.platform,t.bracketedArray=t.bracketedArray!==!1;const s=t.platform==="win32"?`\r
|
||||
`:`
|
||||
`,r=t.whitespace?" = ":"=",c=[],o=t.sort?Object.keys(e).sort():Object.keys(e);let g=0;t.align&&(g=h(o.filter(n=>e[n]===null||Array.isArray(e[n])||typeof e[n]!="object").map(n=>Array.isArray(e[n])?`${n}[]`:n).concat([""]).reduce((n,i)=>h(n).length>=h(i).length?n:i)).length);let l="";const m=t.bracketedArray?"[]":"";for(const n of o){const i=e[n];if(i&&Array.isArray(i))for(const f of i)l+=h(`${n}${m}`).padEnd(g," ")+r+h(f)+s;else i&&typeof i=="object"?c.push(n):l+=h(n).padEnd(g," ")+r+h(i)+s}t.section&&l.length&&(l="["+h(t.section)+"]"+(t.newline?s+s:s)+l);for(const n of c){const i=k(n,".").join("\\."),f=(t.section?t.section+".":"")+i,u=d(e[n],{...t,section:f});l.length&&u.length&&(l+=s),l+=u}return l};function k(e,t){var s=0,r=0,c=0,o=[];do if(c=e.indexOf(t,s),c!==-1){if(s=c+t.length,c>0&&e[c-1]==="\\")continue;o.push(e.slice(r,c)),r=c+t.length}while(c!==-1);return o.push(e.slice(r)),o}const w=(e,t={})=>{t.bracketedArray=t.bracketedArray!==!1;const s=Object.create(null);let r=s,c=null;const o=/^\[([^\]]*)\]\s*$|^([^=]+)(=(.*))?$/i,g=e.split(/[\r\n]+/g),l={};for(const n of g){if(!n||n.match(/^\s*[;#]/)||n.match(/^\s*$/))continue;const i=n.match(o);if(!i)continue;if(i[1]!==void 0){if(c=A(i[1]),c==="__proto__"){r=Object.create(null);continue}r=s[c]=s[c]||Object.create(null);continue}const f=A(i[2]);let u;t.bracketedArray?u=f.length>2&&f.slice(-2)==="[]":(l[f]=(l?.[f]||0)+1,u=l[f]>1);const a=u&&f.endsWith("[]")?f.slice(0,-2):f;if(a==="__proto__")continue;const p=i[3]?A(i[4]):!0,b=p==="true"||p==="false"||p==="null"?JSON.parse(p):p;u&&(y.call(r,a)?Array.isArray(r[a])||(r[a]=[r[a]]):r[a]=[]),Array.isArray(r[a])?r[a].push(b):r[a]=b}const m=[];for(const n of Object.keys(s)){if(!y.call(s,n)||typeof s[n]!="object"||Array.isArray(s[n]))continue;const i=k(n,".");r=s;const f=i.pop(),u=f.replace(/\\\./g,".");for(const a of i)a!=="__proto__"&&((!y.call(r,a)||typeof r[a]!="object")&&(r[a]=Object.create(null)),r=r[a]);r===s&&u===f||(r[u]=s[n],m.push(n))}for(const n of m)delete s[n];return s},_=e=>e.startsWith('"')&&e.endsWith('"')||e.startsWith("'")&&e.endsWith("'"),h=e=>typeof e!="string"||e.match(/[=\r\n]/)||e.match(/^\[/)||e.length>1&&_(e)||e!==e.trim()?JSON.stringify(e):e.split(";").join("\\;").split("#").join("\\#"),A=e=>{if(e=(e||"").trim(),_(e)){e.charAt(0)==="'"&&(e=e.slice(1,-1));try{e=JSON.parse(e)}catch{}}else{let t=!1,s="";for(let r=0,c=e.length;r<c;r++){const o=e.charAt(r);if(t)"\\;#".indexOf(o)!==-1?s+=o:s+="\\"+o,t=!1;else{if(";#".indexOf(o)!==-1)break;o==="\\"?t=!0:s+=o}}return t&&(s+="\\"),s.trim()}return e};return O={parse:w,decode:w,stringify:d,encode:d,safe:h,unsafe:A},O}var I=j();function S(y,d){return I.parse(y,d)}function $(y,d){return I.stringify(y,{whitespace:!0,...d})}export{S as parseINI,$ as stringifyINI};
|
||||
59
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/json5.d.mts
generated
vendored
59
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/json5.d.mts
generated
vendored
@@ -1,59 +0,0 @@
|
||||
import { F as FormatOptions } from './shared/confbox.B202Uz6F.mjs';
|
||||
|
||||
/**
|
||||
* Converts a [JSON5](https://json5.org/) string into an object.
|
||||
*
|
||||
* @template T The type of the return value.
|
||||
* @param text The string to parse as JSON5.
|
||||
* @param options Parsing options.
|
||||
* @returns The JavaScript value converted from the JSON5 string.
|
||||
*/
|
||||
declare function parseJSON5<T = unknown>(text: string, options?: JSON5ParseOptions): T;
|
||||
/**
|
||||
* Converts a JavaScript value to a [JSON5](https://json5.org/) string.
|
||||
*
|
||||
* @param value
|
||||
* @param options
|
||||
* @returns The JSON string converted from the JavaScript value.
|
||||
*/
|
||||
declare function stringifyJSON5(value: any, options?: JSON5StringifyOptions): string;
|
||||
interface JSON5ParseOptions extends FormatOptions {
|
||||
/**
|
||||
* A function that alters the behavior of the parsing process, or an array of
|
||||
* String and Number objects that serve as a allowlist for selecting/filtering
|
||||
* the properties of the value object to be included in the resulting
|
||||
* JavaScript object. If this value is null or not provided, all properties of
|
||||
* the object are included in the resulting JavaScript object.
|
||||
*/
|
||||
reviver?: (this: any, key: string, value: any) => any;
|
||||
}
|
||||
interface JSON5StringifyOptions extends FormatOptions {
|
||||
/**
|
||||
* A function that alters the behavior of the stringification process, or an
|
||||
* array of String and Number objects that serve as a allowlist for
|
||||
* selecting/filtering the properties of the value object to be included in
|
||||
* the JSON5 string. If this value is null or not provided, all properties
|
||||
* of the object are included in the resulting JSON5 string.
|
||||
*/
|
||||
replacer?: ((this: any, key: string, value: any) => any) | null;
|
||||
/**
|
||||
* A String or Number object that's used to insert white space into the
|
||||
* output JSON5 string for readability purposes. If this is a Number, it
|
||||
* indicates the number of space characters to use as white space; this
|
||||
* number is capped at 10 (if it is greater, the value is just 10). Values
|
||||
* less than 1 indicate that no space should be used. If this is a String,
|
||||
* the string (or the first 10 characters of the string, if it's longer than
|
||||
* that) is used as white space. If this parameter is not provided (or is
|
||||
* null), no white space is used. If white space is used, trailing commas
|
||||
* will be used in objects and arrays.
|
||||
*/
|
||||
space?: string | number | null;
|
||||
/**
|
||||
* A String representing the quote character to use when serializing
|
||||
* strings.
|
||||
*/
|
||||
quote?: string | null;
|
||||
}
|
||||
|
||||
export { parseJSON5, stringifyJSON5 };
|
||||
export type { JSON5ParseOptions, JSON5StringifyOptions };
|
||||
14
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/json5.mjs
generated
vendored
14
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/json5.mjs
generated
vendored
File diff suppressed because one or more lines are too long
42
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/jsonc.d.mts
generated
vendored
42
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/jsonc.d.mts
generated
vendored
@@ -1,42 +0,0 @@
|
||||
import { F as FormatOptions } from './shared/confbox.B202Uz6F.mjs';
|
||||
|
||||
/**
|
||||
*
|
||||
* Converts a [JSONC](https://github.com/microsoft/node-jsonc-parser) string into an object.
|
||||
*
|
||||
* @NOTE On invalid input, the parser tries to be as fault tolerant as possible, but still return a result.
|
||||
*
|
||||
* @NOTE Comments and trailing commas are not preserved after parsing.
|
||||
*
|
||||
* @template T The type of the return value.
|
||||
* @param text The string to parse as JSONC.
|
||||
* @param options Parsing options.
|
||||
* @returns The JavaScript value converted from the JSONC string.
|
||||
*/
|
||||
declare function parseJSONC<T = unknown>(text: string, options?: JSONCParseOptions): T;
|
||||
/**
|
||||
* Converts a JavaScript value to a [JSONC](https://github.com/microsoft/node-jsonc-parser) string.
|
||||
*
|
||||
* @NOTE Comments and trailing commas are not preserved in the output.
|
||||
*
|
||||
* @param value
|
||||
* @param options
|
||||
* @returns The JSON string converted from the JavaScript value.
|
||||
*/
|
||||
declare function stringifyJSONC(value: any, options?: JSONCStringifyOptions): string;
|
||||
interface JSONCParseOptions extends FormatOptions {
|
||||
disallowComments?: boolean;
|
||||
allowTrailingComma?: boolean;
|
||||
allowEmptyContent?: boolean;
|
||||
errors?: JSONCParseError[];
|
||||
}
|
||||
interface JSONCStringifyOptions extends FormatOptions {
|
||||
}
|
||||
interface JSONCParseError {
|
||||
error: number;
|
||||
offset: number;
|
||||
length: number;
|
||||
}
|
||||
|
||||
export { parseJSONC, stringifyJSONC };
|
||||
export type { JSONCParseError, JSONCParseOptions, JSONCStringifyOptions };
|
||||
1
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/jsonc.mjs
generated
vendored
1
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/jsonc.mjs
generated
vendored
@@ -1 +0,0 @@
|
||||
import"./shared/confbox.DA7CpUDY.mjs";export{p as parseJSONC,s as stringifyJSONC}from"./shared/confbox.DnMsyigM.mjs";
|
||||
@@ -1,24 +0,0 @@
|
||||
interface FormatOptions {
|
||||
/**
|
||||
* A String or Number object that's used to insert white space into the output JSON string for readability purposes.
|
||||
*
|
||||
* When provided, identation won't be auto detected anymore.
|
||||
*/
|
||||
indent?: string | number;
|
||||
/**
|
||||
* Set to `false` to skip indentation preservation.
|
||||
*/
|
||||
preserveIndentation?: boolean;
|
||||
/**
|
||||
* Set to `false` to skip whitespace preservation.
|
||||
*/
|
||||
preserveWhitespace?: boolean;
|
||||
/**
|
||||
* The number of characters to sample from the start of the text.
|
||||
*
|
||||
* Default: 1024
|
||||
*/
|
||||
sampleSize?: number;
|
||||
}
|
||||
|
||||
export type { FormatOptions as F };
|
||||
@@ -1 +0,0 @@
|
||||
const b=/^(?:( )+|\t+)/,d="space",h="tab";function g(e,t){const n=new Map;let s=0,o,i;for(const c of e.split(/\n/g)){if(!c)continue;let f,a,l,p,r;const y=c.match(b);if(y===null)s=0,o="";else{if(f=y[0].length,a=y[1]?d:h,t&&a===d&&f===1)continue;a!==o&&(s=0),o=a,l=1,p=0;const u=f-s;if(s=f,u===0)l=0,p=1;else{const I=u>0?u:-u;i=T(a,I)}r=n.get(i),r=r===void 0?[1,0]:[r[0]+l,r[1]+p],n.set(i,r)}}return n}function T(e,t){return(e===d?"s":"t")+String(t)}function w(e){const n=e[0]==="s"?d:h,s=Number(e.slice(1));return{type:n,amount:s}}function E(e){let t,n=0,s=0;for(const[o,[i,c]]of e)(i>n||i===n&&c>s)&&(n=i,s=c,t=o);return t}function S(e,t){return(e===d?" ":" ").repeat(t)}function _(e){if(typeof e!="string")throw new TypeError("Expected a string");let t=g(e,!0);t.size===0&&(t=g(e,!1));const n=E(t);let s,o=0,i="";return n!==void 0&&({type:s,amount:o}=w(n),i=S(s,o)),{amount:o,type:s,indent:i}}const m=Symbol.for("__confbox_fmt__"),k=/^(\s+)/,v=/(\s+)$/;function x(e,t={}){const n=t.indent===void 0&&t.preserveIndentation!==!1&&e.slice(0,t?.sampleSize||1024),s=t.preserveWhitespace===!1?void 0:{start:k.exec(e)?.[0]||"",end:v.exec(e)?.[0]||""};return{sample:n,whiteSpace:s}}function N(e,t,n){!t||typeof t!="object"||Object.defineProperty(t,m,{enumerable:!1,configurable:!0,writable:!0,value:x(e,n)})}function C(e,t){if(!e||typeof e!="object"||!(m in e))return{indent:t?.indent??2,whitespace:{start:"",end:""}};const n=e[m];return{indent:t?.indent||_(n.sample||"").indent,whitespace:n.whiteSpace||{start:"",end:""}}}export{C as g,N as s};
|
||||
File diff suppressed because one or more lines are too long
22
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/toml.d.mts
generated
vendored
22
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/toml.d.mts
generated
vendored
@@ -1,22 +0,0 @@
|
||||
/**
|
||||
* Converts a [TOML](https://toml.io/) string into an object.
|
||||
*
|
||||
* @NOTE Comments and indentation is not preserved after parsing.
|
||||
*
|
||||
* @template T The type of the return value.
|
||||
* @param text The TOML string to parse.
|
||||
* @returns The JavaScript value converted from the TOML string.
|
||||
*/
|
||||
declare function parseTOML<T = unknown>(text: string): T;
|
||||
/**
|
||||
* Converts a JavaScript value to a [TOML](https://toml.io/) string.
|
||||
*
|
||||
* @NOTE Comments and indentation is not preserved in the output.
|
||||
*
|
||||
* @param value
|
||||
* @param options
|
||||
* @returns The YAML string converted from the JavaScript value.
|
||||
*/
|
||||
declare function stringifyTOML(value: any): string;
|
||||
|
||||
export { parseTOML, stringifyTOML };
|
||||
239
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/toml.mjs
generated
vendored
239
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/toml.mjs
generated
vendored
@@ -1,239 +0,0 @@
|
||||
import{s as L,g as P}from"./shared/confbox.DA7CpUDY.mjs";/*!
|
||||
* Copyright (c) Squirrel Chat et al., All rights reserved.
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/function v(e,n){let t=e.slice(0,n).split(/\r\n|\n|\r/g);return[t.length,t.pop().length+1]}function C(e,n,t){let l=e.split(/\r\n|\n|\r/g),r="",i=(Math.log10(n+1)|0)+1;for(let o=n-1;o<=n+1;o++){let f=l[o-1];f&&(r+=o.toString().padEnd(i," "),r+=": ",r+=f,r+=`
|
||||
`,o===n&&(r+=" ".repeat(i+t+2),r+=`^
|
||||
`))}return r}class c extends Error{line;column;codeblock;constructor(n,t){const[l,r]=v(t.toml,t.ptr),i=C(t.toml,l,r);super(`Invalid TOML document: ${n}
|
||||
|
||||
${i}`,t),this.line=l,this.column=r,this.codeblock=i}}/*!
|
||||
* Copyright (c) Squirrel Chat et al., All rights reserved.
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/function g(e,n=0,t=e.length){let l=e.indexOf(`
|
||||
`,n);return e[l-1]==="\r"&&l--,l<=t?l:-1}function y(e,n){for(let t=n;t<e.length;t++){let l=e[t];if(l===`
|
||||
`)return t;if(l==="\r"&&e[t+1]===`
|
||||
`)return t+1;if(l<" "&&l!==" "||l==="\x7F")throw new c("control characters are not allowed in comments",{toml:e,ptr:n})}return e.length}function s(e,n,t,l){let r;for(;(r=e[n])===" "||r===" "||!t&&(r===`
|
||||
`||r==="\r"&&e[n+1]===`
|
||||
`);)n++;return l||r!=="#"?n:s(e,y(e,n),t)}function A(e,n,t,l,r=!1){if(!l)return n=g(e,n),n<0?e.length:n;for(let i=n;i<e.length;i++){let o=e[i];if(o==="#")i=g(e,i);else{if(o===t)return i+1;if(o===l)return i;if(r&&(o===`
|
||||
`||o==="\r"&&e[i+1]===`
|
||||
`))return i}}throw new c("cannot find end of structure",{toml:e,ptr:n})}function S(e,n){let t=e[n],l=t===e[n+1]&&e[n+1]===e[n+2]?e.slice(n,n+3):t;n+=l.length-1;do n=e.indexOf(l,++n);while(n>-1&&t!=="'"&&e[n-1]==="\\"&&e[n-2]!=="\\");return n>-1&&(n+=l.length,l.length>1&&(e[n]===t&&n++,e[n]===t&&n++)),n}/*!
|
||||
* Copyright (c) Squirrel Chat et al., All rights reserved.
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/let R=/^(\d{4}-\d{2}-\d{2})?[T ]?(?:(\d{2}):\d{2}:\d{2}(?:\.\d+)?)?(Z|[-+]\d{2}:\d{2})?$/i;class w extends Date{#n=!1;#t=!1;#e=null;constructor(n){let t=!0,l=!0,r="Z";if(typeof n=="string"){let i=n.match(R);i?(i[1]||(t=!1,n=`0000-01-01T${n}`),l=!!i[2],i[2]&&+i[2]>23?n="":(r=i[3]||null,n=n.toUpperCase(),!r&&l&&(n+="Z"))):n=""}super(n),isNaN(this.getTime())||(this.#n=t,this.#t=l,this.#e=r)}isDateTime(){return this.#n&&this.#t}isLocal(){return!this.#n||!this.#t||!this.#e}isDate(){return this.#n&&!this.#t}isTime(){return this.#t&&!this.#n}isValid(){return this.#n||this.#t}toISOString(){let n=super.toISOString();if(this.isDate())return n.slice(0,10);if(this.isTime())return n.slice(11,23);if(this.#e===null)return n.slice(0,-1);if(this.#e==="Z")return n;let t=+this.#e.slice(1,3)*60+ +this.#e.slice(4,6);return t=this.#e[0]==="-"?t:-t,new Date(this.getTime()-t*6e4).toISOString().slice(0,-1)+this.#e}static wrapAsOffsetDateTime(n,t="Z"){let l=new w(n);return l.#e=t,l}static wrapAsLocalDateTime(n){let t=new w(n);return t.#e=null,t}static wrapAsLocalDate(n){let t=new w(n);return t.#t=!1,t.#e=null,t}static wrapAsLocalTime(n){let t=new w(n);return t.#n=!1,t.#e=null,t}}/*!
|
||||
* Copyright (c) Squirrel Chat et al., All rights reserved.
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/let M=/^((0x[0-9a-fA-F](_?[0-9a-fA-F])*)|(([+-]|0[ob])?\d(_?\d)*))$/,Z=/^[+-]?\d(_?\d)*(\.\d(_?\d)*)?([eE][+-]?\d(_?\d)*)?$/,j=/^[+-]?0[0-9_]/,z=/^[0-9a-f]{4,8}$/i,I={b:"\b",t:" ",n:`
|
||||
`,f:"\f",r:"\r",'"':'"',"\\":"\\"};function $(e,n=0,t=e.length){let l=e[n]==="'",r=e[n++]===e[n]&&e[n]===e[n+1];r&&(t-=2,e[n+=2]==="\r"&&n++,e[n]===`
|
||||
`&&n++);let i=0,o,f="",a=n;for(;n<t-1;){let u=e[n++];if(u===`
|
||||
`||u==="\r"&&e[n]===`
|
||||
`){if(!r)throw new c("newlines are not allowed in strings",{toml:e,ptr:n-1})}else if(u<" "&&u!==" "||u==="\x7F")throw new c("control characters are not allowed in strings",{toml:e,ptr:n-1});if(o){if(o=!1,u==="u"||u==="U"){let d=e.slice(n,n+=u==="u"?4:8);if(!z.test(d))throw new c("invalid unicode escape",{toml:e,ptr:i});try{f+=String.fromCodePoint(parseInt(d,16))}catch{throw new c("invalid unicode escape",{toml:e,ptr:i})}}else if(r&&(u===`
|
||||
`||u===" "||u===" "||u==="\r")){if(n=s(e,n-1,!0),e[n]!==`
|
||||
`&&e[n]!=="\r")throw new c("invalid escape: only line-ending whitespace may be escaped",{toml:e,ptr:i});n=s(e,n)}else if(u in I)f+=I[u];else throw new c("unrecognized escape sequence",{toml:e,ptr:i});a=n}else!l&&u==="\\"&&(i=n-1,o=!0,f+=e.slice(a,i))}return f+e.slice(a,t-1)}function F(e,n,t){if(e==="true")return!0;if(e==="false")return!1;if(e==="-inf")return-1/0;if(e==="inf"||e==="+inf")return 1/0;if(e==="nan"||e==="+nan"||e==="-nan")return NaN;if(e==="-0")return 0;let l;if((l=M.test(e))||Z.test(e)){if(j.test(e))throw new c("leading zeroes are not allowed",{toml:n,ptr:t});let i=+e.replace(/_/g,"");if(isNaN(i))throw new c("invalid number",{toml:n,ptr:t});if(l&&!Number.isSafeInteger(i))throw new c("integer value cannot be represented losslessly",{toml:n,ptr:t});return i}let r=new w(e);if(!r.isValid())throw new c("invalid value",{toml:n,ptr:t});return r}/*!
|
||||
* Copyright (c) Squirrel Chat et al., All rights reserved.
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/function V(e,n,t,l){let r=e.slice(n,t),i=r.indexOf("#");i>-1&&(y(e,i),r=r.slice(0,i));let o=r.trimEnd();if(!l){let f=r.indexOf(`
|
||||
`,o.length);if(f>-1)throw new c("newlines are not allowed in inline tables",{toml:e,ptr:n+f})}return[o,i]}function b(e,n,t,l){if(l===0)throw new c("document contains excessively nested structures. aborting.",{toml:e,ptr:n});let r=e[n];if(r==="["||r==="{"){let[f,a]=r==="["?U(e,n,l):K(e,n,l),u=A(e,a,",",t);if(t==="}"){let d=g(e,a,u);if(d>-1)throw new c("newlines are not allowed in inline tables",{toml:e,ptr:d})}return[f,u]}let i;if(r==='"'||r==="'"){i=S(e,n);let f=$(e,n,i);if(t){if(i=s(e,i,t!=="]"),e[i]&&e[i]!==","&&e[i]!==t&&e[i]!==`
|
||||
`&&e[i]!=="\r")throw new c("unexpected character encountered",{toml:e,ptr:i});i+=+(e[i]===",")}return[f,i]}i=A(e,n,",",t);let o=V(e,n,i-+(e[i-1]===","),t==="]");if(!o[0])throw new c("incomplete key-value declaration: no value specified",{toml:e,ptr:n});return t&&o[1]>-1&&(i=s(e,n+o[1]),i+=+(e[i]===",")),[F(o[0],e,n),i]}/*!
|
||||
* Copyright (c) Squirrel Chat et al., All rights reserved.
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/let G=/^[a-zA-Z0-9-_]+[ \t]*$/;function x(e,n,t="="){let l=n-1,r=[],i=e.indexOf(t,n);if(i<0)throw new c("incomplete key-value: cannot find end of key",{toml:e,ptr:n});do{let o=e[n=++l];if(o!==" "&&o!==" ")if(o==='"'||o==="'"){if(o===e[n+1]&&o===e[n+2])throw new c("multiline strings are not allowed in keys",{toml:e,ptr:n});let f=S(e,n);if(f<0)throw new c("unfinished string encountered",{toml:e,ptr:n});l=e.indexOf(".",f);let a=e.slice(f,l<0||l>i?i:l),u=g(a);if(u>-1)throw new c("newlines are not allowed in keys",{toml:e,ptr:n+l+u});if(a.trimStart())throw new c("found extra tokens after the string part",{toml:e,ptr:f});if(i<f&&(i=e.indexOf(t,f),i<0))throw new c("incomplete key-value: cannot find end of key",{toml:e,ptr:n});r.push($(e,n,f))}else{l=e.indexOf(".",n);let f=e.slice(n,l<0||l>i?i:l);if(!G.test(f))throw new c("only letter, numbers, dashes and underscores are allowed in keys",{toml:e,ptr:n});r.push(f.trimEnd())}}while(l+1&&l<i);return[r,s(e,i+1,!0,!0)]}function K(e,n,t){let l={},r=new Set,i,o=0;for(n++;(i=e[n++])!=="}"&&i;){if(i===`
|
||||
`)throw new c("newlines are not allowed in inline tables",{toml:e,ptr:n-1});if(i==="#")throw new c("inline tables cannot contain comments",{toml:e,ptr:n-1});if(i===",")throw new c("expected key-value, found comma",{toml:e,ptr:n-1});if(i!==" "&&i!==" "){let f,a=l,u=!1,[d,N]=x(e,n-1);for(let m=0;m<d.length;m++){if(m&&(a=u?a[f]:a[f]={}),f=d[m],(u=Object.hasOwn(a,f))&&(typeof a[f]!="object"||r.has(a[f])))throw new c("trying to redefine an already defined value",{toml:e,ptr:n});!u&&f==="__proto__"&&Object.defineProperty(a,f,{enumerable:!0,configurable:!0,writable:!0})}if(u)throw new c("trying to redefine an already defined value",{toml:e,ptr:n});let[_,k]=b(e,N,"}",t-1);r.add(_),a[f]=_,n=k,o=e[n-1]===","?n-1:0}}if(o)throw new c("trailing commas are not allowed in inline tables",{toml:e,ptr:o});if(!i)throw new c("unfinished table encountered",{toml:e,ptr:n});return[l,n]}function U(e,n,t){let l=[],r;for(n++;(r=e[n++])!=="]"&&r;){if(r===",")throw new c("expected value, found comma",{toml:e,ptr:n-1});if(r==="#")n=y(e,n);else if(r!==" "&&r!==" "&&r!==`
|
||||
`&&r!=="\r"){let i=b(e,n-1,"]",t-1);l.push(i[0]),n=i[1]}}if(!r)throw new c("unfinished array encountered",{toml:e,ptr:n});return[l,n]}/*!
|
||||
* Copyright (c) Squirrel Chat et al., All rights reserved.
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/function p(e,n,t,l){let r=n,i=t,o,f=!1,a;for(let u=0;u<e.length;u++){if(u){if(r=f?r[o]:r[o]={},i=(a=i[o]).c,l===0&&(a.t===1||a.t===2))return null;if(a.t===2){let d=r.length-1;r=r[d],i=i[d].c}}if(o=e[u],(f=Object.hasOwn(r,o))&&i[o]?.t===0&&i[o]?.d)return null;f||(o==="__proto__"&&(Object.defineProperty(r,o,{enumerable:!0,configurable:!0,writable:!0}),Object.defineProperty(i,o,{enumerable:!0,configurable:!0,writable:!0})),i[o]={t:u<e.length-1&&l===2?3:l,d:!1,i:0,c:{}})}if(a=i[o],a.t!==l&&!(l===1&&a.t===3)||(l===2&&(a.d||(a.d=!0,r[o]=[]),r[o].push(r={}),a.c[a.i++]=a={t:1,d:!1,i:0,c:{}}),a.d))return null;if(a.d=!0,l===1)r=f?r[o]:r[o]={};else if(l===0&&f)return null;return[o,r,a.c]}function X(e,n){let t=n?.maxDepth??1e3,l={},r={},i=l,o=r;for(let f=s(e,0);f<e.length;){if(e[f]==="["){let a=e[++f]==="[",u=x(e,f+=+a,"]");if(a){if(e[u[1]-1]!=="]")throw new c("expected end of table declaration",{toml:e,ptr:u[1]-1});u[1]++}let d=p(u[0],l,r,a?2:1);if(!d)throw new c("trying to redefine an already defined table or value",{toml:e,ptr:f});o=d[2],i=d[1],f=u[1]}else{let a=x(e,f),u=p(a[0],i,o,0);if(!u)throw new c("trying to redefine an already defined table or value",{toml:e,ptr:f});let d=b(e,a[1],void 0,t);u[1][u[0]]=d[0],f=d[1]}if(f=s(e,f,!0),e[f]&&e[f]!==`
|
||||
`&&e[f]!=="\r")throw new c("each key-value declaration must be followed by an end-of-line",{toml:e,ptr:f});f=s(e,f)}return l}/*!
|
||||
* Copyright (c) Squirrel Chat et al., All rights reserved.
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/let D=/^[a-z0-9-_]+$/i;function h(e){let n=typeof e;if(n==="object"){if(Array.isArray(e))return"array";if(e instanceof Date)return"date"}return n}function B(e){for(let n=0;n<e.length;n++)if(h(e[n])!=="object")return!1;return e.length!=0}function E(e){return JSON.stringify(e).replace(/\x7f/g,"\\u007f")}function O(e,n,t){if(t===0)throw new Error("Could not stringify the object: maximum object depth exceeded");if(n==="number")return isNaN(e)?"nan":e===1/0?"inf":e===-1/0?"-inf":e.toString();if(n==="bigint"||n==="boolean")return e.toString();if(n==="string")return E(e);if(n==="date"){if(isNaN(e.getTime()))throw new TypeError("cannot serialize invalid date");return e.toISOString()}if(n==="object")return Y(e,t);if(n==="array")return q(e,t)}function Y(e,n){let t=Object.keys(e);if(t.length===0)return"{}";let l="{ ";for(let r=0;r<t.length;r++){let i=t[r];r&&(l+=", "),l+=D.test(i)?i:E(i),l+=" = ",l+=O(e[i],h(e[i]),n-1)}return l+" }"}function q(e,n){if(e.length===0)return"[]";let t="[ ";for(let l=0;l<e.length;l++){if(l&&(t+=", "),e[l]===null||e[l]===void 0)throw new TypeError("arrays cannot contain null or undefined values");t+=O(e[l],h(e[l]),n-1)}return t+" ]"}function J(e,n,t){if(t===0)throw new Error("Could not stringify the object: maximum object depth exceeded");let l="";for(let r=0;r<e.length;r++)l+=`[[${n}]]
|
||||
`,l+=T(e[r],n,t),l+=`
|
||||
|
||||
`;return l}function T(e,n,t){if(t===0)throw new Error("Could not stringify the object: maximum object depth exceeded");let l="",r="",i=Object.keys(e);for(let o=0;o<i.length;o++){let f=i[o];if(e[f]!==null&&e[f]!==void 0){let a=h(e[f]);if(a==="symbol"||a==="function")throw new TypeError(`cannot serialize values of type '${a}'`);let u=D.test(f)?f:E(f);if(a==="array"&&B(e[f]))r+=J(e[f],n?`${n}.${u}`:u,t-1);else if(a==="object"){let d=n?`${n}.${u}`:u;r+=`[${d}]
|
||||
`,r+=T(e[f],d,t-1),r+=`
|
||||
|
||||
`}else l+=u,l+=" = ",l+=O(e[f],a,t),l+=`
|
||||
`}}return`${l}
|
||||
${r}`.trim()}function H(e,n){if(h(e)!=="object")throw new TypeError("stringify can only be called with an object");let t=n?.maxDepth??1e3;return T(e,"",t)}function Q(e){const n=X(e);return L(e,n,{preserveIndentation:!1}),n}function W(e){const n=P(e,{}),t=H(e);return n.whitespace.start+t+n.whitespace.end}export{Q as parseTOML,W as stringifyTOML};
|
||||
96
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/yaml.d.mts
generated
vendored
96
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/yaml.d.mts
generated
vendored
@@ -1,96 +0,0 @@
|
||||
import { F as FormatOptions } from './shared/confbox.B202Uz6F.mjs';
|
||||
|
||||
/**
|
||||
* Converts a [YAML](https://yaml.org/) string into an object.
|
||||
*
|
||||
* @NOTE This function does **not** understand multi-document sources, it throws exception on those.
|
||||
*
|
||||
* @NOTE Comments are not preserved after parsing.
|
||||
*
|
||||
* @NOTE This function does **not** support schema-specific tag resolution restrictions.
|
||||
* So, the JSON schema is not as strictly defined in the YAML specification.
|
||||
* It allows numbers in any notation, use `Null` and `NULL` as `null`, etc.
|
||||
* The core schema also has no such restrictions. It allows binary notation for integers.
|
||||
*
|
||||
* @template T The type of the return value.
|
||||
* @param text The YAML string to parse.
|
||||
* @param options Parsing options.
|
||||
* @returns The JavaScript value converted from the YAML string.
|
||||
*/
|
||||
declare function parseYAML<T = unknown>(text: string, options?: YAMLParseOptions): T;
|
||||
/**
|
||||
* Converts a JavaScript value to a [YAML](https://yaml.org/) string.
|
||||
*
|
||||
* @NOTE Comments are not preserved in the output.
|
||||
*
|
||||
* @param value
|
||||
* @param options
|
||||
* @returns The YAML string converted from the JavaScript value.
|
||||
*/
|
||||
declare function stringifyYAML(value: any, options?: YAMLStringifyOptions): string;
|
||||
interface YAMLParseOptions extends FormatOptions {
|
||||
/** string to be used as a file path in error/warning messages. */
|
||||
filename?: string | undefined;
|
||||
/** function to call on warning messages. */
|
||||
onWarning?(this: null, e: YAMLException): void;
|
||||
/** specifies a schema to use. */
|
||||
schema?: any | undefined;
|
||||
/** compatibility with JSON.parse behaviour. */
|
||||
json?: boolean | undefined;
|
||||
/** listener for parse events */
|
||||
listener?(this: any, eventType: any, state: any): void;
|
||||
}
|
||||
interface YAMLStringifyOptions extends FormatOptions {
|
||||
/** indentation width to use (in spaces). */
|
||||
indent?: number | undefined;
|
||||
/** when true, will not add an indentation level to array elements */
|
||||
noArrayIndent?: boolean | undefined;
|
||||
/** do not throw on invalid types (like function in the safe schema) and skip pairs and single values with such types. */
|
||||
skipInvalid?: boolean | undefined;
|
||||
/** specifies level of nesting, when to switch from block to flow style for collections. -1 means block style everwhere */
|
||||
flowLevel?: number | undefined;
|
||||
/** Each tag may have own set of styles. - "tag" => "style" map. */
|
||||
styles?: {
|
||||
[x: string]: any;
|
||||
} | undefined;
|
||||
/** specifies a schema to use. */
|
||||
schema?: any | undefined;
|
||||
/** if true, sort keys when dumping YAML. If a function, use the function to sort the keys. (default: false) */
|
||||
sortKeys?: boolean | ((a: any, b: any) => number) | undefined;
|
||||
/** set max line width. (default: 80) */
|
||||
lineWidth?: number | undefined;
|
||||
/** if true, don't convert duplicate objects into references (default: false) */
|
||||
noRefs?: boolean | undefined;
|
||||
/** if true don't try to be compatible with older yaml versions. Currently: don't quote "yes", "no" and so on, as required for YAML 1.1 (default: false) */
|
||||
noCompatMode?: boolean | undefined;
|
||||
/**
|
||||
* if true flow sequences will be condensed, omitting the space between `key: value` or `a, b`. Eg. `'[a,b]'` or `{a:{b:c}}`.
|
||||
* Can be useful when using yaml for pretty URL query params as spaces are %-encoded. (default: false).
|
||||
*/
|
||||
condenseFlow?: boolean | undefined;
|
||||
/** strings will be quoted using this quoting style. If you specify single quotes, double quotes will still be used for non-printable characters. (default: `'`) */
|
||||
quotingType?: "'" | '"' | undefined;
|
||||
/** if true, all non-key strings will be quoted even if they normally don't need to. (default: false) */
|
||||
forceQuotes?: boolean | undefined;
|
||||
/** callback `function (key, value)` called recursively on each key/value in source object (see `replacer` docs for `JSON.stringify`). */
|
||||
replacer?: ((key: string, value: any) => any) | undefined;
|
||||
}
|
||||
interface Mark {
|
||||
buffer: string;
|
||||
column: number;
|
||||
line: number;
|
||||
name: string;
|
||||
position: number;
|
||||
snippet: string;
|
||||
}
|
||||
declare class YAMLException extends Error {
|
||||
constructor(reason?: string, mark?: Mark);
|
||||
toString(compact?: boolean): string;
|
||||
name: string;
|
||||
reason: string;
|
||||
message: string;
|
||||
mark: Mark;
|
||||
}
|
||||
|
||||
export { parseYAML, stringifyYAML };
|
||||
export type { YAMLParseOptions, YAMLStringifyOptions };
|
||||
32
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/yaml.mjs
generated
vendored
32
web/admin-spa/node_modules/unimport/node_modules/confbox/dist/yaml.mjs
generated
vendored
File diff suppressed because one or more lines are too long
85
web/admin-spa/node_modules/unimport/node_modules/confbox/package.json
generated
vendored
85
web/admin-spa/node_modules/unimport/node_modules/confbox/package.json
generated
vendored
@@ -1,85 +0,0 @@
|
||||
{
|
||||
"name": "confbox",
|
||||
"version": "0.2.2",
|
||||
"description": "Compact YAML, TOML, JSONC, JSON5 and INI parser and serializer",
|
||||
"keywords": [
|
||||
"yaml",
|
||||
"toml",
|
||||
"jsonc",
|
||||
"json5",
|
||||
"unjs",
|
||||
"config",
|
||||
"ini"
|
||||
],
|
||||
"repository": "unjs/confbox",
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.mts",
|
||||
"default": "./dist/index.mjs"
|
||||
},
|
||||
"./json5": {
|
||||
"types": "./dist/json5.d.mts",
|
||||
"default": "./dist/json5.mjs"
|
||||
},
|
||||
"./jsonc": {
|
||||
"types": "./dist/jsonc.d.mts",
|
||||
"default": "./dist/jsonc.mjs"
|
||||
},
|
||||
"./toml": {
|
||||
"types": "./dist/toml.d.mts",
|
||||
"default": "./dist/toml.mjs"
|
||||
},
|
||||
"./yaml": {
|
||||
"types": "./dist/yaml.d.mts",
|
||||
"default": "./dist/yaml.mjs"
|
||||
},
|
||||
"./ini": {
|
||||
"types": "./dist/ini.d.mts",
|
||||
"default": "./dist/ini.mjs"
|
||||
}
|
||||
},
|
||||
"types": "./dist/index.d.mts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "unbuild",
|
||||
"dev": "vitest dev --coverage",
|
||||
"bench": "pnpm build && node test/bench.mjs",
|
||||
"lint": "eslint && prettier -c src test",
|
||||
"lint:fix": "eslint --fix && prettier -c src test -w",
|
||||
"prepack": "pnpm build",
|
||||
"release": "pnpm test && changelogen --release && npm publish && git push --follow-tags",
|
||||
"test": "pnpm lint && pnpm test:types && vitest run --coverage",
|
||||
"test:types": "tsc --noEmit --skipLibCheck"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/ini": "^4.1.1",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/node": "^22.14.0",
|
||||
"@vitest/coverage-v8": "^3.1.1",
|
||||
"automd": "^0.4.0",
|
||||
"changelogen": "^0.6.1",
|
||||
"detect-indent": "^7.0.1",
|
||||
"eslint": "^9.23.0",
|
||||
"eslint-config-unjs": "^0.4.2",
|
||||
"ini": "^5.0.0",
|
||||
"jiti": "^2.4.2",
|
||||
"js-toml": "^1.0.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
"json5": "^2.2.3",
|
||||
"jsonc-parser": "^3.3.1",
|
||||
"mitata": "^1.0.34",
|
||||
"prettier": "^3.5.3",
|
||||
"smol-toml": "^1.3.1",
|
||||
"toml": "^3.0.0",
|
||||
"typescript": "^5.8.2",
|
||||
"unbuild": "^3.5.0",
|
||||
"vitest": "^3.1.1",
|
||||
"yaml": "^2.7.1"
|
||||
},
|
||||
"packageManager": "pnpm@10.7.1"
|
||||
}
|
||||
16
web/admin-spa/node_modules/unimport/node_modules/escape-string-regexp/index.d.ts
generated
vendored
16
web/admin-spa/node_modules/unimport/node_modules/escape-string-regexp/index.d.ts
generated
vendored
@@ -1,16 +0,0 @@
|
||||
/**
|
||||
Escape RegExp special characters.
|
||||
|
||||
You can also use this to escape a string that is inserted into the middle of a regex, for example, into a character class.
|
||||
|
||||
@example
|
||||
```
|
||||
import escapeStringRegexp from 'escape-string-regexp';
|
||||
|
||||
const escapedString = escapeStringRegexp('How much $ for a 🦄?');
|
||||
//=> 'How much \\$ for a 🦄\\?'
|
||||
|
||||
new RegExp(escapedString);
|
||||
```
|
||||
*/
|
||||
export default function escapeStringRegexp(string: string): string;
|
||||
11
web/admin-spa/node_modules/unimport/node_modules/escape-string-regexp/index.js
generated
vendored
11
web/admin-spa/node_modules/unimport/node_modules/escape-string-regexp/index.js
generated
vendored
@@ -1,11 +0,0 @@
|
||||
export default function escapeStringRegexp(string) {
|
||||
if (typeof string !== 'string') {
|
||||
throw new TypeError('Expected a string');
|
||||
}
|
||||
|
||||
// Escape characters with special meaning either inside or outside character sets.
|
||||
// Use a simple backslash escape when it’s always valid, and a `\xnn` escape when the simpler form would be disallowed by Unicode patterns’ stricter grammar.
|
||||
return string
|
||||
.replace(/[|\\{}()[\]^$+*?.]/g, '\\$&')
|
||||
.replace(/-/g, '\\x2d');
|
||||
}
|
||||
9
web/admin-spa/node_modules/unimport/node_modules/escape-string-regexp/license
generated
vendored
9
web/admin-spa/node_modules/unimport/node_modules/escape-string-regexp/license
generated
vendored
@@ -1,9 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
40
web/admin-spa/node_modules/unimport/node_modules/escape-string-regexp/package.json
generated
vendored
40
web/admin-spa/node_modules/unimport/node_modules/escape-string-regexp/package.json
generated
vendored
@@ -1,40 +0,0 @@
|
||||
{
|
||||
"name": "escape-string-regexp",
|
||||
"version": "5.0.0",
|
||||
"description": "Escape RegExp special characters",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/escape-string-regexp",
|
||||
"funding": "https://github.com/sponsors/sindresorhus",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"type": "module",
|
||||
"exports": "./index.js",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"escape",
|
||||
"regex",
|
||||
"regexp",
|
||||
"regular",
|
||||
"expression",
|
||||
"string",
|
||||
"special",
|
||||
"characters"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^3.15.0",
|
||||
"tsd": "^0.14.0",
|
||||
"xo": "^0.38.2"
|
||||
}
|
||||
}
|
||||
34
web/admin-spa/node_modules/unimport/node_modules/escape-string-regexp/readme.md
generated
vendored
34
web/admin-spa/node_modules/unimport/node_modules/escape-string-regexp/readme.md
generated
vendored
@@ -1,34 +0,0 @@
|
||||
# escape-string-regexp
|
||||
|
||||
> Escape RegExp special characters
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install escape-string-regexp
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import escapeStringRegexp from 'escape-string-regexp';
|
||||
|
||||
const escapedString = escapeStringRegexp('How much $ for a 🦄?');
|
||||
//=> 'How much \\$ for a 🦄\\?'
|
||||
|
||||
new RegExp(escapedString);
|
||||
```
|
||||
|
||||
You can also use this to escape a string that is inserted into the middle of a regex, for example, into a character class.
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-escape-string-regexp?utm_source=npm-escape-string-regexp&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
||||
7
web/admin-spa/node_modules/unimport/node_modules/estree-walker/LICENSE
generated
vendored
7
web/admin-spa/node_modules/unimport/node_modules/estree-walker/LICENSE
generated
vendored
@@ -1,7 +0,0 @@
|
||||
Copyright (c) 2015-20 [these people](https://github.com/Rich-Harris/estree-walker/graphs/contributors)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
48
web/admin-spa/node_modules/unimport/node_modules/estree-walker/README.md
generated
vendored
48
web/admin-spa/node_modules/unimport/node_modules/estree-walker/README.md
generated
vendored
@@ -1,48 +0,0 @@
|
||||
# estree-walker
|
||||
|
||||
Simple utility for walking an [ESTree](https://github.com/estree/estree)-compliant AST, such as one generated by [acorn](https://github.com/marijnh/acorn).
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm i estree-walker
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var walk = require('estree-walker').walk;
|
||||
var acorn = require('acorn');
|
||||
|
||||
ast = acorn.parse(sourceCode, options); // https://github.com/acornjs/acorn
|
||||
|
||||
walk(ast, {
|
||||
enter(node, parent, prop, index) {
|
||||
// some code happens
|
||||
},
|
||||
leave(node, parent, prop, index) {
|
||||
// some code happens
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
Inside the `enter` function, calling `this.skip()` will prevent the node's children being walked, or the `leave` function (which is optional) being called.
|
||||
|
||||
Call `this.replace(new_node)` in either `enter` or `leave` to replace the current node with a new one.
|
||||
|
||||
Call `this.remove()` in either `enter` or `leave` to remove the current node.
|
||||
|
||||
## Why not use estraverse?
|
||||
|
||||
The ESTree spec is evolving to accommodate ES6/7. I've had a couple of experiences where [estraverse](https://github.com/estools/estraverse) was unable to handle an AST generated by recent versions of acorn, because it hard-codes visitor keys.
|
||||
|
||||
estree-walker, by contrast, simply enumerates a node's properties to find child nodes (and child lists of nodes), and is therefore resistant to spec changes. It's also much smaller. (The performance, if you're wondering, is basically identical.)
|
||||
|
||||
None of which should be taken as criticism of estraverse, which has more features and has been battle-tested in many more situations, and for which I'm very grateful.
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
38
web/admin-spa/node_modules/unimport/node_modules/estree-walker/package.json
generated
vendored
38
web/admin-spa/node_modules/unimport/node_modules/estree-walker/package.json
generated
vendored
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"name": "estree-walker",
|
||||
"description": "Traverse an ESTree-compliant AST",
|
||||
"version": "3.0.3",
|
||||
"private": false,
|
||||
"author": "Rich Harris",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Rich-Harris/estree-walker"
|
||||
},
|
||||
"type": "module",
|
||||
"module": "./src/index.js",
|
||||
"exports": {
|
||||
"./package.json": "./package.json",
|
||||
".": {
|
||||
"types": "./types/index.d.ts",
|
||||
"import": "./src/index.js"
|
||||
}
|
||||
},
|
||||
"types": "types/index.d.ts",
|
||||
"scripts": {
|
||||
"prepublishOnly": "tsc && npm test",
|
||||
"test": "uvu test"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/estree": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^4.9.0",
|
||||
"uvu": "^0.5.1"
|
||||
},
|
||||
"files": [
|
||||
"src",
|
||||
"types",
|
||||
"README.md"
|
||||
]
|
||||
}
|
||||
152
web/admin-spa/node_modules/unimport/node_modules/estree-walker/src/async.js
generated
vendored
152
web/admin-spa/node_modules/unimport/node_modules/estree-walker/src/async.js
generated
vendored
@@ -1,152 +0,0 @@
|
||||
import { WalkerBase } from './walker.js';
|
||||
|
||||
/**
|
||||
* @typedef { import('estree').Node} Node
|
||||
* @typedef { import('./walker.js').WalkerContext} WalkerContext
|
||||
* @typedef {(
|
||||
* this: WalkerContext,
|
||||
* node: Node,
|
||||
* parent: Node | null,
|
||||
* key: string | number | symbol | null | undefined,
|
||||
* index: number | null | undefined
|
||||
* ) => Promise<void>} AsyncHandler
|
||||
*/
|
||||
|
||||
export class AsyncWalker extends WalkerBase {
|
||||
/**
|
||||
*
|
||||
* @param {AsyncHandler} [enter]
|
||||
* @param {AsyncHandler} [leave]
|
||||
*/
|
||||
constructor(enter, leave) {
|
||||
super();
|
||||
|
||||
/** @type {boolean} */
|
||||
this.should_skip = false;
|
||||
|
||||
/** @type {boolean} */
|
||||
this.should_remove = false;
|
||||
|
||||
/** @type {Node | null} */
|
||||
this.replacement = null;
|
||||
|
||||
/** @type {WalkerContext} */
|
||||
this.context = {
|
||||
skip: () => (this.should_skip = true),
|
||||
remove: () => (this.should_remove = true),
|
||||
replace: (node) => (this.replacement = node)
|
||||
};
|
||||
|
||||
/** @type {AsyncHandler | undefined} */
|
||||
this.enter = enter;
|
||||
|
||||
/** @type {AsyncHandler | undefined} */
|
||||
this.leave = leave;
|
||||
}
|
||||
|
||||
/**
|
||||
* @template {Node} Parent
|
||||
* @param {Node} node
|
||||
* @param {Parent | null} parent
|
||||
* @param {keyof Parent} [prop]
|
||||
* @param {number | null} [index]
|
||||
* @returns {Promise<Node | null>}
|
||||
*/
|
||||
async visit(node, parent, prop, index) {
|
||||
if (node) {
|
||||
if (this.enter) {
|
||||
const _should_skip = this.should_skip;
|
||||
const _should_remove = this.should_remove;
|
||||
const _replacement = this.replacement;
|
||||
this.should_skip = false;
|
||||
this.should_remove = false;
|
||||
this.replacement = null;
|
||||
|
||||
await this.enter.call(this.context, node, parent, prop, index);
|
||||
|
||||
if (this.replacement) {
|
||||
node = this.replacement;
|
||||
this.replace(parent, prop, index, node);
|
||||
}
|
||||
|
||||
if (this.should_remove) {
|
||||
this.remove(parent, prop, index);
|
||||
}
|
||||
|
||||
const skipped = this.should_skip;
|
||||
const removed = this.should_remove;
|
||||
|
||||
this.should_skip = _should_skip;
|
||||
this.should_remove = _should_remove;
|
||||
this.replacement = _replacement;
|
||||
|
||||
if (skipped) return node;
|
||||
if (removed) return null;
|
||||
}
|
||||
|
||||
/** @type {keyof Node} */
|
||||
let key;
|
||||
|
||||
for (key in node) {
|
||||
/** @type {unknown} */
|
||||
const value = node[key];
|
||||
|
||||
if (value && typeof value === 'object') {
|
||||
if (Array.isArray(value)) {
|
||||
const nodes = /** @type {Array<unknown>} */ (value);
|
||||
for (let i = 0; i < nodes.length; i += 1) {
|
||||
const item = nodes[i];
|
||||
if (isNode(item)) {
|
||||
if (!(await this.visit(item, node, key, i))) {
|
||||
// removed
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (isNode(value)) {
|
||||
await this.visit(value, node, key, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.leave) {
|
||||
const _replacement = this.replacement;
|
||||
const _should_remove = this.should_remove;
|
||||
this.replacement = null;
|
||||
this.should_remove = false;
|
||||
|
||||
await this.leave.call(this.context, node, parent, prop, index);
|
||||
|
||||
if (this.replacement) {
|
||||
node = this.replacement;
|
||||
this.replace(parent, prop, index, node);
|
||||
}
|
||||
|
||||
if (this.should_remove) {
|
||||
this.remove(parent, prop, index);
|
||||
}
|
||||
|
||||
const removed = this.should_remove;
|
||||
|
||||
this.replacement = _replacement;
|
||||
this.should_remove = _should_remove;
|
||||
|
||||
if (removed) return null;
|
||||
}
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ducktype a node.
|
||||
*
|
||||
* @param {unknown} value
|
||||
* @returns {value is Node}
|
||||
*/
|
||||
function isNode(value) {
|
||||
return (
|
||||
value !== null && typeof value === 'object' && 'type' in value && typeof value.type === 'string'
|
||||
);
|
||||
}
|
||||
34
web/admin-spa/node_modules/unimport/node_modules/estree-walker/src/index.js
generated
vendored
34
web/admin-spa/node_modules/unimport/node_modules/estree-walker/src/index.js
generated
vendored
@@ -1,34 +0,0 @@
|
||||
import { SyncWalker } from './sync.js';
|
||||
import { AsyncWalker } from './async.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('estree').Node} Node
|
||||
* @typedef {import('./sync.js').SyncHandler} SyncHandler
|
||||
* @typedef {import('./async.js').AsyncHandler} AsyncHandler
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Node} ast
|
||||
* @param {{
|
||||
* enter?: SyncHandler
|
||||
* leave?: SyncHandler
|
||||
* }} walker
|
||||
* @returns {Node | null}
|
||||
*/
|
||||
export function walk(ast, { enter, leave }) {
|
||||
const instance = new SyncWalker(enter, leave);
|
||||
return instance.visit(ast, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Node} ast
|
||||
* @param {{
|
||||
* enter?: AsyncHandler
|
||||
* leave?: AsyncHandler
|
||||
* }} walker
|
||||
* @returns {Promise<Node | null>}
|
||||
*/
|
||||
export async function asyncWalk(ast, { enter, leave }) {
|
||||
const instance = new AsyncWalker(enter, leave);
|
||||
return await instance.visit(ast, null);
|
||||
}
|
||||
152
web/admin-spa/node_modules/unimport/node_modules/estree-walker/src/sync.js
generated
vendored
152
web/admin-spa/node_modules/unimport/node_modules/estree-walker/src/sync.js
generated
vendored
@@ -1,152 +0,0 @@
|
||||
import { WalkerBase } from './walker.js';
|
||||
|
||||
/**
|
||||
* @typedef { import('estree').Node} Node
|
||||
* @typedef { import('./walker.js').WalkerContext} WalkerContext
|
||||
* @typedef {(
|
||||
* this: WalkerContext,
|
||||
* node: Node,
|
||||
* parent: Node | null,
|
||||
* key: string | number | symbol | null | undefined,
|
||||
* index: number | null | undefined
|
||||
* ) => void} SyncHandler
|
||||
*/
|
||||
|
||||
export class SyncWalker extends WalkerBase {
|
||||
/**
|
||||
*
|
||||
* @param {SyncHandler} [enter]
|
||||
* @param {SyncHandler} [leave]
|
||||
*/
|
||||
constructor(enter, leave) {
|
||||
super();
|
||||
|
||||
/** @type {boolean} */
|
||||
this.should_skip = false;
|
||||
|
||||
/** @type {boolean} */
|
||||
this.should_remove = false;
|
||||
|
||||
/** @type {Node | null} */
|
||||
this.replacement = null;
|
||||
|
||||
/** @type {WalkerContext} */
|
||||
this.context = {
|
||||
skip: () => (this.should_skip = true),
|
||||
remove: () => (this.should_remove = true),
|
||||
replace: (node) => (this.replacement = node)
|
||||
};
|
||||
|
||||
/** @type {SyncHandler | undefined} */
|
||||
this.enter = enter;
|
||||
|
||||
/** @type {SyncHandler | undefined} */
|
||||
this.leave = leave;
|
||||
}
|
||||
|
||||
/**
|
||||
* @template {Node} Parent
|
||||
* @param {Node} node
|
||||
* @param {Parent | null} parent
|
||||
* @param {keyof Parent} [prop]
|
||||
* @param {number | null} [index]
|
||||
* @returns {Node | null}
|
||||
*/
|
||||
visit(node, parent, prop, index) {
|
||||
if (node) {
|
||||
if (this.enter) {
|
||||
const _should_skip = this.should_skip;
|
||||
const _should_remove = this.should_remove;
|
||||
const _replacement = this.replacement;
|
||||
this.should_skip = false;
|
||||
this.should_remove = false;
|
||||
this.replacement = null;
|
||||
|
||||
this.enter.call(this.context, node, parent, prop, index);
|
||||
|
||||
if (this.replacement) {
|
||||
node = this.replacement;
|
||||
this.replace(parent, prop, index, node);
|
||||
}
|
||||
|
||||
if (this.should_remove) {
|
||||
this.remove(parent, prop, index);
|
||||
}
|
||||
|
||||
const skipped = this.should_skip;
|
||||
const removed = this.should_remove;
|
||||
|
||||
this.should_skip = _should_skip;
|
||||
this.should_remove = _should_remove;
|
||||
this.replacement = _replacement;
|
||||
|
||||
if (skipped) return node;
|
||||
if (removed) return null;
|
||||
}
|
||||
|
||||
/** @type {keyof Node} */
|
||||
let key;
|
||||
|
||||
for (key in node) {
|
||||
/** @type {unknown} */
|
||||
const value = node[key];
|
||||
|
||||
if (value && typeof value === 'object') {
|
||||
if (Array.isArray(value)) {
|
||||
const nodes = /** @type {Array<unknown>} */ (value);
|
||||
for (let i = 0; i < nodes.length; i += 1) {
|
||||
const item = nodes[i];
|
||||
if (isNode(item)) {
|
||||
if (!this.visit(item, node, key, i)) {
|
||||
// removed
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (isNode(value)) {
|
||||
this.visit(value, node, key, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.leave) {
|
||||
const _replacement = this.replacement;
|
||||
const _should_remove = this.should_remove;
|
||||
this.replacement = null;
|
||||
this.should_remove = false;
|
||||
|
||||
this.leave.call(this.context, node, parent, prop, index);
|
||||
|
||||
if (this.replacement) {
|
||||
node = this.replacement;
|
||||
this.replace(parent, prop, index, node);
|
||||
}
|
||||
|
||||
if (this.should_remove) {
|
||||
this.remove(parent, prop, index);
|
||||
}
|
||||
|
||||
const removed = this.should_remove;
|
||||
|
||||
this.replacement = _replacement;
|
||||
this.should_remove = _should_remove;
|
||||
|
||||
if (removed) return null;
|
||||
}
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ducktype a node.
|
||||
*
|
||||
* @param {unknown} value
|
||||
* @returns {value is Node}
|
||||
*/
|
||||
function isNode(value) {
|
||||
return (
|
||||
value !== null && typeof value === 'object' && 'type' in value && typeof value.type === 'string'
|
||||
);
|
||||
}
|
||||
61
web/admin-spa/node_modules/unimport/node_modules/estree-walker/src/walker.js
generated
vendored
61
web/admin-spa/node_modules/unimport/node_modules/estree-walker/src/walker.js
generated
vendored
@@ -1,61 +0,0 @@
|
||||
/**
|
||||
* @typedef { import('estree').Node} Node
|
||||
* @typedef {{
|
||||
* skip: () => void;
|
||||
* remove: () => void;
|
||||
* replace: (node: Node) => void;
|
||||
* }} WalkerContext
|
||||
*/
|
||||
|
||||
export class WalkerBase {
|
||||
constructor() {
|
||||
/** @type {boolean} */
|
||||
this.should_skip = false;
|
||||
|
||||
/** @type {boolean} */
|
||||
this.should_remove = false;
|
||||
|
||||
/** @type {Node | null} */
|
||||
this.replacement = null;
|
||||
|
||||
/** @type {WalkerContext} */
|
||||
this.context = {
|
||||
skip: () => (this.should_skip = true),
|
||||
remove: () => (this.should_remove = true),
|
||||
replace: (node) => (this.replacement = node)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @template {Node} Parent
|
||||
* @param {Parent | null | undefined} parent
|
||||
* @param {keyof Parent | null | undefined} prop
|
||||
* @param {number | null | undefined} index
|
||||
* @param {Node} node
|
||||
*/
|
||||
replace(parent, prop, index, node) {
|
||||
if (parent && prop) {
|
||||
if (index != null) {
|
||||
/** @type {Array<Node>} */ (parent[prop])[index] = node;
|
||||
} else {
|
||||
/** @type {Node} */ (parent[prop]) = node;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template {Node} Parent
|
||||
* @param {Parent | null | undefined} parent
|
||||
* @param {keyof Parent | null | undefined} prop
|
||||
* @param {number | null | undefined} index
|
||||
*/
|
||||
remove(parent, prop, index) {
|
||||
if (parent && prop) {
|
||||
if (index !== null && index !== undefined) {
|
||||
/** @type {Array<Node>} */ (parent[prop]).splice(index, 1);
|
||||
} else {
|
||||
delete parent[prop];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
36
web/admin-spa/node_modules/unimport/node_modules/estree-walker/types/async.d.ts
generated
vendored
36
web/admin-spa/node_modules/unimport/node_modules/estree-walker/types/async.d.ts
generated
vendored
@@ -1,36 +0,0 @@
|
||||
/**
|
||||
* @typedef { import('estree').Node} Node
|
||||
* @typedef { import('./walker.js').WalkerContext} WalkerContext
|
||||
* @typedef {(
|
||||
* this: WalkerContext,
|
||||
* node: Node,
|
||||
* parent: Node | null,
|
||||
* key: string | number | symbol | null | undefined,
|
||||
* index: number | null | undefined
|
||||
* ) => Promise<void>} AsyncHandler
|
||||
*/
|
||||
export class AsyncWalker extends WalkerBase {
|
||||
/**
|
||||
*
|
||||
* @param {AsyncHandler} [enter]
|
||||
* @param {AsyncHandler} [leave]
|
||||
*/
|
||||
constructor(enter?: AsyncHandler | undefined, leave?: AsyncHandler | undefined);
|
||||
/** @type {AsyncHandler | undefined} */
|
||||
enter: AsyncHandler | undefined;
|
||||
/** @type {AsyncHandler | undefined} */
|
||||
leave: AsyncHandler | undefined;
|
||||
/**
|
||||
* @template {Node} Parent
|
||||
* @param {Node} node
|
||||
* @param {Parent | null} parent
|
||||
* @param {keyof Parent} [prop]
|
||||
* @param {number | null} [index]
|
||||
* @returns {Promise<Node | null>}
|
||||
*/
|
||||
visit<Parent extends import("estree").Node>(node: Node, parent: Parent | null, prop?: keyof Parent | undefined, index?: number | null | undefined): Promise<Node | null>;
|
||||
}
|
||||
export type Node = import('estree').Node;
|
||||
export type WalkerContext = import('./walker.js').WalkerContext;
|
||||
export type AsyncHandler = (this: WalkerContext, node: Node, parent: Node | null, key: string | number | symbol | null | undefined, index: number | null | undefined) => Promise<void>;
|
||||
import { WalkerBase } from "./walker.js";
|
||||
32
web/admin-spa/node_modules/unimport/node_modules/estree-walker/types/index.d.ts
generated
vendored
32
web/admin-spa/node_modules/unimport/node_modules/estree-walker/types/index.d.ts
generated
vendored
@@ -1,32 +0,0 @@
|
||||
/**
|
||||
* @typedef {import('estree').Node} Node
|
||||
* @typedef {import('./sync.js').SyncHandler} SyncHandler
|
||||
* @typedef {import('./async.js').AsyncHandler} AsyncHandler
|
||||
*/
|
||||
/**
|
||||
* @param {Node} ast
|
||||
* @param {{
|
||||
* enter?: SyncHandler
|
||||
* leave?: SyncHandler
|
||||
* }} walker
|
||||
* @returns {Node | null}
|
||||
*/
|
||||
export function walk(ast: Node, { enter, leave }: {
|
||||
enter?: SyncHandler;
|
||||
leave?: SyncHandler;
|
||||
}): Node | null;
|
||||
/**
|
||||
* @param {Node} ast
|
||||
* @param {{
|
||||
* enter?: AsyncHandler
|
||||
* leave?: AsyncHandler
|
||||
* }} walker
|
||||
* @returns {Promise<Node | null>}
|
||||
*/
|
||||
export function asyncWalk(ast: Node, { enter, leave }: {
|
||||
enter?: AsyncHandler;
|
||||
leave?: AsyncHandler;
|
||||
}): Promise<Node | null>;
|
||||
export type Node = import('estree').Node;
|
||||
export type SyncHandler = import('./sync.js').SyncHandler;
|
||||
export type AsyncHandler = import('./async.js').AsyncHandler;
|
||||
36
web/admin-spa/node_modules/unimport/node_modules/estree-walker/types/sync.d.ts
generated
vendored
36
web/admin-spa/node_modules/unimport/node_modules/estree-walker/types/sync.d.ts
generated
vendored
@@ -1,36 +0,0 @@
|
||||
/**
|
||||
* @typedef { import('estree').Node} Node
|
||||
* @typedef { import('./walker.js').WalkerContext} WalkerContext
|
||||
* @typedef {(
|
||||
* this: WalkerContext,
|
||||
* node: Node,
|
||||
* parent: Node | null,
|
||||
* key: string | number | symbol | null | undefined,
|
||||
* index: number | null | undefined
|
||||
* ) => void} SyncHandler
|
||||
*/
|
||||
export class SyncWalker extends WalkerBase {
|
||||
/**
|
||||
*
|
||||
* @param {SyncHandler} [enter]
|
||||
* @param {SyncHandler} [leave]
|
||||
*/
|
||||
constructor(enter?: SyncHandler | undefined, leave?: SyncHandler | undefined);
|
||||
/** @type {SyncHandler | undefined} */
|
||||
enter: SyncHandler | undefined;
|
||||
/** @type {SyncHandler | undefined} */
|
||||
leave: SyncHandler | undefined;
|
||||
/**
|
||||
* @template {Node} Parent
|
||||
* @param {Node} node
|
||||
* @param {Parent | null} parent
|
||||
* @param {keyof Parent} [prop]
|
||||
* @param {number | null} [index]
|
||||
* @returns {Node | null}
|
||||
*/
|
||||
visit<Parent extends import("estree").Node>(node: Node, parent: Parent | null, prop?: keyof Parent | undefined, index?: number | null | undefined): Node | null;
|
||||
}
|
||||
export type Node = import('estree').Node;
|
||||
export type WalkerContext = import('./walker.js').WalkerContext;
|
||||
export type SyncHandler = (this: WalkerContext, node: Node, parent: Node | null, key: string | number | symbol | null | undefined, index: number | null | undefined) => void;
|
||||
import { WalkerBase } from "./walker.js";
|
||||
39
web/admin-spa/node_modules/unimport/node_modules/estree-walker/types/walker.d.ts
generated
vendored
39
web/admin-spa/node_modules/unimport/node_modules/estree-walker/types/walker.d.ts
generated
vendored
@@ -1,39 +0,0 @@
|
||||
/**
|
||||
* @typedef { import('estree').Node} Node
|
||||
* @typedef {{
|
||||
* skip: () => void;
|
||||
* remove: () => void;
|
||||
* replace: (node: Node) => void;
|
||||
* }} WalkerContext
|
||||
*/
|
||||
export class WalkerBase {
|
||||
/** @type {boolean} */
|
||||
should_skip: boolean;
|
||||
/** @type {boolean} */
|
||||
should_remove: boolean;
|
||||
/** @type {Node | null} */
|
||||
replacement: Node | null;
|
||||
/** @type {WalkerContext} */
|
||||
context: WalkerContext;
|
||||
/**
|
||||
* @template {Node} Parent
|
||||
* @param {Parent | null | undefined} parent
|
||||
* @param {keyof Parent | null | undefined} prop
|
||||
* @param {number | null | undefined} index
|
||||
* @param {Node} node
|
||||
*/
|
||||
replace<Parent extends import("estree").Node>(parent: Parent | null | undefined, prop: keyof Parent | null | undefined, index: number | null | undefined, node: Node): void;
|
||||
/**
|
||||
* @template {Node} Parent
|
||||
* @param {Parent | null | undefined} parent
|
||||
* @param {keyof Parent | null | undefined} prop
|
||||
* @param {number | null | undefined} index
|
||||
*/
|
||||
remove<Parent_1 extends import("estree").Node>(parent: Parent_1 | null | undefined, prop: keyof Parent_1 | null | undefined, index: number | null | undefined): void;
|
||||
}
|
||||
export type Node = import('estree').Node;
|
||||
export type WalkerContext = {
|
||||
skip: () => void;
|
||||
remove: () => void;
|
||||
replace: (node: Node) => void;
|
||||
};
|
||||
21
web/admin-spa/node_modules/unimport/node_modules/local-pkg/LICENSE
generated
vendored
21
web/admin-spa/node_modules/unimport/node_modules/local-pkg/LICENSE
generated
vendored
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021 Anthony Fu <https://github.com/antfu>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
55
web/admin-spa/node_modules/unimport/node_modules/local-pkg/README.md
generated
vendored
55
web/admin-spa/node_modules/unimport/node_modules/local-pkg/README.md
generated
vendored
@@ -1,55 +0,0 @@
|
||||
# local-pkg
|
||||
|
||||
[](https://www.npmjs.com/package/local-pkg)
|
||||
|
||||
Get information on local packages. Works on both CJS and ESM.
|
||||
|
||||
## Install
|
||||
|
||||
```bash
|
||||
npm i local-pkg
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import {
|
||||
getPackageInfo,
|
||||
importModule,
|
||||
isPackageExists,
|
||||
resolveModule,
|
||||
} from 'local-pkg'
|
||||
|
||||
isPackageExists('local-pkg') // true
|
||||
isPackageExists('foo') // false
|
||||
|
||||
await getPackageInfo('local-pkg')
|
||||
/* {
|
||||
* name: "local-pkg",
|
||||
* version: "0.1.0",
|
||||
* rootPath: "/path/to/node_modules/local-pkg",
|
||||
* packageJson: {
|
||||
* ...
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
|
||||
// similar to `require.resolve` but works also in ESM
|
||||
resolveModule('local-pkg')
|
||||
// '/path/to/node_modules/local-pkg/dist/index.cjs'
|
||||
|
||||
// similar to `await import()` but works also in CJS
|
||||
const { importModule } = await importModule('local-pkg')
|
||||
```
|
||||
|
||||
## Sponsors
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cdn.jsdelivr.net/gh/antfu/static/sponsors.svg">
|
||||
<img src='https://cdn.jsdelivr.net/gh/antfu/static/sponsors.svg'/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
## License
|
||||
|
||||
[MIT](./LICENSE) License © 2021 [Anthony Fu](https://github.com/antfu)
|
||||
190
web/admin-spa/node_modules/unimport/node_modules/local-pkg/dist/index.cjs
generated
vendored
190
web/admin-spa/node_modules/unimport/node_modules/local-pkg/dist/index.cjs
generated
vendored
@@ -1,190 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('node:fs');
|
||||
const node_module = require('node:module');
|
||||
const path = require('node:path');
|
||||
const process = require('node:process');
|
||||
const fsPromises = require('node:fs/promises');
|
||||
const node_url = require('node:url');
|
||||
const mlly = require('mlly');
|
||||
const macro = require('quansync/macro');
|
||||
|
||||
var _documentCurrentScript = typeof document !== 'undefined' ? document.currentScript : null;
|
||||
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e.default : e; }
|
||||
|
||||
const fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
||||
const path__default = /*#__PURE__*/_interopDefaultCompat(path);
|
||||
const process__default = /*#__PURE__*/_interopDefaultCompat(process);
|
||||
const fsPromises__default = /*#__PURE__*/_interopDefaultCompat(fsPromises);
|
||||
|
||||
const toPath = urlOrPath => urlOrPath instanceof URL ? node_url.fileURLToPath(urlOrPath) : urlOrPath;
|
||||
|
||||
async function findUp$1(name, {
|
||||
cwd = process__default.cwd(),
|
||||
type = 'file',
|
||||
stopAt,
|
||||
} = {}) {
|
||||
let directory = path__default.resolve(toPath(cwd) ?? '');
|
||||
const {root} = path__default.parse(directory);
|
||||
stopAt = path__default.resolve(directory, toPath(stopAt ?? root));
|
||||
const isAbsoluteName = path__default.isAbsolute(name);
|
||||
|
||||
while (directory) {
|
||||
const filePath = isAbsoluteName ? name : path__default.join(directory, name);
|
||||
try {
|
||||
const stats = await fsPromises__default.stat(filePath); // eslint-disable-line no-await-in-loop
|
||||
if ((type === 'file' && stats.isFile()) || (type === 'directory' && stats.isDirectory())) {
|
||||
return filePath;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
if (directory === stopAt || directory === root) {
|
||||
break;
|
||||
}
|
||||
|
||||
directory = path__default.dirname(directory);
|
||||
}
|
||||
}
|
||||
|
||||
function findUpSync(name, {
|
||||
cwd = process__default.cwd(),
|
||||
type = 'file',
|
||||
stopAt,
|
||||
} = {}) {
|
||||
let directory = path__default.resolve(toPath(cwd) ?? '');
|
||||
const {root} = path__default.parse(directory);
|
||||
stopAt = path__default.resolve(directory, toPath(stopAt) ?? root);
|
||||
const isAbsoluteName = path__default.isAbsolute(name);
|
||||
|
||||
while (directory) {
|
||||
const filePath = isAbsoluteName ? name : path__default.join(directory, name);
|
||||
|
||||
try {
|
||||
const stats = fs__default.statSync(filePath, {throwIfNoEntry: false});
|
||||
if ((type === 'file' && stats?.isFile()) || (type === 'directory' && stats?.isDirectory())) {
|
||||
return filePath;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
if (directory === stopAt || directory === root) {
|
||||
break;
|
||||
}
|
||||
|
||||
directory = path__default.dirname(directory);
|
||||
}
|
||||
}
|
||||
|
||||
function _resolve(path$1, options = {}) {
|
||||
if (options.platform === "auto" || !options.platform)
|
||||
options.platform = process__default.platform === "win32" ? "win32" : "posix";
|
||||
if (process__default.versions.pnp) {
|
||||
const paths = options.paths || [];
|
||||
if (paths.length === 0)
|
||||
paths.push(process__default.cwd());
|
||||
const targetRequire = node_module.createRequire((typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('index.cjs', document.baseURI).href)));
|
||||
try {
|
||||
return targetRequire.resolve(path$1, { paths });
|
||||
} catch {
|
||||
}
|
||||
}
|
||||
const modulePath = mlly.resolvePathSync(path$1, {
|
||||
url: options.paths
|
||||
});
|
||||
if (options.platform === "win32")
|
||||
return path.win32.normalize(modulePath);
|
||||
return modulePath;
|
||||
}
|
||||
function resolveModule(name, options = {}) {
|
||||
try {
|
||||
return _resolve(name, options);
|
||||
} catch {
|
||||
return void 0;
|
||||
}
|
||||
}
|
||||
async function importModule(path) {
|
||||
const i = await import(path);
|
||||
if (i)
|
||||
return mlly.interopDefault(i);
|
||||
return i;
|
||||
}
|
||||
function isPackageExists(name, options = {}) {
|
||||
return !!resolvePackage(name, options);
|
||||
}
|
||||
function getPackageJsonPath(name, options = {}) {
|
||||
const entry = resolvePackage(name, options);
|
||||
if (!entry)
|
||||
return;
|
||||
return searchPackageJSON(entry);
|
||||
}
|
||||
const readFile = macro.quansync({
|
||||
async: (id) => fs__default.promises.readFile(id, "utf8"),
|
||||
sync: (id) => fs__default.readFileSync(id, "utf8")
|
||||
});
|
||||
const getPackageInfo = macro.quansync(function* (name, options = {}) {
|
||||
const packageJsonPath = getPackageJsonPath(name, options);
|
||||
if (!packageJsonPath)
|
||||
return;
|
||||
const packageJson = JSON.parse(yield readFile(packageJsonPath));
|
||||
return {
|
||||
name,
|
||||
version: packageJson.version,
|
||||
rootPath: path.dirname(packageJsonPath),
|
||||
packageJsonPath,
|
||||
packageJson
|
||||
};
|
||||
});
|
||||
const getPackageInfoSync = getPackageInfo.sync;
|
||||
function resolvePackage(name, options = {}) {
|
||||
try {
|
||||
return _resolve(`${name}/package.json`, options);
|
||||
} catch {
|
||||
}
|
||||
try {
|
||||
return _resolve(name, options);
|
||||
} catch (e) {
|
||||
if (e.code !== "MODULE_NOT_FOUND" && e.code !== "ERR_MODULE_NOT_FOUND")
|
||||
console.error(e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
function searchPackageJSON(dir) {
|
||||
let packageJsonPath;
|
||||
while (true) {
|
||||
if (!dir)
|
||||
return;
|
||||
const newDir = path.dirname(dir);
|
||||
if (newDir === dir)
|
||||
return;
|
||||
dir = newDir;
|
||||
packageJsonPath = path.join(dir, "package.json");
|
||||
if (fs__default.existsSync(packageJsonPath))
|
||||
break;
|
||||
}
|
||||
return packageJsonPath;
|
||||
}
|
||||
const findUp = macro.quansync({
|
||||
sync: findUpSync,
|
||||
async: findUp$1
|
||||
});
|
||||
const loadPackageJSON = macro.quansync(function* (cwd = process__default.cwd()) {
|
||||
const path = yield findUp("package.json", { cwd });
|
||||
if (!path || !fs__default.existsSync(path))
|
||||
return null;
|
||||
return JSON.parse(yield readFile(path));
|
||||
});
|
||||
const loadPackageJSONSync = loadPackageJSON.sync;
|
||||
const isPackageListed = macro.quansync(function* (name, cwd) {
|
||||
const pkg = (yield loadPackageJSON(cwd)) || {};
|
||||
return name in (pkg.dependencies || {}) || name in (pkg.devDependencies || {});
|
||||
});
|
||||
const isPackageListedSync = isPackageListed.sync;
|
||||
|
||||
exports.getPackageInfo = getPackageInfo;
|
||||
exports.getPackageInfoSync = getPackageInfoSync;
|
||||
exports.importModule = importModule;
|
||||
exports.isPackageExists = isPackageExists;
|
||||
exports.isPackageListed = isPackageListed;
|
||||
exports.isPackageListedSync = isPackageListedSync;
|
||||
exports.loadPackageJSON = loadPackageJSON;
|
||||
exports.loadPackageJSONSync = loadPackageJSONSync;
|
||||
exports.resolveModule = resolveModule;
|
||||
41
web/admin-spa/node_modules/unimport/node_modules/local-pkg/dist/index.d.cts
generated
vendored
41
web/admin-spa/node_modules/unimport/node_modules/local-pkg/dist/index.d.cts
generated
vendored
@@ -1,41 +0,0 @@
|
||||
import * as quansync_types from 'quansync/types';
|
||||
import { PackageJson } from 'pkg-types';
|
||||
|
||||
interface PackageInfo {
|
||||
name: string;
|
||||
rootPath: string;
|
||||
packageJsonPath: string;
|
||||
version: string;
|
||||
packageJson: PackageJson;
|
||||
}
|
||||
interface PackageResolvingOptions {
|
||||
paths?: string[];
|
||||
/**
|
||||
* @default 'auto'
|
||||
* Resolve path as posix or win32
|
||||
*/
|
||||
platform?: 'posix' | 'win32' | 'auto';
|
||||
}
|
||||
declare function resolveModule(name: string, options?: PackageResolvingOptions): string | undefined;
|
||||
declare function importModule<T = any>(path: string): Promise<T>;
|
||||
declare function isPackageExists(name: string, options?: PackageResolvingOptions): boolean;
|
||||
declare const getPackageInfo: quansync_types.QuansyncFn<{
|
||||
name: string;
|
||||
version: string | undefined;
|
||||
rootPath: string;
|
||||
packageJsonPath: string;
|
||||
packageJson: PackageJson;
|
||||
} | undefined, [name: string, options?: PackageResolvingOptions | undefined]>;
|
||||
declare const getPackageInfoSync: (name: string, options?: PackageResolvingOptions | undefined) => {
|
||||
name: string;
|
||||
version: string | undefined;
|
||||
rootPath: string;
|
||||
packageJsonPath: string;
|
||||
packageJson: PackageJson;
|
||||
} | undefined;
|
||||
declare const loadPackageJSON: quansync_types.QuansyncFn<PackageJson | null, [cwd?: Args[0] | undefined]>;
|
||||
declare const loadPackageJSONSync: (cwd?: Args[0] | undefined) => PackageJson | null;
|
||||
declare const isPackageListed: quansync_types.QuansyncFn<boolean, [name: string, cwd?: string | undefined]>;
|
||||
declare const isPackageListedSync: (name: string, cwd?: string | undefined) => boolean;
|
||||
|
||||
export { type PackageInfo, type PackageResolvingOptions, getPackageInfo, getPackageInfoSync, importModule, isPackageExists, isPackageListed, isPackageListedSync, loadPackageJSON, loadPackageJSONSync, resolveModule };
|
||||
41
web/admin-spa/node_modules/unimport/node_modules/local-pkg/dist/index.d.mts
generated
vendored
41
web/admin-spa/node_modules/unimport/node_modules/local-pkg/dist/index.d.mts
generated
vendored
@@ -1,41 +0,0 @@
|
||||
import * as quansync_types from 'quansync/types';
|
||||
import { PackageJson } from 'pkg-types';
|
||||
|
||||
interface PackageInfo {
|
||||
name: string;
|
||||
rootPath: string;
|
||||
packageJsonPath: string;
|
||||
version: string;
|
||||
packageJson: PackageJson;
|
||||
}
|
||||
interface PackageResolvingOptions {
|
||||
paths?: string[];
|
||||
/**
|
||||
* @default 'auto'
|
||||
* Resolve path as posix or win32
|
||||
*/
|
||||
platform?: 'posix' | 'win32' | 'auto';
|
||||
}
|
||||
declare function resolveModule(name: string, options?: PackageResolvingOptions): string | undefined;
|
||||
declare function importModule<T = any>(path: string): Promise<T>;
|
||||
declare function isPackageExists(name: string, options?: PackageResolvingOptions): boolean;
|
||||
declare const getPackageInfo: quansync_types.QuansyncFn<{
|
||||
name: string;
|
||||
version: string | undefined;
|
||||
rootPath: string;
|
||||
packageJsonPath: string;
|
||||
packageJson: PackageJson;
|
||||
} | undefined, [name: string, options?: PackageResolvingOptions | undefined]>;
|
||||
declare const getPackageInfoSync: (name: string, options?: PackageResolvingOptions | undefined) => {
|
||||
name: string;
|
||||
version: string | undefined;
|
||||
rootPath: string;
|
||||
packageJsonPath: string;
|
||||
packageJson: PackageJson;
|
||||
} | undefined;
|
||||
declare const loadPackageJSON: quansync_types.QuansyncFn<PackageJson | null, [cwd?: Args[0] | undefined]>;
|
||||
declare const loadPackageJSONSync: (cwd?: Args[0] | undefined) => PackageJson | null;
|
||||
declare const isPackageListed: quansync_types.QuansyncFn<boolean, [name: string, cwd?: string | undefined]>;
|
||||
declare const isPackageListedSync: (name: string, cwd?: string | undefined) => boolean;
|
||||
|
||||
export { type PackageInfo, type PackageResolvingOptions, getPackageInfo, getPackageInfoSync, importModule, isPackageExists, isPackageListed, isPackageListedSync, loadPackageJSON, loadPackageJSONSync, resolveModule };
|
||||
41
web/admin-spa/node_modules/unimport/node_modules/local-pkg/dist/index.d.ts
generated
vendored
41
web/admin-spa/node_modules/unimport/node_modules/local-pkg/dist/index.d.ts
generated
vendored
@@ -1,41 +0,0 @@
|
||||
import * as quansync_types from 'quansync/types';
|
||||
import { PackageJson } from 'pkg-types';
|
||||
|
||||
interface PackageInfo {
|
||||
name: string;
|
||||
rootPath: string;
|
||||
packageJsonPath: string;
|
||||
version: string;
|
||||
packageJson: PackageJson;
|
||||
}
|
||||
interface PackageResolvingOptions {
|
||||
paths?: string[];
|
||||
/**
|
||||
* @default 'auto'
|
||||
* Resolve path as posix or win32
|
||||
*/
|
||||
platform?: 'posix' | 'win32' | 'auto';
|
||||
}
|
||||
declare function resolveModule(name: string, options?: PackageResolvingOptions): string | undefined;
|
||||
declare function importModule<T = any>(path: string): Promise<T>;
|
||||
declare function isPackageExists(name: string, options?: PackageResolvingOptions): boolean;
|
||||
declare const getPackageInfo: quansync_types.QuansyncFn<{
|
||||
name: string;
|
||||
version: string | undefined;
|
||||
rootPath: string;
|
||||
packageJsonPath: string;
|
||||
packageJson: PackageJson;
|
||||
} | undefined, [name: string, options?: PackageResolvingOptions | undefined]>;
|
||||
declare const getPackageInfoSync: (name: string, options?: PackageResolvingOptions | undefined) => {
|
||||
name: string;
|
||||
version: string | undefined;
|
||||
rootPath: string;
|
||||
packageJsonPath: string;
|
||||
packageJson: PackageJson;
|
||||
} | undefined;
|
||||
declare const loadPackageJSON: quansync_types.QuansyncFn<PackageJson | null, [cwd?: Args[0] | undefined]>;
|
||||
declare const loadPackageJSONSync: (cwd?: Args[0] | undefined) => PackageJson | null;
|
||||
declare const isPackageListed: quansync_types.QuansyncFn<boolean, [name: string, cwd?: string | undefined]>;
|
||||
declare const isPackageListedSync: (name: string, cwd?: string | undefined) => boolean;
|
||||
|
||||
export { type PackageInfo, type PackageResolvingOptions, getPackageInfo, getPackageInfoSync, importModule, isPackageExists, isPackageListed, isPackageListedSync, loadPackageJSON, loadPackageJSONSync, resolveModule };
|
||||
172
web/admin-spa/node_modules/unimport/node_modules/local-pkg/dist/index.mjs
generated
vendored
172
web/admin-spa/node_modules/unimport/node_modules/local-pkg/dist/index.mjs
generated
vendored
@@ -1,172 +0,0 @@
|
||||
import fs from 'node:fs';
|
||||
import { createRequire } from 'node:module';
|
||||
import path, { dirname, join, win32 } from 'node:path';
|
||||
import process from 'node:process';
|
||||
import fsPromises from 'node:fs/promises';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { resolvePathSync, interopDefault } from 'mlly';
|
||||
import { quansync } from 'quansync/macro';
|
||||
|
||||
const toPath = urlOrPath => urlOrPath instanceof URL ? fileURLToPath(urlOrPath) : urlOrPath;
|
||||
|
||||
async function findUp$1(name, {
|
||||
cwd = process.cwd(),
|
||||
type = 'file',
|
||||
stopAt,
|
||||
} = {}) {
|
||||
let directory = path.resolve(toPath(cwd) ?? '');
|
||||
const {root} = path.parse(directory);
|
||||
stopAt = path.resolve(directory, toPath(stopAt ?? root));
|
||||
const isAbsoluteName = path.isAbsolute(name);
|
||||
|
||||
while (directory) {
|
||||
const filePath = isAbsoluteName ? name : path.join(directory, name);
|
||||
try {
|
||||
const stats = await fsPromises.stat(filePath); // eslint-disable-line no-await-in-loop
|
||||
if ((type === 'file' && stats.isFile()) || (type === 'directory' && stats.isDirectory())) {
|
||||
return filePath;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
if (directory === stopAt || directory === root) {
|
||||
break;
|
||||
}
|
||||
|
||||
directory = path.dirname(directory);
|
||||
}
|
||||
}
|
||||
|
||||
function findUpSync(name, {
|
||||
cwd = process.cwd(),
|
||||
type = 'file',
|
||||
stopAt,
|
||||
} = {}) {
|
||||
let directory = path.resolve(toPath(cwd) ?? '');
|
||||
const {root} = path.parse(directory);
|
||||
stopAt = path.resolve(directory, toPath(stopAt) ?? root);
|
||||
const isAbsoluteName = path.isAbsolute(name);
|
||||
|
||||
while (directory) {
|
||||
const filePath = isAbsoluteName ? name : path.join(directory, name);
|
||||
|
||||
try {
|
||||
const stats = fs.statSync(filePath, {throwIfNoEntry: false});
|
||||
if ((type === 'file' && stats?.isFile()) || (type === 'directory' && stats?.isDirectory())) {
|
||||
return filePath;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
if (directory === stopAt || directory === root) {
|
||||
break;
|
||||
}
|
||||
|
||||
directory = path.dirname(directory);
|
||||
}
|
||||
}
|
||||
|
||||
function _resolve(path, options = {}) {
|
||||
if (options.platform === "auto" || !options.platform)
|
||||
options.platform = process.platform === "win32" ? "win32" : "posix";
|
||||
if (process.versions.pnp) {
|
||||
const paths = options.paths || [];
|
||||
if (paths.length === 0)
|
||||
paths.push(process.cwd());
|
||||
const targetRequire = createRequire(import.meta.url);
|
||||
try {
|
||||
return targetRequire.resolve(path, { paths });
|
||||
} catch {
|
||||
}
|
||||
}
|
||||
const modulePath = resolvePathSync(path, {
|
||||
url: options.paths
|
||||
});
|
||||
if (options.platform === "win32")
|
||||
return win32.normalize(modulePath);
|
||||
return modulePath;
|
||||
}
|
||||
function resolveModule(name, options = {}) {
|
||||
try {
|
||||
return _resolve(name, options);
|
||||
} catch {
|
||||
return void 0;
|
||||
}
|
||||
}
|
||||
async function importModule(path) {
|
||||
const i = await import(path);
|
||||
if (i)
|
||||
return interopDefault(i);
|
||||
return i;
|
||||
}
|
||||
function isPackageExists(name, options = {}) {
|
||||
return !!resolvePackage(name, options);
|
||||
}
|
||||
function getPackageJsonPath(name, options = {}) {
|
||||
const entry = resolvePackage(name, options);
|
||||
if (!entry)
|
||||
return;
|
||||
return searchPackageJSON(entry);
|
||||
}
|
||||
const readFile = quansync({
|
||||
async: (id) => fs.promises.readFile(id, "utf8"),
|
||||
sync: (id) => fs.readFileSync(id, "utf8")
|
||||
});
|
||||
const getPackageInfo = quansync(function* (name, options = {}) {
|
||||
const packageJsonPath = getPackageJsonPath(name, options);
|
||||
if (!packageJsonPath)
|
||||
return;
|
||||
const packageJson = JSON.parse(yield readFile(packageJsonPath));
|
||||
return {
|
||||
name,
|
||||
version: packageJson.version,
|
||||
rootPath: dirname(packageJsonPath),
|
||||
packageJsonPath,
|
||||
packageJson
|
||||
};
|
||||
});
|
||||
const getPackageInfoSync = getPackageInfo.sync;
|
||||
function resolvePackage(name, options = {}) {
|
||||
try {
|
||||
return _resolve(`${name}/package.json`, options);
|
||||
} catch {
|
||||
}
|
||||
try {
|
||||
return _resolve(name, options);
|
||||
} catch (e) {
|
||||
if (e.code !== "MODULE_NOT_FOUND" && e.code !== "ERR_MODULE_NOT_FOUND")
|
||||
console.error(e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
function searchPackageJSON(dir) {
|
||||
let packageJsonPath;
|
||||
while (true) {
|
||||
if (!dir)
|
||||
return;
|
||||
const newDir = dirname(dir);
|
||||
if (newDir === dir)
|
||||
return;
|
||||
dir = newDir;
|
||||
packageJsonPath = join(dir, "package.json");
|
||||
if (fs.existsSync(packageJsonPath))
|
||||
break;
|
||||
}
|
||||
return packageJsonPath;
|
||||
}
|
||||
const findUp = quansync({
|
||||
sync: findUpSync,
|
||||
async: findUp$1
|
||||
});
|
||||
const loadPackageJSON = quansync(function* (cwd = process.cwd()) {
|
||||
const path = yield findUp("package.json", { cwd });
|
||||
if (!path || !fs.existsSync(path))
|
||||
return null;
|
||||
return JSON.parse(yield readFile(path));
|
||||
});
|
||||
const loadPackageJSONSync = loadPackageJSON.sync;
|
||||
const isPackageListed = quansync(function* (name, cwd) {
|
||||
const pkg = (yield loadPackageJSON(cwd)) || {};
|
||||
return name in (pkg.dependencies || {}) || name in (pkg.devDependencies || {});
|
||||
});
|
||||
const isPackageListedSync = isPackageListed.sync;
|
||||
|
||||
export { getPackageInfo, getPackageInfoSync, importModule, isPackageExists, isPackageListed, isPackageListedSync, loadPackageJSON, loadPackageJSONSync, resolveModule };
|
||||
@@ -1,44 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Pooya Parsa <pooya@pi0.io> - Daniel Roe <daniel@roe.dev>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
Copyright Joyent, Inc. and other Node contributors.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
persons to whom the Software is furnished to do so, subject to the
|
||||
following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
224
web/admin-spa/node_modules/unimport/node_modules/local-pkg/node_modules/pkg-types/README.md
generated
vendored
224
web/admin-spa/node_modules/unimport/node_modules/local-pkg/node_modules/pkg-types/README.md
generated
vendored
@@ -1,224 +0,0 @@
|
||||
# pkg-types
|
||||
|
||||
<!-- automd:badges color=yellow codecov -->
|
||||
|
||||
[](https://npmjs.com/package/pkg-types)
|
||||
[](https://npm.chart.dev/pkg-types)
|
||||
[](https://codecov.io/gh/unjs/pkg-types)
|
||||
|
||||
<!-- /automd -->
|
||||
|
||||
Node.js utilities and TypeScript definitions for `package.json` and `tsconfig.json`.
|
||||
|
||||
## Install
|
||||
|
||||
<!-- automd:pm-i -->
|
||||
|
||||
```sh
|
||||
# ✨ Auto-detect
|
||||
npx nypm install pkg-types
|
||||
|
||||
# npm
|
||||
npm install pkg-types
|
||||
|
||||
# yarn
|
||||
yarn add pkg-types
|
||||
|
||||
# pnpm
|
||||
pnpm install pkg-types
|
||||
|
||||
# bun
|
||||
bun install pkg-types
|
||||
|
||||
# deno
|
||||
deno install pkg-types
|
||||
```
|
||||
|
||||
<!-- /automd -->
|
||||
|
||||
## Usage
|
||||
|
||||
### `readPackageJSON`
|
||||
|
||||
```js
|
||||
import { readPackageJSON } from "pkg-types";
|
||||
const localPackageJson = await readPackageJSON();
|
||||
// or
|
||||
const packageJson = await readPackageJSON("/fully/resolved/path/to/folder");
|
||||
```
|
||||
|
||||
### `writePackageJSON`
|
||||
|
||||
```js
|
||||
import { writePackageJSON } from "pkg-types";
|
||||
|
||||
await writePackageJSON("path/to/package.json", pkg);
|
||||
```
|
||||
|
||||
### `resolvePackageJSON`
|
||||
|
||||
```js
|
||||
import { resolvePackageJSON } from "pkg-types";
|
||||
const filename = await resolvePackageJSON();
|
||||
// or
|
||||
const packageJson = await resolvePackageJSON("/fully/resolved/path/to/folder");
|
||||
```
|
||||
|
||||
### `readTSConfig`
|
||||
|
||||
```js
|
||||
import { readTSConfig } from "pkg-types";
|
||||
const tsconfig = await readTSConfig();
|
||||
// or
|
||||
const tsconfig2 = await readTSConfig("/fully/resolved/path/to/folder");
|
||||
```
|
||||
|
||||
### `writeTSConfig`
|
||||
|
||||
```js
|
||||
import { writeTSConfig } from "pkg-types";
|
||||
|
||||
await writeTSConfig("path/to/tsconfig.json", tsconfig);
|
||||
```
|
||||
|
||||
### `resolveTSConfig`
|
||||
|
||||
```js
|
||||
import { resolveTSConfig } from "pkg-types";
|
||||
const filename = await resolveTSConfig();
|
||||
// or
|
||||
const tsconfig = await resolveTSConfig("/fully/resolved/path/to/folder");
|
||||
```
|
||||
|
||||
### `resolveFile`
|
||||
|
||||
```js
|
||||
import { resolveFile } from "pkg-types";
|
||||
const filename = await resolveFile("README.md", {
|
||||
startingFrom: id,
|
||||
rootPattern: /^node_modules$/,
|
||||
matcher: (filename) => filename.endsWith(".md"),
|
||||
});
|
||||
```
|
||||
|
||||
### `resolveLockFile`
|
||||
|
||||
Find path to the lock file (`yarn.lock`, `package-lock.json`, `pnpm-lock.yaml`, `npm-shrinkwrap.json`, `bun.lockb`, `bun.lock`) or throws an error.
|
||||
|
||||
```js
|
||||
import { resolveLockFile } from "pkg-types";
|
||||
const lockfile = await resolveLockFile(".");
|
||||
```
|
||||
|
||||
### `findWorkspaceDir`
|
||||
|
||||
Try to detect workspace dir by in order:
|
||||
|
||||
1. Farthest workspace file (`pnpm-workspace.yaml`, `lerna.json`, `turbo.json`, `rush.json`, `deno.json`, `deno.jsonc`)
|
||||
2. Closest `.git/config` file
|
||||
3. Farthest lockfile
|
||||
4. Farthest `package.json` file
|
||||
|
||||
If fails, throws an error.
|
||||
|
||||
```js
|
||||
import { findWorkspaceDir } from "pkg-types";
|
||||
const workspaceDir = await findWorkspaceDir(".");
|
||||
```
|
||||
|
||||
### `resolveGitConfig`
|
||||
|
||||
Finds closest `.git/config` file.
|
||||
|
||||
```js
|
||||
import { resolveGitConfig } from "pkg-types";
|
||||
|
||||
const gitConfig = await resolveGitConfig(".")
|
||||
```
|
||||
|
||||
### `readGitConfig`
|
||||
|
||||
Finds and reads closest `.git/config` file into a JS object.
|
||||
|
||||
```js
|
||||
import { readGitConfig } from "pkg-types";
|
||||
|
||||
const gitConfigObj = await readGitConfig(".")
|
||||
```
|
||||
|
||||
### `writeGitConfig`
|
||||
|
||||
Stringifies git config object into INI text format and writes it to a file.
|
||||
|
||||
```js
|
||||
import { writeGitConfig } from "pkg-types";
|
||||
|
||||
await writeGitConfig(".git/config", gitConfigObj)
|
||||
```
|
||||
|
||||
### `parseGitConfig`
|
||||
|
||||
Parses a git config file in INI text format into a JavaScript object.
|
||||
|
||||
```js
|
||||
import { parseGitConfig } from "pkg-types";
|
||||
|
||||
const gitConfigObj = parseGitConfig(gitConfigINI)
|
||||
```
|
||||
|
||||
### `stringifyGitConfig`
|
||||
|
||||
Stringifies a git config object into a git config file INI text format.
|
||||
|
||||
```js
|
||||
import { stringifyGitConfig } from "pkg-types";
|
||||
|
||||
const gitConfigINI = stringifyGitConfig(gitConfigObj)
|
||||
```
|
||||
|
||||
## Types
|
||||
|
||||
**Note:** In order to make types working, you need to install `typescript` as a devDependency.
|
||||
|
||||
You can directly use typed interfaces:
|
||||
|
||||
```ts
|
||||
import type { TSConfig, PackageJSON, GitConfig } from "pkg-types";
|
||||
```
|
||||
|
||||
You can also use define utils for type support for using in plain `.js` files and auto-complete in IDE.
|
||||
|
||||
```js
|
||||
import type { definePackageJSON } from 'pkg-types'
|
||||
|
||||
const pkg = definePackageJSON({})
|
||||
```
|
||||
|
||||
```js
|
||||
import type { defineTSConfig } from 'pkg-types'
|
||||
|
||||
const pkg = defineTSConfig({})
|
||||
```
|
||||
|
||||
```js
|
||||
import type { defineGitConfig } from 'pkg-types'
|
||||
|
||||
const gitConfig = defineGitConfig({})
|
||||
```
|
||||
|
||||
## Alternatives
|
||||
|
||||
- [dominikg/tsconfck](https://github.com/dominikg/tsconfck)
|
||||
|
||||
## License
|
||||
|
||||
<!-- automd:contributors license=MIT author="pi0,danielroe" -->
|
||||
|
||||
Published under the [MIT](https://github.com/unjs/pkg-types/blob/main/LICENSE) license.
|
||||
Made by [@pi0](https://github.com/pi0), [@danielroe](https://github.com/danielroe) and [community](https://github.com/unjs/pkg-types/graphs/contributors) 💛
|
||||
<br><br>
|
||||
<a href="https://github.com/unjs/pkg-types/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=unjs/pkg-types" />
|
||||
</a>
|
||||
|
||||
<!-- /automd -->
|
||||
@@ -1,502 +0,0 @@
|
||||
import { ResolveOptions as ResolveOptions$1 } from 'exsolve';
|
||||
import { CompilerOptions, TypeAcquisition } from 'typescript';
|
||||
|
||||
/**
|
||||
* Represents the options for resolving paths with additional file finding capabilities.
|
||||
*/
|
||||
type ResolveOptions = Omit<FindFileOptions, "startingFrom"> & ResolveOptions$1 & {
|
||||
/** @deprecated: use `from` */
|
||||
url?: string;
|
||||
/** @deprecated: use `from` */
|
||||
parent?: string;
|
||||
};
|
||||
/**
|
||||
* Options for reading files with optional caching.
|
||||
*/
|
||||
type ReadOptions = {
|
||||
/**
|
||||
* Specifies whether the read results should be cached.
|
||||
* Can be a boolean or a map to hold the cached data.
|
||||
*/
|
||||
cache?: boolean | Map<string, Record<string, any>>;
|
||||
};
|
||||
interface FindFileOptions {
|
||||
/**
|
||||
* The starting directory for the search.
|
||||
* @default . (same as `process.cwd()`)
|
||||
*/
|
||||
startingFrom?: string;
|
||||
/**
|
||||
* A pattern to match a path segment above which you don't want to ascend
|
||||
* @default /^node_modules$/
|
||||
*/
|
||||
rootPattern?: RegExp;
|
||||
/**
|
||||
* If true, search starts from root level descending into subdirectories
|
||||
*/
|
||||
reverse?: boolean;
|
||||
/**
|
||||
* A matcher that can evaluate whether the given path is a valid file (for example,
|
||||
* by testing whether the file path exists.
|
||||
*
|
||||
* @default fs.statSync(path).isFile()
|
||||
*/
|
||||
test?: (filePath: string) => boolean | undefined | Promise<boolean | undefined>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously finds a file by name, starting from the specified directory and traversing up (or down if reverse).
|
||||
* @param filename - The name of the file to find.
|
||||
* @param _options - Options to customise the search behaviour.
|
||||
* @returns a promise that resolves to the path of the file found.
|
||||
* @throws Will throw an error if the file cannot be found.
|
||||
*/
|
||||
declare function findFile(filename: string | string[], _options?: FindFileOptions): Promise<string>;
|
||||
/**
|
||||
* Asynchronously finds the next file with the given name, starting in the given directory and moving up.
|
||||
* Alias for findFile without reversing the search.
|
||||
* @param filename - The name of the file to find.
|
||||
* @param options - Options to customise the search behaviour.
|
||||
* @returns A promise that resolves to the path of the next file found.
|
||||
*/
|
||||
declare function findNearestFile(filename: string | string[], options?: FindFileOptions): Promise<string>;
|
||||
/**
|
||||
* Asynchronously finds the furthest file with the given name, starting from the root directory and moving downwards.
|
||||
* This is essentially the reverse of `findNearestFile'.
|
||||
* @param filename - The name of the file to find.
|
||||
* @param options - Options to customise the search behaviour, with reverse set to true.
|
||||
* @returns A promise that resolves to the path of the farthest file found.
|
||||
*/
|
||||
declare function findFarthestFile(filename: string | string[], options?: FindFileOptions): Promise<string>;
|
||||
|
||||
type StripEnums<T extends Record<string, any>> = {
|
||||
[K in keyof T]: T[K] extends boolean ? T[K] : T[K] extends string ? T[K] : T[K] extends object ? T[K] : T[K] extends Array<any> ? T[K] : T[K] extends undefined ? undefined : any;
|
||||
};
|
||||
interface TSConfig {
|
||||
compilerOptions?: StripEnums<CompilerOptions>;
|
||||
exclude?: string[];
|
||||
compileOnSave?: boolean;
|
||||
extends?: string | string[];
|
||||
files?: string[];
|
||||
include?: string[];
|
||||
typeAcquisition?: TypeAcquisition;
|
||||
references?: {
|
||||
path: string;
|
||||
}[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a TSConfig structure.
|
||||
* @param tsconfig - The contents of `tsconfig.json` as an object. See {@link TSConfig}.
|
||||
* @returns the same `tsconfig.json` object.
|
||||
*/
|
||||
declare function defineTSConfig(tsconfig: TSConfig): TSConfig;
|
||||
/**
|
||||
* Asynchronously reads a `tsconfig.json` file.
|
||||
* @param id - The path to the `tsconfig.json` file, defaults to the current working directory.
|
||||
* @param options - The options for resolving and reading the file. See {@link ResolveOptions}.
|
||||
* @returns a promise resolving to the parsed `tsconfig.json` object.
|
||||
*/
|
||||
declare function readTSConfig(id?: string, options?: ResolveOptions & ReadOptions): Promise<TSConfig>;
|
||||
/**
|
||||
* Asynchronously writes data to a `tsconfig.json` file.
|
||||
* @param path - The path to the file where the `tsconfig.json` is written.
|
||||
* @param tsconfig - The `tsconfig.json` object to write. See {@link TSConfig}.
|
||||
*/
|
||||
declare function writeTSConfig(path: string, tsconfig: TSConfig): Promise<void>;
|
||||
/**
|
||||
* Resolves the path to the nearest `tsconfig.json` file from a given directory.
|
||||
* @param id - The base path for the search, defaults to the current working directory.
|
||||
* @param options - Options to modify the search behaviour. See {@link ResolveOptions}.
|
||||
* @returns A promise resolving to the path of the nearest `tsconfig.json` file.
|
||||
*/
|
||||
declare function resolveTSConfig(id?: string, options?: ResolveOptions): Promise<string>;
|
||||
|
||||
interface PackageJson {
|
||||
/**
|
||||
* The name is what your thing is called.
|
||||
* Some rules:
|
||||
* - The name must be less than or equal to 214 characters. This includes the scope for scoped packages.
|
||||
* - The name can’t start with a dot or an underscore.
|
||||
* - New packages must not have uppercase letters in the name.
|
||||
* - The name ends up being part of a URL, an argument on the command line, and a folder name. Therefore, the name can’t contain any non-URL-safe characters.
|
||||
*/
|
||||
name?: string;
|
||||
/**
|
||||
* Version must be parseable by `node-semver`, which is bundled with npm as a dependency. (`npm install semver` to use it yourself.)
|
||||
*/
|
||||
version?: string;
|
||||
/**
|
||||
* Put a description in it. It’s a string. This helps people discover your package, as it’s listed in `npm search`.
|
||||
*/
|
||||
description?: string;
|
||||
/**
|
||||
* Put keywords in it. It’s an array of strings. This helps people discover your package as it’s listed in `npm search`.
|
||||
*/
|
||||
keywords?: string[];
|
||||
/**
|
||||
* The url to the project homepage.
|
||||
*/
|
||||
homepage?: string;
|
||||
/**
|
||||
* The url to your project’s issue tracker and / or the email address to which issues should be reported. These are helpful for people who encounter issues with your package.
|
||||
*/
|
||||
bugs?: string | {
|
||||
url?: string;
|
||||
email?: string;
|
||||
};
|
||||
/**
|
||||
* You should specify a license for your package so that people know how they are permitted to use it, and any restrictions you’re placing on it.
|
||||
*/
|
||||
license?: string;
|
||||
/**
|
||||
* Specify the place where your code lives. This is helpful for people who want to contribute. If the git repo is on GitHub, then the `npm docs` command will be able to find you.
|
||||
* For GitHub, GitHub gist, Bitbucket, or GitLab repositories you can use the same shortcut syntax you use for npm install:
|
||||
*/
|
||||
repository?: string | {
|
||||
type: string;
|
||||
url: string;
|
||||
/**
|
||||
* If the `package.json` for your package is not in the root directory (for example if it is part of a monorepo), you can specify the directory in which it lives:
|
||||
*/
|
||||
directory?: string;
|
||||
};
|
||||
/**
|
||||
* The `scripts` field is a dictionary containing script commands that are run at various times in the lifecycle of your package.
|
||||
*/
|
||||
scripts?: PackageJsonScripts;
|
||||
/**
|
||||
* If you set `"private": true` in your package.json, then npm will refuse to publish it.
|
||||
*/
|
||||
private?: boolean;
|
||||
/**
|
||||
* The “author” is one person.
|
||||
*/
|
||||
author?: PackageJsonPerson;
|
||||
/**
|
||||
* “contributors” is an array of people.
|
||||
*/
|
||||
contributors?: PackageJsonPerson[];
|
||||
/**
|
||||
* An object containing a URL that provides up-to-date information
|
||||
* about ways to help fund development of your package,
|
||||
* a string URL, or an array of objects and string URLs
|
||||
*/
|
||||
funding?: PackageJsonFunding | PackageJsonFunding[];
|
||||
/**
|
||||
* The optional `files` field is an array of file patterns that describes the entries to be included when your package is installed as a dependency. File patterns follow a similar syntax to `.gitignore`, but reversed: including a file, directory, or glob pattern (`*`, `**\/*`, and such) will make it so that file is included in the tarball when it’s packed. Omitting the field will make it default to `["*"]`, which means it will include all files.
|
||||
*/
|
||||
files?: string[];
|
||||
/**
|
||||
* The main field is a module ID that is the primary entry point to your program. That is, if your package is named `foo`, and a user installs it, and then does `require("foo")`, then your main module’s exports object will be returned.
|
||||
* This should be a module ID relative to the root of your package folder.
|
||||
* For most modules, it makes the most sense to have a main script and often not much else.
|
||||
*/
|
||||
main?: string;
|
||||
/**
|
||||
* If your module is meant to be used client-side the browser field should be used instead of the main field. This is helpful to hint users that it might rely on primitives that aren’t available in Node.js modules. (e.g. window)
|
||||
*/
|
||||
browser?: string | Record<string, string | false>;
|
||||
/**
|
||||
* The `unpkg` field is used to specify the URL to a UMD module for your package. This is used by default in the unpkg.com CDN service.
|
||||
*/
|
||||
unpkg?: string;
|
||||
/**
|
||||
* A map of command name to local file name. On install, npm will symlink that file into `prefix/bin` for global installs, or `./node_modules/.bin/` for local installs.
|
||||
*/
|
||||
bin?: string | Record<string, string>;
|
||||
/**
|
||||
* Specify either a single file or an array of filenames to put in place for the `man` program to find.
|
||||
*/
|
||||
man?: string | string[];
|
||||
/**
|
||||
* Dependencies are specified in a simple object that maps a package name to a version range. The version range is a string which has one or more space-separated descriptors. Dependencies can also be identified with a tarball or git URL.
|
||||
*/
|
||||
dependencies?: Record<string, string>;
|
||||
/**
|
||||
* If someone is planning on downloading and using your module in their program, then they probably don’t want or need to download and build the external test or documentation framework that you use.
|
||||
* In this case, it’s best to map these additional items in a `devDependencies` object.
|
||||
*/
|
||||
devDependencies?: Record<string, string>;
|
||||
/**
|
||||
* If a dependency can be used, but you would like npm to proceed if it cannot be found or fails to install, then you may put it in the `optionalDependencies` object. This is a map of package name to version or url, just like the `dependencies` object. The difference is that build failures do not cause installation to fail.
|
||||
*/
|
||||
optionalDependencies?: Record<string, string>;
|
||||
/**
|
||||
* In some cases, you want to express the compatibility of your package with a host tool or library, while not necessarily doing a `require` of this host. This is usually referred to as a plugin. Notably, your module may be exposing a specific interface, expected and specified by the host documentation.
|
||||
*/
|
||||
peerDependencies?: Record<string, string>;
|
||||
/**
|
||||
* TypeScript typings, typically ending by `.d.ts`.
|
||||
*/
|
||||
types?: string;
|
||||
/**
|
||||
* This field is synonymous with `types`.
|
||||
*/
|
||||
typings?: string;
|
||||
/**
|
||||
* Non-Standard Node.js alternate entry-point to main.
|
||||
* An initial implementation for supporting CJS packages (from main), and use module for ESM modules.
|
||||
*/
|
||||
module?: string;
|
||||
/**
|
||||
* Make main entry-point be loaded as an ESM module, support "export" syntax instead of "require"
|
||||
*
|
||||
* Docs:
|
||||
* - https://nodejs.org/docs/latest-v14.x/api/esm.html#esm_package_json_type_field
|
||||
*
|
||||
* @default 'commonjs'
|
||||
* @since Node.js v14
|
||||
*/
|
||||
type?: "module" | "commonjs";
|
||||
/**
|
||||
* Alternate and extensible alternative to "main" entry point.
|
||||
*
|
||||
* When using `{type: "module"}`, any ESM module file MUST end with `.mjs` extension.
|
||||
*
|
||||
* Docs:
|
||||
* - https://nodejs.org/docs/latest-v14.x/api/esm.html#esm_exports_sugar
|
||||
*
|
||||
* @since Node.js v12.7
|
||||
*/
|
||||
exports?: PackageJsonExports;
|
||||
/**
|
||||
* Docs:
|
||||
* - https://nodejs.org/api/packages.html#imports
|
||||
*/
|
||||
imports?: Record<string, string | Record<string, string>>;
|
||||
/**
|
||||
* The field is used to define a set of sub-packages (or workspaces) within a monorepo.
|
||||
*
|
||||
* This field is an array of glob patterns or an object with specific configurations for managing
|
||||
* multiple packages in a single repository.
|
||||
*/
|
||||
workspaces?: string[];
|
||||
/**
|
||||
* The field is is used to specify different TypeScript declaration files for
|
||||
* different versions of TypeScript, allowing for version-specific type definitions.
|
||||
*/
|
||||
typesVersions?: Record<string, Record<string, string[]>>;
|
||||
/**
|
||||
* You can specify which operating systems your module will run on:
|
||||
* ```json
|
||||
* {
|
||||
* "os": ["darwin", "linux"]
|
||||
* }
|
||||
* ```
|
||||
* You can also block instead of allowing operating systems, just prepend the blocked os with a '!':
|
||||
* ```json
|
||||
* {
|
||||
* "os": ["!win32"]
|
||||
* }
|
||||
* ```
|
||||
* The host operating system is determined by `process.platform`
|
||||
* It is allowed to both block and allow an item, although there isn't any good reason to do this.
|
||||
*/
|
||||
os?: string[];
|
||||
/**
|
||||
* If your code only runs on certain cpu architectures, you can specify which ones.
|
||||
* ```json
|
||||
* {
|
||||
* "cpu": ["x64", "ia32"]
|
||||
* }
|
||||
* ```
|
||||
* Like the `os` option, you can also block architectures:
|
||||
* ```json
|
||||
* {
|
||||
* "cpu": ["!arm", "!mips"]
|
||||
* }
|
||||
* ```
|
||||
* The host architecture is determined by `process.arch`
|
||||
*/
|
||||
cpu?: string[];
|
||||
/**
|
||||
* This is a set of config values that will be used at publish-time.
|
||||
*/
|
||||
publishConfig?: {
|
||||
/**
|
||||
* The registry that will be used if the package is published.
|
||||
*/
|
||||
registry?: string;
|
||||
/**
|
||||
* The tag that will be used if the package is published.
|
||||
*/
|
||||
tag?: string;
|
||||
/**
|
||||
* The access level that will be used if the package is published.
|
||||
*/
|
||||
access?: "public" | "restricted";
|
||||
/**
|
||||
* **pnpm-only**
|
||||
*
|
||||
* By default, for portability reasons, no files except those listed in
|
||||
* the bin field will be marked as executable in the resulting package
|
||||
* archive. The executableFiles field lets you declare additional fields
|
||||
* that must have the executable flag (+x) set even if
|
||||
* they aren't directly accessible through the bin field.
|
||||
*/
|
||||
executableFiles?: string[];
|
||||
/**
|
||||
* **pnpm-only**
|
||||
*
|
||||
* You also can use the field `publishConfig.directory` to customize
|
||||
* the published subdirectory relative to the current `package.json`.
|
||||
*
|
||||
* It is expected to have a modified version of the current package in
|
||||
* the specified directory (usually using third party build tools).
|
||||
*/
|
||||
directory?: string;
|
||||
/**
|
||||
* **pnpm-only**
|
||||
*
|
||||
* When set to `true`, the project will be symlinked from the
|
||||
* `publishConfig.directory` location during local development.
|
||||
* @default true
|
||||
*/
|
||||
linkDirectory?: boolean;
|
||||
} & Pick<PackageJson, "bin" | "main" | "exports" | "types" | "typings" | "module" | "browser" | "unpkg" | "typesVersions" | "os" | "cpu">;
|
||||
/**
|
||||
* See: https://nodejs.org/api/packages.html#packagemanager
|
||||
* This field defines which package manager is expected to be used when working on the current project.
|
||||
* Should be of the format: `<name>@<version>[#hash]`
|
||||
*/
|
||||
packageManager?: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
/**
|
||||
* See: https://docs.npmjs.com/cli/v11/using-npm/scripts#pre--post-scripts
|
||||
*/
|
||||
type PackageJsonScriptWithPreAndPost<S extends string> = S | `${"pre" | "post"}${S}`;
|
||||
/**
|
||||
* See: https://docs.npmjs.com/cli/v11/using-npm/scripts#life-cycle-operation-order
|
||||
*/
|
||||
type PackageJsonNpmLifeCycleScripts = "dependencies" | "prepublishOnly" | PackageJsonScriptWithPreAndPost<"install" | "pack" | "prepare" | "publish" | "restart" | "start" | "stop" | "test" | "version">;
|
||||
/**
|
||||
* See: https://pnpm.io/scripts#lifecycle-scripts
|
||||
*/
|
||||
type PackageJsonPnpmLifeCycleScripts = "pnpm:devPreinstall";
|
||||
type PackageJsonCommonScripts = "build" | "coverage" | "deploy" | "dev" | "format" | "lint" | "preview" | "release" | "typecheck" | "watch";
|
||||
type PackageJsonScriptName = PackageJsonCommonScripts | PackageJsonNpmLifeCycleScripts | PackageJsonPnpmLifeCycleScripts | (string & {});
|
||||
type PackageJsonScripts = {
|
||||
[P in PackageJsonScriptName]?: string;
|
||||
};
|
||||
/**
|
||||
* A “person” is an object with a “name” field and optionally “url” and “email”. Or you can shorten that all into a single string, and npm will parse it for you.
|
||||
*/
|
||||
type PackageJsonPerson = string | {
|
||||
name: string;
|
||||
email?: string;
|
||||
url?: string;
|
||||
};
|
||||
type PackageJsonFunding = string | {
|
||||
url: string;
|
||||
type?: string;
|
||||
};
|
||||
type PackageJsonExportKey = "." | "import" | "require" | "types" | "node" | "browser" | "default" | (string & {});
|
||||
type PackageJsonExportsObject = {
|
||||
[P in PackageJsonExportKey]?: string | PackageJsonExportsObject | Array<string | PackageJsonExportsObject>;
|
||||
};
|
||||
type PackageJsonExports = string | PackageJsonExportsObject | Array<string | PackageJsonExportsObject>;
|
||||
|
||||
/**
|
||||
* Defines a PackageJson structure.
|
||||
* @param pkg - The `package.json` content as an object. See {@link PackageJson}.
|
||||
* @returns the same `package.json` object.
|
||||
*/
|
||||
declare function definePackageJSON(pkg: PackageJson): PackageJson;
|
||||
/**
|
||||
* Asynchronously reads a `package.json` file.
|
||||
* @param id - The path identifier for the package.json, defaults to the current working directory.
|
||||
* @param options - The options for resolving and reading the file. See {@link ResolveOptions}.
|
||||
* @returns a promise resolving to the parsed `package.json` object.
|
||||
*/
|
||||
declare function readPackageJSON(id?: string, options?: ResolveOptions & ReadOptions): Promise<PackageJson>;
|
||||
/**
|
||||
* Asynchronously writes data to a `package.json` file.
|
||||
* @param path - The path to the file where the `package.json` is written.
|
||||
* @param pkg - The `package.json` object to write. See {@link PackageJson}.
|
||||
*/
|
||||
declare function writePackageJSON(path: string, pkg: PackageJson): Promise<void>;
|
||||
/**
|
||||
* Resolves the path to the nearest `package.json` file from a given directory.
|
||||
* @param id - The base path for the search, defaults to the current working directory.
|
||||
* @param options - Options to modify the search behaviour. See {@link ResolveOptions}.
|
||||
* @returns A promise resolving to the path of the nearest `package.json` file.
|
||||
*/
|
||||
declare function resolvePackageJSON(id?: string, options?: ResolveOptions): Promise<string>;
|
||||
/**
|
||||
* Resolves the path to the nearest lockfile from a given directory.
|
||||
* @param id - The base path for the search, defaults to the current working directory.
|
||||
* @param options - Options to modify the search behaviour. See {@link ResolveOptions}.
|
||||
* @returns A promise resolving to the path of the nearest lockfile.
|
||||
*/
|
||||
declare function resolveLockfile(id?: string, options?: ResolveOptions): Promise<string>;
|
||||
type WorkspaceTestName = "workspaceFile" | "gitConfig" | "lockFile" | "packageJson";
|
||||
/**
|
||||
* Detects the workspace directory based on common project markers .
|
||||
* Throws an error if the workspace root cannot be detected.
|
||||
*
|
||||
* @param id - The base path to search, defaults to the current working directory.
|
||||
* @param options - Options to modify the search behaviour. See {@link ResolveOptions}.
|
||||
* @returns a promise resolving to the path of the detected workspace directory.
|
||||
*/
|
||||
declare function findWorkspaceDir(id?: string, options?: ResolveOptions & Partial<Record<WorkspaceTestName, boolean | "closest" | "furthest">> & {
|
||||
tests?: WorkspaceTestName[];
|
||||
}): Promise<string>;
|
||||
|
||||
interface GitRemote {
|
||||
[key: string]: unknown;
|
||||
name?: string;
|
||||
url?: string;
|
||||
fetch?: string;
|
||||
}
|
||||
interface GitBranch {
|
||||
[key: string]: unknown;
|
||||
remote?: string;
|
||||
merge?: string;
|
||||
description?: string;
|
||||
rebase?: boolean;
|
||||
}
|
||||
interface GitCoreConfig {
|
||||
[key: string]: unknown;
|
||||
}
|
||||
interface GitConfigUser {
|
||||
[key: string]: unknown;
|
||||
name?: string;
|
||||
email?: string;
|
||||
}
|
||||
interface GitConfig {
|
||||
[key: string]: unknown;
|
||||
core?: GitCoreConfig;
|
||||
user?: GitConfigUser;
|
||||
remote?: Record<string, GitRemote>;
|
||||
branch?: Record<string, GitBranch>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a git config object.
|
||||
*/
|
||||
declare function defineGitConfig(config: GitConfig): GitConfig;
|
||||
/**
|
||||
* Finds closest `.git/config` file.
|
||||
*/
|
||||
declare function resolveGitConfig(dir: string, opts?: ResolveOptions): Promise<string>;
|
||||
/**
|
||||
* Finds and reads closest `.git/config` file into a JS object.
|
||||
*/
|
||||
declare function readGitConfig(dir: string, opts?: ResolveOptions): Promise<GitConfig>;
|
||||
/**
|
||||
* Stringifies git config object into INI text format and writes it to a file.
|
||||
*/
|
||||
declare function writeGitConfig(path: string, config: GitConfig): Promise<void>;
|
||||
/**
|
||||
* Parses a git config file in INI text format into a JavaScript object.
|
||||
*/
|
||||
declare function parseGitConfig(ini: string): GitConfig;
|
||||
/**
|
||||
* Stringifies a git config object into a git config file INI text format.
|
||||
*/
|
||||
declare function stringifyGitConfig(config: GitConfig): string;
|
||||
|
||||
export { defineGitConfig, definePackageJSON, defineTSConfig, findFarthestFile, findFile, findNearestFile, findWorkspaceDir, parseGitConfig, readGitConfig, readPackageJSON, readTSConfig, resolveGitConfig, resolveLockfile, resolvePackageJSON, resolveTSConfig, stringifyGitConfig, writeGitConfig, writePackageJSON, writeTSConfig };
|
||||
export type { FindFileOptions, GitBranch, GitConfig, GitConfigUser, GitCoreConfig, GitRemote, PackageJson, PackageJsonExports, PackageJsonPerson, ReadOptions, ResolveOptions, TSConfig };
|
||||
@@ -1,206 +0,0 @@
|
||||
import { statSync, promises } from 'node:fs';
|
||||
import { resolve, join, normalize, isAbsolute, dirname } from 'pathe';
|
||||
import { parseJSONC, stringifyJSONC, parseJSON, stringifyJSON } from 'confbox';
|
||||
import { resolveModulePath } from 'exsolve';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { parseINI, stringifyINI } from 'confbox/ini';
|
||||
|
||||
const defaultFindOptions = {
|
||||
startingFrom: ".",
|
||||
rootPattern: /^node_modules$/,
|
||||
reverse: false,
|
||||
test: (filePath) => {
|
||||
try {
|
||||
if (statSync(filePath).isFile()) {
|
||||
return true;
|
||||
}
|
||||
} catch {
|
||||
}
|
||||
}
|
||||
};
|
||||
async function findFile(filename, _options = {}) {
|
||||
const filenames = Array.isArray(filename) ? filename : [filename];
|
||||
const options = { ...defaultFindOptions, ..._options };
|
||||
const basePath = resolve(options.startingFrom);
|
||||
const leadingSlash = basePath[0] === "/";
|
||||
const segments = basePath.split("/").filter(Boolean);
|
||||
if (filenames.includes(segments.at(-1)) && await options.test(basePath)) {
|
||||
return basePath;
|
||||
}
|
||||
if (leadingSlash) {
|
||||
segments[0] = "/" + segments[0];
|
||||
}
|
||||
let root = segments.findIndex((r) => r.match(options.rootPattern));
|
||||
if (root === -1) {
|
||||
root = 0;
|
||||
}
|
||||
if (options.reverse) {
|
||||
for (let index = root + 1; index <= segments.length; index++) {
|
||||
for (const filename2 of filenames) {
|
||||
const filePath = join(...segments.slice(0, index), filename2);
|
||||
if (await options.test(filePath)) {
|
||||
return filePath;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (let index = segments.length; index > root; index--) {
|
||||
for (const filename2 of filenames) {
|
||||
const filePath = join(...segments.slice(0, index), filename2);
|
||||
if (await options.test(filePath)) {
|
||||
return filePath;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new Error(
|
||||
`Cannot find matching ${filename} in ${options.startingFrom} or parent directories`
|
||||
);
|
||||
}
|
||||
function findNearestFile(filename, options = {}) {
|
||||
return findFile(filename, options);
|
||||
}
|
||||
function findFarthestFile(filename, options = {}) {
|
||||
return findFile(filename, { ...options, reverse: true });
|
||||
}
|
||||
|
||||
function _resolvePath(id, opts = {}) {
|
||||
if (id instanceof URL || id.startsWith("file://")) {
|
||||
return normalize(fileURLToPath(id));
|
||||
}
|
||||
if (isAbsolute(id)) {
|
||||
return normalize(id);
|
||||
}
|
||||
return resolveModulePath(id, {
|
||||
...opts,
|
||||
from: opts.from || opts.parent || opts.url
|
||||
});
|
||||
}
|
||||
|
||||
const FileCache$1 = /* @__PURE__ */ new Map();
|
||||
function defineTSConfig(tsconfig) {
|
||||
return tsconfig;
|
||||
}
|
||||
async function readTSConfig(id, options = {}) {
|
||||
const resolvedPath = await resolveTSConfig(id, options);
|
||||
const cache = options.cache && typeof options.cache !== "boolean" ? options.cache : FileCache$1;
|
||||
if (options.cache && cache.has(resolvedPath)) {
|
||||
return cache.get(resolvedPath);
|
||||
}
|
||||
const text = await promises.readFile(resolvedPath, "utf8");
|
||||
const parsed = parseJSONC(text);
|
||||
cache.set(resolvedPath, parsed);
|
||||
return parsed;
|
||||
}
|
||||
async function writeTSConfig(path, tsconfig) {
|
||||
await promises.writeFile(path, stringifyJSONC(tsconfig));
|
||||
}
|
||||
async function resolveTSConfig(id = process.cwd(), options = {}) {
|
||||
return findNearestFile("tsconfig.json", {
|
||||
...options,
|
||||
startingFrom: _resolvePath(id, options)
|
||||
});
|
||||
}
|
||||
|
||||
const lockFiles = [
|
||||
"yarn.lock",
|
||||
"package-lock.json",
|
||||
"pnpm-lock.yaml",
|
||||
"npm-shrinkwrap.json",
|
||||
"bun.lockb",
|
||||
"bun.lock"
|
||||
];
|
||||
const workspaceFiles = [
|
||||
"pnpm-workspace.yaml",
|
||||
"lerna.json",
|
||||
"turbo.json",
|
||||
"rush.json",
|
||||
"deno.json",
|
||||
"deno.jsonc"
|
||||
];
|
||||
const FileCache = /* @__PURE__ */ new Map();
|
||||
function definePackageJSON(pkg) {
|
||||
return pkg;
|
||||
}
|
||||
async function readPackageJSON(id, options = {}) {
|
||||
const resolvedPath = await resolvePackageJSON(id, options);
|
||||
const cache = options.cache && typeof options.cache !== "boolean" ? options.cache : FileCache;
|
||||
if (options.cache && cache.has(resolvedPath)) {
|
||||
return cache.get(resolvedPath);
|
||||
}
|
||||
const blob = await promises.readFile(resolvedPath, "utf8");
|
||||
let parsed;
|
||||
try {
|
||||
parsed = parseJSON(blob);
|
||||
} catch {
|
||||
parsed = parseJSONC(blob);
|
||||
}
|
||||
cache.set(resolvedPath, parsed);
|
||||
return parsed;
|
||||
}
|
||||
async function writePackageJSON(path, pkg) {
|
||||
await promises.writeFile(path, stringifyJSON(pkg));
|
||||
}
|
||||
async function resolvePackageJSON(id = process.cwd(), options = {}) {
|
||||
return findNearestFile("package.json", {
|
||||
...options,
|
||||
startingFrom: _resolvePath(id, options)
|
||||
});
|
||||
}
|
||||
async function resolveLockfile(id = process.cwd(), options = {}) {
|
||||
return findNearestFile(lockFiles, {
|
||||
...options,
|
||||
startingFrom: _resolvePath(id, options)
|
||||
});
|
||||
}
|
||||
const workspaceTests = {
|
||||
workspaceFile: (opts) => findFile(workspaceFiles, opts).then((r) => dirname(r)),
|
||||
gitConfig: (opts) => findFile(".git/config", opts).then((r) => resolve(r, "../..")),
|
||||
lockFile: (opts) => findFile(lockFiles, opts).then((r) => dirname(r)),
|
||||
packageJson: (opts) => findFile("package.json", opts).then((r) => dirname(r))
|
||||
};
|
||||
async function findWorkspaceDir(id = process.cwd(), options = {}) {
|
||||
const startingFrom = _resolvePath(id, options);
|
||||
const tests = options.tests || ["workspaceFile", "gitConfig", "lockFile", "packageJson"];
|
||||
for (const testName of tests) {
|
||||
const test = workspaceTests[testName];
|
||||
if (options[testName] === false || !test) {
|
||||
continue;
|
||||
}
|
||||
const direction = options[testName] || (testName === "gitConfig" ? "closest" : "furthest");
|
||||
const detected = await test({
|
||||
...options,
|
||||
startingFrom,
|
||||
reverse: direction === "furthest"
|
||||
}).catch(() => {
|
||||
});
|
||||
if (detected) {
|
||||
return detected;
|
||||
}
|
||||
}
|
||||
throw new Error(`Cannot detect workspace root from ${id}`);
|
||||
}
|
||||
|
||||
function defineGitConfig(config) {
|
||||
return config;
|
||||
}
|
||||
async function resolveGitConfig(dir, opts) {
|
||||
return findNearestFile(".git/config", { ...opts, startingFrom: dir });
|
||||
}
|
||||
async function readGitConfig(dir, opts) {
|
||||
const path = await resolveGitConfig(dir, opts);
|
||||
const ini = await readFile(path, "utf8");
|
||||
return parseGitConfig(ini);
|
||||
}
|
||||
async function writeGitConfig(path, config) {
|
||||
await writeFile(path, stringifyGitConfig(config));
|
||||
}
|
||||
function parseGitConfig(ini) {
|
||||
return parseINI(ini.replaceAll(/^\[(\w+) "(.+)"\]$/gm, "[$1.$2]"));
|
||||
}
|
||||
function stringifyGitConfig(config) {
|
||||
return stringifyINI(config).replaceAll(/^\[(\w+)\.(\w+)\]$/gm, '[$1 "$2"]');
|
||||
}
|
||||
|
||||
export { defineGitConfig, definePackageJSON, defineTSConfig, findFarthestFile, findFile, findNearestFile, findWorkspaceDir, parseGitConfig, readGitConfig, readPackageJSON, readTSConfig, resolveGitConfig, resolveLockfile, resolvePackageJSON, resolveTSConfig, stringifyGitConfig, writeGitConfig, writePackageJSON, writeTSConfig };
|
||||
@@ -1,45 +0,0 @@
|
||||
{
|
||||
"name": "pkg-types",
|
||||
"version": "2.2.0",
|
||||
"description": "Node.js utilities and TypeScript definitions for `package.json` and `tsconfig.json`",
|
||||
"repository": "unjs/pkg-types",
|
||||
"license": "MIT",
|
||||
"sideEffects": false,
|
||||
"exports": {
|
||||
"types": "./dist/index.d.mts",
|
||||
"default": "./dist/index.mjs"
|
||||
},
|
||||
"types": "./dist/index.d.mts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "unbuild",
|
||||
"dev": "vitest --typecheck",
|
||||
"lint": "eslint && prettier -c src test",
|
||||
"lint:fix": "automd && eslint --fix . && prettier -w src test",
|
||||
"prepack": "pnpm build",
|
||||
"release": "pnpm test && changelogen --release && npm publish && git push --follow-tags",
|
||||
"test": "vitest run --typecheck --coverage"
|
||||
},
|
||||
"dependencies": {
|
||||
"confbox": "^0.2.2",
|
||||
"exsolve": "^1.0.7",
|
||||
"pathe": "^2.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.0.7",
|
||||
"@vitest/coverage-v8": "^3.2.4",
|
||||
"automd": "^0.4.0",
|
||||
"changelogen": "^0.6.1",
|
||||
"eslint": "^9.30.0",
|
||||
"eslint-config-unjs": "^0.5.0",
|
||||
"expect-type": "^1.2.1",
|
||||
"jiti": "^2.4.2",
|
||||
"prettier": "^3.6.2",
|
||||
"typescript": "^5.8.3",
|
||||
"unbuild": "^3.5.0",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"packageManager": "pnpm@10.12.4"
|
||||
}
|
||||
72
web/admin-spa/node_modules/unimport/node_modules/local-pkg/package.json
generated
vendored
72
web/admin-spa/node_modules/unimport/node_modules/local-pkg/package.json
generated
vendored
@@ -1,72 +0,0 @@
|
||||
{
|
||||
"name": "local-pkg",
|
||||
"type": "module",
|
||||
"version": "1.1.1",
|
||||
"packageManager": "pnpm@10.5.2",
|
||||
"description": "Get information on local packages.",
|
||||
"author": "Anthony Fu <anthonyfu117@hotmail.com>",
|
||||
"license": "MIT",
|
||||
"funding": "https://github.com/sponsors/antfu",
|
||||
"homepage": "https://github.com/antfu/local-pkg#readme",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/antfu/local-pkg.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/antfu/local-pkg/issues"
|
||||
},
|
||||
"keywords": [
|
||||
"package"
|
||||
],
|
||||
"sideEffects": false,
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"import": "./dist/index.mjs",
|
||||
"require": "./dist/index.cjs"
|
||||
}
|
||||
},
|
||||
"main": "dist/index.cjs",
|
||||
"module": "dist/index.mjs",
|
||||
"types": "dist/index.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"scripts": {
|
||||
"prepublishOnly": "nr build",
|
||||
"build": "unbuild",
|
||||
"lint": "eslint .",
|
||||
"release": "bumpp && npm publish",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"test": "vitest run && node ./test/cjs.cjs && node ./test/esm.mjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"mlly": "^1.7.4",
|
||||
"pkg-types": "^2.0.1",
|
||||
"quansync": "^0.2.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@antfu/eslint-config": "^4.4.0",
|
||||
"@antfu/ni": "^23.3.1",
|
||||
"@antfu/utils": "^9.1.0",
|
||||
"@types/chai": "^5.0.1",
|
||||
"@types/node": "^22.13.8",
|
||||
"bumpp": "^10.0.3",
|
||||
"chai": "^5.2.0",
|
||||
"eslint": "^9.21.0",
|
||||
"esno": "^4.8.0",
|
||||
"find-up-simple": "^1.0.1",
|
||||
"typescript": "^5.8.2",
|
||||
"unbuild": "^3.5.0",
|
||||
"unplugin-quansync": "^0.3.3",
|
||||
"vitest": "^3.0.7"
|
||||
},
|
||||
"pnpm": {
|
||||
"onlyBuiltDependencies": [
|
||||
"esbuild"
|
||||
]
|
||||
}
|
||||
}
|
||||
21
web/admin-spa/node_modules/unimport/node_modules/picomatch/LICENSE
generated
vendored
21
web/admin-spa/node_modules/unimport/node_modules/picomatch/LICENSE
generated
vendored
@@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017-present, Jon Schlinkert.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
738
web/admin-spa/node_modules/unimport/node_modules/picomatch/README.md
generated
vendored
738
web/admin-spa/node_modules/unimport/node_modules/picomatch/README.md
generated
vendored
@@ -1,738 +0,0 @@
|
||||
<h1 align="center">Picomatch</h1>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://npmjs.org/package/picomatch">
|
||||
<img src="https://img.shields.io/npm/v/picomatch.svg" alt="version">
|
||||
</a>
|
||||
<a href="https://github.com/micromatch/picomatch/actions?workflow=Tests">
|
||||
<img src="https://github.com/micromatch/picomatch/workflows/Tests/badge.svg" alt="test status">
|
||||
</a>
|
||||
<a href="https://coveralls.io/github/micromatch/picomatch">
|
||||
<img src="https://img.shields.io/coveralls/github/micromatch/picomatch/master.svg" alt="coverage status">
|
||||
</a>
|
||||
<a href="https://npmjs.org/package/picomatch">
|
||||
<img src="https://img.shields.io/npm/dm/picomatch.svg" alt="downloads">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
<p align="center">
|
||||
<strong>Blazing fast and accurate glob matcher written in JavaScript.</strong></br>
|
||||
<em>No dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.</em>
|
||||
</p>
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
## Why picomatch?
|
||||
|
||||
* **Lightweight** - No dependencies
|
||||
* **Minimal** - Tiny API surface. Main export is a function that takes a glob pattern and returns a matcher function.
|
||||
* **Fast** - Loads in about 2ms (that's several times faster than a [single frame of a HD movie](http://www.endmemo.com/sconvert/framespersecondframespermillisecond.php) at 60fps)
|
||||
* **Performant** - Use the returned matcher function to speed up repeat matching (like when watching files)
|
||||
* **Accurate matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories, [advanced globbing](#advanced-globbing) with extglobs, braces, and POSIX brackets, and support for escaping special characters with `\` or quotes.
|
||||
* **Well tested** - Thousands of unit tests
|
||||
|
||||
See the [library comparison](#library-comparisons) to other libraries.
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
## Table of Contents
|
||||
|
||||
<details><summary> Click to expand </summary>
|
||||
|
||||
- [Install](#install)
|
||||
- [Usage](#usage)
|
||||
- [API](#api)
|
||||
* [picomatch](#picomatch)
|
||||
* [.test](#test)
|
||||
* [.matchBase](#matchbase)
|
||||
* [.isMatch](#ismatch)
|
||||
* [.parse](#parse)
|
||||
* [.scan](#scan)
|
||||
* [.compileRe](#compilere)
|
||||
* [.makeRe](#makere)
|
||||
* [.toRegex](#toregex)
|
||||
- [Options](#options)
|
||||
* [Picomatch options](#picomatch-options)
|
||||
* [Scan Options](#scan-options)
|
||||
* [Options Examples](#options-examples)
|
||||
- [Globbing features](#globbing-features)
|
||||
* [Basic globbing](#basic-globbing)
|
||||
* [Advanced globbing](#advanced-globbing)
|
||||
* [Braces](#braces)
|
||||
* [Matching special characters as literals](#matching-special-characters-as-literals)
|
||||
- [Library Comparisons](#library-comparisons)
|
||||
- [Benchmarks](#benchmarks)
|
||||
- [Philosophies](#philosophies)
|
||||
- [About](#about)
|
||||
* [Author](#author)
|
||||
* [License](#license)
|
||||
|
||||
_(TOC generated by [verb](https://github.com/verbose/verb) using [markdown-toc](https://github.com/jonschlinkert/markdown-toc))_
|
||||
|
||||
</details>
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
## Install
|
||||
|
||||
Install with [npm](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
npm install --save picomatch
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
## Usage
|
||||
|
||||
The main export is a function that takes a glob pattern and an options object and returns a function for matching strings.
|
||||
|
||||
```js
|
||||
const pm = require('picomatch');
|
||||
const isMatch = pm('*.js');
|
||||
|
||||
console.log(isMatch('abcd')); //=> false
|
||||
console.log(isMatch('a.js')); //=> true
|
||||
console.log(isMatch('a.md')); //=> false
|
||||
console.log(isMatch('a/b.js')); //=> false
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
## API
|
||||
|
||||
### [picomatch](lib/picomatch.js#L31)
|
||||
|
||||
Creates a matcher function from one or more glob patterns. The returned function takes a string to match as its first argument, and returns true if the string is a match. The returned matcher function also takes a boolean as the second argument that, when true, returns an object with additional information.
|
||||
|
||||
**Params**
|
||||
|
||||
* `globs` **{String|Array}**: One or more glob patterns.
|
||||
* `options` **{Object=}**
|
||||
* `returns` **{Function=}**: Returns a matcher function.
|
||||
|
||||
**Example**
|
||||
|
||||
```js
|
||||
const picomatch = require('picomatch');
|
||||
// picomatch(glob[, options]);
|
||||
|
||||
const isMatch = picomatch('*.!(*a)');
|
||||
console.log(isMatch('a.a')); //=> false
|
||||
console.log(isMatch('a.b')); //=> true
|
||||
```
|
||||
|
||||
**Example without node.js**
|
||||
|
||||
For environments without `node.js`, `picomatch/posix` provides you a dependency-free matcher, without automatic OS detection.
|
||||
|
||||
```js
|
||||
const picomatch = require('picomatch/posix');
|
||||
// the same API, defaulting to posix paths
|
||||
const isMatch = picomatch('a/*');
|
||||
console.log(isMatch('a\\b')); //=> false
|
||||
console.log(isMatch('a/b')); //=> true
|
||||
|
||||
// you can still configure the matcher function to accept windows paths
|
||||
const isMatch = picomatch('a/*', { options: windows });
|
||||
console.log(isMatch('a\\b')); //=> true
|
||||
console.log(isMatch('a/b')); //=> true
|
||||
```
|
||||
|
||||
### [.test](lib/picomatch.js#L116)
|
||||
|
||||
Test `input` with the given `regex`. This is used by the main `picomatch()` function to test the input string.
|
||||
|
||||
**Params**
|
||||
|
||||
* `input` **{String}**: String to test.
|
||||
* `regex` **{RegExp}**
|
||||
* `returns` **{Object}**: Returns an object with matching info.
|
||||
|
||||
**Example**
|
||||
|
||||
```js
|
||||
const picomatch = require('picomatch');
|
||||
// picomatch.test(input, regex[, options]);
|
||||
|
||||
console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/));
|
||||
// { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }
|
||||
```
|
||||
|
||||
### [.matchBase](lib/picomatch.js#L160)
|
||||
|
||||
Match the basename of a filepath.
|
||||
|
||||
**Params**
|
||||
|
||||
* `input` **{String}**: String to test.
|
||||
* `glob` **{RegExp|String}**: Glob pattern or regex created by [.makeRe](#makeRe).
|
||||
* `returns` **{Boolean}**
|
||||
|
||||
**Example**
|
||||
|
||||
```js
|
||||
const picomatch = require('picomatch');
|
||||
// picomatch.matchBase(input, glob[, options]);
|
||||
console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true
|
||||
```
|
||||
|
||||
### [.isMatch](lib/picomatch.js#L182)
|
||||
|
||||
Returns true if **any** of the given glob `patterns` match the specified `string`.
|
||||
|
||||
**Params**
|
||||
|
||||
* **{String|Array}**: str The string to test.
|
||||
* **{String|Array}**: patterns One or more glob patterns to use for matching.
|
||||
* **{Object}**: See available [options](#options).
|
||||
* `returns` **{Boolean}**: Returns true if any patterns match `str`
|
||||
|
||||
**Example**
|
||||
|
||||
```js
|
||||
const picomatch = require('picomatch');
|
||||
// picomatch.isMatch(string, patterns[, options]);
|
||||
|
||||
console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true
|
||||
console.log(picomatch.isMatch('a.a', 'b.*')); //=> false
|
||||
```
|
||||
|
||||
### [.parse](lib/picomatch.js#L198)
|
||||
|
||||
Parse a glob pattern to create the source string for a regular expression.
|
||||
|
||||
**Params**
|
||||
|
||||
* `pattern` **{String}**
|
||||
* `options` **{Object}**
|
||||
* `returns` **{Object}**: Returns an object with useful properties and output to be used as a regex source string.
|
||||
|
||||
**Example**
|
||||
|
||||
```js
|
||||
const picomatch = require('picomatch');
|
||||
const result = picomatch.parse(pattern[, options]);
|
||||
```
|
||||
|
||||
### [.scan](lib/picomatch.js#L230)
|
||||
|
||||
Scan a glob pattern to separate the pattern into segments.
|
||||
|
||||
**Params**
|
||||
|
||||
* `input` **{String}**: Glob pattern to scan.
|
||||
* `options` **{Object}**
|
||||
* `returns` **{Object}**: Returns an object with
|
||||
|
||||
**Example**
|
||||
|
||||
```js
|
||||
const picomatch = require('picomatch');
|
||||
// picomatch.scan(input[, options]);
|
||||
|
||||
const result = picomatch.scan('!./foo/*.js');
|
||||
console.log(result);
|
||||
{ prefix: '!./',
|
||||
input: '!./foo/*.js',
|
||||
start: 3,
|
||||
base: 'foo',
|
||||
glob: '*.js',
|
||||
isBrace: false,
|
||||
isBracket: false,
|
||||
isGlob: true,
|
||||
isExtglob: false,
|
||||
isGlobstar: false,
|
||||
negated: true }
|
||||
```
|
||||
|
||||
### [.compileRe](lib/picomatch.js#L244)
|
||||
|
||||
Compile a regular expression from the `state` object returned by the
|
||||
[parse()](#parse) method.
|
||||
|
||||
**Params**
|
||||
|
||||
* `state` **{Object}**
|
||||
* `options` **{Object}**
|
||||
* `returnOutput` **{Boolean}**: Intended for implementors, this argument allows you to return the raw output from the parser.
|
||||
* `returnState` **{Boolean}**: Adds the state to a `state` property on the returned regex. Useful for implementors and debugging.
|
||||
* `returns` **{RegExp}**
|
||||
|
||||
### [.makeRe](lib/picomatch.js#L285)
|
||||
|
||||
Create a regular expression from a parsed glob pattern.
|
||||
|
||||
**Params**
|
||||
|
||||
* `state` **{String}**: The object returned from the `.parse` method.
|
||||
* `options` **{Object}**
|
||||
* `returnOutput` **{Boolean}**: Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result.
|
||||
* `returnState` **{Boolean}**: Implementors may use this argument to return the state from the parsed glob with the returned regular expression.
|
||||
* `returns` **{RegExp}**: Returns a regex created from the given pattern.
|
||||
|
||||
**Example**
|
||||
|
||||
```js
|
||||
const picomatch = require('picomatch');
|
||||
const state = picomatch.parse('*.js');
|
||||
// picomatch.compileRe(state[, options]);
|
||||
|
||||
console.log(picomatch.compileRe(state));
|
||||
//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
|
||||
```
|
||||
|
||||
### [.toRegex](lib/picomatch.js#L320)
|
||||
|
||||
Create a regular expression from the given regex source string.
|
||||
|
||||
**Params**
|
||||
|
||||
* `source` **{String}**: Regular expression source string.
|
||||
* `options` **{Object}**
|
||||
* `returns` **{RegExp}**
|
||||
|
||||
**Example**
|
||||
|
||||
```js
|
||||
const picomatch = require('picomatch');
|
||||
// picomatch.toRegex(source[, options]);
|
||||
|
||||
const { output } = picomatch.parse('*.js');
|
||||
console.log(picomatch.toRegex(output));
|
||||
//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
## Options
|
||||
|
||||
### Picomatch options
|
||||
|
||||
The following options may be used with the main `picomatch()` function or any of the methods on the picomatch API.
|
||||
|
||||
| **Option** | **Type** | **Default value** | **Description** |
|
||||
| --- | --- | --- | --- |
|
||||
| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. |
|
||||
| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). |
|
||||
| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. |
|
||||
| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). |
|
||||
| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` |
|
||||
| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. |
|
||||
| `dot` | `boolean` | `false` | Enable dotfile matching. By default, dotfiles are ignored unless a `.` is explicitly defined in the pattern, or `options.dot` is true |
|
||||
| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. |
|
||||
| `failglob` | `boolean` | `false` | Throws an error if no matches are found. Based on the bash option of the same name. |
|
||||
| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. |
|
||||
| `flags` | `string` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. |
|
||||
| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. |
|
||||
| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. |
|
||||
| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. |
|
||||
| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. |
|
||||
| `matchBase` | `boolean` | `false` | Alias for `basename` |
|
||||
| `maxLength` | `number` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. |
|
||||
| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. |
|
||||
| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. |
|
||||
| `nocase` | `boolean` | `false` | Make matching case-insensitive. Equivalent to the regex `i` flag. Note that this option is overridden by the `flags` option. |
|
||||
| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. |
|
||||
| `noext` | `boolean` | `false` | Alias for `noextglob` |
|
||||
| `noextglob` | `boolean` | `false` | Disable support for matching with extglobs (like `+(a\|b)`) |
|
||||
| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) |
|
||||
| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` |
|
||||
| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. |
|
||||
| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. |
|
||||
| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. |
|
||||
| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. |
|
||||
| `posix` | `boolean` | `false` | Support POSIX character classes ("posix brackets"). |
|
||||
| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself |
|
||||
| `prepend` | `boolean` | `undefined` | String to prepend to the generated regex used for matching. |
|
||||
| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). |
|
||||
| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. |
|
||||
| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. |
|
||||
| `unescape` | `boolean` | `undefined` | Remove backslashes preceding escaped characters in the glob pattern. By default, backslashes are retained. |
|
||||
| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatibility. |
|
||||
| `windows` | `boolean` | `false` | Also accept backslashes as the path separator. |
|
||||
|
||||
### Scan Options
|
||||
|
||||
In addition to the main [picomatch options](#picomatch-options), the following options may also be used with the [.scan](#scan) method.
|
||||
|
||||
| **Option** | **Type** | **Default value** | **Description** |
|
||||
| --- | --- | --- | --- |
|
||||
| `tokens` | `boolean` | `false` | When `true`, the returned object will include an array of tokens (objects), representing each path "segment" in the scanned glob pattern |
|
||||
| `parts` | `boolean` | `false` | When `true`, the returned object will include an array of strings representing each path "segment" in the scanned glob pattern. This is automatically enabled when `options.tokens` is true |
|
||||
|
||||
**Example**
|
||||
|
||||
```js
|
||||
const picomatch = require('picomatch');
|
||||
const result = picomatch.scan('!./foo/*.js', { tokens: true });
|
||||
console.log(result);
|
||||
// {
|
||||
// prefix: '!./',
|
||||
// input: '!./foo/*.js',
|
||||
// start: 3,
|
||||
// base: 'foo',
|
||||
// glob: '*.js',
|
||||
// isBrace: false,
|
||||
// isBracket: false,
|
||||
// isGlob: true,
|
||||
// isExtglob: false,
|
||||
// isGlobstar: false,
|
||||
// negated: true,
|
||||
// maxDepth: 2,
|
||||
// tokens: [
|
||||
// { value: '!./', depth: 0, isGlob: false, negated: true, isPrefix: true },
|
||||
// { value: 'foo', depth: 1, isGlob: false },
|
||||
// { value: '*.js', depth: 1, isGlob: true }
|
||||
// ],
|
||||
// slashes: [ 2, 6 ],
|
||||
// parts: [ 'foo', '*.js' ]
|
||||
// }
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
### Options Examples
|
||||
|
||||
#### options.expandRange
|
||||
|
||||
**Type**: `function`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need.
|
||||
|
||||
**Example**
|
||||
|
||||
The following example shows how to create a glob that matches a folder
|
||||
|
||||
```js
|
||||
const fill = require('fill-range');
|
||||
const regex = pm.makeRe('foo/{01..25}/bar', {
|
||||
expandRange(a, b) {
|
||||
return `(${fill(a, b, { toRegex: true })})`;
|
||||
}
|
||||
});
|
||||
|
||||
console.log(regex);
|
||||
//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/
|
||||
|
||||
console.log(regex.test('foo/00/bar')) // false
|
||||
console.log(regex.test('foo/01/bar')) // true
|
||||
console.log(regex.test('foo/10/bar')) // true
|
||||
console.log(regex.test('foo/22/bar')) // true
|
||||
console.log(regex.test('foo/25/bar')) // true
|
||||
console.log(regex.test('foo/26/bar')) // false
|
||||
```
|
||||
|
||||
#### options.format
|
||||
|
||||
**Type**: `function`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
Custom function for formatting strings before they're matched.
|
||||
|
||||
**Example**
|
||||
|
||||
```js
|
||||
// strip leading './' from strings
|
||||
const format = str => str.replace(/^\.\//, '');
|
||||
const isMatch = picomatch('foo/*.js', { format });
|
||||
console.log(isMatch('./foo/bar.js')); //=> true
|
||||
```
|
||||
|
||||
#### options.onMatch
|
||||
|
||||
```js
|
||||
const onMatch = ({ glob, regex, input, output }) => {
|
||||
console.log({ glob, regex, input, output });
|
||||
};
|
||||
|
||||
const isMatch = picomatch('*', { onMatch });
|
||||
isMatch('foo');
|
||||
isMatch('bar');
|
||||
isMatch('baz');
|
||||
```
|
||||
|
||||
#### options.onIgnore
|
||||
|
||||
```js
|
||||
const onIgnore = ({ glob, regex, input, output }) => {
|
||||
console.log({ glob, regex, input, output });
|
||||
};
|
||||
|
||||
const isMatch = picomatch('*', { onIgnore, ignore: 'f*' });
|
||||
isMatch('foo');
|
||||
isMatch('bar');
|
||||
isMatch('baz');
|
||||
```
|
||||
|
||||
#### options.onResult
|
||||
|
||||
```js
|
||||
const onResult = ({ glob, regex, input, output }) => {
|
||||
console.log({ glob, regex, input, output });
|
||||
};
|
||||
|
||||
const isMatch = picomatch('*', { onResult, ignore: 'f*' });
|
||||
isMatch('foo');
|
||||
isMatch('bar');
|
||||
isMatch('baz');
|
||||
```
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
## Globbing features
|
||||
|
||||
* [Basic globbing](#basic-globbing) (Wildcard matching)
|
||||
* [Advanced globbing](#advanced-globbing) (extglobs, posix brackets, brace matching)
|
||||
|
||||
### Basic globbing
|
||||
|
||||
| **Character** | **Description** |
|
||||
| --- | --- |
|
||||
| `*` | Matches any character zero or more times, excluding path separators. Does _not match_ path separators or hidden files or directories ("dotfiles"), unless explicitly enabled by setting the `dot` option to `true`. |
|
||||
| `**` | Matches any character zero or more times, including path separators. Note that `**` will only match path separators (`/`, and `\\` with the `windows` option) when they are the only characters in a path segment. Thus, `foo**/bar` is equivalent to `foo*/bar`, and `foo/a**b/bar` is equivalent to `foo/a*b/bar`, and _more than two_ consecutive stars in a glob path segment are regarded as _a single star_. Thus, `foo/***/bar` is equivalent to `foo/*/bar`. |
|
||||
| `?` | Matches any character excluding path separators one time. Does _not match_ path separators or leading dots. |
|
||||
| `[abc]` | Matches any characters inside the brackets. For example, `[abc]` would match the characters `a`, `b` or `c`, and nothing else. |
|
||||
|
||||
#### Matching behavior vs. Bash
|
||||
|
||||
Picomatch's matching features and expected results in unit tests are based on Bash's unit tests and the Bash 4.3 specification, with the following exceptions:
|
||||
|
||||
* Bash will match `foo/bar/baz` with `*`. Picomatch only matches nested directories with `**`.
|
||||
* Bash greedily matches with negated extglobs. For example, Bash 4.3 says that `!(foo)*` should match `foo` and `foobar`, since the trailing `*` bracktracks to match the preceding pattern. This is very memory-inefficient, and IMHO, also incorrect. Picomatch would return `false` for both `foo` and `foobar`.
|
||||
|
||||
<br>
|
||||
|
||||
### Advanced globbing
|
||||
|
||||
* [extglobs](#extglobs)
|
||||
* [POSIX brackets](#posix-brackets)
|
||||
* [Braces](#brace-expansion)
|
||||
|
||||
#### Extglobs
|
||||
|
||||
| **Pattern** | **Description** |
|
||||
| --- | --- |
|
||||
| `@(pattern)` | Match _only one_ consecutive occurrence of `pattern` |
|
||||
| `*(pattern)` | Match _zero or more_ consecutive occurrences of `pattern` |
|
||||
| `+(pattern)` | Match _one or more_ consecutive occurrences of `pattern` |
|
||||
| `?(pattern)` | Match _zero or **one**_ consecutive occurrences of `pattern` |
|
||||
| `!(pattern)` | Match _anything but_ `pattern` |
|
||||
|
||||
**Examples**
|
||||
|
||||
```js
|
||||
const pm = require('picomatch');
|
||||
|
||||
// *(pattern) matches ZERO or more of "pattern"
|
||||
console.log(pm.isMatch('a', 'a*(z)')); // true
|
||||
console.log(pm.isMatch('az', 'a*(z)')); // true
|
||||
console.log(pm.isMatch('azzz', 'a*(z)')); // true
|
||||
|
||||
// +(pattern) matches ONE or more of "pattern"
|
||||
console.log(pm.isMatch('a', 'a+(z)')); // false
|
||||
console.log(pm.isMatch('az', 'a+(z)')); // true
|
||||
console.log(pm.isMatch('azzz', 'a+(z)')); // true
|
||||
|
||||
// supports multiple extglobs
|
||||
console.log(pm.isMatch('foo.bar', '!(foo).!(bar)')); // false
|
||||
|
||||
// supports nested extglobs
|
||||
console.log(pm.isMatch('foo.bar', '!(!(foo)).!(!(bar))')); // true
|
||||
```
|
||||
|
||||
#### POSIX brackets
|
||||
|
||||
POSIX classes are disabled by default. Enable this feature by setting the `posix` option to true.
|
||||
|
||||
**Enable POSIX bracket support**
|
||||
|
||||
```js
|
||||
console.log(pm.makeRe('[[:word:]]+', { posix: true }));
|
||||
//=> /^(?:(?=.)[A-Za-z0-9_]+\/?)$/
|
||||
```
|
||||
|
||||
**Supported POSIX classes**
|
||||
|
||||
The following named POSIX bracket expressions are supported:
|
||||
|
||||
* `[:alnum:]` - Alphanumeric characters, equ `[a-zA-Z0-9]`
|
||||
* `[:alpha:]` - Alphabetical characters, equivalent to `[a-zA-Z]`.
|
||||
* `[:ascii:]` - ASCII characters, equivalent to `[\\x00-\\x7F]`.
|
||||
* `[:blank:]` - Space and tab characters, equivalent to `[ \\t]`.
|
||||
* `[:cntrl:]` - Control characters, equivalent to `[\\x00-\\x1F\\x7F]`.
|
||||
* `[:digit:]` - Numerical digits, equivalent to `[0-9]`.
|
||||
* `[:graph:]` - Graph characters, equivalent to `[\\x21-\\x7E]`.
|
||||
* `[:lower:]` - Lowercase letters, equivalent to `[a-z]`.
|
||||
* `[:print:]` - Print characters, equivalent to `[\\x20-\\x7E ]`.
|
||||
* `[:punct:]` - Punctuation and symbols, equivalent to `[\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~]`.
|
||||
* `[:space:]` - Extended space characters, equivalent to `[ \\t\\r\\n\\v\\f]`.
|
||||
* `[:upper:]` - Uppercase letters, equivalent to `[A-Z]`.
|
||||
* `[:word:]` - Word characters (letters, numbers and underscores), equivalent to `[A-Za-z0-9_]`.
|
||||
* `[:xdigit:]` - Hexadecimal digits, equivalent to `[A-Fa-f0-9]`.
|
||||
|
||||
See the [Bash Reference Manual](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html) for more information.
|
||||
|
||||
### Braces
|
||||
|
||||
Picomatch does not do brace expansion. For [brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html) and advanced matching with braces, use [micromatch](https://github.com/micromatch/micromatch) instead. Picomatch has very basic support for braces.
|
||||
|
||||
### Matching special characters as literals
|
||||
|
||||
If you wish to match the following special characters in a filepath, and you want to use these characters in your glob pattern, they must be escaped with backslashes or quotes:
|
||||
|
||||
**Special Characters**
|
||||
|
||||
Some characters that are used for matching in regular expressions are also regarded as valid file path characters on some platforms.
|
||||
|
||||
To match any of the following characters as literals: `$^*+?()[]
|
||||
|
||||
Examples:
|
||||
|
||||
```js
|
||||
console.log(pm.makeRe('foo/bar \\(1\\)'));
|
||||
console.log(pm.makeRe('foo/bar \\(1\\)'));
|
||||
```
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
## Library Comparisons
|
||||
|
||||
The following table shows which features are supported by [minimatch](https://github.com/isaacs/minimatch), [micromatch](https://github.com/micromatch/micromatch), [picomatch](https://github.com/micromatch/picomatch), [nanomatch](https://github.com/micromatch/nanomatch), [extglob](https://github.com/micromatch/extglob), [braces](https://github.com/micromatch/braces), and [expand-brackets](https://github.com/micromatch/expand-brackets).
|
||||
|
||||
| **Feature** | `minimatch` | `micromatch` | `picomatch` | `nanomatch` | `extglob` | `braces` | `expand-brackets` |
|
||||
| --- | --- | --- | --- | --- | --- | --- | --- |
|
||||
| Wildcard matching (`*?+`) | ✔ | ✔ | ✔ | ✔ | - | - | - |
|
||||
| Advancing globbing | ✔ | ✔ | ✔ | - | - | - | - |
|
||||
| Brace _matching_ | ✔ | ✔ | ✔ | - | - | ✔ | - |
|
||||
| Brace _expansion_ | ✔ | ✔ | - | - | - | ✔ | - |
|
||||
| Extglobs | partial | ✔ | ✔ | - | ✔ | - | - |
|
||||
| Posix brackets | - | ✔ | ✔ | - | - | - | ✔ |
|
||||
| Regular expression syntax | - | ✔ | ✔ | ✔ | ✔ | - | ✔ |
|
||||
| File system operations | - | - | - | - | - | - | - |
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
## Benchmarks
|
||||
|
||||
Performance comparison of picomatch and minimatch.
|
||||
|
||||
_(Pay special attention to the last three benchmarks. Minimatch freezes on long ranges.)_
|
||||
|
||||
```
|
||||
# .makeRe star (*)
|
||||
picomatch x 4,449,159 ops/sec ±0.24% (97 runs sampled)
|
||||
minimatch x 632,772 ops/sec ±0.14% (98 runs sampled)
|
||||
|
||||
# .makeRe star; dot=true (*)
|
||||
picomatch x 3,500,079 ops/sec ±0.26% (99 runs sampled)
|
||||
minimatch x 564,916 ops/sec ±0.23% (96 runs sampled)
|
||||
|
||||
# .makeRe globstar (**)
|
||||
picomatch x 3,261,000 ops/sec ±0.27% (98 runs sampled)
|
||||
minimatch x 1,664,766 ops/sec ±0.20% (100 runs sampled)
|
||||
|
||||
# .makeRe globstars (**/**/**)
|
||||
picomatch x 3,284,469 ops/sec ±0.18% (97 runs sampled)
|
||||
minimatch x 1,435,880 ops/sec ±0.34% (95 runs sampled)
|
||||
|
||||
# .makeRe with leading star (*.txt)
|
||||
picomatch x 3,100,197 ops/sec ±0.35% (99 runs sampled)
|
||||
minimatch x 428,347 ops/sec ±0.42% (94 runs sampled)
|
||||
|
||||
# .makeRe - basic braces ({a,b,c}*.txt)
|
||||
picomatch x 443,578 ops/sec ±1.33% (89 runs sampled)
|
||||
minimatch x 107,143 ops/sec ±0.35% (94 runs sampled)
|
||||
|
||||
# .makeRe - short ranges ({a..z}*.txt)
|
||||
picomatch x 415,484 ops/sec ±0.76% (96 runs sampled)
|
||||
minimatch x 14,299 ops/sec ±0.26% (96 runs sampled)
|
||||
|
||||
# .makeRe - medium ranges ({1..100000}*.txt)
|
||||
picomatch x 395,020 ops/sec ±0.87% (89 runs sampled)
|
||||
minimatch x 2 ops/sec ±4.59% (10 runs sampled)
|
||||
|
||||
# .makeRe - long ranges ({1..10000000}*.txt)
|
||||
picomatch x 400,036 ops/sec ±0.83% (90 runs sampled)
|
||||
minimatch (FATAL ERROR: Ineffective mark-compacts near heap limit Allocation failed - JavaScript heap out of memory)
|
||||
```
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
## Philosophies
|
||||
|
||||
The goal of this library is to be blazing fast, without compromising on accuracy.
|
||||
|
||||
**Accuracy**
|
||||
|
||||
The number one of goal of this library is accuracy. However, it's not unusual for different glob implementations to have different rules for matching behavior, even with simple wildcard matching. It gets increasingly more complicated when combinations of different features are combined, like when extglobs are combined with globstars, braces, slashes, and so on: `!(**/{a,b,*/c})`.
|
||||
|
||||
Thus, given that there is no canonical glob specification to use as a single source of truth when differences of opinion arise regarding behavior, sometimes we have to implement our best judgement and rely on feedback from users to make improvements.
|
||||
|
||||
**Performance**
|
||||
|
||||
Although this library performs well in benchmarks, and in most cases it's faster than other popular libraries we benchmarked against, we will always choose accuracy over performance. It's not helpful to anyone if our library is faster at returning the wrong answer.
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
## About
|
||||
|
||||
<details>
|
||||
<summary><strong>Contributing</strong></summary>
|
||||
|
||||
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
|
||||
|
||||
Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Running Tests</strong></summary>
|
||||
|
||||
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
|
||||
|
||||
```sh
|
||||
npm install && npm test
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Building docs</strong></summary>
|
||||
|
||||
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
|
||||
|
||||
To generate the readme, run the following command:
|
||||
|
||||
```sh
|
||||
npm install -g verbose/verb#dev verb-generate-readme && verb
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Author
|
||||
|
||||
**Jon Schlinkert**
|
||||
|
||||
* [GitHub Profile](https://github.com/jonschlinkert)
|
||||
* [Twitter Profile](https://twitter.com/jonschlinkert)
|
||||
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
|
||||
|
||||
### License
|
||||
|
||||
Copyright © 2017-present, [Jon Schlinkert](https://github.com/jonschlinkert).
|
||||
Released under the [MIT License](LICENSE).
|
||||
17
web/admin-spa/node_modules/unimport/node_modules/picomatch/index.js
generated
vendored
17
web/admin-spa/node_modules/unimport/node_modules/picomatch/index.js
generated
vendored
@@ -1,17 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const pico = require('./lib/picomatch');
|
||||
const utils = require('./lib/utils');
|
||||
|
||||
function picomatch(glob, options, returnState = false) {
|
||||
// default to os.platform()
|
||||
if (options && (options.windows === null || options.windows === undefined)) {
|
||||
// don't mutate the original options object
|
||||
options = { ...options, windows: utils.isWindows() };
|
||||
}
|
||||
|
||||
return pico(glob, options, returnState);
|
||||
}
|
||||
|
||||
Object.assign(picomatch, pico);
|
||||
module.exports = picomatch;
|
||||
180
web/admin-spa/node_modules/unimport/node_modules/picomatch/lib/constants.js
generated
vendored
180
web/admin-spa/node_modules/unimport/node_modules/picomatch/lib/constants.js
generated
vendored
@@ -1,180 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const WIN_SLASH = '\\\\/';
|
||||
const WIN_NO_SLASH = `[^${WIN_SLASH}]`;
|
||||
|
||||
/**
|
||||
* Posix glob regex
|
||||
*/
|
||||
|
||||
const DOT_LITERAL = '\\.';
|
||||
const PLUS_LITERAL = '\\+';
|
||||
const QMARK_LITERAL = '\\?';
|
||||
const SLASH_LITERAL = '\\/';
|
||||
const ONE_CHAR = '(?=.)';
|
||||
const QMARK = '[^/]';
|
||||
const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;
|
||||
const START_ANCHOR = `(?:^|${SLASH_LITERAL})`;
|
||||
const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;
|
||||
const NO_DOT = `(?!${DOT_LITERAL})`;
|
||||
const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;
|
||||
const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;
|
||||
const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;
|
||||
const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;
|
||||
const STAR = `${QMARK}*?`;
|
||||
const SEP = '/';
|
||||
|
||||
const POSIX_CHARS = {
|
||||
DOT_LITERAL,
|
||||
PLUS_LITERAL,
|
||||
QMARK_LITERAL,
|
||||
SLASH_LITERAL,
|
||||
ONE_CHAR,
|
||||
QMARK,
|
||||
END_ANCHOR,
|
||||
DOTS_SLASH,
|
||||
NO_DOT,
|
||||
NO_DOTS,
|
||||
NO_DOT_SLASH,
|
||||
NO_DOTS_SLASH,
|
||||
QMARK_NO_DOT,
|
||||
STAR,
|
||||
START_ANCHOR,
|
||||
SEP
|
||||
};
|
||||
|
||||
/**
|
||||
* Windows glob regex
|
||||
*/
|
||||
|
||||
const WINDOWS_CHARS = {
|
||||
...POSIX_CHARS,
|
||||
|
||||
SLASH_LITERAL: `[${WIN_SLASH}]`,
|
||||
QMARK: WIN_NO_SLASH,
|
||||
STAR: `${WIN_NO_SLASH}*?`,
|
||||
DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,
|
||||
NO_DOT: `(?!${DOT_LITERAL})`,
|
||||
NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
|
||||
NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,
|
||||
NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
|
||||
QMARK_NO_DOT: `[^.${WIN_SLASH}]`,
|
||||
START_ANCHOR: `(?:^|[${WIN_SLASH}])`,
|
||||
END_ANCHOR: `(?:[${WIN_SLASH}]|$)`,
|
||||
SEP: '\\'
|
||||
};
|
||||
|
||||
/**
|
||||
* POSIX Bracket Regex
|
||||
*/
|
||||
|
||||
const POSIX_REGEX_SOURCE = {
|
||||
alnum: 'a-zA-Z0-9',
|
||||
alpha: 'a-zA-Z',
|
||||
ascii: '\\x00-\\x7F',
|
||||
blank: ' \\t',
|
||||
cntrl: '\\x00-\\x1F\\x7F',
|
||||
digit: '0-9',
|
||||
graph: '\\x21-\\x7E',
|
||||
lower: 'a-z',
|
||||
print: '\\x20-\\x7E ',
|
||||
punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~',
|
||||
space: ' \\t\\r\\n\\v\\f',
|
||||
upper: 'A-Z',
|
||||
word: 'A-Za-z0-9_',
|
||||
xdigit: 'A-Fa-f0-9'
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
MAX_LENGTH: 1024 * 64,
|
||||
POSIX_REGEX_SOURCE,
|
||||
|
||||
// regular expressions
|
||||
REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g,
|
||||
REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/,
|
||||
REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/,
|
||||
REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g,
|
||||
REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g,
|
||||
REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g,
|
||||
|
||||
// Replace globs with equivalent patterns to reduce parsing time.
|
||||
REPLACEMENTS: {
|
||||
__proto__: null,
|
||||
'***': '*',
|
||||
'**/**': '**',
|
||||
'**/**/**': '**'
|
||||
},
|
||||
|
||||
// Digits
|
||||
CHAR_0: 48, /* 0 */
|
||||
CHAR_9: 57, /* 9 */
|
||||
|
||||
// Alphabet chars.
|
||||
CHAR_UPPERCASE_A: 65, /* A */
|
||||
CHAR_LOWERCASE_A: 97, /* a */
|
||||
CHAR_UPPERCASE_Z: 90, /* Z */
|
||||
CHAR_LOWERCASE_Z: 122, /* z */
|
||||
|
||||
CHAR_LEFT_PARENTHESES: 40, /* ( */
|
||||
CHAR_RIGHT_PARENTHESES: 41, /* ) */
|
||||
|
||||
CHAR_ASTERISK: 42, /* * */
|
||||
|
||||
// Non-alphabetic chars.
|
||||
CHAR_AMPERSAND: 38, /* & */
|
||||
CHAR_AT: 64, /* @ */
|
||||
CHAR_BACKWARD_SLASH: 92, /* \ */
|
||||
CHAR_CARRIAGE_RETURN: 13, /* \r */
|
||||
CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */
|
||||
CHAR_COLON: 58, /* : */
|
||||
CHAR_COMMA: 44, /* , */
|
||||
CHAR_DOT: 46, /* . */
|
||||
CHAR_DOUBLE_QUOTE: 34, /* " */
|
||||
CHAR_EQUAL: 61, /* = */
|
||||
CHAR_EXCLAMATION_MARK: 33, /* ! */
|
||||
CHAR_FORM_FEED: 12, /* \f */
|
||||
CHAR_FORWARD_SLASH: 47, /* / */
|
||||
CHAR_GRAVE_ACCENT: 96, /* ` */
|
||||
CHAR_HASH: 35, /* # */
|
||||
CHAR_HYPHEN_MINUS: 45, /* - */
|
||||
CHAR_LEFT_ANGLE_BRACKET: 60, /* < */
|
||||
CHAR_LEFT_CURLY_BRACE: 123, /* { */
|
||||
CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */
|
||||
CHAR_LINE_FEED: 10, /* \n */
|
||||
CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */
|
||||
CHAR_PERCENT: 37, /* % */
|
||||
CHAR_PLUS: 43, /* + */
|
||||
CHAR_QUESTION_MARK: 63, /* ? */
|
||||
CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */
|
||||
CHAR_RIGHT_CURLY_BRACE: 125, /* } */
|
||||
CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */
|
||||
CHAR_SEMICOLON: 59, /* ; */
|
||||
CHAR_SINGLE_QUOTE: 39, /* ' */
|
||||
CHAR_SPACE: 32, /* */
|
||||
CHAR_TAB: 9, /* \t */
|
||||
CHAR_UNDERSCORE: 95, /* _ */
|
||||
CHAR_VERTICAL_LINE: 124, /* | */
|
||||
CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */
|
||||
|
||||
/**
|
||||
* Create EXTGLOB_CHARS
|
||||
*/
|
||||
|
||||
extglobChars(chars) {
|
||||
return {
|
||||
'!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },
|
||||
'?': { type: 'qmark', open: '(?:', close: ')?' },
|
||||
'+': { type: 'plus', open: '(?:', close: ')+' },
|
||||
'*': { type: 'star', open: '(?:', close: ')*' },
|
||||
'@': { type: 'at', open: '(?:', close: ')' }
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Create GLOB_CHARS
|
||||
*/
|
||||
|
||||
globChars(win32) {
|
||||
return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;
|
||||
}
|
||||
};
|
||||
1085
web/admin-spa/node_modules/unimport/node_modules/picomatch/lib/parse.js
generated
vendored
1085
web/admin-spa/node_modules/unimport/node_modules/picomatch/lib/parse.js
generated
vendored
File diff suppressed because it is too large
Load Diff
341
web/admin-spa/node_modules/unimport/node_modules/picomatch/lib/picomatch.js
generated
vendored
341
web/admin-spa/node_modules/unimport/node_modules/picomatch/lib/picomatch.js
generated
vendored
@@ -1,341 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const scan = require('./scan');
|
||||
const parse = require('./parse');
|
||||
const utils = require('./utils');
|
||||
const constants = require('./constants');
|
||||
const isObject = val => val && typeof val === 'object' && !Array.isArray(val);
|
||||
|
||||
/**
|
||||
* Creates a matcher function from one or more glob patterns. The
|
||||
* returned function takes a string to match as its first argument,
|
||||
* and returns true if the string is a match. The returned matcher
|
||||
* function also takes a boolean as the second argument that, when true,
|
||||
* returns an object with additional information.
|
||||
*
|
||||
* ```js
|
||||
* const picomatch = require('picomatch');
|
||||
* // picomatch(glob[, options]);
|
||||
*
|
||||
* const isMatch = picomatch('*.!(*a)');
|
||||
* console.log(isMatch('a.a')); //=> false
|
||||
* console.log(isMatch('a.b')); //=> true
|
||||
* ```
|
||||
* @name picomatch
|
||||
* @param {String|Array} `globs` One or more glob patterns.
|
||||
* @param {Object=} `options`
|
||||
* @return {Function=} Returns a matcher function.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
const picomatch = (glob, options, returnState = false) => {
|
||||
if (Array.isArray(glob)) {
|
||||
const fns = glob.map(input => picomatch(input, options, returnState));
|
||||
const arrayMatcher = str => {
|
||||
for (const isMatch of fns) {
|
||||
const state = isMatch(str);
|
||||
if (state) return state;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
return arrayMatcher;
|
||||
}
|
||||
|
||||
const isState = isObject(glob) && glob.tokens && glob.input;
|
||||
|
||||
if (glob === '' || (typeof glob !== 'string' && !isState)) {
|
||||
throw new TypeError('Expected pattern to be a non-empty string');
|
||||
}
|
||||
|
||||
const opts = options || {};
|
||||
const posix = opts.windows;
|
||||
const regex = isState
|
||||
? picomatch.compileRe(glob, options)
|
||||
: picomatch.makeRe(glob, options, false, true);
|
||||
|
||||
const state = regex.state;
|
||||
delete regex.state;
|
||||
|
||||
let isIgnored = () => false;
|
||||
if (opts.ignore) {
|
||||
const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
|
||||
isIgnored = picomatch(opts.ignore, ignoreOpts, returnState);
|
||||
}
|
||||
|
||||
const matcher = (input, returnObject = false) => {
|
||||
const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix });
|
||||
const result = { glob, state, regex, posix, input, output, match, isMatch };
|
||||
|
||||
if (typeof opts.onResult === 'function') {
|
||||
opts.onResult(result);
|
||||
}
|
||||
|
||||
if (isMatch === false) {
|
||||
result.isMatch = false;
|
||||
return returnObject ? result : false;
|
||||
}
|
||||
|
||||
if (isIgnored(input)) {
|
||||
if (typeof opts.onIgnore === 'function') {
|
||||
opts.onIgnore(result);
|
||||
}
|
||||
result.isMatch = false;
|
||||
return returnObject ? result : false;
|
||||
}
|
||||
|
||||
if (typeof opts.onMatch === 'function') {
|
||||
opts.onMatch(result);
|
||||
}
|
||||
return returnObject ? result : true;
|
||||
};
|
||||
|
||||
if (returnState) {
|
||||
matcher.state = state;
|
||||
}
|
||||
|
||||
return matcher;
|
||||
};
|
||||
|
||||
/**
|
||||
* Test `input` with the given `regex`. This is used by the main
|
||||
* `picomatch()` function to test the input string.
|
||||
*
|
||||
* ```js
|
||||
* const picomatch = require('picomatch');
|
||||
* // picomatch.test(input, regex[, options]);
|
||||
*
|
||||
* console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/));
|
||||
* // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }
|
||||
* ```
|
||||
* @param {String} `input` String to test.
|
||||
* @param {RegExp} `regex`
|
||||
* @return {Object} Returns an object with matching info.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
picomatch.test = (input, regex, options, { glob, posix } = {}) => {
|
||||
if (typeof input !== 'string') {
|
||||
throw new TypeError('Expected input to be a string');
|
||||
}
|
||||
|
||||
if (input === '') {
|
||||
return { isMatch: false, output: '' };
|
||||
}
|
||||
|
||||
const opts = options || {};
|
||||
const format = opts.format || (posix ? utils.toPosixSlashes : null);
|
||||
let match = input === glob;
|
||||
let output = (match && format) ? format(input) : input;
|
||||
|
||||
if (match === false) {
|
||||
output = format ? format(input) : input;
|
||||
match = output === glob;
|
||||
}
|
||||
|
||||
if (match === false || opts.capture === true) {
|
||||
if (opts.matchBase === true || opts.basename === true) {
|
||||
match = picomatch.matchBase(input, regex, options, posix);
|
||||
} else {
|
||||
match = regex.exec(output);
|
||||
}
|
||||
}
|
||||
|
||||
return { isMatch: Boolean(match), match, output };
|
||||
};
|
||||
|
||||
/**
|
||||
* Match the basename of a filepath.
|
||||
*
|
||||
* ```js
|
||||
* const picomatch = require('picomatch');
|
||||
* // picomatch.matchBase(input, glob[, options]);
|
||||
* console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true
|
||||
* ```
|
||||
* @param {String} `input` String to test.
|
||||
* @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).
|
||||
* @return {Boolean}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
picomatch.matchBase = (input, glob, options) => {
|
||||
const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options);
|
||||
return regex.test(utils.basename(input));
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns true if **any** of the given glob `patterns` match the specified `string`.
|
||||
*
|
||||
* ```js
|
||||
* const picomatch = require('picomatch');
|
||||
* // picomatch.isMatch(string, patterns[, options]);
|
||||
*
|
||||
* console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true
|
||||
* console.log(picomatch.isMatch('a.a', 'b.*')); //=> false
|
||||
* ```
|
||||
* @param {String|Array} str The string to test.
|
||||
* @param {String|Array} patterns One or more glob patterns to use for matching.
|
||||
* @param {Object} [options] See available [options](#options).
|
||||
* @return {Boolean} Returns true if any patterns match `str`
|
||||
* @api public
|
||||
*/
|
||||
|
||||
picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);
|
||||
|
||||
/**
|
||||
* Parse a glob pattern to create the source string for a regular
|
||||
* expression.
|
||||
*
|
||||
* ```js
|
||||
* const picomatch = require('picomatch');
|
||||
* const result = picomatch.parse(pattern[, options]);
|
||||
* ```
|
||||
* @param {String} `pattern`
|
||||
* @param {Object} `options`
|
||||
* @return {Object} Returns an object with useful properties and output to be used as a regex source string.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
picomatch.parse = (pattern, options) => {
|
||||
if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options));
|
||||
return parse(pattern, { ...options, fastpaths: false });
|
||||
};
|
||||
|
||||
/**
|
||||
* Scan a glob pattern to separate the pattern into segments.
|
||||
*
|
||||
* ```js
|
||||
* const picomatch = require('picomatch');
|
||||
* // picomatch.scan(input[, options]);
|
||||
*
|
||||
* const result = picomatch.scan('!./foo/*.js');
|
||||
* console.log(result);
|
||||
* { prefix: '!./',
|
||||
* input: '!./foo/*.js',
|
||||
* start: 3,
|
||||
* base: 'foo',
|
||||
* glob: '*.js',
|
||||
* isBrace: false,
|
||||
* isBracket: false,
|
||||
* isGlob: true,
|
||||
* isExtglob: false,
|
||||
* isGlobstar: false,
|
||||
* negated: true }
|
||||
* ```
|
||||
* @param {String} `input` Glob pattern to scan.
|
||||
* @param {Object} `options`
|
||||
* @return {Object} Returns an object with
|
||||
* @api public
|
||||
*/
|
||||
|
||||
picomatch.scan = (input, options) => scan(input, options);
|
||||
|
||||
/**
|
||||
* Compile a regular expression from the `state` object returned by the
|
||||
* [parse()](#parse) method.
|
||||
*
|
||||
* @param {Object} `state`
|
||||
* @param {Object} `options`
|
||||
* @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser.
|
||||
* @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging.
|
||||
* @return {RegExp}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => {
|
||||
if (returnOutput === true) {
|
||||
return state.output;
|
||||
}
|
||||
|
||||
const opts = options || {};
|
||||
const prepend = opts.contains ? '' : '^';
|
||||
const append = opts.contains ? '' : '$';
|
||||
|
||||
let source = `${prepend}(?:${state.output})${append}`;
|
||||
if (state && state.negated === true) {
|
||||
source = `^(?!${source}).*$`;
|
||||
}
|
||||
|
||||
const regex = picomatch.toRegex(source, options);
|
||||
if (returnState === true) {
|
||||
regex.state = state;
|
||||
}
|
||||
|
||||
return regex;
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a regular expression from a parsed glob pattern.
|
||||
*
|
||||
* ```js
|
||||
* const picomatch = require('picomatch');
|
||||
* const state = picomatch.parse('*.js');
|
||||
* // picomatch.compileRe(state[, options]);
|
||||
*
|
||||
* console.log(picomatch.compileRe(state));
|
||||
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
|
||||
* ```
|
||||
* @param {String} `state` The object returned from the `.parse` method.
|
||||
* @param {Object} `options`
|
||||
* @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result.
|
||||
* @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression.
|
||||
* @return {RegExp} Returns a regex created from the given pattern.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
|
||||
if (!input || typeof input !== 'string') {
|
||||
throw new TypeError('Expected a non-empty string');
|
||||
}
|
||||
|
||||
let parsed = { negated: false, fastpaths: true };
|
||||
|
||||
if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {
|
||||
parsed.output = parse.fastpaths(input, options);
|
||||
}
|
||||
|
||||
if (!parsed.output) {
|
||||
parsed = parse(input, options);
|
||||
}
|
||||
|
||||
return picomatch.compileRe(parsed, options, returnOutput, returnState);
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a regular expression from the given regex source string.
|
||||
*
|
||||
* ```js
|
||||
* const picomatch = require('picomatch');
|
||||
* // picomatch.toRegex(source[, options]);
|
||||
*
|
||||
* const { output } = picomatch.parse('*.js');
|
||||
* console.log(picomatch.toRegex(output));
|
||||
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
|
||||
* ```
|
||||
* @param {String} `source` Regular expression source string.
|
||||
* @param {Object} `options`
|
||||
* @return {RegExp}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
picomatch.toRegex = (source, options) => {
|
||||
try {
|
||||
const opts = options || {};
|
||||
return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));
|
||||
} catch (err) {
|
||||
if (options && options.debug === true) throw err;
|
||||
return /$^/;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Picomatch constants.
|
||||
* @return {Object}
|
||||
*/
|
||||
|
||||
picomatch.constants = constants;
|
||||
|
||||
/**
|
||||
* Expose "picomatch"
|
||||
*/
|
||||
|
||||
module.exports = picomatch;
|
||||
391
web/admin-spa/node_modules/unimport/node_modules/picomatch/lib/scan.js
generated
vendored
391
web/admin-spa/node_modules/unimport/node_modules/picomatch/lib/scan.js
generated
vendored
@@ -1,391 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const utils = require('./utils');
|
||||
const {
|
||||
CHAR_ASTERISK, /* * */
|
||||
CHAR_AT, /* @ */
|
||||
CHAR_BACKWARD_SLASH, /* \ */
|
||||
CHAR_COMMA, /* , */
|
||||
CHAR_DOT, /* . */
|
||||
CHAR_EXCLAMATION_MARK, /* ! */
|
||||
CHAR_FORWARD_SLASH, /* / */
|
||||
CHAR_LEFT_CURLY_BRACE, /* { */
|
||||
CHAR_LEFT_PARENTHESES, /* ( */
|
||||
CHAR_LEFT_SQUARE_BRACKET, /* [ */
|
||||
CHAR_PLUS, /* + */
|
||||
CHAR_QUESTION_MARK, /* ? */
|
||||
CHAR_RIGHT_CURLY_BRACE, /* } */
|
||||
CHAR_RIGHT_PARENTHESES, /* ) */
|
||||
CHAR_RIGHT_SQUARE_BRACKET /* ] */
|
||||
} = require('./constants');
|
||||
|
||||
const isPathSeparator = code => {
|
||||
return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;
|
||||
};
|
||||
|
||||
const depth = token => {
|
||||
if (token.isPrefix !== true) {
|
||||
token.depth = token.isGlobstar ? Infinity : 1;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Quickly scans a glob pattern and returns an object with a handful of
|
||||
* useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),
|
||||
* `glob` (the actual pattern), `negated` (true if the path starts with `!` but not
|
||||
* with `!(`) and `negatedExtglob` (true if the path starts with `!(`).
|
||||
*
|
||||
* ```js
|
||||
* const pm = require('picomatch');
|
||||
* console.log(pm.scan('foo/bar/*.js'));
|
||||
* { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }
|
||||
* ```
|
||||
* @param {String} `str`
|
||||
* @param {Object} `options`
|
||||
* @return {Object} Returns an object with tokens and regex source string.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
const scan = (input, options) => {
|
||||
const opts = options || {};
|
||||
|
||||
const length = input.length - 1;
|
||||
const scanToEnd = opts.parts === true || opts.scanToEnd === true;
|
||||
const slashes = [];
|
||||
const tokens = [];
|
||||
const parts = [];
|
||||
|
||||
let str = input;
|
||||
let index = -1;
|
||||
let start = 0;
|
||||
let lastIndex = 0;
|
||||
let isBrace = false;
|
||||
let isBracket = false;
|
||||
let isGlob = false;
|
||||
let isExtglob = false;
|
||||
let isGlobstar = false;
|
||||
let braceEscaped = false;
|
||||
let backslashes = false;
|
||||
let negated = false;
|
||||
let negatedExtglob = false;
|
||||
let finished = false;
|
||||
let braces = 0;
|
||||
let prev;
|
||||
let code;
|
||||
let token = { value: '', depth: 0, isGlob: false };
|
||||
|
||||
const eos = () => index >= length;
|
||||
const peek = () => str.charCodeAt(index + 1);
|
||||
const advance = () => {
|
||||
prev = code;
|
||||
return str.charCodeAt(++index);
|
||||
};
|
||||
|
||||
while (index < length) {
|
||||
code = advance();
|
||||
let next;
|
||||
|
||||
if (code === CHAR_BACKWARD_SLASH) {
|
||||
backslashes = token.backslashes = true;
|
||||
code = advance();
|
||||
|
||||
if (code === CHAR_LEFT_CURLY_BRACE) {
|
||||
braceEscaped = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) {
|
||||
braces++;
|
||||
|
||||
while (eos() !== true && (code = advance())) {
|
||||
if (code === CHAR_BACKWARD_SLASH) {
|
||||
backslashes = token.backslashes = true;
|
||||
advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (code === CHAR_LEFT_CURLY_BRACE) {
|
||||
braces++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) {
|
||||
isBrace = token.isBrace = true;
|
||||
isGlob = token.isGlob = true;
|
||||
finished = true;
|
||||
|
||||
if (scanToEnd === true) {
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (braceEscaped !== true && code === CHAR_COMMA) {
|
||||
isBrace = token.isBrace = true;
|
||||
isGlob = token.isGlob = true;
|
||||
finished = true;
|
||||
|
||||
if (scanToEnd === true) {
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (code === CHAR_RIGHT_CURLY_BRACE) {
|
||||
braces--;
|
||||
|
||||
if (braces === 0) {
|
||||
braceEscaped = false;
|
||||
isBrace = token.isBrace = true;
|
||||
finished = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (scanToEnd === true) {
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (code === CHAR_FORWARD_SLASH) {
|
||||
slashes.push(index);
|
||||
tokens.push(token);
|
||||
token = { value: '', depth: 0, isGlob: false };
|
||||
|
||||
if (finished === true) continue;
|
||||
if (prev === CHAR_DOT && index === (start + 1)) {
|
||||
start += 2;
|
||||
continue;
|
||||
}
|
||||
|
||||
lastIndex = index + 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (opts.noext !== true) {
|
||||
const isExtglobChar = code === CHAR_PLUS
|
||||
|| code === CHAR_AT
|
||||
|| code === CHAR_ASTERISK
|
||||
|| code === CHAR_QUESTION_MARK
|
||||
|| code === CHAR_EXCLAMATION_MARK;
|
||||
|
||||
if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) {
|
||||
isGlob = token.isGlob = true;
|
||||
isExtglob = token.isExtglob = true;
|
||||
finished = true;
|
||||
if (code === CHAR_EXCLAMATION_MARK && index === start) {
|
||||
negatedExtglob = true;
|
||||
}
|
||||
|
||||
if (scanToEnd === true) {
|
||||
while (eos() !== true && (code = advance())) {
|
||||
if (code === CHAR_BACKWARD_SLASH) {
|
||||
backslashes = token.backslashes = true;
|
||||
code = advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (code === CHAR_RIGHT_PARENTHESES) {
|
||||
isGlob = token.isGlob = true;
|
||||
finished = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (code === CHAR_ASTERISK) {
|
||||
if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;
|
||||
isGlob = token.isGlob = true;
|
||||
finished = true;
|
||||
|
||||
if (scanToEnd === true) {
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (code === CHAR_QUESTION_MARK) {
|
||||
isGlob = token.isGlob = true;
|
||||
finished = true;
|
||||
|
||||
if (scanToEnd === true) {
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (code === CHAR_LEFT_SQUARE_BRACKET) {
|
||||
while (eos() !== true && (next = advance())) {
|
||||
if (next === CHAR_BACKWARD_SLASH) {
|
||||
backslashes = token.backslashes = true;
|
||||
advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (next === CHAR_RIGHT_SQUARE_BRACKET) {
|
||||
isBracket = token.isBracket = true;
|
||||
isGlob = token.isGlob = true;
|
||||
finished = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (scanToEnd === true) {
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {
|
||||
negated = token.negated = true;
|
||||
start++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) {
|
||||
isGlob = token.isGlob = true;
|
||||
|
||||
if (scanToEnd === true) {
|
||||
while (eos() !== true && (code = advance())) {
|
||||
if (code === CHAR_LEFT_PARENTHESES) {
|
||||
backslashes = token.backslashes = true;
|
||||
code = advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (code === CHAR_RIGHT_PARENTHESES) {
|
||||
finished = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (isGlob === true) {
|
||||
finished = true;
|
||||
|
||||
if (scanToEnd === true) {
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.noext === true) {
|
||||
isExtglob = false;
|
||||
isGlob = false;
|
||||
}
|
||||
|
||||
let base = str;
|
||||
let prefix = '';
|
||||
let glob = '';
|
||||
|
||||
if (start > 0) {
|
||||
prefix = str.slice(0, start);
|
||||
str = str.slice(start);
|
||||
lastIndex -= start;
|
||||
}
|
||||
|
||||
if (base && isGlob === true && lastIndex > 0) {
|
||||
base = str.slice(0, lastIndex);
|
||||
glob = str.slice(lastIndex);
|
||||
} else if (isGlob === true) {
|
||||
base = '';
|
||||
glob = str;
|
||||
} else {
|
||||
base = str;
|
||||
}
|
||||
|
||||
if (base && base !== '' && base !== '/' && base !== str) {
|
||||
if (isPathSeparator(base.charCodeAt(base.length - 1))) {
|
||||
base = base.slice(0, -1);
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.unescape === true) {
|
||||
if (glob) glob = utils.removeBackslashes(glob);
|
||||
|
||||
if (base && backslashes === true) {
|
||||
base = utils.removeBackslashes(base);
|
||||
}
|
||||
}
|
||||
|
||||
const state = {
|
||||
prefix,
|
||||
input,
|
||||
start,
|
||||
base,
|
||||
glob,
|
||||
isBrace,
|
||||
isBracket,
|
||||
isGlob,
|
||||
isExtglob,
|
||||
isGlobstar,
|
||||
negated,
|
||||
negatedExtglob
|
||||
};
|
||||
|
||||
if (opts.tokens === true) {
|
||||
state.maxDepth = 0;
|
||||
if (!isPathSeparator(code)) {
|
||||
tokens.push(token);
|
||||
}
|
||||
state.tokens = tokens;
|
||||
}
|
||||
|
||||
if (opts.parts === true || opts.tokens === true) {
|
||||
let prevIndex;
|
||||
|
||||
for (let idx = 0; idx < slashes.length; idx++) {
|
||||
const n = prevIndex ? prevIndex + 1 : start;
|
||||
const i = slashes[idx];
|
||||
const value = input.slice(n, i);
|
||||
if (opts.tokens) {
|
||||
if (idx === 0 && start !== 0) {
|
||||
tokens[idx].isPrefix = true;
|
||||
tokens[idx].value = prefix;
|
||||
} else {
|
||||
tokens[idx].value = value;
|
||||
}
|
||||
depth(tokens[idx]);
|
||||
state.maxDepth += tokens[idx].depth;
|
||||
}
|
||||
if (idx !== 0 || value !== '') {
|
||||
parts.push(value);
|
||||
}
|
||||
prevIndex = i;
|
||||
}
|
||||
|
||||
if (prevIndex && prevIndex + 1 < input.length) {
|
||||
const value = input.slice(prevIndex + 1);
|
||||
parts.push(value);
|
||||
|
||||
if (opts.tokens) {
|
||||
tokens[tokens.length - 1].value = value;
|
||||
depth(tokens[tokens.length - 1]);
|
||||
state.maxDepth += tokens[tokens.length - 1].depth;
|
||||
}
|
||||
}
|
||||
|
||||
state.slashes = slashes;
|
||||
state.parts = parts;
|
||||
}
|
||||
|
||||
return state;
|
||||
};
|
||||
|
||||
module.exports = scan;
|
||||
72
web/admin-spa/node_modules/unimport/node_modules/picomatch/lib/utils.js
generated
vendored
72
web/admin-spa/node_modules/unimport/node_modules/picomatch/lib/utils.js
generated
vendored
@@ -1,72 +0,0 @@
|
||||
/*global navigator*/
|
||||
'use strict';
|
||||
|
||||
const {
|
||||
REGEX_BACKSLASH,
|
||||
REGEX_REMOVE_BACKSLASH,
|
||||
REGEX_SPECIAL_CHARS,
|
||||
REGEX_SPECIAL_CHARS_GLOBAL
|
||||
} = require('./constants');
|
||||
|
||||
exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
|
||||
exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);
|
||||
exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);
|
||||
exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1');
|
||||
exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');
|
||||
|
||||
exports.isWindows = () => {
|
||||
if (typeof navigator !== 'undefined' && navigator.platform) {
|
||||
const platform = navigator.platform.toLowerCase();
|
||||
return platform === 'win32' || platform === 'windows';
|
||||
}
|
||||
|
||||
if (typeof process !== 'undefined' && process.platform) {
|
||||
return process.platform === 'win32';
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
exports.removeBackslashes = str => {
|
||||
return str.replace(REGEX_REMOVE_BACKSLASH, match => {
|
||||
return match === '\\' ? '' : match;
|
||||
});
|
||||
};
|
||||
|
||||
exports.escapeLast = (input, char, lastIdx) => {
|
||||
const idx = input.lastIndexOf(char, lastIdx);
|
||||
if (idx === -1) return input;
|
||||
if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1);
|
||||
return `${input.slice(0, idx)}\\${input.slice(idx)}`;
|
||||
};
|
||||
|
||||
exports.removePrefix = (input, state = {}) => {
|
||||
let output = input;
|
||||
if (output.startsWith('./')) {
|
||||
output = output.slice(2);
|
||||
state.prefix = './';
|
||||
}
|
||||
return output;
|
||||
};
|
||||
|
||||
exports.wrapOutput = (input, state = {}, options = {}) => {
|
||||
const prepend = options.contains ? '' : '^';
|
||||
const append = options.contains ? '' : '$';
|
||||
|
||||
let output = `${prepend}(?:${input})${append}`;
|
||||
if (state.negated === true) {
|
||||
output = `(?:^(?!${output}).*$)`;
|
||||
}
|
||||
return output;
|
||||
};
|
||||
|
||||
exports.basename = (path, { windows } = {}) => {
|
||||
const segs = path.split(windows ? /[\\/]/ : '/');
|
||||
const last = segs[segs.length - 1];
|
||||
|
||||
if (last === '') {
|
||||
return segs[segs.length - 2];
|
||||
}
|
||||
|
||||
return last;
|
||||
};
|
||||
83
web/admin-spa/node_modules/unimport/node_modules/picomatch/package.json
generated
vendored
83
web/admin-spa/node_modules/unimport/node_modules/picomatch/package.json
generated
vendored
@@ -1,83 +0,0 @@
|
||||
{
|
||||
"name": "picomatch",
|
||||
"description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.",
|
||||
"version": "4.0.3",
|
||||
"homepage": "https://github.com/micromatch/picomatch",
|
||||
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
|
||||
"funding": "https://github.com/sponsors/jonschlinkert",
|
||||
"repository": "micromatch/picomatch",
|
||||
"bugs": {
|
||||
"url": "https://github.com/micromatch/picomatch/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"index.js",
|
||||
"posix.js",
|
||||
"lib"
|
||||
],
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .",
|
||||
"mocha": "mocha --reporter dot",
|
||||
"test": "npm run lint && npm run mocha",
|
||||
"test:ci": "npm run test:cover",
|
||||
"test:cover": "nyc npm run mocha"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^8.57.0",
|
||||
"fill-range": "^7.0.1",
|
||||
"gulp-format-md": "^2.0.0",
|
||||
"mocha": "^10.4.0",
|
||||
"nyc": "^15.1.0",
|
||||
"time-require": "github:jonschlinkert/time-require"
|
||||
},
|
||||
"keywords": [
|
||||
"glob",
|
||||
"match",
|
||||
"picomatch"
|
||||
],
|
||||
"nyc": {
|
||||
"reporter": [
|
||||
"html",
|
||||
"lcov",
|
||||
"text-summary"
|
||||
]
|
||||
},
|
||||
"verb": {
|
||||
"toc": {
|
||||
"render": true,
|
||||
"method": "preWrite",
|
||||
"maxdepth": 3
|
||||
},
|
||||
"layout": "empty",
|
||||
"tasks": [
|
||||
"readme"
|
||||
],
|
||||
"plugins": [
|
||||
"gulp-format-md"
|
||||
],
|
||||
"lint": {
|
||||
"reflinks": true
|
||||
},
|
||||
"related": {
|
||||
"list": [
|
||||
"braces",
|
||||
"micromatch"
|
||||
]
|
||||
},
|
||||
"reflinks": [
|
||||
"braces",
|
||||
"expand-brackets",
|
||||
"extglob",
|
||||
"fill-range",
|
||||
"micromatch",
|
||||
"minimatch",
|
||||
"nanomatch",
|
||||
"picomatch"
|
||||
]
|
||||
}
|
||||
}
|
||||
3
web/admin-spa/node_modules/unimport/node_modules/picomatch/posix.js
generated
vendored
3
web/admin-spa/node_modules/unimport/node_modules/picomatch/posix.js
generated
vendored
@@ -1,3 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = require('./lib/picomatch');
|
||||
75
web/admin-spa/node_modules/unimport/package.json
generated
vendored
75
web/admin-spa/node_modules/unimport/package.json
generated
vendored
@@ -1,75 +0,0 @@
|
||||
{
|
||||
"name": "unimport",
|
||||
"type": "module",
|
||||
"version": "3.14.6",
|
||||
"description": "Unified utils for auto importing APIs in modules",
|
||||
"license": "MIT",
|
||||
"repository": "unjs/unimport",
|
||||
"sideEffects": false,
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/index.mjs",
|
||||
"require": "./dist/index.cjs"
|
||||
},
|
||||
"./unplugin": {
|
||||
"import": "./dist/unplugin.mjs",
|
||||
"require": "./dist/unplugin.cjs"
|
||||
},
|
||||
"./addons": {
|
||||
"import": "./dist/addons.mjs",
|
||||
"require": "./dist/addons.cjs"
|
||||
},
|
||||
"./*": "./*"
|
||||
},
|
||||
"main": "./dist/index.cjs",
|
||||
"module": "./dist/index.mjs",
|
||||
"types": "./dist/index.d.ts",
|
||||
"files": [
|
||||
"*.d.ts",
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@rollup/pluginutils": "^5.1.4",
|
||||
"acorn": "^8.14.0",
|
||||
"escape-string-regexp": "^5.0.0",
|
||||
"estree-walker": "^3.0.3",
|
||||
"fast-glob": "^3.3.3",
|
||||
"local-pkg": "^1.0.0",
|
||||
"magic-string": "^0.30.17",
|
||||
"mlly": "^1.7.4",
|
||||
"pathe": "^2.0.1",
|
||||
"picomatch": "^4.0.2",
|
||||
"pkg-types": "^1.3.0",
|
||||
"scule": "^1.3.0",
|
||||
"strip-literal": "^2.1.1",
|
||||
"unplugin": "^1.16.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@antfu/eslint-config": "^3.14.0",
|
||||
"@types/estree": "^1.0.6",
|
||||
"@types/node": "^22.10.6",
|
||||
"@types/picomatch": "^3.0.1",
|
||||
"@vitest/coverage-v8": "^2.1.8",
|
||||
"bumpp": "^9.10.0",
|
||||
"conventional-changelog-cli": "^5.0.0",
|
||||
"eslint": "^9.18.0",
|
||||
"h3": "^1.13.1",
|
||||
"jquery": "^3.7.1",
|
||||
"lit": "^3.2.1",
|
||||
"typescript": "^5.7.3",
|
||||
"unbuild": "^3.3.1",
|
||||
"vitest": "^2.1.8",
|
||||
"vue-tsc": "^2.2.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "unbuild",
|
||||
"dev": "vitest dev",
|
||||
"lint": "eslint .",
|
||||
"play": "pnpm -C playground run dev",
|
||||
"play:build": "pnpm -C playground run build",
|
||||
"typecheck": "vue-tsc --noEmit",
|
||||
"changelog": "conventional-changelog -p angular -i CHANGELOG.md -s",
|
||||
"release": "pnpm run test --run && bumpp -x \"pnpm run changelog\" --all && pnpm publish",
|
||||
"test": "vitest --coverage"
|
||||
}
|
||||
}
|
||||
3
web/admin-spa/node_modules/unimport/unplugin.d.ts
generated
vendored
3
web/admin-spa/node_modules/unimport/unplugin.d.ts
generated
vendored
@@ -1,3 +0,0 @@
|
||||
// redirect for TypeScript to pick it up
|
||||
export * from './dist/unplugin'
|
||||
export { default } from './dist/unplugin'
|
||||
Reference in New Issue
Block a user