chore: stop tracking node_modules

This commit is contained in:
mahdahar 2026-03-26 16:54:31 +07:00
parent 88c8c951b1
commit b76f57e1b9
3273 changed files with 1 additions and 443205 deletions

2
.gitignore vendored
View File

@ -1 +1 @@
/node_modules/
node_modules/

16
node_modules/.bin/node-gyp generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../node-gyp/bin/node-gyp.js" "$@"
else
exec node "$basedir/../node-gyp/bin/node-gyp.js" "$@"
fi

17
node_modules/.bin/node-gyp.cmd generated vendored
View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\node-gyp\bin\node-gyp.js" %*

28
node_modules/.bin/node-gyp.ps1 generated vendored
View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../node-gyp/bin/node-gyp.js" $args
} else {
& "$basedir/node$exe" "$basedir/../node-gyp/bin/node-gyp.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../node-gyp/bin/node-gyp.js" $args
} else {
& "node$exe" "$basedir/../node-gyp/bin/node-gyp.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

16
node_modules/.bin/node-which generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../which/bin/which.js" "$@"
else
exec node "$basedir/../which/bin/which.js" "$@"
fi

17
node_modules/.bin/node-which.cmd generated vendored
View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\which\bin\which.js" %*

28
node_modules/.bin/node-which.ps1 generated vendored
View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../which/bin/which.js" $args
} else {
& "$basedir/node$exe" "$basedir/../which/bin/which.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../which/bin/which.js" $args
} else {
& "node$exe" "$basedir/../which/bin/which.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

16
node_modules/.bin/nopt generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../nopt/bin/nopt.js" "$@"
else
exec node "$basedir/../nopt/bin/nopt.js" "$@"
fi

17
node_modules/.bin/nopt.cmd generated vendored
View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\nopt\bin\nopt.js" %*

28
node_modules/.bin/nopt.ps1 generated vendored
View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../nopt/bin/nopt.js" $args
} else {
& "$basedir/node$exe" "$basedir/../nopt/bin/nopt.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../nopt/bin/nopt.js" $args
} else {
& "node$exe" "$basedir/../nopt/bin/nopt.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

16
node_modules/.bin/pino generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../pino/bin.js" "$@"
else
exec node "$basedir/../pino/bin.js" "$@"
fi

17
node_modules/.bin/pino.cmd generated vendored
View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\pino\bin.js" %*

28
node_modules/.bin/pino.ps1 generated vendored
View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../pino/bin.js" $args
} else {
& "$basedir/node$exe" "$basedir/../pino/bin.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../pino/bin.js" $args
} else {
& "node$exe" "$basedir/../pino/bin.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

16
node_modules/.bin/prebuild-install generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../prebuild-install/bin.js" "$@"
else
exec node "$basedir/../prebuild-install/bin.js" "$@"
fi

View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\prebuild-install\bin.js" %*

View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args
} else {
& "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../prebuild-install/bin.js" $args
} else {
& "node$exe" "$basedir/../prebuild-install/bin.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

16
node_modules/.bin/rc generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../rc/cli.js" "$@"
else
exec node "$basedir/../rc/cli.js" "$@"
fi

17
node_modules/.bin/rc.cmd generated vendored
View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rc\cli.js" %*

28
node_modules/.bin/rc.ps1 generated vendored
View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../rc/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../rc/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../rc/cli.js" $args
} else {
& "node$exe" "$basedir/../rc/cli.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

16
node_modules/.bin/semver generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@"
else
exec node "$basedir/../semver/bin/semver.js" "$@"
fi

17
node_modules/.bin/semver.cmd generated vendored
View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %*

28
node_modules/.bin/semver.ps1 generated vendored
View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args
} else {
& "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../semver/bin/semver.js" $args
} else {
& "node$exe" "$basedir/../semver/bin/semver.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

2052
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

View File

@ -1,20 +0,0 @@
Copyright (c) 2011 Tim Koschützki (tim@debuggable.com), Felix Geisendörfer (felix@debuggable.com)
Copyright (c) 2014 IndigoUnited
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is furnished
to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -1,86 +0,0 @@
# @gar/promise-retry
This is a fork of [promise-retry](https://npm.im/promise-retry). See the [CHANGELOG.md](./CHANGELOG.md) for more info.
It also inlines and updates the original [retry](https://github.com/tim-kos/node-retry) package that was being promisified.
Retries a function that returns a promise, leveraging the power of the [retry](https://github.com/tim-kos/node-retry) module to the promises world.
There's already some modules that are able to retry functions that return promises but they were rather difficult to use or do not offer an easy way to do conditional retries.
## Installation
`$ npm install promise-retry`
## Usage
### retry(fn(retry, number, operation), [options])
Calls `fn` until the returned promise ends up fulfilled or rejected with an error different than a `retry` error.
The `options` argument is an object which maps to the original [retry](https://github.com/tim-kos/node-retry) module options:
- `retries`: The maximum amount of times to retry the operation. Default is `10`.
- `factor`: The exponential factor to use. Default is `2`.
- `minTimeout`: The number of milliseconds before starting the first retry. Default is `1000`.
- `maxTimeout`: The maximum number of milliseconds between two retries. Default is `Infinity`.
- `randomize`: Randomizes the timeouts by multiplying with a factor between `1` to `2`. Default is `false`.
- `forever`: Whether to retry forver, default is false.
- `unref`: Whether to [unref](https://nodejs.org/api/timers.html#timers_unref) the underlying `setTimeout`s.
- `maxRetryTime`: Maximum number of milliseconds that the retried operation is allowed to run.
`options` can also be a Number, which is effectively the same as pasing `{ retries: Number }`
The `fn` function will be called with the following parameters:
- A `retry` function as its first argument that should be called with an error whenever you want to retry `fn`. The `retry` function will always throw an error.
- The current retry number being attempted
- The retry operation object itself from which will allow you to call things like `operation.reset()`
If there are retries left, it will throw a special `retry` error that will be handled internally to call `fn` again.
If there are no retries left, it will throw the actual error passed to it.
## Example
```js
const { retry } = require('@gar/promise-retry');
// Simple example
retry(function (retry, number) {
console.log('attempt number', number);
return doSomething()
.catch(retry);
})
.then(function (value) {
// ..
}, function (err) {
// ..
});
// Conditional example
retry(function (retry, number) {
console.log('attempt number', number);
return doSomething()
.catch(function (err) {
if (err.code === 'ETIMEDOUT') {
retry(err);
}
throw err;
});
})
.then(function (value) {
// ..
}, function (err) {
// ..
});
```
## Tests
`$ npm test`
## License
Released under the [MIT License](http://www.opensource.org/licenses/mit-license.php).

View File

@ -1,114 +0,0 @@
type OperationOptions = {
/**
* The exponential factor to use.
* @default 2
*/
factor?: number | undefined;
/**
* The number of milliseconds before starting the first retry.
* @default 1000
*/
minTimeout?: number | undefined;
/**
* The maximum number of milliseconds between two retries.
* @default Infinity
*/
maxTimeout?: number | undefined;
/**
* Randomizes the timeouts by multiplying a factor between 1-2.
* @default false
*/
randomize?: boolean | undefined;
/**
* The maximum amount of times to retry the operation.
* @default 10
*/
retries?: number | undefined;
/**
* Whether to retry forever.
* @default false
*/
forever?: boolean | undefined;
/**
* Whether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's.
* @default false
*/
unref?: boolean | undefined;
/**
* The maximum time (in milliseconds) that the retried operation is allowed to run.
* @default Infinity
*/
maxRetryTime?: number | undefined;
} | number[];
type RetryOperation = {
/**
* Returns an array of all errors that have been passed to `retryOperation.retry()` so far.
* The returning array has the errors ordered chronologically based on when they were passed to
* `retryOperation.retry()`, which means the first passed error is at index zero and the last is at the last index.
*/
errors(): Error[];
/**
* A reference to the error object that occured most frequently.
* Errors are compared using the `error.message` property.
* If multiple error messages occured the same amount of time, the last error object with that message is returned.
*
* @return If no errors occured so far the value will be `null`.
*/
mainError(): Error | null;
/**
* Defines the function that is to be retried and executes it for the first time right away.
*
* @param fn The function that is to be retried. `currentAttempt` represents the number of attempts callback has been executed so far.
*/
attempt(fn: (currentAttempt: number) => void): void;
/**
* Returns `false` when no `error` value is given, or the maximum amount of retries has been reached.
* Otherwise it returns `true`, and retries the operation after the timeout for the current attempt number.
*/
retry(err?: Error): boolean;
/**
* Stops the operation being retried. Useful for aborting the operation on a fatal error etc.
*/
stop(): void;
/**
* Resets the internal state of the operation object, so that you can call `attempt()` again as if
* this was a new operation object.
*/
reset(): void;
/**
* Returns an int representing the number of attempts it took to call `fn` before it was successful.
*/
attempts(): number;
}
/**
* A function that is retryable, by having implicitly-bound params for both an error handler and an attempt number.
*
* @param retry The retry callback upon any rejection. Essentially throws the error on in the form of a { retried: err }
* wrapper, and tags it with a 'code' field of value "EPROMISERETRY" so that it is recognised as needing retrying. Call
* this from the catch() block when you want to retry a rejected attempt.
* @param attempt The number of the attempt.
* @param operation The operation object from the underlying retry module.
* @returns A Promise for anything (eg. a HTTP response).
*/
type RetryableFn<ResolutionType> = (retry: (error: any) => never, attempt: number, operation: RetryOperation) => Promise<ResolutionType>;
/**
* Wrap all functions of the object with retry. The params can be entered in either order, just like in the original library.
*
* @param retryableFn The function to retry.
* @param options The options for how long/often to retry the function for.
* @returns The Promise resolved by the input retryableFn, or rejected (if not retried) from its catch block.
*/
declare function promiseRetry<ResolutionType>(
retryableFn: RetryableFn<ResolutionType>,
options?: OperationOptions,
): Promise<ResolutionType>;
export { promiseRetry };

View File

@ -1,62 +0,0 @@
const { RetryOperation } = require('./retry')
const createTimeout = (attempt, opts) => Math.min(Math.round((1 + (opts.randomize ? Math.random() : 0)) * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)), opts.maxTimeout)
const isRetryError = err => err?.code === 'EPROMISERETRY' && Object.hasOwn(err, 'retried')
const promiseRetry = async (fn, options = {}) => {
let timeouts = []
if (options instanceof Array) {
timeouts = [...options]
} else {
if (options.retries === Infinity) {
options.forever = true
delete options.retries
}
const opts = {
retries: 10,
factor: 2,
minTimeout: 1 * 1000,
maxTimeout: Infinity,
randomize: false,
...options
}
if (opts.minTimeout > opts.maxTimeout) {
throw new Error('minTimeout is greater than maxTimeout')
}
if (opts.retries) {
for (let i = 0; i < opts.retries; i++) {
timeouts.push(createTimeout(i, opts))
}
// sort the array numerically ascending (since the timeouts may be out of order at factor < 1)
timeouts.sort((a, b) => a - b)
} else if (options.forever) {
timeouts.push(createTimeout(0, opts))
}
}
const operation = new RetryOperation(timeouts, {
forever: options.forever,
unref: options.unref,
maxRetryTime: options.maxRetryTime
})
return new Promise(function (resolve, reject) {
operation.attempt(async number => {
try {
const result = await fn(err => {
throw Object.assign(new Error('Retrying'), { code: 'EPROMISERETRY', retried: err })
}, number, operation)
return resolve(result)
} catch (err) {
if (!isRetryError(err)) {
return reject(err)
}
if (!operation.retry(err.retried || new Error())) {
return reject(err.retried)
}
}
})
})
}
module.exports = { promiseRetry }

View File

@ -1,109 +0,0 @@
class RetryOperation {
#attempts = 1
#cachedTimeouts = null
#errors = []
#fn = null
#maxRetryTime
#operationStart = null
#originalTimeouts
#timeouts
#timer = null
#unref
constructor (timeouts, options = {}) {
this.#originalTimeouts = [...timeouts]
this.#timeouts = [...timeouts]
this.#unref = options.unref
this.#maxRetryTime = options.maxRetryTime || Infinity
if (options.forever) {
this.#cachedTimeouts = [...this.#timeouts]
}
}
get timeouts () {
return [...this.#timeouts]
}
get errors () {
return [...this.#errors]
}
get attempts () {
return this.#attempts
}
get mainError () {
let mainError = null
if (this.#errors.length) {
let mainErrorCount = 0
const counts = {}
for (let i = 0; i < this.#errors.length; i++) {
const error = this.#errors[i]
const { message } = error
if (!counts[message]) {
counts[message] = 0
}
counts[message]++
if (counts[message] >= mainErrorCount) {
mainError = error
mainErrorCount = counts[message]
}
}
}
return mainError
}
reset () {
this.#attempts = 1
this.#timeouts = [...this.#originalTimeouts]
}
stop () {
if (this.#timer) {
clearTimeout(this.#timer)
}
this.#timeouts = []
this.#cachedTimeouts = null
}
retry (err) {
this.#errors.push(err)
if (new Date().getTime() - this.#operationStart >= this.#maxRetryTime) {
// XXX This puts the timeout error first, meaning it will never show as mainError, there may be no way to ever see this
this.#errors.unshift(new Error('RetryOperation timeout occurred'))
return false
}
let timeout = this.#timeouts.shift()
if (timeout === undefined) {
// We're out of timeouts, clear the last error and repeat the final timeout
if (this.#cachedTimeouts) {
this.#errors.pop()
timeout = this.#cachedTimeouts.at(-1)
} else {
return false
}
}
// TODO what if there already is a timer?
this.#timer = setTimeout(() => {
this.#attempts++
this.#fn(this.#attempts)
}, timeout)
if (this.#unref) {
this.#timer.unref()
}
return true
}
attempt (fn) {
this.#fn = fn
this.#operationStart = new Date().getTime()
this.#fn(this.#attempts)
}
}
module.exports = { RetryOperation }

View File

@ -1,45 +0,0 @@
{
"name": "@gar/promise-retry",
"version": "1.0.3",
"description": "Retries a function that returns a promise, leveraging the power of the retry module.",
"main": "./lib/index.js",
"files": [
"lib"
],
"type": "commonjs",
"exports": {
".": [
{
"default": "./lib/index.js",
"types": "./lib/index.d.ts"
},
"./lib/index.js"
]
},
"scripts": {
"lint": "npx standard",
"lint:fix": "npx standard --fix",
"test": "node --test --experimental-test-coverage --test-coverage-lines=100 --test-coverage-functions=100 --test-coverage-branches=100",
"typelint": "npx -p typescript tsc ./lib/index.d.ts",
"posttest": "npm run lint",
"postlint": "npm run typelint"
},
"bugs": {
"url": "https://github.com/wraithgar/node-promise-retry/issues/"
},
"repository": {
"type": "git",
"url": "git://github.com/wraithgar/node-promise-retry.git"
},
"keywords": [
"retry",
"promise",
"backoff",
"repeat",
"replay"
],
"license": "MIT",
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
}

View File

@ -1,15 +0,0 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@ -1,71 +0,0 @@
# fs-minipass
Filesystem streams based on [minipass](http://npm.im/minipass).
4 classes are exported:
- ReadStream
- ReadStreamSync
- WriteStream
- WriteStreamSync
When using `ReadStreamSync`, all of the data is made available
immediately upon consuming the stream. Nothing is buffered in memory
when the stream is constructed. If the stream is piped to a writer,
then it will synchronously `read()` and emit data into the writer as
fast as the writer can consume it. (That is, it will respect
backpressure.) If you call `stream.read()` then it will read the
entire file and return the contents.
When using `WriteStreamSync`, every write is flushed to the file
synchronously. If your writes all come in a single tick, then it'll
write it all out in a single tick. It's as synchronous as you are.
The async versions work much like their node builtin counterparts,
with the exception of introducing significantly less Stream machinery
overhead.
## USAGE
It's just streams, you pipe them or read() them or write() to them.
```js
import { ReadStream, WriteStream } from 'fs-minipass'
// or: const { ReadStream, WriteStream } = require('fs-minipass')
const readStream = new ReadStream('file.txt')
const writeStream = new WriteStream('output.txt')
writeStream.write('some file header or whatever\n')
readStream.pipe(writeStream)
```
## ReadStream(path, options)
Path string is required, but somewhat irrelevant if an open file
descriptor is passed in as an option.
Options:
- `fd` Pass in a numeric file descriptor, if the file is already open.
- `readSize` The size of reads to do, defaults to 16MB
- `size` The size of the file, if known. Prevents zero-byte read()
call at the end.
- `autoClose` Set to `false` to prevent the file descriptor from being
closed when the file is done being read.
## WriteStream(path, options)
Path string is required, but somewhat irrelevant if an open file
descriptor is passed in as an option.
Options:
- `fd` Pass in a numeric file descriptor, if the file is already open.
- `mode` The mode to create the file with. Defaults to `0o666`.
- `start` The position in the file to start reading. If not
specified, then the file will start writing at position zero, and be
truncated by default.
- `autoClose` Set to `false` to prevent the file descriptor from being
closed when the stream is ended.
- `flags` Flags to use when opening the file. Irrelevant if `fd` is
passed in, since file won't be opened in that case. Defaults to
`'a'` if a `pos` is specified, or `'w'` otherwise.

View File

@ -1,118 +0,0 @@
/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
import EE from 'events';
import { Minipass } from 'minipass';
declare const _autoClose: unique symbol;
declare const _close: unique symbol;
declare const _ended: unique symbol;
declare const _fd: unique symbol;
declare const _finished: unique symbol;
declare const _flags: unique symbol;
declare const _flush: unique symbol;
declare const _handleChunk: unique symbol;
declare const _makeBuf: unique symbol;
declare const _mode: unique symbol;
declare const _needDrain: unique symbol;
declare const _onerror: unique symbol;
declare const _onopen: unique symbol;
declare const _onread: unique symbol;
declare const _onwrite: unique symbol;
declare const _open: unique symbol;
declare const _path: unique symbol;
declare const _pos: unique symbol;
declare const _queue: unique symbol;
declare const _read: unique symbol;
declare const _readSize: unique symbol;
declare const _reading: unique symbol;
declare const _remain: unique symbol;
declare const _size: unique symbol;
declare const _write: unique symbol;
declare const _writing: unique symbol;
declare const _defaultFlag: unique symbol;
declare const _errored: unique symbol;
export type ReadStreamOptions = Minipass.Options<Minipass.ContiguousData> & {
fd?: number;
readSize?: number;
size?: number;
autoClose?: boolean;
};
export type ReadStreamEvents = Minipass.Events<Minipass.ContiguousData> & {
open: [fd: number];
};
export declare class ReadStream extends Minipass<Minipass.ContiguousData, Buffer, ReadStreamEvents> {
[_errored]: boolean;
[_fd]?: number;
[_path]: string;
[_readSize]: number;
[_reading]: boolean;
[_size]: number;
[_remain]: number;
[_autoClose]: boolean;
constructor(path: string, opt: ReadStreamOptions);
get fd(): number | undefined;
get path(): string;
write(): void;
end(): void;
[_open](): void;
[_onopen](er?: NodeJS.ErrnoException | null, fd?: number): void;
[_makeBuf](): Buffer;
[_read](): void;
[_onread](er?: NodeJS.ErrnoException | null, br?: number, buf?: Buffer): void;
[_close](): void;
[_onerror](er: NodeJS.ErrnoException): void;
[_handleChunk](br: number, buf: Buffer): boolean;
emit<Event extends keyof ReadStreamEvents>(ev: Event, ...args: ReadStreamEvents[Event]): boolean;
}
export declare class ReadStreamSync extends ReadStream {
[_open](): void;
[_read](): void;
[_close](): void;
}
export type WriteStreamOptions = {
fd?: number;
autoClose?: boolean;
mode?: number;
captureRejections?: boolean;
start?: number;
flags?: string;
};
export declare class WriteStream extends EE {
readable: false;
writable: boolean;
[_errored]: boolean;
[_writing]: boolean;
[_ended]: boolean;
[_queue]: Buffer[];
[_needDrain]: boolean;
[_path]: string;
[_mode]: number;
[_autoClose]: boolean;
[_fd]?: number;
[_defaultFlag]: boolean;
[_flags]: string;
[_finished]: boolean;
[_pos]?: number;
constructor(path: string, opt: WriteStreamOptions);
emit(ev: string, ...args: any[]): boolean;
get fd(): number | undefined;
get path(): string;
[_onerror](er: NodeJS.ErrnoException): void;
[_open](): void;
[_onopen](er?: null | NodeJS.ErrnoException, fd?: number): void;
end(buf: string, enc?: BufferEncoding): this;
end(buf?: Buffer, enc?: undefined): this;
write(buf: string, enc?: BufferEncoding): boolean;
write(buf: Buffer, enc?: undefined): boolean;
[_write](buf: Buffer): void;
[_onwrite](er?: null | NodeJS.ErrnoException, bw?: number): void;
[_flush](): void;
[_close](): void;
}
export declare class WriteStreamSync extends WriteStream {
[_open](): void;
[_close](): void;
[_write](buf: Buffer): void;
}
export {};
//# sourceMappingURL=index.d.ts.map

View File

@ -1 +0,0 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,OAAO,EAAE,MAAM,QAAQ,CAAA;AAEvB,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAInC,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,GAAG,eAAgB,CAAA;AACzB,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,YAAY,eAAyB,CAAA;AAC3C,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,YAAY,eAAyB,CAAA;AAC3C,QAAA,MAAM,QAAQ,eAAqB,CAAA;AAEnC,MAAM,MAAM,iBAAiB,GAC3B,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,GAAG;IAC1C,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB,CAAA;AAEH,MAAM,MAAM,gBAAgB,GAAG,QAAQ,CAAC,MAAM,CAAC,QAAQ,CAAC,cAAc,CAAC,GAAG;IACxE,IAAI,EAAE,CAAC,EAAE,EAAE,MAAM,CAAC,CAAA;CACnB,CAAA;AAED,qBAAa,UAAW,SAAQ,QAAQ,CACtC,QAAQ,CAAC,cAAc,EACvB,MAAM,EACN,gBAAgB,CACjB;IACC,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC;IACpB,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAClB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAA;gBAET,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,iBAAiB;IA4BhD,IAAI,EAAE,uBAEL;IAED,IAAI,IAAI,WAEP;IAGD,KAAK;IAKL,GAAG;IAIH,CAAC,KAAK,CAAC;IAIP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,EAAE,CAAC,EAAE,MAAM;IAUxD,CAAC,QAAQ,CAAC;IAIV,CAAC,KAAK,CAAC;IAeP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,EAAE,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,MAAM;IAStE,CAAC,MAAM,CAAC;IAUR,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,MAAM,CAAC,cAAc;IAMpC,CAAC,YAAY,CAAC,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM;IAiBtC,IAAI,CAAC,KAAK,SAAS,MAAM,gBAAgB,EACvC,EAAE,EAAE,KAAK,EACT,GAAG,IAAI,EAAE,gBAAgB,CAAC,KAAK,CAAC,GAC/B,OAAO;CAuBX;AAED,qBAAa,cAAe,SAAQ,UAAU;IAC5C,CAAC,KAAK,CAAC;IAYP,CAAC,KAAK,CAAC;IA2BP,CAAC,MAAM,CAAC;CAQT;AAED,MAAM,MAAM,kBAAkB,GAAG;IAC/B,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,SAAS,CAAC,EAAE,OAAO,CAAA;IACnB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,iBAAiB,CAAC,EAAE,OAAO,CAAA;IAC3B,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,qBAAa,WAAY,SAAQ,EAAE;IACjC,QAAQ,EAAE,KAAK,CAAQ;IACvB,QAAQ,EAAE,OAAO,CAAQ;IACzB,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,MAAM,CAAC,EAAE,OAAO,CAAS;IAC1B,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,CAAM;IACxB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAS;IAC9B,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC;IACtB,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,YAAY,CAAC,EAAE,OAAO,CAAC;IACxB,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC;IACjB,CAAC,SAAS,CAAC,EAAE,OAAO,CAAS;IAC7B,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAA;gBAEH,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,kBAAkB;IAoBjD,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,GAAG,EAAE;IAU/B,IAAI,EAAE,uBAEL;IAED,IAAI,IAAI,WAEP;IAED,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,MAAM,CAAC,cAAc;IAMpC,CAAC,KAAK,CAAC;IAMP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,GAAG,MAAM,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,MAAM;IAoBxD,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,cAAc,GAAG,IAAI;IAC5C,GAAG,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,SAAS,GAAG,IAAI;IAoBxC,KAAK,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,cAAc,GAAG,OAAO;IACjD,KAAK,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,SAAS,GAAG,OAAO;IAsB5C,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM;IAWpB,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,GAAG,MAAM,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,MAAM;IAwBzD,CAAC,MAAM,CAAC;IAgBR,CAAC,MAAM,CAAC;CAST;AAED,qBAAa,eAAgB,SAAQ,WAAW;IAC9C,CAAC,KAAK,CAAC,IAAI,IAAI;IAsBf,CAAC,MAAM,CAAC;IASR,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM;CAmBrB"}

View File

@ -1,430 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.WriteStreamSync = exports.WriteStream = exports.ReadStreamSync = exports.ReadStream = void 0;
const events_1 = __importDefault(require("events"));
const fs_1 = __importDefault(require("fs"));
const minipass_1 = require("minipass");
const writev = fs_1.default.writev;
const _autoClose = Symbol('_autoClose');
const _close = Symbol('_close');
const _ended = Symbol('_ended');
const _fd = Symbol('_fd');
const _finished = Symbol('_finished');
const _flags = Symbol('_flags');
const _flush = Symbol('_flush');
const _handleChunk = Symbol('_handleChunk');
const _makeBuf = Symbol('_makeBuf');
const _mode = Symbol('_mode');
const _needDrain = Symbol('_needDrain');
const _onerror = Symbol('_onerror');
const _onopen = Symbol('_onopen');
const _onread = Symbol('_onread');
const _onwrite = Symbol('_onwrite');
const _open = Symbol('_open');
const _path = Symbol('_path');
const _pos = Symbol('_pos');
const _queue = Symbol('_queue');
const _read = Symbol('_read');
const _readSize = Symbol('_readSize');
const _reading = Symbol('_reading');
const _remain = Symbol('_remain');
const _size = Symbol('_size');
const _write = Symbol('_write');
const _writing = Symbol('_writing');
const _defaultFlag = Symbol('_defaultFlag');
const _errored = Symbol('_errored');
class ReadStream extends minipass_1.Minipass {
[_errored] = false;
[_fd];
[_path];
[_readSize];
[_reading] = false;
[_size];
[_remain];
[_autoClose];
constructor(path, opt) {
opt = opt || {};
super(opt);
this.readable = true;
this.writable = false;
if (typeof path !== 'string') {
throw new TypeError('path must be a string');
}
this[_errored] = false;
this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined;
this[_path] = path;
this[_readSize] = opt.readSize || 16 * 1024 * 1024;
this[_reading] = false;
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity;
this[_remain] = this[_size];
this[_autoClose] =
typeof opt.autoClose === 'boolean' ? opt.autoClose : true;
if (typeof this[_fd] === 'number') {
this[_read]();
}
else {
this[_open]();
}
}
get fd() {
return this[_fd];
}
get path() {
return this[_path];
}
//@ts-ignore
write() {
throw new TypeError('this is a readable stream');
}
//@ts-ignore
end() {
throw new TypeError('this is a readable stream');
}
[_open]() {
fs_1.default.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd));
}
[_onopen](er, fd) {
if (er) {
this[_onerror](er);
}
else {
this[_fd] = fd;
this.emit('open', fd);
this[_read]();
}
}
[_makeBuf]() {
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]));
}
[_read]() {
if (!this[_reading]) {
this[_reading] = true;
const buf = this[_makeBuf]();
/* c8 ignore start */
if (buf.length === 0) {
return process.nextTick(() => this[_onread](null, 0, buf));
}
/* c8 ignore stop */
fs_1.default.read(this[_fd], buf, 0, buf.length, null, (er, br, b) => this[_onread](er, br, b));
}
}
[_onread](er, br, buf) {
this[_reading] = false;
if (er) {
this[_onerror](er);
}
else if (this[_handleChunk](br, buf)) {
this[_read]();
}
}
[_close]() {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd];
this[_fd] = undefined;
fs_1.default.close(fd, er => er ? this.emit('error', er) : this.emit('close'));
}
}
[_onerror](er) {
this[_reading] = true;
this[_close]();
this.emit('error', er);
}
[_handleChunk](br, buf) {
let ret = false;
// no effect if infinite
this[_remain] -= br;
if (br > 0) {
ret = super.write(br < buf.length ? buf.subarray(0, br) : buf);
}
if (br === 0 || this[_remain] <= 0) {
ret = false;
this[_close]();
super.end();
}
return ret;
}
emit(ev, ...args) {
switch (ev) {
case 'prefinish':
case 'finish':
return false;
case 'drain':
if (typeof this[_fd] === 'number') {
this[_read]();
}
return false;
case 'error':
if (this[_errored]) {
return false;
}
this[_errored] = true;
return super.emit(ev, ...args);
default:
return super.emit(ev, ...args);
}
}
}
exports.ReadStream = ReadStream;
class ReadStreamSync extends ReadStream {
[_open]() {
let threw = true;
try {
this[_onopen](null, fs_1.default.openSync(this[_path], 'r'));
threw = false;
}
finally {
if (threw) {
this[_close]();
}
}
}
[_read]() {
let threw = true;
try {
if (!this[_reading]) {
this[_reading] = true;
do {
const buf = this[_makeBuf]();
/* c8 ignore start */
const br = buf.length === 0
? 0
: fs_1.default.readSync(this[_fd], buf, 0, buf.length, null);
/* c8 ignore stop */
if (!this[_handleChunk](br, buf)) {
break;
}
} while (true);
this[_reading] = false;
}
threw = false;
}
finally {
if (threw) {
this[_close]();
}
}
}
[_close]() {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd];
this[_fd] = undefined;
fs_1.default.closeSync(fd);
this.emit('close');
}
}
}
exports.ReadStreamSync = ReadStreamSync;
class WriteStream extends events_1.default {
readable = false;
writable = true;
[_errored] = false;
[_writing] = false;
[_ended] = false;
[_queue] = [];
[_needDrain] = false;
[_path];
[_mode];
[_autoClose];
[_fd];
[_defaultFlag];
[_flags];
[_finished] = false;
[_pos];
constructor(path, opt) {
opt = opt || {};
super(opt);
this[_path] = path;
this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined;
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode;
this[_pos] = typeof opt.start === 'number' ? opt.start : undefined;
this[_autoClose] =
typeof opt.autoClose === 'boolean' ? opt.autoClose : true;
// truncating makes no sense when writing into the middle
const defaultFlag = this[_pos] !== undefined ? 'r+' : 'w';
this[_defaultFlag] = opt.flags === undefined;
this[_flags] = opt.flags === undefined ? defaultFlag : opt.flags;
if (this[_fd] === undefined) {
this[_open]();
}
}
emit(ev, ...args) {
if (ev === 'error') {
if (this[_errored]) {
return false;
}
this[_errored] = true;
}
return super.emit(ev, ...args);
}
get fd() {
return this[_fd];
}
get path() {
return this[_path];
}
[_onerror](er) {
this[_close]();
this[_writing] = true;
this.emit('error', er);
}
[_open]() {
fs_1.default.open(this[_path], this[_flags], this[_mode], (er, fd) => this[_onopen](er, fd));
}
[_onopen](er, fd) {
if (this[_defaultFlag] &&
this[_flags] === 'r+' &&
er &&
er.code === 'ENOENT') {
this[_flags] = 'w';
this[_open]();
}
else if (er) {
this[_onerror](er);
}
else {
this[_fd] = fd;
this.emit('open', fd);
if (!this[_writing]) {
this[_flush]();
}
}
}
end(buf, enc) {
if (buf) {
//@ts-ignore
this.write(buf, enc);
}
this[_ended] = true;
// synthetic after-write logic, where drain/finish live
if (!this[_writing] &&
!this[_queue].length &&
typeof this[_fd] === 'number') {
this[_onwrite](null, 0);
}
return this;
}
write(buf, enc) {
if (typeof buf === 'string') {
buf = Buffer.from(buf, enc);
}
if (this[_ended]) {
this.emit('error', new Error('write() after end()'));
return false;
}
if (this[_fd] === undefined || this[_writing] || this[_queue].length) {
this[_queue].push(buf);
this[_needDrain] = true;
return false;
}
this[_writing] = true;
this[_write](buf);
return true;
}
[_write](buf) {
fs_1.default.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw));
}
[_onwrite](er, bw) {
if (er) {
this[_onerror](er);
}
else {
if (this[_pos] !== undefined && typeof bw === 'number') {
this[_pos] += bw;
}
if (this[_queue].length) {
this[_flush]();
}
else {
this[_writing] = false;
if (this[_ended] && !this[_finished]) {
this[_finished] = true;
this[_close]();
this.emit('finish');
}
else if (this[_needDrain]) {
this[_needDrain] = false;
this.emit('drain');
}
}
}
}
[_flush]() {
if (this[_queue].length === 0) {
if (this[_ended]) {
this[_onwrite](null, 0);
}
}
else if (this[_queue].length === 1) {
this[_write](this[_queue].pop());
}
else {
const iovec = this[_queue];
this[_queue] = [];
writev(this[_fd], iovec, this[_pos], (er, bw) => this[_onwrite](er, bw));
}
}
[_close]() {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd];
this[_fd] = undefined;
fs_1.default.close(fd, er => er ? this.emit('error', er) : this.emit('close'));
}
}
}
exports.WriteStream = WriteStream;
class WriteStreamSync extends WriteStream {
[_open]() {
let fd;
// only wrap in a try{} block if we know we'll retry, to avoid
// the rethrow obscuring the error's source frame in most cases.
if (this[_defaultFlag] && this[_flags] === 'r+') {
try {
fd = fs_1.default.openSync(this[_path], this[_flags], this[_mode]);
}
catch (er) {
if (er?.code === 'ENOENT') {
this[_flags] = 'w';
return this[_open]();
}
else {
throw er;
}
}
}
else {
fd = fs_1.default.openSync(this[_path], this[_flags], this[_mode]);
}
this[_onopen](null, fd);
}
[_close]() {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd];
this[_fd] = undefined;
fs_1.default.closeSync(fd);
this.emit('close');
}
}
[_write](buf) {
// throw the original, but try to close if it fails
let threw = true;
try {
this[_onwrite](null, fs_1.default.writeSync(this[_fd], buf, 0, buf.length, this[_pos]));
threw = false;
}
finally {
if (threw) {
try {
this[_close]();
}
catch {
// ok error
}
}
}
}
}
exports.WriteStreamSync = WriteStreamSync;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@ -1,3 +0,0 @@
{
"type": "commonjs"
}

View File

@ -1,118 +0,0 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import EE from 'events';
import { Minipass } from 'minipass';
declare const _autoClose: unique symbol;
declare const _close: unique symbol;
declare const _ended: unique symbol;
declare const _fd: unique symbol;
declare const _finished: unique symbol;
declare const _flags: unique symbol;
declare const _flush: unique symbol;
declare const _handleChunk: unique symbol;
declare const _makeBuf: unique symbol;
declare const _mode: unique symbol;
declare const _needDrain: unique symbol;
declare const _onerror: unique symbol;
declare const _onopen: unique symbol;
declare const _onread: unique symbol;
declare const _onwrite: unique symbol;
declare const _open: unique symbol;
declare const _path: unique symbol;
declare const _pos: unique symbol;
declare const _queue: unique symbol;
declare const _read: unique symbol;
declare const _readSize: unique symbol;
declare const _reading: unique symbol;
declare const _remain: unique symbol;
declare const _size: unique symbol;
declare const _write: unique symbol;
declare const _writing: unique symbol;
declare const _defaultFlag: unique symbol;
declare const _errored: unique symbol;
export type ReadStreamOptions = Minipass.Options<Minipass.ContiguousData> & {
fd?: number;
readSize?: number;
size?: number;
autoClose?: boolean;
};
export type ReadStreamEvents = Minipass.Events<Minipass.ContiguousData> & {
open: [fd: number];
};
export declare class ReadStream extends Minipass<Minipass.ContiguousData, Buffer, ReadStreamEvents> {
[_errored]: boolean;
[_fd]?: number;
[_path]: string;
[_readSize]: number;
[_reading]: boolean;
[_size]: number;
[_remain]: number;
[_autoClose]: boolean;
constructor(path: string, opt: ReadStreamOptions);
get fd(): number | undefined;
get path(): string;
write(): void;
end(): void;
[_open](): void;
[_onopen](er?: NodeJS.ErrnoException | null, fd?: number): void;
[_makeBuf](): Buffer;
[_read](): void;
[_onread](er?: NodeJS.ErrnoException | null, br?: number, buf?: Buffer): void;
[_close](): void;
[_onerror](er: NodeJS.ErrnoException): void;
[_handleChunk](br: number, buf: Buffer): boolean;
emit<Event extends keyof ReadStreamEvents>(ev: Event, ...args: ReadStreamEvents[Event]): boolean;
}
export declare class ReadStreamSync extends ReadStream {
[_open](): void;
[_read](): void;
[_close](): void;
}
export type WriteStreamOptions = {
fd?: number;
autoClose?: boolean;
mode?: number;
captureRejections?: boolean;
start?: number;
flags?: string;
};
export declare class WriteStream extends EE {
readable: false;
writable: boolean;
[_errored]: boolean;
[_writing]: boolean;
[_ended]: boolean;
[_queue]: Buffer[];
[_needDrain]: boolean;
[_path]: string;
[_mode]: number;
[_autoClose]: boolean;
[_fd]?: number;
[_defaultFlag]: boolean;
[_flags]: string;
[_finished]: boolean;
[_pos]?: number;
constructor(path: string, opt: WriteStreamOptions);
emit(ev: string, ...args: any[]): boolean;
get fd(): number | undefined;
get path(): string;
[_onerror](er: NodeJS.ErrnoException): void;
[_open](): void;
[_onopen](er?: null | NodeJS.ErrnoException, fd?: number): void;
end(buf: string, enc?: BufferEncoding): this;
end(buf?: Buffer, enc?: undefined): this;
write(buf: string, enc?: BufferEncoding): boolean;
write(buf: Buffer, enc?: undefined): boolean;
[_write](buf: Buffer): void;
[_onwrite](er?: null | NodeJS.ErrnoException, bw?: number): void;
[_flush](): void;
[_close](): void;
}
export declare class WriteStreamSync extends WriteStream {
[_open](): void;
[_close](): void;
[_write](buf: Buffer): void;
}
export {};
//# sourceMappingURL=index.d.ts.map

View File

@ -1 +0,0 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,OAAO,EAAE,MAAM,QAAQ,CAAA;AAEvB,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAInC,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,GAAG,eAAgB,CAAA;AACzB,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,YAAY,eAAyB,CAAA;AAC3C,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,YAAY,eAAyB,CAAA;AAC3C,QAAA,MAAM,QAAQ,eAAqB,CAAA;AAEnC,MAAM,MAAM,iBAAiB,GAC3B,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,GAAG;IAC1C,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB,CAAA;AAEH,MAAM,MAAM,gBAAgB,GAAG,QAAQ,CAAC,MAAM,CAAC,QAAQ,CAAC,cAAc,CAAC,GAAG;IACxE,IAAI,EAAE,CAAC,EAAE,EAAE,MAAM,CAAC,CAAA;CACnB,CAAA;AAED,qBAAa,UAAW,SAAQ,QAAQ,CACtC,QAAQ,CAAC,cAAc,EACvB,MAAM,EACN,gBAAgB,CACjB;IACC,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC;IACpB,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAClB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAA;gBAET,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,iBAAiB;IA4BhD,IAAI,EAAE,uBAEL;IAED,IAAI,IAAI,WAEP;IAGD,KAAK;IAKL,GAAG;IAIH,CAAC,KAAK,CAAC;IAIP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,EAAE,CAAC,EAAE,MAAM;IAUxD,CAAC,QAAQ,CAAC;IAIV,CAAC,KAAK,CAAC;IAeP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,EAAE,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,MAAM;IAStE,CAAC,MAAM,CAAC;IAUR,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,MAAM,CAAC,cAAc;IAMpC,CAAC,YAAY,CAAC,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM;IAiBtC,IAAI,CAAC,KAAK,SAAS,MAAM,gBAAgB,EACvC,EAAE,EAAE,KAAK,EACT,GAAG,IAAI,EAAE,gBAAgB,CAAC,KAAK,CAAC,GAC/B,OAAO;CAuBX;AAED,qBAAa,cAAe,SAAQ,UAAU;IAC5C,CAAC,KAAK,CAAC;IAYP,CAAC,KAAK,CAAC;IA2BP,CAAC,MAAM,CAAC;CAQT;AAED,MAAM,MAAM,kBAAkB,GAAG;IAC/B,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,SAAS,CAAC,EAAE,OAAO,CAAA;IACnB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,iBAAiB,CAAC,EAAE,OAAO,CAAA;IAC3B,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,qBAAa,WAAY,SAAQ,EAAE;IACjC,QAAQ,EAAE,KAAK,CAAQ;IACvB,QAAQ,EAAE,OAAO,CAAQ;IACzB,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,MAAM,CAAC,EAAE,OAAO,CAAS;IAC1B,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,CAAM;IACxB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAS;IAC9B,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC;IACtB,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,YAAY,CAAC,EAAE,OAAO,CAAC;IACxB,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC;IACjB,CAAC,SAAS,CAAC,EAAE,OAAO,CAAS;IAC7B,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAA;gBAEH,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,kBAAkB;IAoBjD,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,GAAG,EAAE;IAU/B,IAAI,EAAE,uBAEL;IAED,IAAI,IAAI,WAEP;IAED,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,MAAM,CAAC,cAAc;IAMpC,CAAC,KAAK,CAAC;IAMP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,GAAG,MAAM,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,MAAM;IAoBxD,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,cAAc,GAAG,IAAI;IAC5C,GAAG,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,SAAS,GAAG,IAAI;IAoBxC,KAAK,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,cAAc,GAAG,OAAO;IACjD,KAAK,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,SAAS,GAAG,OAAO;IAsB5C,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM;IAWpB,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,GAAG,MAAM,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,MAAM;IAwBzD,CAAC,MAAM,CAAC;IAgBR,CAAC,MAAM,CAAC;CAST;AAED,qBAAa,eAAgB,SAAQ,WAAW;IAC9C,CAAC,KAAK,CAAC,IAAI,IAAI;IAsBf,CAAC,MAAM,CAAC;IASR,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM;CAmBrB"}

View File

@ -1,420 +0,0 @@
import EE from 'events';
import fs from 'fs';
import { Minipass } from 'minipass';
const writev = fs.writev;
const _autoClose = Symbol('_autoClose');
const _close = Symbol('_close');
const _ended = Symbol('_ended');
const _fd = Symbol('_fd');
const _finished = Symbol('_finished');
const _flags = Symbol('_flags');
const _flush = Symbol('_flush');
const _handleChunk = Symbol('_handleChunk');
const _makeBuf = Symbol('_makeBuf');
const _mode = Symbol('_mode');
const _needDrain = Symbol('_needDrain');
const _onerror = Symbol('_onerror');
const _onopen = Symbol('_onopen');
const _onread = Symbol('_onread');
const _onwrite = Symbol('_onwrite');
const _open = Symbol('_open');
const _path = Symbol('_path');
const _pos = Symbol('_pos');
const _queue = Symbol('_queue');
const _read = Symbol('_read');
const _readSize = Symbol('_readSize');
const _reading = Symbol('_reading');
const _remain = Symbol('_remain');
const _size = Symbol('_size');
const _write = Symbol('_write');
const _writing = Symbol('_writing');
const _defaultFlag = Symbol('_defaultFlag');
const _errored = Symbol('_errored');
export class ReadStream extends Minipass {
[_errored] = false;
[_fd];
[_path];
[_readSize];
[_reading] = false;
[_size];
[_remain];
[_autoClose];
constructor(path, opt) {
opt = opt || {};
super(opt);
this.readable = true;
this.writable = false;
if (typeof path !== 'string') {
throw new TypeError('path must be a string');
}
this[_errored] = false;
this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined;
this[_path] = path;
this[_readSize] = opt.readSize || 16 * 1024 * 1024;
this[_reading] = false;
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity;
this[_remain] = this[_size];
this[_autoClose] =
typeof opt.autoClose === 'boolean' ? opt.autoClose : true;
if (typeof this[_fd] === 'number') {
this[_read]();
}
else {
this[_open]();
}
}
get fd() {
return this[_fd];
}
get path() {
return this[_path];
}
//@ts-ignore
write() {
throw new TypeError('this is a readable stream');
}
//@ts-ignore
end() {
throw new TypeError('this is a readable stream');
}
[_open]() {
fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd));
}
[_onopen](er, fd) {
if (er) {
this[_onerror](er);
}
else {
this[_fd] = fd;
this.emit('open', fd);
this[_read]();
}
}
[_makeBuf]() {
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]));
}
[_read]() {
if (!this[_reading]) {
this[_reading] = true;
const buf = this[_makeBuf]();
/* c8 ignore start */
if (buf.length === 0) {
return process.nextTick(() => this[_onread](null, 0, buf));
}
/* c8 ignore stop */
fs.read(this[_fd], buf, 0, buf.length, null, (er, br, b) => this[_onread](er, br, b));
}
}
[_onread](er, br, buf) {
this[_reading] = false;
if (er) {
this[_onerror](er);
}
else if (this[_handleChunk](br, buf)) {
this[_read]();
}
}
[_close]() {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd];
this[_fd] = undefined;
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'));
}
}
[_onerror](er) {
this[_reading] = true;
this[_close]();
this.emit('error', er);
}
[_handleChunk](br, buf) {
let ret = false;
// no effect if infinite
this[_remain] -= br;
if (br > 0) {
ret = super.write(br < buf.length ? buf.subarray(0, br) : buf);
}
if (br === 0 || this[_remain] <= 0) {
ret = false;
this[_close]();
super.end();
}
return ret;
}
emit(ev, ...args) {
switch (ev) {
case 'prefinish':
case 'finish':
return false;
case 'drain':
if (typeof this[_fd] === 'number') {
this[_read]();
}
return false;
case 'error':
if (this[_errored]) {
return false;
}
this[_errored] = true;
return super.emit(ev, ...args);
default:
return super.emit(ev, ...args);
}
}
}
export class ReadStreamSync extends ReadStream {
[_open]() {
let threw = true;
try {
this[_onopen](null, fs.openSync(this[_path], 'r'));
threw = false;
}
finally {
if (threw) {
this[_close]();
}
}
}
[_read]() {
let threw = true;
try {
if (!this[_reading]) {
this[_reading] = true;
do {
const buf = this[_makeBuf]();
/* c8 ignore start */
const br = buf.length === 0
? 0
: fs.readSync(this[_fd], buf, 0, buf.length, null);
/* c8 ignore stop */
if (!this[_handleChunk](br, buf)) {
break;
}
} while (true);
this[_reading] = false;
}
threw = false;
}
finally {
if (threw) {
this[_close]();
}
}
}
[_close]() {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd];
this[_fd] = undefined;
fs.closeSync(fd);
this.emit('close');
}
}
}
export class WriteStream extends EE {
readable = false;
writable = true;
[_errored] = false;
[_writing] = false;
[_ended] = false;
[_queue] = [];
[_needDrain] = false;
[_path];
[_mode];
[_autoClose];
[_fd];
[_defaultFlag];
[_flags];
[_finished] = false;
[_pos];
constructor(path, opt) {
opt = opt || {};
super(opt);
this[_path] = path;
this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined;
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode;
this[_pos] = typeof opt.start === 'number' ? opt.start : undefined;
this[_autoClose] =
typeof opt.autoClose === 'boolean' ? opt.autoClose : true;
// truncating makes no sense when writing into the middle
const defaultFlag = this[_pos] !== undefined ? 'r+' : 'w';
this[_defaultFlag] = opt.flags === undefined;
this[_flags] = opt.flags === undefined ? defaultFlag : opt.flags;
if (this[_fd] === undefined) {
this[_open]();
}
}
emit(ev, ...args) {
if (ev === 'error') {
if (this[_errored]) {
return false;
}
this[_errored] = true;
}
return super.emit(ev, ...args);
}
get fd() {
return this[_fd];
}
get path() {
return this[_path];
}
[_onerror](er) {
this[_close]();
this[_writing] = true;
this.emit('error', er);
}
[_open]() {
fs.open(this[_path], this[_flags], this[_mode], (er, fd) => this[_onopen](er, fd));
}
[_onopen](er, fd) {
if (this[_defaultFlag] &&
this[_flags] === 'r+' &&
er &&
er.code === 'ENOENT') {
this[_flags] = 'w';
this[_open]();
}
else if (er) {
this[_onerror](er);
}
else {
this[_fd] = fd;
this.emit('open', fd);
if (!this[_writing]) {
this[_flush]();
}
}
}
end(buf, enc) {
if (buf) {
//@ts-ignore
this.write(buf, enc);
}
this[_ended] = true;
// synthetic after-write logic, where drain/finish live
if (!this[_writing] &&
!this[_queue].length &&
typeof this[_fd] === 'number') {
this[_onwrite](null, 0);
}
return this;
}
write(buf, enc) {
if (typeof buf === 'string') {
buf = Buffer.from(buf, enc);
}
if (this[_ended]) {
this.emit('error', new Error('write() after end()'));
return false;
}
if (this[_fd] === undefined || this[_writing] || this[_queue].length) {
this[_queue].push(buf);
this[_needDrain] = true;
return false;
}
this[_writing] = true;
this[_write](buf);
return true;
}
[_write](buf) {
fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw));
}
[_onwrite](er, bw) {
if (er) {
this[_onerror](er);
}
else {
if (this[_pos] !== undefined && typeof bw === 'number') {
this[_pos] += bw;
}
if (this[_queue].length) {
this[_flush]();
}
else {
this[_writing] = false;
if (this[_ended] && !this[_finished]) {
this[_finished] = true;
this[_close]();
this.emit('finish');
}
else if (this[_needDrain]) {
this[_needDrain] = false;
this.emit('drain');
}
}
}
}
[_flush]() {
if (this[_queue].length === 0) {
if (this[_ended]) {
this[_onwrite](null, 0);
}
}
else if (this[_queue].length === 1) {
this[_write](this[_queue].pop());
}
else {
const iovec = this[_queue];
this[_queue] = [];
writev(this[_fd], iovec, this[_pos], (er, bw) => this[_onwrite](er, bw));
}
}
[_close]() {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd];
this[_fd] = undefined;
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'));
}
}
}
export class WriteStreamSync extends WriteStream {
[_open]() {
let fd;
// only wrap in a try{} block if we know we'll retry, to avoid
// the rethrow obscuring the error's source frame in most cases.
if (this[_defaultFlag] && this[_flags] === 'r+') {
try {
fd = fs.openSync(this[_path], this[_flags], this[_mode]);
}
catch (er) {
if (er?.code === 'ENOENT') {
this[_flags] = 'w';
return this[_open]();
}
else {
throw er;
}
}
}
else {
fd = fs.openSync(this[_path], this[_flags], this[_mode]);
}
this[_onopen](null, fd);
}
[_close]() {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd];
this[_fd] = undefined;
fs.closeSync(fd);
this.emit('close');
}
}
[_write](buf) {
// throw the original, but try to close if it fails
let threw = true;
try {
this[_onwrite](null, fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]));
threw = false;
}
finally {
if (threw) {
try {
this[_close]();
}
catch {
// ok error
}
}
}
}
}
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@ -1,3 +0,0 @@
{
"type": "module"
}

View File

@ -1,72 +0,0 @@
{
"name": "@isaacs/fs-minipass",
"version": "4.0.1",
"main": "./dist/commonjs/index.js",
"scripts": {
"prepare": "tshy",
"pretest": "npm run prepare",
"test": "tap",
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags",
"format": "prettier --write . --loglevel warn",
"typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
},
"keywords": [],
"author": "Isaac Z. Schlueter",
"license": "ISC",
"repository": {
"type": "git",
"url": "https://github.com/npm/fs-minipass.git"
},
"description": "fs read and write streams based on minipass",
"dependencies": {
"minipass": "^7.0.4"
},
"devDependencies": {
"@types/node": "^20.11.30",
"mutate-fs": "^2.1.1",
"prettier": "^3.2.5",
"tap": "^18.7.1",
"tshy": "^1.12.0",
"typedoc": "^0.25.12"
},
"files": [
"dist"
],
"engines": {
"node": ">=18.0.0"
},
"tshy": {
"exports": {
"./package.json": "./package.json",
".": "./src/index.ts"
}
},
"exports": {
"./package.json": "./package.json",
".": {
"import": {
"types": "./dist/esm/index.d.ts",
"default": "./dist/esm/index.js"
},
"require": {
"types": "./dist/commonjs/index.d.ts",
"default": "./dist/commonjs/index.js"
}
}
},
"types": "./dist/commonjs/index.d.ts",
"type": "module",
"prettier": {
"semi": false,
"printWidth": 75,
"tabWidth": 2,
"useTabs": false,
"singleQuote": true,
"jsxSingleQuote": false,
"bracketSameLine": true,
"arrowParens": "avoid",
"endOfLine": "lf"
}
}

40
node_modules/@npmcli/agent/README.md generated vendored
View File

@ -1,40 +0,0 @@
## @npmcli/agent
A pair of Agent implementations for nodejs that provide consistent keep-alives, granular timeouts, dns caching, and proxy support.
### Usage
```js
const { getAgent, HttpAgent } = require('@npmcli/agent')
const fetch = require('minipass-fetch')
const main = async () => {
// if you know what agent you need, you can create one directly
const agent = new HttpAgent(agentOptions)
// or you can use the getAgent helper, it will determine and create an Agent
// instance for you as well as reuse that agent for new requests as appropriate
const agent = getAgent('https://registry.npmjs.org/npm', agentOptions)
// minipass-fetch is just an example, this will work for any http client that
// supports node's Agents
const res = await fetch('https://registry.npmjs.org/npm', { agent })
}
main()
```
### Options
All options supported by the node Agent implementations are supported here, see [the docs](https://nodejs.org/api/http.html#new-agentoptions) for those.
Options that have been added by this module include:
- `family`: what tcp family to use, can be `4` for IPv4, `6` for IPv6 or `0` for both.
- `proxy`: a URL to a supported proxy, currently supports `HTTP CONNECT` based http/https proxies as well as socks4 and 5.
- `dns`: configuration for the built-in dns cache
- `ttl`: how long (in milliseconds) to keep cached dns entries, defaults to `5 * 60 * 100 (5 minutes)`
- `lookup`: optional function to override how dns lookups are performed, defaults to `require('dns').lookup`
- `timeouts`: a set of granular timeouts, all default to `0`
- `connection`: time between initiating connection and actually connecting
- `idle`: time between data packets (if a top level `timeout` is provided, it will be copied here)
- `response`: time between sending a request and receiving a response
- `transfer`: time between starting to receive a request and consuming the response fully

View File

@ -1,206 +0,0 @@
'use strict'
const net = require('net')
const tls = require('tls')
const { once } = require('events')
const timers = require('timers/promises')
const { normalizeOptions, cacheOptions } = require('./options')
const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js')
const Errors = require('./errors.js')
const { Agent: AgentBase } = require('agent-base')
module.exports = class Agent extends AgentBase {
#options
#timeouts
#proxy
#noProxy
#ProxyAgent
constructor (options = {}) {
const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options)
super(normalizedOptions)
this.#options = normalizedOptions
this.#timeouts = timeouts
if (proxy) {
this.#proxy = new URL(proxy)
this.#noProxy = noProxy
this.#ProxyAgent = getProxyAgent(proxy)
}
}
get proxy () {
return this.#proxy ? { url: this.#proxy } : {}
}
#getProxy (options) {
if (!this.#proxy) {
return
}
const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, {
proxy: this.#proxy,
noProxy: this.#noProxy,
})
if (!proxy) {
return
}
const cacheKey = cacheOptions({
...options,
...this.#options,
timeouts: this.#timeouts,
proxy,
})
if (proxyCache.has(cacheKey)) {
return proxyCache.get(cacheKey)
}
let ProxyAgent = this.#ProxyAgent
if (Array.isArray(ProxyAgent)) {
ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0]
}
const proxyAgent = new ProxyAgent(proxy, {
...this.#options,
socketOptions: { family: this.#options.family },
})
proxyCache.set(cacheKey, proxyAgent)
return proxyAgent
}
// takes an array of promises and races them against the connection timeout
// which will throw the necessary error if it is hit. This will return the
// result of the promise race.
async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) {
if (timeout) {
const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal })
.then(() => {
throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`)
}).catch((err) => {
if (err.name === 'AbortError') {
return
}
throw err
})
promises.push(connectionTimeout)
}
let result
try {
result = await Promise.race(promises)
ac.abort()
} catch (err) {
ac.abort()
throw err
}
return result
}
async connect (request, options) {
// if the connection does not have its own lookup function
// set, then use the one from our options
options.lookup ??= this.#options.lookup
let socket
let timeout = this.#timeouts.connection
const isSecureEndpoint = this.isSecureEndpoint(options)
const proxy = this.#getProxy(options)
if (proxy) {
// some of the proxies will wait for the socket to fully connect before
// returning so we have to await this while also racing it against the
// connection timeout.
const start = Date.now()
socket = await this.#timeoutConnection({
options,
timeout,
promises: [proxy.connect(request, options)],
})
// see how much time proxy.connect took and subtract it from
// the timeout
if (timeout) {
timeout = timeout - (Date.now() - start)
}
} else {
socket = (isSecureEndpoint ? tls : net).connect(options)
}
socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs)
socket.setNoDelay(this.keepAlive)
const abortController = new AbortController()
const { signal } = abortController
const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting']
? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal })
: Promise.resolve()
await this.#timeoutConnection({
options,
timeout,
promises: [
connectPromise,
once(socket, 'error', { signal }).then((err) => {
throw err[0]
}),
],
}, abortController)
if (this.#timeouts.idle) {
socket.setTimeout(this.#timeouts.idle, () => {
socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`))
})
}
return socket
}
addRequest (request, options) {
const proxy = this.#getProxy(options)
// it would be better to call proxy.addRequest here but this causes the
// http-proxy-agent to call its super.addRequest which causes the request
// to be added to the agent twice. since we only support 3 agents
// currently (see the required agents in proxy.js) we have manually
// checked that the only public methods we need to call are called in the
// next block. this could change in the future and presumably we would get
// failing tests until we have properly called the necessary methods on
// each of our proxy agents
if (proxy?.setRequestProps) {
proxy.setRequestProps(request, options)
}
request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close')
if (this.#timeouts.response) {
let responseTimeout
request.once('finish', () => {
setTimeout(() => {
request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy))
}, this.#timeouts.response)
})
request.once('response', () => {
clearTimeout(responseTimeout)
})
}
if (this.#timeouts.transfer) {
let transferTimeout
request.once('response', (res) => {
setTimeout(() => {
res.destroy(new Errors.TransferTimeoutError(request, this.#proxy))
}, this.#timeouts.transfer)
res.once('close', () => {
clearTimeout(transferTimeout)
})
})
}
return super.addRequest(request, options)
}
}

View File

@ -1,53 +0,0 @@
'use strict'
const { LRUCache } = require('lru-cache')
const dns = require('dns')
// this is a factory so that each request can have its own opts (i.e. ttl)
// while still sharing the cache across all requests
const cache = new LRUCache({ max: 50 })
const getOptions = ({
family = 0,
hints = dns.ADDRCONFIG,
all = false,
verbatim = undefined,
ttl = 5 * 60 * 1000,
lookup = dns.lookup,
}) => ({
// hints and lookup are returned since both are top level properties to (net|tls).connect
hints,
lookup: (hostname, ...args) => {
const callback = args.pop() // callback is always last arg
const lookupOptions = args[0] ?? {}
const options = {
family,
hints,
all,
verbatim,
...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions),
}
const key = JSON.stringify({ hostname, ...options })
if (cache.has(key)) {
const cached = cache.get(key)
return process.nextTick(callback, null, ...cached)
}
lookup(hostname, options, (err, ...result) => {
if (err) {
return callback(err)
}
cache.set(key, result, { ttl })
return callback(null, ...result)
})
},
})
module.exports = {
cache,
getOptions,
}

View File

@ -1,61 +0,0 @@
'use strict'
class InvalidProxyProtocolError extends Error {
constructor (url) {
super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
this.code = 'EINVALIDPROXY'
this.proxy = url
}
}
class ConnectionTimeoutError extends Error {
constructor (host) {
super(`Timeout connecting to host \`${host}\``)
this.code = 'ECONNECTIONTIMEOUT'
this.host = host
}
}
class IdleTimeoutError extends Error {
constructor (host) {
super(`Idle timeout reached for host \`${host}\``)
this.code = 'EIDLETIMEOUT'
this.host = host
}
}
class ResponseTimeoutError extends Error {
constructor (request, proxy) {
let msg = 'Response timeout '
if (proxy) {
msg += `from proxy \`${proxy.host}\` `
}
msg += `connecting to host \`${request.host}\``
super(msg)
this.code = 'ERESPONSETIMEOUT'
this.proxy = proxy
this.request = request
}
}
class TransferTimeoutError extends Error {
constructor (request, proxy) {
let msg = 'Transfer timeout '
if (proxy) {
msg += `from proxy \`${proxy.host}\` `
}
msg += `for \`${request.host}\``
super(msg)
this.code = 'ETRANSFERTIMEOUT'
this.proxy = proxy
this.request = request
}
}
module.exports = {
InvalidProxyProtocolError,
ConnectionTimeoutError,
IdleTimeoutError,
ResponseTimeoutError,
TransferTimeoutError,
}

View File

@ -1,56 +0,0 @@
'use strict'
const { LRUCache } = require('lru-cache')
const { normalizeOptions, cacheOptions } = require('./options')
const { getProxy, proxyCache } = require('./proxy.js')
const dns = require('./dns.js')
const Agent = require('./agents.js')
const agentCache = new LRUCache({ max: 20 })
const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => {
// false has meaning so this can't be a simple truthiness check
if (agent != null) {
return agent
}
url = new URL(url)
const proxyForUrl = getProxy(url, { proxy, noProxy })
const normalizedOptions = {
...normalizeOptions(options),
proxy: proxyForUrl,
}
const cacheKey = cacheOptions({
...normalizedOptions,
secureEndpoint: url.protocol === 'https:',
})
if (agentCache.has(cacheKey)) {
return agentCache.get(cacheKey)
}
const newAgent = new Agent(normalizedOptions)
agentCache.set(cacheKey, newAgent)
return newAgent
}
module.exports = {
getAgent,
Agent,
// these are exported for backwards compatability
HttpAgent: Agent,
HttpsAgent: Agent,
cache: {
proxy: proxyCache,
agent: agentCache,
dns: dns.cache,
clear: () => {
proxyCache.clear()
agentCache.clear()
dns.cache.clear()
},
},
}

View File

@ -1,86 +0,0 @@
'use strict'
const dns = require('./dns')
const normalizeOptions = (opts) => {
const family = parseInt(opts.family ?? '0', 10)
const keepAlive = opts.keepAlive ?? true
const normalized = {
// nodejs http agent options. these are all the defaults
// but kept here to increase the likelihood of cache hits
// https://nodejs.org/api/http.html#new-agentoptions
keepAliveMsecs: keepAlive ? 1000 : undefined,
maxSockets: opts.maxSockets ?? 15,
maxTotalSockets: Infinity,
maxFreeSockets: keepAlive ? 256 : undefined,
scheduling: 'fifo',
// then spread the rest of the options
...opts,
// we already set these to their defaults that we want
family,
keepAlive,
// our custom timeout options
timeouts: {
// the standard timeout option is mapped to our idle timeout
// and then deleted below
idle: opts.timeout ?? 0,
connection: 0,
response: 0,
transfer: 0,
...opts.timeouts,
},
// get the dns options that go at the top level of socket connection
...dns.getOptions({ family, ...opts.dns }),
}
// remove timeout since we already used it to set our own idle timeout
delete normalized.timeout
return normalized
}
const createKey = (obj) => {
let key = ''
const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0])
for (let [k, v] of sorted) {
if (v == null) {
v = 'null'
} else if (v instanceof URL) {
v = v.toString()
} else if (typeof v === 'object') {
v = createKey(v)
}
key += `${k}:${v}:`
}
return key
}
const cacheOptions = ({ secureEndpoint, ...options }) => createKey({
secureEndpoint: !!secureEndpoint,
// socket connect options
family: options.family,
hints: options.hints,
localAddress: options.localAddress,
// tls specific connect options
strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false,
ca: secureEndpoint ? options.ca : null,
cert: secureEndpoint ? options.cert : null,
key: secureEndpoint ? options.key : null,
// http agent options
keepAlive: options.keepAlive,
keepAliveMsecs: options.keepAliveMsecs,
maxSockets: options.maxSockets,
maxTotalSockets: options.maxTotalSockets,
maxFreeSockets: options.maxFreeSockets,
scheduling: options.scheduling,
// timeout options
timeouts: options.timeouts,
// proxy
proxy: options.proxy,
})
module.exports = {
normalizeOptions,
cacheOptions,
}

View File

@ -1,88 +0,0 @@
'use strict'
const { HttpProxyAgent } = require('http-proxy-agent')
const { HttpsProxyAgent } = require('https-proxy-agent')
const { SocksProxyAgent } = require('socks-proxy-agent')
const { LRUCache } = require('lru-cache')
const { InvalidProxyProtocolError } = require('./errors.js')
const PROXY_CACHE = new LRUCache({ max: 20 })
const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols)
const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy'])
const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => {
key = key.toLowerCase()
if (PROXY_ENV_KEYS.has(key)) {
acc[key] = value
}
return acc
}, {})
const getProxyAgent = (url) => {
url = new URL(url)
const protocol = url.protocol.slice(0, -1)
if (SOCKS_PROTOCOLS.has(protocol)) {
return SocksProxyAgent
}
if (protocol === 'https' || protocol === 'http') {
return [HttpProxyAgent, HttpsProxyAgent]
}
throw new InvalidProxyProtocolError(url)
}
const isNoProxy = (url, noProxy) => {
if (typeof noProxy === 'string') {
noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean)
}
if (!noProxy || !noProxy.length) {
return false
}
const hostSegments = url.hostname.split('.').reverse()
return noProxy.some((no) => {
const noSegments = no.split('.').filter(Boolean).reverse()
if (!noSegments.length) {
return false
}
for (let i = 0; i < noSegments.length; i++) {
if (hostSegments[i] !== noSegments[i]) {
return false
}
}
return true
})
}
const getProxy = (url, { proxy, noProxy }) => {
url = new URL(url)
if (!proxy) {
proxy = url.protocol === 'https:'
? PROXY_ENV.https_proxy
: PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy
}
if (!noProxy) {
noProxy = PROXY_ENV.no_proxy
}
if (!proxy || isNoProxy(url, noProxy)) {
return null
}
return new URL(proxy)
}
module.exports = {
getProxyAgent,
getProxy,
proxyCache: PROXY_CACHE,
}

View File

@ -1,60 +0,0 @@
{
"name": "@npmcli/agent",
"version": "4.0.0",
"description": "the http/https agent used by the npm cli",
"main": "lib/index.js",
"scripts": {
"gencerts": "bash scripts/create-cert.sh",
"test": "tap",
"lint": "npm run eslint",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force",
"lintfix": "npm run eslint -- --fix",
"snap": "tap",
"posttest": "npm run lint",
"eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
},
"author": "GitHub Inc.",
"license": "ISC",
"bugs": {
"url": "https://github.com/npm/agent/issues"
},
"homepage": "https://github.com/npm/agent#readme",
"files": [
"bin/",
"lib/"
],
"engines": {
"node": "^20.17.0 || >=22.9.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.25.0",
"publish": "true"
},
"dependencies": {
"agent-base": "^7.1.0",
"http-proxy-agent": "^7.0.0",
"https-proxy-agent": "^7.0.1",
"lru-cache": "^11.2.1",
"socks-proxy-agent": "^8.0.3"
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
"@npmcli/template-oss": "4.25.0",
"minipass-fetch": "^4.0.1",
"nock": "^14.0.3",
"socksv5": "^0.0.6",
"tap": "^16.3.0"
},
"repository": {
"type": "git",
"url": "git+https://github.com/npm/agent.git"
},
"tap": {
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
]
}
}

20
node_modules/@npmcli/fs/LICENSE.md generated vendored
View File

@ -1,20 +0,0 @@
<!-- This file is automatically added by @npmcli/template-oss. Do not edit. -->
ISC License
Copyright npm, Inc.
Permission to use, copy, modify, and/or distribute this
software for any purpose with or without fee is hereby
granted, provided that the above copyright notice and this
permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
USE OR PERFORMANCE OF THIS SOFTWARE.

97
node_modules/@npmcli/fs/README.md generated vendored
View File

@ -1,97 +0,0 @@
# @npmcli/fs
polyfills, and extensions, of the core `fs` module.
## Features
- `fs.cp` polyfill for node < 16.7.0
- `fs.withTempDir` added
- `fs.readdirScoped` added
- `fs.moveFile` added
## `fs.withTempDir(root, fn, options) -> Promise`
### Parameters
- `root`: the directory in which to create the temporary directory
- `fn`: a function that will be called with the path to the temporary directory
- `options`
- `tmpPrefix`: a prefix to be used in the generated directory name
### Usage
The `withTempDir` function creates a temporary directory, runs the provided
function (`fn`), then removes the temporary directory and resolves or rejects
based on the result of `fn`.
```js
const fs = require('@npmcli/fs')
const os = require('os')
// this function will be called with the full path to the temporary directory
// it is called with `await` behind the scenes, so can be async if desired.
const myFunction = async (tempPath) => {
return 'done!'
}
const main = async () => {
const result = await fs.withTempDir(os.tmpdir(), myFunction)
// result === 'done!'
}
main()
```
## `fs.readdirScoped(root) -> Promise`
### Parameters
- `root`: the directory to read
### Usage
Like `fs.readdir` but handling `@org/module` dirs as if they were
a single entry.
```javascript
const { readdirScoped } = require('@npmcli/fs')
const entries = await readdirScoped('node_modules')
// entries will be something like: ['a', '@org/foo', '@org/bar']
```
## `fs.moveFile(source, dest, options) -> Promise`
A fork of [move-file](https://github.com/sindresorhus/move-file) with
support for Common JS.
### Highlights
- Promise API.
- Supports moving a file across partitions and devices.
- Optionally prevent overwriting an existing file.
- Creates non-existent destination directories for you.
- Automatically recurses when source is a directory.
### Parameters
- `source`: File, or directory, you want to move.
- `dest`: Where you want the file or directory moved.
- `options`
- `overwrite` (`boolean`, default: `true`): Overwrite existing destination file(s).
### Usage
The built-in
[`fs.rename()`](https://nodejs.org/api/fs.html#fs_fs_rename_oldpath_newpath_callback)
is just a JavaScript wrapper for the C `rename(2)` function, which doesn't
support moving files across partitions or devices. This module is what you
would have expected `fs.rename()` to be.
```js
const { moveFile } = require('@npmcli/fs');
(async () => {
await moveFile('source/unicorn.png', 'destination/unicorn.png');
console.log('The file has been moved');
})();
```

View File

@ -1,20 +0,0 @@
// given an input that may or may not be an object, return an object that has
// a copy of every defined property listed in 'copy'. if the input is not an
// object, assign it to the property named by 'wrap'
const getOptions = (input, { copy, wrap }) => {
const result = {}
if (input && typeof input === 'object') {
for (const prop of copy) {
if (input[prop] !== undefined) {
result[prop] = input[prop]
}
}
} else {
result[wrap] = input
}
return result
}
module.exports = getOptions

View File

@ -1,9 +0,0 @@
const semver = require('semver')
const satisfies = (range) => {
return semver.satisfies(process.version, range, { includePrerelease: true })
}
module.exports = {
satisfies,
}

View File

@ -1,15 +0,0 @@
(The MIT License)
Copyright (c) 2011-2017 JP Richardson
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,129 +0,0 @@
'use strict'
const { inspect } = require('util')
// adapted from node's internal/errors
// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js
// close copy of node's internal SystemError class.
class SystemError {
constructor (code, prefix, context) {
// XXX context.code is undefined in all constructors used in cp/polyfill
// that may be a bug copied from node, maybe the constructor should use
// `code` not `errno`? nodejs/node#41104
let message = `${prefix}: ${context.syscall} returned ` +
`${context.code} (${context.message})`
if (context.path !== undefined) {
message += ` ${context.path}`
}
if (context.dest !== undefined) {
message += ` => ${context.dest}`
}
this.code = code
Object.defineProperties(this, {
name: {
value: 'SystemError',
enumerable: false,
writable: true,
configurable: true,
},
message: {
value: message,
enumerable: false,
writable: true,
configurable: true,
},
info: {
value: context,
enumerable: true,
configurable: true,
writable: false,
},
errno: {
get () {
return context.errno
},
set (value) {
context.errno = value
},
enumerable: true,
configurable: true,
},
syscall: {
get () {
return context.syscall
},
set (value) {
context.syscall = value
},
enumerable: true,
configurable: true,
},
})
if (context.path !== undefined) {
Object.defineProperty(this, 'path', {
get () {
return context.path
},
set (value) {
context.path = value
},
enumerable: true,
configurable: true,
})
}
if (context.dest !== undefined) {
Object.defineProperty(this, 'dest', {
get () {
return context.dest
},
set (value) {
context.dest = value
},
enumerable: true,
configurable: true,
})
}
}
toString () {
return `${this.name} [${this.code}]: ${this.message}`
}
[Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) {
return inspect(this, {
...ctx,
getters: true,
customInspect: false,
})
}
}
function E (code, message) {
module.exports[code] = class NodeError extends SystemError {
constructor (ctx) {
super(code, message, ctx)
}
}
}
E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory')
E('ERR_FS_CP_EEXIST', 'Target already exists')
E('ERR_FS_CP_EINVAL', 'Invalid src or dest')
E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe')
E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory')
E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file')
E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self')
E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type')
E('ERR_FS_EISDIR', 'Path is a directory')
module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error {
constructor (name, expected, actual) {
super()
this.code = 'ERR_INVALID_ARG_TYPE'
this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}`
}
}

View File

@ -1,22 +0,0 @@
const fs = require('fs/promises')
const getOptions = require('../common/get-options.js')
const node = require('../common/node.js')
const polyfill = require('./polyfill.js')
// node 16.7.0 added fs.cp
const useNative = node.satisfies('>=16.7.0')
const cp = async (src, dest, opts) => {
const options = getOptions(opts, {
copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'],
})
// the polyfill is tested separately from this module, no need to hack
// process.version to try to trigger it just for coverage
// istanbul ignore next
return useNative
? fs.cp(src, dest, options)
: polyfill(src, dest, options)
}
module.exports = cp

View File

@ -1,428 +0,0 @@
// this file is a modified version of the code in node 17.2.0
// which is, in turn, a modified version of the fs-extra module on npm
// node core changes:
// - Use of the assert module has been replaced with core's error system.
// - All code related to the glob dependency has been removed.
// - Bring your own custom fs module is not currently supported.
// - Some basic code cleanup.
// changes here:
// - remove all callback related code
// - drop sync support
// - change assertions back to non-internal methods (see options.js)
// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows
'use strict'
const {
ERR_FS_CP_DIR_TO_NON_DIR,
ERR_FS_CP_EEXIST,
ERR_FS_CP_EINVAL,
ERR_FS_CP_FIFO_PIPE,
ERR_FS_CP_NON_DIR_TO_DIR,
ERR_FS_CP_SOCKET,
ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY,
ERR_FS_CP_UNKNOWN,
ERR_FS_EISDIR,
ERR_INVALID_ARG_TYPE,
} = require('./errors.js')
const {
constants: {
errno: {
EEXIST,
EISDIR,
EINVAL,
ENOTDIR,
},
},
} = require('os')
const {
chmod,
copyFile,
lstat,
mkdir,
readdir,
readlink,
stat,
symlink,
unlink,
utimes,
} = require('fs/promises')
const {
dirname,
isAbsolute,
join,
parse,
resolve,
sep,
toNamespacedPath,
} = require('path')
const { fileURLToPath } = require('url')
const defaultOptions = {
dereference: false,
errorOnExist: false,
filter: undefined,
force: true,
preserveTimestamps: false,
recursive: false,
}
async function cp (src, dest, opts) {
if (opts != null && typeof opts !== 'object') {
throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts)
}
return cpFn(
toNamespacedPath(getValidatedPath(src)),
toNamespacedPath(getValidatedPath(dest)),
{ ...defaultOptions, ...opts })
}
function getValidatedPath (fileURLOrPath) {
const path = fileURLOrPath != null && fileURLOrPath.href
&& fileURLOrPath.origin
? fileURLToPath(fileURLOrPath)
: fileURLOrPath
return path
}
async function cpFn (src, dest, opts) {
// Warn about using preserveTimestamps on 32-bit node
// istanbul ignore next
if (opts.preserveTimestamps && process.arch === 'ia32') {
const warning = 'Using the preserveTimestamps option in 32-bit ' +
'node is not recommended'
process.emitWarning(warning, 'TimestampPrecisionWarning')
}
const stats = await checkPaths(src, dest, opts)
const { srcStat, destStat } = stats
await checkParentPaths(src, srcStat, dest)
if (opts.filter) {
return handleFilter(checkParentDir, destStat, src, dest, opts)
}
return checkParentDir(destStat, src, dest, opts)
}
async function checkPaths (src, dest, opts) {
const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts)
if (destStat) {
if (areIdentical(srcStat, destStat)) {
throw new ERR_FS_CP_EINVAL({
message: 'src and dest cannot be the same',
path: dest,
syscall: 'cp',
errno: EINVAL,
})
}
if (srcStat.isDirectory() && !destStat.isDirectory()) {
throw new ERR_FS_CP_DIR_TO_NON_DIR({
message: `cannot overwrite directory ${src} ` +
`with non-directory ${dest}`,
path: dest,
syscall: 'cp',
errno: EISDIR,
})
}
if (!srcStat.isDirectory() && destStat.isDirectory()) {
throw new ERR_FS_CP_NON_DIR_TO_DIR({
message: `cannot overwrite non-directory ${src} ` +
`with directory ${dest}`,
path: dest,
syscall: 'cp',
errno: ENOTDIR,
})
}
}
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
throw new ERR_FS_CP_EINVAL({
message: `cannot copy ${src} to a subdirectory of self ${dest}`,
path: dest,
syscall: 'cp',
errno: EINVAL,
})
}
return { srcStat, destStat }
}
function areIdentical (srcStat, destStat) {
return destStat.ino && destStat.dev && destStat.ino === srcStat.ino &&
destStat.dev === srcStat.dev
}
function getStats (src, dest, opts) {
const statFunc = opts.dereference ?
(file) => stat(file, { bigint: true }) :
(file) => lstat(file, { bigint: true })
return Promise.all([
statFunc(src),
statFunc(dest).catch((err) => {
// istanbul ignore next: unsure how to cover.
if (err.code === 'ENOENT') {
return null
}
// istanbul ignore next: unsure how to cover.
throw err
}),
])
}
async function checkParentDir (destStat, src, dest, opts) {
const destParent = dirname(dest)
const dirExists = await pathExists(destParent)
if (dirExists) {
return getStatsForCopy(destStat, src, dest, opts)
}
await mkdir(destParent, { recursive: true })
return getStatsForCopy(destStat, src, dest, opts)
}
function pathExists (dest) {
return stat(dest).then(
() => true,
// istanbul ignore next: not sure when this would occur
(err) => (err.code === 'ENOENT' ? false : Promise.reject(err)))
}
// Recursively check if dest parent is a subdirectory of src.
// It works for all file types including symlinks since it
// checks the src and dest inodes. It starts from the deepest
// parent and stops once it reaches the src parent or the root path.
async function checkParentPaths (src, srcStat, dest) {
const srcParent = resolve(dirname(src))
const destParent = resolve(dirname(dest))
if (destParent === srcParent || destParent === parse(destParent).root) {
return
}
let destStat
try {
destStat = await stat(destParent, { bigint: true })
} catch (err) {
// istanbul ignore else: not sure when this would occur
if (err.code === 'ENOENT') {
return
}
// istanbul ignore next: not sure when this would occur
throw err
}
if (areIdentical(srcStat, destStat)) {
throw new ERR_FS_CP_EINVAL({
message: `cannot copy ${src} to a subdirectory of self ${dest}`,
path: dest,
syscall: 'cp',
errno: EINVAL,
})
}
return checkParentPaths(src, srcStat, destParent)
}
const normalizePathToArray = (path) =>
resolve(path).split(sep).filter(Boolean)
// Return true if dest is a subdir of src, otherwise false.
// It only checks the path strings.
function isSrcSubdir (src, dest) {
const srcArr = normalizePathToArray(src)
const destArr = normalizePathToArray(dest)
return srcArr.every((cur, i) => destArr[i] === cur)
}
async function handleFilter (onInclude, destStat, src, dest, opts, cb) {
const include = await opts.filter(src, dest)
if (include) {
return onInclude(destStat, src, dest, opts, cb)
}
}
function startCopy (destStat, src, dest, opts) {
if (opts.filter) {
return handleFilter(getStatsForCopy, destStat, src, dest, opts)
}
return getStatsForCopy(destStat, src, dest, opts)
}
async function getStatsForCopy (destStat, src, dest, opts) {
const statFn = opts.dereference ? stat : lstat
const srcStat = await statFn(src)
// istanbul ignore else: can't portably test FIFO
if (srcStat.isDirectory() && opts.recursive) {
return onDir(srcStat, destStat, src, dest, opts)
} else if (srcStat.isDirectory()) {
throw new ERR_FS_EISDIR({
message: `${src} is a directory (not copied)`,
path: src,
syscall: 'cp',
errno: EINVAL,
})
} else if (srcStat.isFile() ||
srcStat.isCharacterDevice() ||
srcStat.isBlockDevice()) {
return onFile(srcStat, destStat, src, dest, opts)
} else if (srcStat.isSymbolicLink()) {
return onLink(destStat, src, dest)
} else if (srcStat.isSocket()) {
throw new ERR_FS_CP_SOCKET({
message: `cannot copy a socket file: ${dest}`,
path: dest,
syscall: 'cp',
errno: EINVAL,
})
} else if (srcStat.isFIFO()) {
throw new ERR_FS_CP_FIFO_PIPE({
message: `cannot copy a FIFO pipe: ${dest}`,
path: dest,
syscall: 'cp',
errno: EINVAL,
})
}
// istanbul ignore next: should be unreachable
throw new ERR_FS_CP_UNKNOWN({
message: `cannot copy an unknown file type: ${dest}`,
path: dest,
syscall: 'cp',
errno: EINVAL,
})
}
function onFile (srcStat, destStat, src, dest, opts) {
if (!destStat) {
return _copyFile(srcStat, src, dest, opts)
}
return mayCopyFile(srcStat, src, dest, opts)
}
async function mayCopyFile (srcStat, src, dest, opts) {
if (opts.force) {
await unlink(dest)
return _copyFile(srcStat, src, dest, opts)
} else if (opts.errorOnExist) {
throw new ERR_FS_CP_EEXIST({
message: `${dest} already exists`,
path: dest,
syscall: 'cp',
errno: EEXIST,
})
}
}
async function _copyFile (srcStat, src, dest, opts) {
await copyFile(src, dest)
if (opts.preserveTimestamps) {
return handleTimestampsAndMode(srcStat.mode, src, dest)
}
return setDestMode(dest, srcStat.mode)
}
async function handleTimestampsAndMode (srcMode, src, dest) {
// Make sure the file is writable before setting the timestamp
// otherwise open fails with EPERM when invoked with 'r+'
// (through utimes call)
if (fileIsNotWritable(srcMode)) {
await makeFileWritable(dest, srcMode)
return setDestTimestampsAndMode(srcMode, src, dest)
}
return setDestTimestampsAndMode(srcMode, src, dest)
}
function fileIsNotWritable (srcMode) {
return (srcMode & 0o200) === 0
}
function makeFileWritable (dest, srcMode) {
return setDestMode(dest, srcMode | 0o200)
}
async function setDestTimestampsAndMode (srcMode, src, dest) {
await setDestTimestamps(src, dest)
return setDestMode(dest, srcMode)
}
function setDestMode (dest, srcMode) {
return chmod(dest, srcMode)
}
async function setDestTimestamps (src, dest) {
// The initial srcStat.atime cannot be trusted
// because it is modified by the read(2) system call
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
const updatedSrcStat = await stat(src)
return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
}
function onDir (srcStat, destStat, src, dest, opts) {
if (!destStat) {
return mkDirAndCopy(srcStat.mode, src, dest, opts)
}
return copyDir(src, dest, opts)
}
async function mkDirAndCopy (srcMode, src, dest, opts) {
await mkdir(dest)
await copyDir(src, dest, opts)
return setDestMode(dest, srcMode)
}
async function copyDir (src, dest, opts) {
const dir = await readdir(src)
for (let i = 0; i < dir.length; i++) {
const item = dir[i]
const srcItem = join(src, item)
const destItem = join(dest, item)
const { destStat } = await checkPaths(srcItem, destItem, opts)
await startCopy(destStat, srcItem, destItem, opts)
}
}
async function onLink (destStat, src, dest) {
let resolvedSrc = await readlink(src)
if (!isAbsolute(resolvedSrc)) {
resolvedSrc = resolve(dirname(src), resolvedSrc)
}
if (!destStat) {
return symlink(resolvedSrc, dest)
}
let resolvedDest
try {
resolvedDest = await readlink(dest)
} catch (err) {
// Dest exists and is a regular file or directory,
// Windows may throw UNKNOWN error. If dest already exists,
// fs throws error anyway, so no need to guard against it here.
// istanbul ignore next: can only test on windows
if (err.code === 'EINVAL' || err.code === 'UNKNOWN') {
return symlink(resolvedSrc, dest)
}
// istanbul ignore next: should not be possible
throw err
}
if (!isAbsolute(resolvedDest)) {
resolvedDest = resolve(dirname(dest), resolvedDest)
}
if (isSrcSubdir(resolvedSrc, resolvedDest)) {
throw new ERR_FS_CP_EINVAL({
message: `cannot copy ${resolvedSrc} to a subdirectory of self ` +
`${resolvedDest}`,
path: dest,
syscall: 'cp',
errno: EINVAL,
})
}
// Do not copy if src is a subdir of dest since unlinking
// dest in this case would result in removing src contents
// and therefore a broken symlink would be created.
const srcStat = await stat(src)
if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) {
throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({
message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`,
path: dest,
syscall: 'cp',
errno: EINVAL,
})
}
return copyLink(resolvedSrc, dest)
}
async function copyLink (resolvedSrc, dest) {
await unlink(dest)
return symlink(resolvedSrc, dest)
}
module.exports = cp

13
node_modules/@npmcli/fs/lib/index.js generated vendored
View File

@ -1,13 +0,0 @@
'use strict'
const cp = require('./cp/index.js')
const withTempDir = require('./with-temp-dir.js')
const readdirScoped = require('./readdir-scoped.js')
const moveFile = require('./move-file.js')
module.exports = {
cp,
withTempDir,
readdirScoped,
moveFile,
}

View File

@ -1,78 +0,0 @@
const { dirname, join, resolve, relative, isAbsolute } = require('path')
const fs = require('fs/promises')
const pathExists = async path => {
try {
await fs.access(path)
return true
} catch (er) {
return er.code !== 'ENOENT'
}
}
const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => {
if (!source || !destination) {
throw new TypeError('`source` and `destination` file required')
}
options = {
overwrite: true,
...options,
}
if (!options.overwrite && await pathExists(destination)) {
throw new Error(`The destination file exists: ${destination}`)
}
await fs.mkdir(dirname(destination), { recursive: true })
try {
await fs.rename(source, destination)
} catch (error) {
if (error.code === 'EXDEV' || error.code === 'EPERM') {
const sourceStat = await fs.lstat(source)
if (sourceStat.isDirectory()) {
const files = await fs.readdir(source)
await Promise.all(files.map((file) =>
moveFile(join(source, file), join(destination, file), options, false, symlinks)
))
} else if (sourceStat.isSymbolicLink()) {
symlinks.push({ source, destination })
} else {
await fs.copyFile(source, destination)
}
} else {
throw error
}
}
if (root) {
await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => {
let target = await fs.readlink(symSource)
// junction symlinks in windows will be absolute paths, so we need to
// make sure they point to the symlink destination
if (isAbsolute(target)) {
target = resolve(symDestination, relative(symSource, target))
}
// try to determine what the actual file is so we can create the correct
// type of symlink in windows
let targetStat = 'file'
try {
targetStat = await fs.stat(resolve(dirname(symSource), target))
if (targetStat.isDirectory()) {
targetStat = 'junction'
}
} catch {
// targetStat remains 'file'
}
await fs.symlink(
target,
symDestination,
targetStat
)
}))
await fs.rm(source, { recursive: true, force: true })
}
}
module.exports = moveFile

View File

@ -1,20 +0,0 @@
const { readdir } = require('fs/promises')
const { join } = require('path')
const readdirScoped = async (dir) => {
const results = []
for (const item of await readdir(dir)) {
if (item.startsWith('@')) {
for (const scopedItem of await readdir(join(dir, item))) {
results.push(join(item, scopedItem))
}
} else {
results.push(item)
}
}
return results
}
module.exports = readdirScoped

View File

@ -1,39 +0,0 @@
const { join, sep } = require('path')
const getOptions = require('./common/get-options.js')
const { mkdir, mkdtemp, rm } = require('fs/promises')
// create a temp directory, ensure its permissions match its parent, then call
// the supplied function passing it the path to the directory. clean up after
// the function finishes, whether it throws or not
const withTempDir = async (root, fn, opts) => {
const options = getOptions(opts, {
copy: ['tmpPrefix'],
})
// create the directory
await mkdir(root, { recursive: true })
const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''))
let err
let result
try {
result = await fn(target)
} catch (_err) {
err = _err
}
try {
await rm(target, { force: true, recursive: true })
} catch {
// ignore errors
}
if (err) {
throw err
}
return result
}
module.exports = withTempDir

54
node_modules/@npmcli/fs/package.json generated vendored
View File

@ -1,54 +0,0 @@
{
"name": "@npmcli/fs",
"version": "5.0.0",
"description": "filesystem utilities for the npm cli",
"main": "lib/index.js",
"files": [
"bin/",
"lib/"
],
"scripts": {
"snap": "tap",
"test": "tap",
"npmclilint": "npmcli-lint",
"lint": "npm run eslint",
"lintfix": "npm run eslint -- --fix",
"posttest": "npm run lint",
"postsnap": "npm run lintfix --",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force",
"eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
},
"repository": {
"type": "git",
"url": "git+https://github.com/npm/fs.git"
},
"keywords": [
"npm",
"oss"
],
"author": "GitHub Inc.",
"license": "ISC",
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
"@npmcli/template-oss": "4.27.1",
"tap": "^16.0.1"
},
"dependencies": {
"semver": "^7.3.5"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.27.1",
"publish": true
},
"tap": {
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
]
}
}

21
node_modules/@npmcli/redact/LICENSE generated vendored
View File

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2024 npm
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,42 +0,0 @@
# @npmcli/redact
Redact sensitive npm information from output.
## API
This will redact `npm_` prefixed tokens and UUIDs from values.
It will also replace passwords in stringified URLs.
### `redact(string)`
Redact values from a single value
```js
const { redact } = require('@npmcli/redact')
redact('https://user:pass@registry.npmjs.org/')
// https://user:***@registry.npmjs.org/
redact(`https://registry.npmjs.org/path/npm_${'a'.repeat('36')}`)
// https://registry.npmjs.org/path/npm_***
```
### `redactLog(string | string[])`
Redact values from a string or array of strings.
This method will also split all strings on `\s` and `=` and iterate over them.
```js
const { redactLog } = require('@npmcli/redact')
redactLog([
'Something --x=https://user:pass@registry.npmjs.org/ foo bar',
'--url=http://foo:bar@registry.npmjs.org',
])
// [
// 'Something --x=https://user:***@registry.npmjs.org/ foo bar',
// '--url=http://foo:***@registry.npmjs.org/',
// ]
```

View File

@ -1,71 +0,0 @@
const { serializeError } = require('./error')
const deepMap = (input, handler = v => v, path = ['$'], seen = new Set([input])) => {
// this is in an effort to maintain bole's error logging behavior
if (path.join('.') === '$' && input instanceof Error) {
return deepMap({ err: serializeError(input) }, handler, path, seen)
}
if (input instanceof Error) {
return deepMap(serializeError(input), handler, path, seen)
}
// allows for non-node js environments, sush as workers
if (typeof Buffer !== 'undefined' && input instanceof Buffer) {
return `[unable to log instanceof buffer]`
}
if (input instanceof Uint8Array) {
return `[unable to log instanceof Uint8Array]`
}
if (Array.isArray(input)) {
const result = []
for (let i = 0; i < input.length; i++) {
const element = input[i]
const elementPath = [...path, i]
if (element instanceof Object) {
if (!seen.has(element)) { // avoid getting stuck in circular reference
seen.add(element)
result.push(deepMap(handler(element, elementPath), handler, elementPath, seen))
}
} else {
result.push(handler(element, elementPath))
}
}
return result
}
if (input === null) {
return null
} else if (typeof input === 'object' || typeof input === 'function') {
const result = {}
for (const propertyName of Object.getOwnPropertyNames(input)) {
// skip logging internal properties
if (propertyName.startsWith('_')) {
continue
}
try {
const property = input[propertyName]
const propertyPath = [...path, propertyName]
if (property instanceof Object) {
if (!seen.has(property)) { // avoid getting stuck in circular reference
seen.add(property)
result[propertyName] = deepMap(
handler(property, propertyPath), handler, propertyPath, seen
)
}
} else {
result[propertyName] = handler(property, propertyPath)
}
} catch (err) {
// a getter may throw an error
result[propertyName] = `[error getting value: ${err.message}]`
}
}
return result
}
return handler(input, path)
}
module.exports = { deepMap }

View File

@ -1,28 +0,0 @@
/** takes an error object and serializes it to a plan object */
function serializeError (input) {
if (!(input instanceof Error)) {
if (typeof input === 'string') {
const error = new Error(`attempted to serialize a non-error, string String, "${input}"`)
return serializeError(error)
}
const error = new Error(`attempted to serialize a non-error, ${typeof input} ${input?.constructor?.name}`)
return serializeError(error)
}
// different error objects store status code differently
// AxiosError uses `status`, other services use `statusCode`
const statusCode = input.statusCode ?? input.status
// CAUTION: what we serialize here gets add to the size of logs
return {
errorType: input.errorType ?? input.constructor.name,
...(input.message ? { message: input.message } : {}),
...(input.stack ? { stack: input.stack } : {}),
// think of this as error code
...(input.code ? { code: input.code } : {}),
// think of this as http status code
...(statusCode ? { statusCode } : {}),
}
}
module.exports = {
serializeError,
}

View File

@ -1,44 +0,0 @@
const matchers = require('./matchers')
const { redactUrlPassword } = require('./utils')
const REPLACE = '***'
const redact = (value) => {
if (typeof value !== 'string' || !value) {
return value
}
return redactUrlPassword(value, REPLACE)
.replace(matchers.NPM_SECRET.pattern, `npm_${REPLACE}`)
.replace(matchers.UUID.pattern, REPLACE)
}
// split on \s|= similar to how nopt parses options
const splitAndRedact = (str) => {
// stateful regex, don't move out of this scope
const splitChars = /[\s=]/g
let match = null
let result = ''
let index = 0
while (match = splitChars.exec(str)) {
result += redact(str.slice(index, match.index)) + match[0]
index = splitChars.lastIndex
}
return result + redact(str.slice(index))
}
// replaces auth info in an array of arguments or in a strings
const redactLog = (arg) => {
if (typeof arg === 'string') {
return splitAndRedact(arg)
} else if (Array.isArray(arg)) {
return arg.map((a) => typeof a === 'string' ? splitAndRedact(a) : a)
}
return arg
}
module.exports = {
redact,
redactLog,
}

View File

@ -1,88 +0,0 @@
const TYPE_REGEX = 'regex'
const TYPE_URL = 'url'
const TYPE_PATH = 'path'
const NPM_SECRET = {
type: TYPE_REGEX,
pattern: /\b(npms?_)[a-zA-Z0-9]{36,48}\b/gi,
replacement: `[REDACTED_NPM_SECRET]`,
}
const AUTH_HEADER = {
type: TYPE_REGEX,
pattern: /\b(Basic\s+|Bearer\s+)[\w+=\-.]+\b/gi,
replacement: `[REDACTED_AUTH_HEADER]`,
}
const JSON_WEB_TOKEN = {
type: TYPE_REGEX,
pattern: /\b[A-Za-z0-9-_]{10,}(?!\.\d+\.)\.[A-Za-z0-9-_]{3,}\.[A-Za-z0-9-_]{20,}\b/gi,
replacement: `[REDACTED_JSON_WEB_TOKEN]`,
}
const UUID = {
type: TYPE_REGEX,
pattern: /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/gi,
replacement: `[REDACTED_UUID]`,
}
const URL_MATCHER = {
type: TYPE_REGEX,
pattern: /(?:https?|ftp):\/\/[^\s/"$.?#].[^\s"]*/gi,
replacement: '[REDACTED_URL]',
}
const DEEP_HEADER_AUTHORIZATION = {
type: TYPE_PATH,
predicate: ({ path }) => path.endsWith('.headers.authorization'),
replacement: '[REDACTED_HEADER_AUTHORIZATION]',
}
const DEEP_HEADER_SET_COOKIE = {
type: TYPE_PATH,
predicate: ({ path }) => path.endsWith('.headers.set-cookie'),
replacement: '[REDACTED_HEADER_SET_COOKIE]',
}
const DEEP_HEADER_COOKIE = {
type: TYPE_PATH,
predicate: ({ path }) => path.endsWith('.headers.cookie'),
replacement: '[REDACTED_HEADER_COOKIE]',
}
const REWRITE_REQUEST = {
type: TYPE_PATH,
predicate: ({ path }) => path.endsWith('.request'),
replacement: (input) => ({
method: input?.method,
path: input?.path,
headers: input?.headers,
url: input?.url,
}),
}
const REWRITE_RESPONSE = {
type: TYPE_PATH,
predicate: ({ path }) => path.endsWith('.response'),
replacement: (input) => ({
data: input?.data,
status: input?.status,
headers: input?.headers,
}),
}
module.exports = {
TYPE_REGEX,
TYPE_URL,
TYPE_PATH,
NPM_SECRET,
AUTH_HEADER,
JSON_WEB_TOKEN,
UUID,
URL_MATCHER,
DEEP_HEADER_AUTHORIZATION,
DEEP_HEADER_SET_COOKIE,
DEEP_HEADER_COOKIE,
REWRITE_REQUEST,
REWRITE_RESPONSE,
}

View File

@ -1,59 +0,0 @@
const {
AUTH_HEADER,
JSON_WEB_TOKEN,
NPM_SECRET,
DEEP_HEADER_AUTHORIZATION,
DEEP_HEADER_SET_COOKIE,
REWRITE_REQUEST,
REWRITE_RESPONSE,
DEEP_HEADER_COOKIE,
} = require('./matchers')
const {
redactUrlMatcher,
redactUrlPasswordMatcher,
redactMatchers,
} = require('./utils')
const { serializeError } = require('./error')
const { deepMap } = require('./deep-map')
const _redact = redactMatchers(
NPM_SECRET,
AUTH_HEADER,
JSON_WEB_TOKEN,
DEEP_HEADER_AUTHORIZATION,
DEEP_HEADER_SET_COOKIE,
DEEP_HEADER_COOKIE,
REWRITE_REQUEST,
REWRITE_RESPONSE,
redactUrlMatcher(
redactUrlPasswordMatcher()
)
)
const redact = (input) => deepMap(input, (value, path) => _redact(value, { path }))
/** takes an error returns new error keeping some custom properties */
function redactError (input) {
const { message, ...data } = serializeError(input)
const output = new Error(redact(message))
return Object.assign(output, redact(data))
}
/** runs a function within try / catch and throws error wrapped in redactError */
function redactThrow (func) {
if (typeof func !== 'function') {
throw new Error('redactThrow expects a function')
}
return async (...args) => {
try {
return await func(...args)
} catch (error) {
throw redactError(error)
}
}
}
module.exports = { redact, redactError, redactThrow }

View File

@ -1,202 +0,0 @@
const {
URL_MATCHER,
TYPE_URL,
TYPE_REGEX,
TYPE_PATH,
} = require('./matchers')
/**
* creates a string of asterisks,
* this forces a minimum asterisk for security purposes
*/
const asterisk = (length = 0) => {
length = typeof length === 'string' ? length.length : length
if (length < 8) {
return '*'.repeat(8)
}
return '*'.repeat(length)
}
/**
* escapes all special regex chars
* @see https://stackoverflow.com/a/9310752
* @see https://github.com/tc39/proposal-regex-escaping
*/
const escapeRegExp = (text) => {
return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, `\\$&`)
}
/**
* provieds a regex "or" of the url versions of a string
*/
const urlEncodeRegexGroup = (value) => {
const decoded = decodeURIComponent(value)
const encoded = encodeURIComponent(value)
const union = [...new Set([encoded, decoded, value])].map(escapeRegExp).join('|')
return union
}
/**
* a tagged template literal that returns a regex ensures all variables are excaped
*/
const urlEncodeRegexTag = (strings, ...values) => {
let pattern = ''
for (let i = 0; i < values.length; i++) {
pattern += strings[i] + `(${urlEncodeRegexGroup(values[i])})`
}
pattern += strings[strings.length - 1]
return new RegExp(pattern)
}
/**
* creates a matcher for redacting url hostname
*/
const redactUrlHostnameMatcher = ({ hostname, replacement } = {}) => ({
type: TYPE_URL,
predicate: ({ url }) => url.hostname === hostname,
pattern: ({ url }) => {
return urlEncodeRegexTag`(^${url.protocol}//${url.username}:.+@)?${url.hostname}`
},
replacement: `$1${replacement || asterisk()}`,
})
/**
* creates a matcher for redacting url search / query parameter values
*/
const redactUrlSearchParamsMatcher = ({ param, replacement } = {}) => ({
type: TYPE_URL,
predicate: ({ url }) => url.searchParams.has(param),
pattern: ({ url }) => urlEncodeRegexTag`(${param}=)${url.searchParams.get(param)}`,
replacement: `$1${replacement || asterisk()}`,
})
/** creates a matcher for redacting the url password */
const redactUrlPasswordMatcher = ({ replacement } = {}) => ({
type: TYPE_URL,
predicate: ({ url }) => url.password,
pattern: ({ url }) => urlEncodeRegexTag`(^${url.protocol}//${url.username}:)${url.password}`,
replacement: `$1${replacement || asterisk()}`,
})
const redactUrlReplacement = (...matchers) => (subValue) => {
try {
const url = new URL(subValue)
return redactMatchers(...matchers)(subValue, { url })
} catch (err) {
return subValue
}
}
/**
* creates a matcher / submatcher for urls, this function allows you to first
* collect all urls within a larger string and then pass those urls to a
* submatcher
*
* @example
* console.log("this will first match all urls, then pass those urls to the password patcher")
* redactMatchers(redactUrlMatcher(redactUrlPasswordMatcher()))
*
* @example
* console.log(
* "this will assume you are passing in a string that is a url, and will redact the password"
* )
* redactMatchers(redactUrlPasswordMatcher())
*
*/
const redactUrlMatcher = (...matchers) => {
return {
...URL_MATCHER,
replacement: redactUrlReplacement(...matchers),
}
}
const matcherFunctions = {
[TYPE_REGEX]: (matcher) => (value) => {
if (typeof value === 'string') {
value = value.replace(matcher.pattern, matcher.replacement)
}
return value
},
[TYPE_URL]: (matcher) => (value, ctx) => {
if (typeof value === 'string') {
try {
const url = ctx?.url || new URL(value)
const { predicate, pattern } = matcher
const predicateValue = predicate({ url })
if (predicateValue) {
value = value.replace(pattern({ url }), matcher.replacement)
}
} catch (_e) {
return value
}
}
return value
},
[TYPE_PATH]: (matcher) => (value, ctx) => {
const rawPath = ctx?.path
const path = rawPath.join('.').toLowerCase()
const { predicate, replacement } = matcher
const replace = typeof replacement === 'function' ? replacement : () => replacement
const shouldRun = predicate({ rawPath, path })
if (shouldRun) {
value = replace(value, { rawPath, path })
}
return value
},
}
/** converts a matcher to a function */
const redactMatcher = (matcher) => {
return matcherFunctions[matcher.type](matcher)
}
/** converts a series of matchers to a function */
const redactMatchers = (...matchers) => (value, ctx) => {
const flatMatchers = matchers.flat()
return flatMatchers.reduce((result, matcher) => {
const fn = (typeof matcher === 'function') ? matcher : redactMatcher(matcher)
return fn(result, ctx)
}, value)
}
/**
* replacement handler, keeping $1 (if it exists) and replacing the
* rest of the string with asterisks, maintaining string length
*/
const redactDynamicReplacement = () => (value, start) => {
if (typeof start === 'number') {
return asterisk(value)
}
return start + asterisk(value.substring(start.length).length)
}
/**
* replacement handler, keeping $1 (if it exists) and replacing the
* rest of the string with a fixed number of asterisks
*/
const redactFixedReplacement = (length) => (_value, start) => {
if (typeof start === 'number') {
return asterisk(length)
}
return start + asterisk(length)
}
const redactUrlPassword = (value, replacement) => {
return redactMatchers(redactUrlPasswordMatcher({ replacement }))(value)
}
module.exports = {
asterisk,
escapeRegExp,
urlEncodeRegexGroup,
urlEncodeRegexTag,
redactUrlHostnameMatcher,
redactUrlSearchParamsMatcher,
redactUrlPasswordMatcher,
redactUrlMatcher,
redactUrlReplacement,
redactDynamicReplacement,
redactFixedReplacement,
redactMatchers,
redactUrlPassword,
}

View File

@ -1,52 +0,0 @@
{
"name": "@npmcli/redact",
"version": "4.0.0",
"description": "Redact sensitive npm information from output",
"main": "lib/index.js",
"exports": {
".": "./lib/index.js",
"./server": "./lib/server.js",
"./package.json": "./package.json"
},
"scripts": {
"test": "tap",
"lint": "npm run eslint",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force",
"lintfix": "npm run eslint -- --fix",
"snap": "tap",
"posttest": "npm run lint",
"eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
},
"keywords": [],
"author": "GitHub Inc.",
"license": "ISC",
"files": [
"bin/",
"lib/"
],
"repository": {
"type": "git",
"url": "git+https://github.com/npm/redact.git"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.27.1",
"publish": true
},
"tap": {
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
],
"timeout": 120
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
"@npmcli/template-oss": "4.27.1",
"tap": "^16.3.10"
},
"engines": {
"node": "^20.17.0 || >=22.9.0"
}
}

View File

@ -1,13 +0,0 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10

View File

@ -1,48 +0,0 @@
name: CI
on:
push:
branches:
- main
- 'v*'
paths-ignore:
- 'docs/**'
- '*.md'
pull_request:
paths-ignore:
- 'docs/**'
- '*.md'
# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
cancel-in-progress: true
jobs:
test:
name: ${{ matrix.node-version }} ${{ matrix.os }}
runs-on: ${{ matrix.os }}
permissions:
contents: read
strategy:
fail-fast: false
matrix:
os: [macOS-latest, windows-latest, ubuntu-latest]
node-version: [18, 20, 22, 24]
steps:
- name: Check out repo
uses: actions/checkout@v5.0.0
with:
persist-credentials: false
- name: Setup Node ${{ matrix.node-version }}
uses: actions/setup-node@v5
with:
node-version: ${{ matrix.node-version }}
- name: Install dependencies
run: npm i --ignore-scripts
- name: Run tests
run: npm run test

View File

@ -1,43 +0,0 @@
name: Publish release
on:
workflow_dispatch:
inputs:
version:
description: 'The version number to tag and release'
required: true
type: string
prerelease:
description: 'Release as pre-release'
required: false
type: boolean
default: false
jobs:
release-npm:
runs-on: ubuntu-latest
environment: main
permissions:
contents: write
id-token: write
steps:
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4
- uses: actions/setup-node@v5
with:
node-version: '22'
registry-url: 'https://registry.npmjs.org'
- run: npm install npm -g
- run: npm install
- name: Change version number and sync
run: |
node scripts/sync-version.mjs ${{ inputs.version }}
- name: GIT commit and push all changed files
run: |
git config --global user.name "mcollina"
git config --global user.email "hello@matteocollina.com"
git commit -n -a -m "Bumped v${{ inputs.version }}"
git push origin HEAD:${{ github.ref }}
- run: npm publish --access public --tag ${{ inputs.prerelease == true && 'next' || 'latest' }}
- name: 'Create release notes'
run: |
npx @matteo.collina/release-notes -a ${{ secrets.GITHUB_TOKEN }} -t v${{ inputs.version }} -r redact -o pinojs ${{ github.event.inputs.prerelease == 'true' && '-p' || '' }} -c ${{ github.ref }}

21
node_modules/@pinojs/redact/LICENSE generated vendored
View File

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2025 pinojs contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

350
node_modules/@pinojs/redact/README.md generated vendored
View File

@ -1,350 +0,0 @@
# @pinojs/redact
> Smart object redaction for JavaScript applications - safe AND fast!
Redact JS objects with the same API as [fast-redact](https://github.com/davidmarkclements/fast-redact), but uses innovative **selective cloning** instead of mutating the original. This provides immutability guarantees with **performance competitive** to fast-redact for real-world usage patterns.
## Install
```bash
npm install @pinojs/redact
```
## Usage
```js
const slowRedact = require('@pinojs/redact')
const redact = slowRedact({
paths: ['headers.cookie', 'headers.authorization', 'user.password']
})
const obj = {
headers: {
cookie: 'secret-session-token',
authorization: 'Bearer abc123',
'x-forwarded-for': '192.168.1.1'
},
user: {
name: 'john',
password: 'secret123'
}
}
console.log(redact(obj))
// Output: {"headers":{"cookie":"[REDACTED]","authorization":"[REDACTED]","x-forwarded-for":"192.168.1.1"},"user":{"name":"john","password":"[REDACTED]"}}
// Original object is completely unchanged:
console.log(obj.headers.cookie) // 'secret-session-token'
```
## API
### slowRedact(options) → Function
Creates a redaction function with the specified options.
#### Options
- **paths** `string[]` (required): An array of strings describing the nested location of a key in an object
- **censor** `any` (optional, default: `'[REDACTED]'`): The value to replace sensitive data with. Can be a static value or function.
- **serialize** `Function|boolean` (optional, default: `JSON.stringify`): Serialization function. Set to `false` to return the redacted object.
- **remove** `boolean` (optional, default: `false`): Remove redacted keys from serialized output
- **strict** `boolean` (optional, default: `true`): Throw on non-object values or pass through primitives
#### Path Syntax
Supports the same path syntax as fast-redact:
- **Dot notation**: `'user.name'`, `'headers.cookie'`
- **Bracket notation**: `'user["password"]'`, `'headers["X-Forwarded-For"]'`
- **Array indices**: `'users[0].password'`, `'items[1].secret'`
- **Wildcards**:
- Terminal: `'users.*.password'` (redacts password for all users)
- Intermediate: `'*.password'` (redacts password at any level)
- Array wildcard: `'items.*'` (redacts all array elements)
#### Examples
**Custom censor value:**
```js
const redact = slowRedact({
paths: ['password'],
censor: '***HIDDEN***'
})
```
**Dynamic censor function:**
```js
const redact = slowRedact({
paths: ['password'],
censor: (value, path) => `REDACTED:${path}`
})
```
**Return object instead of JSON string:**
```js
const redact = slowRedact({
paths: ['secret'],
serialize: false
})
const result = redact({ secret: 'hidden', public: 'data' })
console.log(result.secret) // '[REDACTED]'
console.log(result.public) // 'data'
// Restore original values
const restored = result.restore()
console.log(restored.secret) // 'hidden'
```
**Custom serialization:**
```js
const redact = slowRedact({
paths: ['password'],
serialize: obj => JSON.stringify(obj, null, 2)
})
```
**Remove keys instead of redacting:**
```js
const redact = slowRedact({
paths: ['password', 'user.secret'],
remove: true
})
const obj = { username: 'john', password: 'secret123', user: { name: 'Jane', secret: 'hidden' } }
console.log(redact(obj))
// Output: {"username":"john","user":{"name":"Jane"}}
// Note: 'password' and 'user.secret' are completely absent, not redacted
```
**Wildcard patterns:**
```js
// Redact all properties in secrets object
const redact1 = slowRedact({ paths: ['secrets.*'] })
// Redact password for any user
const redact2 = slowRedact({ paths: ['users.*.password'] })
// Redact all items in an array
const redact3 = slowRedact({ paths: ['items.*'] })
// Remove all secrets instead of redacting them
const redact4 = slowRedact({ paths: ['secrets.*'], remove: true })
```
## Key Differences from fast-redact
### Safety First
- **No mutation**: Original objects are never modified
- **Selective cloning**: Only clones paths that need redaction, shares references for everything else
- **Restore capability**: Can restore original values when `serialize: false`
### Feature Compatibility
- **Remove option**: Full compatibility with fast-redact's `remove: true` option to completely omit keys from output
- **All path patterns**: Supports same syntax including wildcards, bracket notation, and array indices
- **Censor functions**: Dynamic censoring with path information passed as arrays
- **Serialization**: Custom serializers and `serialize: false` mode
### Smart Performance Approach
- **Selective cloning**: Analyzes redaction paths and only clones necessary object branches
- **Reference sharing**: Non-redacted properties maintain original object references
- **Memory efficiency**: Dramatically reduced memory usage for large objects with minimal redaction
- **Setup-time optimization**: Path analysis happens once during setup, not per redaction
### When to Use @pinojs/redact
- When immutability is critical
- When you need to preserve original objects
- When objects are shared across multiple contexts
- In functional programming environments
- When debugging and you need to compare before/after
- **Large objects with selective redaction** (now performance-competitive!)
- When memory efficiency with reference sharing is important
### When to Use fast-redact
- When absolute maximum performance is critical
- In extremely high-throughput scenarios (>100,000 ops/sec)
- When you control the object lifecycle and mutation is acceptable
- Very small objects where setup overhead matters
## Performance Benchmarks
@pinojs/redact uses **selective cloning** that provides good performance while maintaining immutability guarantees:
### Performance Results
| Operation Type | @pinojs/redact | fast-redact | Performance Ratio |
|---------------|-------------|-------------|-------------------|
| **Small objects** | ~690ns | ~200ns | ~3.5x slower |
| **Large objects (minimal redaction)** | **~18μs** | ~17μs | **~same performance** |
| **Large objects (wildcards)** | **~48μs** | ~37μs | **~1.3x slower** |
| **No redaction (large objects)** | **~18μs** | ~17μs | **~same performance** |
### Performance Improvements
@pinojs/redact is performance-competitive with fast-redact for large objects.
1. **Selective cloning approach**: Only clones object paths that need redaction
2. **Reference sharing**: Non-redacted properties share original object references
3. **Setup-time optimization**: Path analysis happens once, not per redaction
4. **Memory efficiency**: Dramatically reduced memory usage for typical use cases
### Benchmark Details
**Small Objects (~180 bytes)**:
- @pinojs/redact: **690ns** per operation
- fast-redact: **200ns** per operation
- **Slight setup overhead for small objects**
**Large Objects (~18KB, minimal redaction)**:
- @pinojs/redact: **18μs** per operation
- fast-redact: **17μs** per operation
- Near-identical performance
**Large Objects (~18KB, wildcard patterns)**:
- @pinojs/redact: **48μs** per operation
- fast-redact: **37μs** per operation
- Competitive performance for complex patterns
**Memory Considerations**:
- @pinojs/redact: **Selective reference sharing** (much lower memory usage than before)
- fast-redact: Mutates in-place (lowest memory usage)
- Large objects with few redacted paths now share most references
### When Performance Matters
Choose **fast-redact** when:
- Absolute maximum performance is critical (>100,000 ops/sec)
- Working with very small objects frequently
- Mutation is acceptable and controlled
- Every microsecond counts
Choose **@pinojs/redact** when:
- Immutability is required (with competitive performance)
- Objects are shared across contexts
- Large objects with selective redaction
- Memory efficiency through reference sharing is important
- Safety and functionality are priorities
- Most production applications (performance gap is minimal)
Run benchmarks yourself:
```bash
npm run bench
```
## How Selective Cloning Works
@pinojs/redact uses an innovative **selective cloning** approach that provides immutability guarantees while dramatically improving performance:
### Traditional Approach (before optimization)
```js
// Old approach: Deep clone entire object, then redact
const fullClone = deepClone(originalObject) // Clone everything
redact(fullClone, paths) // Then redact specific paths
```
### Selective Cloning Approach (current)
```js
// New approach: Analyze paths, clone only what's needed
const pathStructure = buildPathStructure(paths) // One-time setup
const selectiveClone = cloneOnlyNeededPaths(obj, pathStructure) // Smart cloning
redact(selectiveClone, paths) // Redact pre-identified paths
```
### Key Innovations
1. **Path Analysis**: Pre-processes redaction paths into an efficient tree structure
2. **Selective Cloning**: Only creates new objects for branches that contain redaction targets
3. **Reference Sharing**: Non-redacted properties maintain exact same object references
4. **Setup Optimization**: Path parsing happens once during redactor creation, not per redaction
### Example: Reference Sharing in Action
```js
const largeConfig = {
database: { /* large config object */ },
api: { /* another large config */ },
secrets: { password: 'hidden', apiKey: 'secret' }
}
const redact = slowRedact({ paths: ['secrets.password'] })
const result = redact(largeConfig)
// Only secrets object is cloned, database and api share original references
console.log(result.database === largeConfig.database) // true - shared reference!
console.log(result.api === largeConfig.api) // true - shared reference!
console.log(result.secrets === largeConfig.secrets) // false - cloned for redaction
```
This approach provides **immutability where it matters** while **sharing references where it's safe**.
## Remove Option
The `remove: true` option provides full compatibility with fast-redact's key removal functionality:
```js
const redact = slowRedact({
paths: ['password', 'secrets.*', 'users.*.credentials'],
remove: true
})
const data = {
username: 'john',
password: 'secret123',
secrets: { apiKey: 'abc', token: 'xyz' },
users: [
{ name: 'Alice', credentials: { password: 'pass1' } },
{ name: 'Bob', credentials: { password: 'pass2' } }
]
}
console.log(redact(data))
// Output: {"username":"john","secrets":{},"users":[{"name":"Alice"},{"name":"Bob"}]}
```
### Remove vs Redact Behavior
| Option | Behavior | Output Example |
|--------|----------|----------------|
| Default (redact) | Replaces values with censor | `{"password":"[REDACTED]"}` |
| `remove: true` | Completely omits keys | `{}` |
### Compatibility Notes
- **Same output as fast-redact**: Identical JSON output when using `remove: true`
- **Wildcard support**: Works with all wildcard patterns (`*`, `users.*`, `items.*.secret`)
- **Array handling**: Array items are set to `undefined` (omitted in JSON output)
- **Nested paths**: Supports deep removal (`users.*.credentials.password`)
- **Serialize compatibility**: Only works with `JSON.stringify` serializer (like fast-redact)
## Testing
```bash
# Run unit tests
npm test
# Run integration tests comparing with fast-redact
npm run test:integration
# Run all tests (unit + integration)
npm run test:all
# Run benchmarks
npm run bench
```
### Test Coverage
- **16 unit tests**: Core functionality and edge cases
- **16 integration tests**: Output compatibility with fast-redact
- **All major features**: Paths, wildcards, serialization, custom censors
- **Performance benchmarks**: Direct comparison with fast-redact
## License
MIT
## Contributing
Pull requests welcome! Please ensure all tests pass and add tests for new features.

View File

@ -1,184 +0,0 @@
const { bench, group, run } = require('mitata')
const slowRedact = require('../index.js')
const fastRedact = require('fast-redact')
// Test objects
const smallObj = {
user: { name: 'john', password: 'secret123' },
headers: { cookie: 'session-token', authorization: 'Bearer abc123' }
}
const largeObj = {
users: [],
metadata: {
version: '1.0.0',
secret: 'app-secret-key',
database: {
host: 'localhost',
password: 'db-password'
}
}
}
// Populate users array with for loop instead of Array.from
for (let i = 0; i < 100; i++) {
largeObj.users.push({
id: i,
name: `user${i}`,
email: `user${i}@example.com`,
password: `secret${i}`,
profile: {
age: 20 + (i % 50),
preferences: {
theme: 'dark',
notifications: true,
apiKey: `key-${i}-secret`
}
}
})
}
// Redaction configurations
const basicSlowRedact = slowRedact({
paths: ['user.password', 'headers.cookie']
})
const basicFastRedact = fastRedact({
paths: ['user.password', 'headers.cookie']
})
const wildcardSlowRedact = slowRedact({
paths: ['users.*.password', 'users.*.profile.preferences.apiKey']
})
const wildcardFastRedact = fastRedact({
paths: ['users.*.password', 'users.*.profile.preferences.apiKey']
})
const deepSlowRedact = slowRedact({
paths: ['metadata.secret', 'metadata.database.password']
})
const deepFastRedact = fastRedact({
paths: ['metadata.secret', 'metadata.database.password']
})
group('Small Object Redaction - @pinojs/redact', () => {
bench('basic paths', () => {
basicSlowRedact(smallObj)
})
bench('serialize: false', () => {
const redact = slowRedact({
paths: ['user.password'],
serialize: false
})
redact(smallObj)
})
bench('custom censor function', () => {
const redact = slowRedact({
paths: ['user.password'],
censor: (value, path) => `HIDDEN:${path}`
})
redact(smallObj)
})
})
group('Small Object Redaction - fast-redact', () => {
bench('basic paths', () => {
basicFastRedact(smallObj)
})
bench('serialize: false', () => {
const redact = fastRedact({
paths: ['user.password'],
serialize: false
})
redact(smallObj)
})
bench('custom censor function', () => {
const redact = fastRedact({
paths: ['user.password'],
censor: (value, path) => `HIDDEN:${path}`
})
redact(smallObj)
})
})
group('Large Object Redaction - @pinojs/redact', () => {
bench('wildcard patterns', () => {
wildcardSlowRedact(largeObj)
})
bench('deep nested paths', () => {
deepSlowRedact(largeObj)
})
bench('multiple wildcards', () => {
const redact = slowRedact({
paths: ['users.*.password', 'users.*.profile.preferences.*']
})
redact(largeObj)
})
})
group('Large Object Redaction - fast-redact', () => {
bench('wildcard patterns', () => {
wildcardFastRedact(largeObj)
})
bench('deep nested paths', () => {
deepFastRedact(largeObj)
})
bench('multiple wildcards', () => {
const redact = fastRedact({
paths: ['users.*.password', 'users.*.profile.preferences.*']
})
redact(largeObj)
})
})
group('Direct Performance Comparison', () => {
bench('@pinojs/redact - basic paths', () => {
basicSlowRedact(smallObj)
})
bench('fast-redact - basic paths', () => {
basicFastRedact(smallObj)
})
bench('@pinojs/redact - wildcards', () => {
wildcardSlowRedact(largeObj)
})
bench('fast-redact - wildcards', () => {
wildcardFastRedact(largeObj)
})
})
group('Object Cloning Overhead', () => {
bench('@pinojs/redact - no redaction (clone only)', () => {
const redact = slowRedact({ paths: [] })
redact(smallObj)
})
bench('fast-redact - no redaction', () => {
const redact = fastRedact({ paths: [] })
redact(smallObj)
})
bench('@pinojs/redact - large object clone', () => {
const redact = slowRedact({ paths: [] })
redact(largeObj)
})
bench('fast-redact - large object', () => {
const redact = fastRedact({ paths: [] })
redact(largeObj)
})
})
run()

View File

@ -1 +0,0 @@
module.exports = require('neostandard')()

View File

@ -1,52 +0,0 @@
export = F;
/**
* When called without any options, or with a zero length paths array, @pinojs/redact will return JSON.stringify or the serialize option, if set.
* @param redactOptions
* @param redactOptions.paths An array of strings describing the nested location of a key in an object.
* @param redactOptions.censor This is the value which overwrites redacted properties.
* @param redactOptions.remove The remove option, when set to true will cause keys to be removed from the serialized output.
* @param redactOptions.serialize The serialize option may either be a function or a boolean. If a function is supplied, this will be used to serialize the redacted object.
* @param redactOptions.strict The strict option, when set to true, will cause the redactor function to throw if instead of an object it finds a primitive.
* @returns Redacted value from input
*/
declare function F(
redactOptions: F.RedactOptionsNoSerialize
): F.redactFnNoSerialize;
declare function F(redactOptions?: F.RedactOptions): F.redactFn;
declare namespace F {
/** Redacts input */
type redactFn = <T>(input: T) => string | T;
/** Redacts input without serialization */
type redactFnNoSerialize = redactFn & {
/** Method that allowing the redacted keys to be restored with the original data. Supplied only when serialize option set to false. */
restore<T>(input: T): T;
};
interface RedactOptions {
/** An array of strings describing the nested location of a key in an object. */
paths?: string[] | undefined;
/** This is the value which overwrites redacted properties. */
censor?: string | ((v: any) => any) | undefined;
/** The remove option, when set to true will cause keys to be removed from the serialized output. */
remove?: boolean | undefined;
/**
* The serialize option may either be a function or a boolean. If a function is supplied, this will be used to serialize the redacted object.
* The default serialize is the function JSON.stringify
*/
serialize?: boolean | ((v: any) => any) | undefined;
/** The strict option, when set to true, will cause the redactor function to throw if instead of an object it finds a primitive. */
strict?: boolean | undefined;
}
/** RedactOptions without serialization. Instead of the serialized object, the output of the redactor function will be the mutated object itself. */
interface RedactOptionsNoSerialize extends RedactOptions {
serialize: false;
}
}

529
node_modules/@pinojs/redact/index.js generated vendored
View File

@ -1,529 +0,0 @@
'use strict'
function deepClone (obj) {
if (obj === null || typeof obj !== 'object') {
return obj
}
if (obj instanceof Date) {
return new Date(obj.getTime())
}
if (obj instanceof Array) {
const cloned = []
for (let i = 0; i < obj.length; i++) {
cloned[i] = deepClone(obj[i])
}
return cloned
}
if (typeof obj === 'object') {
const cloned = Object.create(Object.getPrototypeOf(obj))
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
cloned[key] = deepClone(obj[key])
}
}
return cloned
}
return obj
}
function parsePath (path) {
const parts = []
let current = ''
let inBrackets = false
let inQuotes = false
let quoteChar = ''
for (let i = 0; i < path.length; i++) {
const char = path[i]
if (!inBrackets && char === '.') {
if (current) {
parts.push(current)
current = ''
}
} else if (char === '[') {
if (current) {
parts.push(current)
current = ''
}
inBrackets = true
} else if (char === ']' && inBrackets) {
// Always push the current value when closing brackets, even if it's an empty string
parts.push(current)
current = ''
inBrackets = false
inQuotes = false
} else if ((char === '"' || char === "'") && inBrackets) {
if (!inQuotes) {
inQuotes = true
quoteChar = char
} else if (char === quoteChar) {
inQuotes = false
quoteChar = ''
} else {
current += char
}
} else {
current += char
}
}
if (current) {
parts.push(current)
}
return parts
}
function setValue (obj, parts, value) {
let current = obj
for (let i = 0; i < parts.length - 1; i++) {
const key = parts[i]
// Type safety: Check if current is an object before using 'in' operator
if (typeof current !== 'object' || current === null || !(key in current)) {
return false // Path doesn't exist, don't create it
}
if (typeof current[key] !== 'object' || current[key] === null) {
return false // Path doesn't exist properly
}
current = current[key]
}
const lastKey = parts[parts.length - 1]
if (lastKey === '*') {
if (Array.isArray(current)) {
for (let i = 0; i < current.length; i++) {
current[i] = value
}
} else if (typeof current === 'object' && current !== null) {
for (const key in current) {
if (Object.prototype.hasOwnProperty.call(current, key)) {
current[key] = value
}
}
}
} else {
// Type safety: Check if current is an object before using 'in' operator
if (typeof current === 'object' && current !== null && lastKey in current && Object.prototype.hasOwnProperty.call(current, lastKey)) {
current[lastKey] = value
}
}
return true
}
function removeKey (obj, parts) {
let current = obj
for (let i = 0; i < parts.length - 1; i++) {
const key = parts[i]
// Type safety: Check if current is an object before using 'in' operator
if (typeof current !== 'object' || current === null || !(key in current)) {
return false // Path doesn't exist, don't create it
}
if (typeof current[key] !== 'object' || current[key] === null) {
return false // Path doesn't exist properly
}
current = current[key]
}
const lastKey = parts[parts.length - 1]
if (lastKey === '*') {
if (Array.isArray(current)) {
// For arrays, we can't really "remove" all items as that would change indices
// Instead, we set them to undefined which will be omitted by JSON.stringify
for (let i = 0; i < current.length; i++) {
current[i] = undefined
}
} else if (typeof current === 'object' && current !== null) {
for (const key in current) {
if (Object.prototype.hasOwnProperty.call(current, key)) {
delete current[key]
}
}
}
} else {
// Type safety: Check if current is an object before using 'in' operator
if (typeof current === 'object' && current !== null && lastKey in current && Object.prototype.hasOwnProperty.call(current, lastKey)) {
delete current[lastKey]
}
}
return true
}
// Sentinel object to distinguish between undefined value and non-existent path
const PATH_NOT_FOUND = Symbol('PATH_NOT_FOUND')
function getValueIfExists (obj, parts) {
let current = obj
for (const part of parts) {
if (current === null || current === undefined) {
return PATH_NOT_FOUND
}
// Type safety: Check if current is an object before property access
if (typeof current !== 'object' || current === null) {
return PATH_NOT_FOUND
}
// Check if the property exists before accessing it
if (!(part in current)) {
return PATH_NOT_FOUND
}
current = current[part]
}
return current
}
function getValue (obj, parts) {
let current = obj
for (const part of parts) {
if (current === null || current === undefined) {
return undefined
}
// Type safety: Check if current is an object before property access
if (typeof current !== 'object' || current === null) {
return undefined
}
current = current[part]
}
return current
}
function redactPaths (obj, paths, censor, remove = false) {
for (const path of paths) {
const parts = parsePath(path)
if (parts.includes('*')) {
redactWildcardPath(obj, parts, censor, path, remove)
} else {
if (remove) {
removeKey(obj, parts)
} else {
// Get value only if path exists - single traversal
const value = getValueIfExists(obj, parts)
if (value === PATH_NOT_FOUND) {
continue
}
const actualCensor = typeof censor === 'function'
? censor(value, parts)
: censor
setValue(obj, parts, actualCensor)
}
}
}
}
function redactWildcardPath (obj, parts, censor, originalPath, remove = false) {
const wildcardIndex = parts.indexOf('*')
if (wildcardIndex === parts.length - 1) {
const parentParts = parts.slice(0, -1)
let current = obj
for (const part of parentParts) {
if (current === null || current === undefined) return
// Type safety: Check if current is an object before property access
if (typeof current !== 'object' || current === null) return
current = current[part]
}
if (Array.isArray(current)) {
if (remove) {
// For arrays, set all items to undefined which will be omitted by JSON.stringify
for (let i = 0; i < current.length; i++) {
current[i] = undefined
}
} else {
for (let i = 0; i < current.length; i++) {
const indexPath = [...parentParts, i.toString()]
const actualCensor = typeof censor === 'function'
? censor(current[i], indexPath)
: censor
current[i] = actualCensor
}
}
} else if (typeof current === 'object' && current !== null) {
if (remove) {
// Collect keys to delete to avoid issues with deleting during iteration
const keysToDelete = []
for (const key in current) {
if (Object.prototype.hasOwnProperty.call(current, key)) {
keysToDelete.push(key)
}
}
for (const key of keysToDelete) {
delete current[key]
}
} else {
for (const key in current) {
const keyPath = [...parentParts, key]
const actualCensor = typeof censor === 'function'
? censor(current[key], keyPath)
: censor
current[key] = actualCensor
}
}
}
} else {
redactIntermediateWildcard(obj, parts, censor, wildcardIndex, originalPath, remove)
}
}
function redactIntermediateWildcard (obj, parts, censor, wildcardIndex, originalPath, remove = false) {
const beforeWildcard = parts.slice(0, wildcardIndex)
const afterWildcard = parts.slice(wildcardIndex + 1)
const pathArray = [] // Cached array to avoid allocations
function traverse (current, pathLength) {
if (pathLength === beforeWildcard.length) {
if (Array.isArray(current)) {
for (let i = 0; i < current.length; i++) {
pathArray[pathLength] = i.toString()
traverse(current[i], pathLength + 1)
}
} else if (typeof current === 'object' && current !== null) {
for (const key in current) {
pathArray[pathLength] = key
traverse(current[key], pathLength + 1)
}
}
} else if (pathLength < beforeWildcard.length) {
const nextKey = beforeWildcard[pathLength]
// Type safety: Check if current is an object before using 'in' operator
if (current && typeof current === 'object' && current !== null && nextKey in current) {
pathArray[pathLength] = nextKey
traverse(current[nextKey], pathLength + 1)
}
} else {
// Check if afterWildcard contains more wildcards
if (afterWildcard.includes('*')) {
// Recursively handle remaining wildcards
// Wrap censor to prepend current path context
const wrappedCensor = typeof censor === 'function'
? (value, path) => {
const fullPath = [...pathArray.slice(0, pathLength), ...path]
return censor(value, fullPath)
}
: censor
redactWildcardPath(current, afterWildcard, wrappedCensor, originalPath, remove)
} else {
// No more wildcards, apply the redaction directly
if (remove) {
removeKey(current, afterWildcard)
} else {
const actualCensor = typeof censor === 'function'
? censor(getValue(current, afterWildcard), [...pathArray.slice(0, pathLength), ...afterWildcard])
: censor
setValue(current, afterWildcard, actualCensor)
}
}
}
}
if (beforeWildcard.length === 0) {
traverse(obj, 0)
} else {
let current = obj
for (let i = 0; i < beforeWildcard.length; i++) {
const part = beforeWildcard[i]
if (current === null || current === undefined) return
// Type safety: Check if current is an object before property access
if (typeof current !== 'object' || current === null) return
current = current[part]
pathArray[i] = part
}
if (current !== null && current !== undefined) {
traverse(current, beforeWildcard.length)
}
}
}
function buildPathStructure (pathsToClone) {
if (pathsToClone.length === 0) {
return null // No paths to redact
}
// Parse all paths and organize by depth
const pathStructure = new Map()
for (const path of pathsToClone) {
const parts = parsePath(path)
let current = pathStructure
for (let i = 0; i < parts.length; i++) {
const part = parts[i]
if (!current.has(part)) {
current.set(part, new Map())
}
current = current.get(part)
}
}
return pathStructure
}
function selectiveClone (obj, pathStructure) {
if (!pathStructure) {
return obj // No paths to redact, return original
}
function cloneSelectively (source, pathMap, depth = 0) {
if (!pathMap || pathMap.size === 0) {
return source // No more paths to clone, return reference
}
if (source === null || typeof source !== 'object') {
return source
}
if (source instanceof Date) {
return new Date(source.getTime())
}
if (Array.isArray(source)) {
const cloned = []
for (let i = 0; i < source.length; i++) {
const indexStr = i.toString()
if (pathMap.has(indexStr) || pathMap.has('*')) {
cloned[i] = cloneSelectively(source[i], pathMap.get(indexStr) || pathMap.get('*'))
} else {
cloned[i] = source[i] // Share reference for non-redacted items
}
}
return cloned
}
// Handle objects
const cloned = Object.create(Object.getPrototypeOf(source))
for (const key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
if (pathMap.has(key) || pathMap.has('*')) {
cloned[key] = cloneSelectively(source[key], pathMap.get(key) || pathMap.get('*'))
} else {
cloned[key] = source[key] // Share reference for non-redacted properties
}
}
}
return cloned
}
return cloneSelectively(obj, pathStructure)
}
function validatePath (path) {
if (typeof path !== 'string') {
throw new Error('Paths must be (non-empty) strings')
}
if (path === '') {
throw new Error('Invalid redaction path ()')
}
// Check for double dots
if (path.includes('..')) {
throw new Error(`Invalid redaction path (${path})`)
}
// Check for comma-separated paths (invalid syntax)
if (path.includes(',')) {
throw new Error(`Invalid redaction path (${path})`)
}
// Check for unmatched brackets
let bracketCount = 0
let inQuotes = false
let quoteChar = ''
for (let i = 0; i < path.length; i++) {
const char = path[i]
if ((char === '"' || char === "'") && bracketCount > 0) {
if (!inQuotes) {
inQuotes = true
quoteChar = char
} else if (char === quoteChar) {
inQuotes = false
quoteChar = ''
}
} else if (char === '[' && !inQuotes) {
bracketCount++
} else if (char === ']' && !inQuotes) {
bracketCount--
if (bracketCount < 0) {
throw new Error(`Invalid redaction path (${path})`)
}
}
}
if (bracketCount !== 0) {
throw new Error(`Invalid redaction path (${path})`)
}
}
function validatePaths (paths) {
if (!Array.isArray(paths)) {
throw new TypeError('paths must be an array')
}
for (const path of paths) {
validatePath(path)
}
}
function slowRedact (options = {}) {
const {
paths = [],
censor = '[REDACTED]',
serialize = JSON.stringify,
strict = true,
remove = false
} = options
// Validate paths upfront to match fast-redact behavior
validatePaths(paths)
// Build path structure once during setup, not on every call
const pathStructure = buildPathStructure(paths)
return function redact (obj) {
if (strict && (obj === null || typeof obj !== 'object')) {
if (obj === null || obj === undefined) {
return serialize ? serialize(obj) : obj
}
if (typeof obj !== 'object') {
return serialize ? serialize(obj) : obj
}
}
// Only clone paths that need redaction
const cloned = selectiveClone(obj, pathStructure)
const original = obj // Keep reference to original for restore
let actualCensor = censor
if (typeof censor === 'function') {
actualCensor = censor
}
redactPaths(cloned, paths, actualCensor, remove)
if (serialize === false) {
cloned.restore = function () {
return deepClone(original) // Full clone only when restore is called
}
return cloned
}
if (typeof serialize === 'function') {
return serialize(cloned)
}
return JSON.stringify(cloned)
}
}
module.exports = slowRedact

View File

@ -1,22 +0,0 @@
import { expectType, expectAssignable } from "tsd";
import slowRedact from ".";
import type { redactFn, redactFnNoSerialize } from ".";
// should return redactFn
expectType<redactFn>(slowRedact());
expectType<redactFn>(slowRedact({ paths: [] }));
expectType<redactFn>(slowRedact({ paths: ["some.path"] }));
expectType<redactFn>(slowRedact({ paths: [], censor: "[REDACTED]" }));
expectType<redactFn>(slowRedact({ paths: [], strict: true }));
expectType<redactFn>(slowRedact({ paths: [], serialize: JSON.stringify }));
expectType<redactFn>(slowRedact({ paths: [], serialize: true }));
expectType<redactFnNoSerialize>(slowRedact({ paths: [], serialize: false }));
expectType<redactFn>(slowRedact({ paths: [], remove: true }));
// should return string
expectType<string>(slowRedact()(""));
// should return string or T
expectAssignable<string | { someField: string }>(
slowRedact()({ someField: "someValue" })
);

View File

@ -1,37 +0,0 @@
{
"name": "@pinojs/redact",
"version": "0.4.0",
"description": "Redact JS objects",
"main": "index.js",
"types": "index.d.ts",
"scripts": {
"test": "node --test && npm run test:types",
"test:integration": "node --test test/integration.test.js",
"test:types": "tsd",
"test:all": "node --test test/*.test.js",
"lint": "eslint .",
"lint:fix": "eslint . --fix",
"bench": "node benchmarks/basic.js"
},
"repository": {
"type": "git",
"url": "git+https://github.com/pinojs/redact.git"
},
"keywords": [
"redact"
],
"author": "Matteo Collina <hello@matteocollina.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/pinojs/redact/issues"
},
"homepage": "https://github.com/pinojs/redact#readme",
"devDependencies": {
"eslint": "^9.36.0",
"fast-redact": "^3.5.0",
"mitata": "^1.0.34",
"neostandard": "^0.12.2",
"tsd": "^0.33.0",
"typescript": "^5.9.2"
}
}

View File

@ -1,20 +0,0 @@
import fs from 'node:fs'
import path from 'node:path'
const packageJsonPath = path.resolve(import.meta.dirname, '../package.json')
let { version } = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'))
let passedVersion = process.argv[2]
if (passedVersion) {
passedVersion = passedVersion.trim().replace(/^v/, '')
if (version !== passedVersion) {
console.log(`Syncing version from ${version} to ${passedVersion}`)
version = passedVersion
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'))
packageJson.version = version
fs.writeFileSync(path.resolve('./package.json'), JSON.stringify(packageJson, null, 2) + '\n', { encoding: 'utf-8' })
}
} else {
throw new Error('Version argument is required')
}

View File

@ -1,211 +0,0 @@
'use strict'
// Node.js test comparing @pinojs/redact vs fast-redact for multiple wildcard patterns
// This test validates that @pinojs/redact correctly handles 3+ consecutive wildcards
// matching the behavior of fast-redact
const { test } = require('node:test')
const { strict: assert } = require('node:assert')
const fastRedact = require('fast-redact')
const slowRedact = require('../index.js')
// Helper function to test redaction and track which values were censored
function testRedactDirect (library, pattern, testData = {}) {
const matches = []
const redactor = library === '@pinojs/redact' ? slowRedact : fastRedact
try {
const redact = redactor({
paths: [pattern],
censor: (value, path) => {
if (
value !== undefined &&
value !== null &&
typeof value === 'string' &&
value.includes('secret')
) {
matches.push({
value,
path: path ? path.join('.') : 'unknown'
})
}
return '[REDACTED]'
}
})
redact(JSON.parse(JSON.stringify(testData)))
return {
library,
pattern,
matches,
success: true,
testData
}
} catch (error) {
return {
library,
pattern,
matches: [],
success: false,
error: error.message,
testData
}
}
}
function testSlowRedactDirect (pattern, testData) {
return testRedactDirect('@pinojs/redact', pattern, testData)
}
function testFastRedactDirect (pattern, testData) {
return testRedactDirect('fast-redact', pattern, testData)
}
test('@pinojs/redact: *.password (2 levels)', () => {
const result = testSlowRedactDirect('*.password', {
simple: { password: 'secret-2-levels' }
})
assert.strictEqual(result.success, true)
assert.strictEqual(result.matches.length, 1)
assert.strictEqual(result.matches[0].value, 'secret-2-levels')
})
test('@pinojs/redact: *.*.password (3 levels)', () => {
const result = testSlowRedactDirect('*.*.password', {
simple: { password: 'secret-2-levels' },
user: { auth: { password: 'secret-3-levels' } }
})
assert.strictEqual(result.success, true)
assert.strictEqual(result.matches.length, 1)
assert.strictEqual(result.matches[0].value, 'secret-3-levels')
})
test('@pinojs/redact: *.*.*.password (4 levels)', () => {
const result = testSlowRedactDirect('*.*.*.password', {
simple: { password: 'secret-2-levels' },
user: { auth: { password: 'secret-3-levels' } },
nested: { deep: { auth: { password: 'secret-4-levels' } } }
})
assert.strictEqual(result.success, true)
assert.strictEqual(result.matches.length, 1)
assert.strictEqual(result.matches[0].value, 'secret-4-levels')
})
test('@pinojs/redact: *.*.*.*.password (5 levels)', () => {
const result = testSlowRedactDirect('*.*.*.*.password', {
simple: { password: 'secret-2-levels' },
user: { auth: { password: 'secret-3-levels' } },
nested: { deep: { auth: { password: 'secret-4-levels' } } },
config: {
user: { auth: { settings: { password: 'secret-5-levels' } } }
}
})
assert.strictEqual(result.success, true)
assert.strictEqual(result.matches.length, 1)
assert.strictEqual(result.matches[0].value, 'secret-5-levels')
})
test('@pinojs/redact: *.*.*.*.*.password (6 levels)', () => {
const result = testSlowRedactDirect('*.*.*.*.*.password', {
simple: { password: 'secret-2-levels' },
user: { auth: { password: 'secret-3-levels' } },
nested: { deep: { auth: { password: 'secret-4-levels' } } },
config: {
user: { auth: { settings: { password: 'secret-5-levels' } } }
},
data: {
reqConfig: {
data: {
credentials: {
settings: {
password: 'real-secret-6-levels'
}
}
}
}
}
})
assert.strictEqual(result.success, true)
assert.strictEqual(result.matches.length, 1)
assert.strictEqual(result.matches[0].value, 'real-secret-6-levels')
})
test('fast-redact: *.password (2 levels)', () => {
const result = testFastRedactDirect('*.password', {
simple: { password: 'secret-2-levels' }
})
assert.strictEqual(result.success, true)
assert.strictEqual(result.matches.length, 1)
assert.strictEqual(result.matches[0].value, 'secret-2-levels')
})
test('fast-redact: *.*.password (3 levels)', () => {
const result = testFastRedactDirect('*.*.password', {
simple: { password: 'secret-2-levels' },
user: { auth: { password: 'secret-3-levels' } }
})
assert.strictEqual(result.success, true)
assert.strictEqual(result.matches.length, 1)
assert.strictEqual(result.matches[0].value, 'secret-3-levels')
})
test('fast-redact: *.*.*.password (4 levels)', () => {
const result = testFastRedactDirect('*.*.*.password', {
simple: { password: 'secret-2-levels' },
user: { auth: { password: 'secret-3-levels' } },
nested: { deep: { auth: { password: 'secret-4-levels' } } }
})
assert.strictEqual(result.success, true)
assert.strictEqual(result.matches.length, 1)
assert.strictEqual(result.matches[0].value, 'secret-4-levels')
})
test('fast-redact: *.*.*.*.password (5 levels)', () => {
const result = testFastRedactDirect('*.*.*.*.password', {
simple: { password: 'secret-2-levels' },
user: { auth: { password: 'secret-3-levels' } },
nested: { deep: { auth: { password: 'secret-4-levels' } } },
config: {
user: { auth: { settings: { password: 'secret-5-levels' } } }
}
})
assert.strictEqual(result.success, true)
assert.strictEqual(result.matches.length, 1)
assert.strictEqual(result.matches[0].value, 'secret-5-levels')
})
test('fast-redact: *.*.*.*.*.password (6 levels)', () => {
const result = testFastRedactDirect('*.*.*.*.*.password', {
simple: { password: 'secret-2-levels' },
user: { auth: { password: 'secret-3-levels' } },
nested: { deep: { auth: { password: 'secret-4-levels' } } },
config: {
user: { auth: { settings: { password: 'secret-5-levels' } } }
},
data: {
reqConfig: {
data: {
credentials: {
settings: {
password: 'real-secret-6-levels'
}
}
}
}
}
})
assert.strictEqual(result.success, true)
assert.strictEqual(result.matches.length, 1)
assert.strictEqual(result.matches[0].value, 'real-secret-6-levels')
})

View File

@ -1,824 +0,0 @@
const { test } = require('node:test')
const { strict: assert } = require('node:assert')
const slowRedact = require('../index.js')
test('basic path redaction', () => {
const obj = {
headers: {
cookie: 'secret-cookie',
authorization: 'Bearer token'
},
body: { message: 'hello' }
}
const redact = slowRedact({ paths: ['headers.cookie'] })
const result = redact(obj)
// Original object should remain unchanged
assert.strictEqual(obj.headers.cookie, 'secret-cookie')
// Result should have redacted path
const parsed = JSON.parse(result)
assert.strictEqual(parsed.headers.cookie, '[REDACTED]')
assert.strictEqual(parsed.headers.authorization, 'Bearer token')
assert.strictEqual(parsed.body.message, 'hello')
})
test('multiple paths redaction', () => {
const obj = {
user: { name: 'john', password: 'secret' },
session: { token: 'abc123' }
}
const redact = slowRedact({
paths: ['user.password', 'session.token']
})
const result = redact(obj)
// Original unchanged
assert.strictEqual(obj.user.password, 'secret')
assert.strictEqual(obj.session.token, 'abc123')
// Result redacted
const parsed = JSON.parse(result)
assert.strictEqual(parsed.user.password, '[REDACTED]')
assert.strictEqual(parsed.session.token, '[REDACTED]')
assert.strictEqual(parsed.user.name, 'john')
})
test('custom censor value', () => {
const obj = { secret: 'hidden' }
const redact = slowRedact({
paths: ['secret'],
censor: '***'
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.secret, '***')
})
test('serialize: false returns object with restore method', () => {
const obj = { secret: 'hidden' }
const redact = slowRedact({
paths: ['secret'],
serialize: false
})
const result = redact(obj)
// Should be object, not string
assert.strictEqual(typeof result, 'object')
assert.strictEqual(result.secret, '[REDACTED]')
// Should have restore method
assert.strictEqual(typeof result.restore, 'function')
const restored = result.restore()
assert.strictEqual(restored.secret, 'hidden')
})
test('bracket notation paths', () => {
const obj = {
'weird-key': { 'another-weird': 'secret' },
normal: 'public'
}
const redact = slowRedact({
paths: ['["weird-key"]["another-weird"]']
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed['weird-key']['another-weird'], '[REDACTED]')
assert.strictEqual(parsed.normal, 'public')
})
test('array paths', () => {
const obj = {
users: [
{ name: 'john', password: 'secret1' },
{ name: 'jane', password: 'secret2' }
]
}
const redact = slowRedact({
paths: ['users[0].password', 'users[1].password']
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.users[0].password, '[REDACTED]')
assert.strictEqual(parsed.users[1].password, '[REDACTED]')
assert.strictEqual(parsed.users[0].name, 'john')
assert.strictEqual(parsed.users[1].name, 'jane')
})
test('wildcard at end of path', () => {
const obj = {
secrets: {
key1: 'secret1',
key2: 'secret2'
},
public: 'data'
}
const redact = slowRedact({
paths: ['secrets.*']
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.secrets.key1, '[REDACTED]')
assert.strictEqual(parsed.secrets.key2, '[REDACTED]')
assert.strictEqual(parsed.public, 'data')
})
test('wildcard with arrays', () => {
const obj = {
items: ['secret1', 'secret2', 'secret3']
}
const redact = slowRedact({
paths: ['items.*']
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.items[0], '[REDACTED]')
assert.strictEqual(parsed.items[1], '[REDACTED]')
assert.strictEqual(parsed.items[2], '[REDACTED]')
})
test('intermediate wildcard', () => {
const obj = {
users: {
user1: { password: 'secret1' },
user2: { password: 'secret2' }
}
}
const redact = slowRedact({
paths: ['users.*.password']
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.users.user1.password, '[REDACTED]')
assert.strictEqual(parsed.users.user2.password, '[REDACTED]')
})
test('censor function', () => {
const obj = { secret: 'hidden' }
const redact = slowRedact({
paths: ['secret'],
censor: (value, path) => `REDACTED:${path.join('.')}`
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.secret, 'REDACTED:secret')
})
test('custom serialize function', () => {
const obj = { secret: 'hidden', public: 'data' }
const redact = slowRedact({
paths: ['secret'],
serialize: (obj) => `custom:${JSON.stringify(obj)}`
})
const result = redact(obj)
assert(result.startsWith('custom:'))
const parsed = JSON.parse(result.slice(7))
assert.strictEqual(parsed.secret, '[REDACTED]')
assert.strictEqual(parsed.public, 'data')
})
test('nested paths', () => {
const obj = {
level1: {
level2: {
level3: {
secret: 'hidden'
}
}
}
}
const redact = slowRedact({
paths: ['level1.level2.level3.secret']
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.level1.level2.level3.secret, '[REDACTED]')
})
test('non-existent paths are ignored', () => {
const obj = { existing: 'value' }
const redact = slowRedact({
paths: ['nonexistent.path']
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.existing, 'value')
assert.strictEqual(parsed.nonexistent, undefined)
})
test('null and undefined handling', () => {
const obj = {
nullValue: null,
undefinedValue: undefined,
nested: {
nullValue: null
}
}
const redact = slowRedact({
paths: ['nullValue', 'nested.nullValue']
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.nullValue, '[REDACTED]')
assert.strictEqual(parsed.nested.nullValue, '[REDACTED]')
})
test('original object remains unchanged', () => {
const original = {
secret: 'hidden',
nested: { secret: 'hidden2' }
}
const copy = JSON.parse(JSON.stringify(original))
const redact = slowRedact({
paths: ['secret', 'nested.secret']
})
redact(original)
// Original should be completely unchanged
assert.deepStrictEqual(original, copy)
})
test('strict mode with primitives', () => {
const redact = slowRedact({
paths: ['test'],
strict: true
})
const stringResult = redact('primitive')
assert.strictEqual(stringResult, '"primitive"')
const numberResult = redact(42)
assert.strictEqual(numberResult, '42')
})
// Path validation tests to match fast-redact behavior
test('path validation - non-string paths should throw', () => {
assert.throws(() => {
slowRedact({ paths: [123] })
}, {
message: 'Paths must be (non-empty) strings'
})
assert.throws(() => {
slowRedact({ paths: [null] })
}, {
message: 'Paths must be (non-empty) strings'
})
assert.throws(() => {
slowRedact({ paths: [undefined] })
}, {
message: 'Paths must be (non-empty) strings'
})
})
test('path validation - empty string should throw', () => {
assert.throws(() => {
slowRedact({ paths: [''] })
}, {
message: 'Invalid redaction path ()'
})
})
test('path validation - double dots should throw', () => {
assert.throws(() => {
slowRedact({ paths: ['invalid..path'] })
}, {
message: 'Invalid redaction path (invalid..path)'
})
assert.throws(() => {
slowRedact({ paths: ['a..b..c'] })
}, {
message: 'Invalid redaction path (a..b..c)'
})
})
test('path validation - unmatched brackets should throw', () => {
assert.throws(() => {
slowRedact({ paths: ['invalid[unclosed'] })
}, {
message: 'Invalid redaction path (invalid[unclosed)'
})
assert.throws(() => {
slowRedact({ paths: ['invalid]unopened'] })
}, {
message: 'Invalid redaction path (invalid]unopened)'
})
assert.throws(() => {
slowRedact({ paths: ['nested[a[b]'] })
}, {
message: 'Invalid redaction path (nested[a[b])'
})
})
test('path validation - comma-separated paths should throw', () => {
assert.throws(() => {
slowRedact({ paths: ['req,headers.cookie'] })
}, {
message: 'Invalid redaction path (req,headers.cookie)'
})
assert.throws(() => {
slowRedact({ paths: ['user,profile,name'] })
}, {
message: 'Invalid redaction path (user,profile,name)'
})
assert.throws(() => {
slowRedact({ paths: ['a,b'] })
}, {
message: 'Invalid redaction path (a,b)'
})
})
test('path validation - mixed valid and invalid should throw', () => {
assert.throws(() => {
slowRedact({ paths: ['valid.path', 123, 'another.valid'] })
}, {
message: 'Paths must be (non-empty) strings'
})
assert.throws(() => {
slowRedact({ paths: ['valid.path', 'invalid..path'] })
}, {
message: 'Invalid redaction path (invalid..path)'
})
assert.throws(() => {
slowRedact({ paths: ['valid.path', 'req,headers.cookie'] })
}, {
message: 'Invalid redaction path (req,headers.cookie)'
})
})
test('path validation - valid paths should work', () => {
// These should not throw
assert.doesNotThrow(() => {
slowRedact({ paths: [] })
})
assert.doesNotThrow(() => {
slowRedact({ paths: ['valid.path'] })
})
assert.doesNotThrow(() => {
slowRedact({ paths: ['user.password', 'data[0].secret'] })
})
assert.doesNotThrow(() => {
slowRedact({ paths: ['["quoted-key"].value'] })
})
assert.doesNotThrow(() => {
slowRedact({ paths: ["['single-quoted'].value"] })
})
assert.doesNotThrow(() => {
slowRedact({ paths: ['array[0]', 'object.property', 'wildcard.*'] })
})
})
// fast-redact compatibility tests
test('censor function receives path as array (fast-redact compatibility)', () => {
const obj = {
headers: {
authorization: 'Bearer token',
'x-api-key': 'secret-key'
}
}
const pathsReceived = []
const redact = slowRedact({
paths: ['headers.authorization', 'headers["x-api-key"]'],
censor: (value, path) => {
pathsReceived.push(path)
assert(Array.isArray(path), 'Path should be an array')
return '[REDACTED]'
}
})
redact(obj)
// Verify paths are arrays
assert.strictEqual(pathsReceived.length, 2)
assert.deepStrictEqual(pathsReceived[0], ['headers', 'authorization'])
assert.deepStrictEqual(pathsReceived[1], ['headers', 'x-api-key'])
})
test('censor function with nested paths receives correct array', () => {
const obj = {
user: {
profile: {
credentials: {
password: 'secret123'
}
}
}
}
let receivedPath
const redact = slowRedact({
paths: ['user.profile.credentials.password'],
censor: (value, path) => {
receivedPath = path
assert.strictEqual(value, 'secret123')
assert(Array.isArray(path))
return '[REDACTED]'
}
})
redact(obj)
assert.deepStrictEqual(receivedPath, ['user', 'profile', 'credentials', 'password'])
})
test('censor function with wildcards receives correct array paths', () => {
const obj = {
users: {
user1: { password: 'secret1' },
user2: { password: 'secret2' }
}
}
const pathsReceived = []
const redact = slowRedact({
paths: ['users.*.password'],
censor: (value, path) => {
pathsReceived.push([...path]) // copy the array
assert(Array.isArray(path))
return '[REDACTED]'
}
})
redact(obj)
assert.strictEqual(pathsReceived.length, 2)
assert.deepStrictEqual(pathsReceived[0], ['users', 'user1', 'password'])
assert.deepStrictEqual(pathsReceived[1], ['users', 'user2', 'password'])
})
test('censor function with array wildcard receives correct array paths', () => {
const obj = {
items: [
{ secret: 'value1' },
{ secret: 'value2' }
]
}
const pathsReceived = []
const redact = slowRedact({
paths: ['items.*.secret'],
censor: (value, path) => {
pathsReceived.push([...path])
assert(Array.isArray(path))
return '[REDACTED]'
}
})
redact(obj)
assert.strictEqual(pathsReceived.length, 2)
assert.deepStrictEqual(pathsReceived[0], ['items', '0', 'secret'])
assert.deepStrictEqual(pathsReceived[1], ['items', '1', 'secret'])
})
test('censor function with end wildcard receives correct array paths', () => {
const obj = {
secrets: {
key1: 'secret1',
key2: 'secret2'
}
}
const pathsReceived = []
const redact = slowRedact({
paths: ['secrets.*'],
censor: (value, path) => {
pathsReceived.push([...path])
assert(Array.isArray(path))
return '[REDACTED]'
}
})
redact(obj)
assert.strictEqual(pathsReceived.length, 2)
// Sort paths for consistent testing since object iteration order isn't guaranteed
pathsReceived.sort((a, b) => a[1].localeCompare(b[1]))
assert.deepStrictEqual(pathsReceived[0], ['secrets', 'key1'])
assert.deepStrictEqual(pathsReceived[1], ['secrets', 'key2'])
})
test('type safety: accessing properties on primitive values should not throw', () => {
// Test case from GitHub issue #5
const redactor = slowRedact({ paths: ['headers.authorization'] })
const data = {
headers: 123 // primitive value
}
assert.doesNotThrow(() => {
const result = redactor(data)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.headers, 123) // Should remain unchanged
})
// Test wildcards with primitives
const redactor2 = slowRedact({ paths: ['data.*.nested'] })
const data2 = {
data: {
item1: 123, // primitive, trying to access .nested on it
item2: { nested: 'secret' }
}
}
assert.doesNotThrow(() => {
const result2 = redactor2(data2)
const parsed2 = JSON.parse(result2)
assert.strictEqual(parsed2.data.item1, 123) // Primitive unchanged
assert.strictEqual(parsed2.data.item2.nested, '[REDACTED]') // Object property redacted
})
// Test deep nested access on primitives
const redactor3 = slowRedact({ paths: ['user.name.first.charAt'] })
const data3 = {
user: {
name: 'John' // string primitive
}
}
assert.doesNotThrow(() => {
const result3 = redactor3(data3)
const parsed3 = JSON.parse(result3)
assert.strictEqual(parsed3.user.name, 'John') // Should remain unchanged
})
})
// Remove option tests
test('remove option: basic key removal', () => {
const obj = { username: 'john', password: 'secret123' }
const redact = slowRedact({ paths: ['password'], remove: true })
const result = redact(obj)
// Original object should remain unchanged
assert.strictEqual(obj.password, 'secret123')
// Result should have password completely removed
const parsed = JSON.parse(result)
assert.strictEqual(parsed.username, 'john')
assert.strictEqual('password' in parsed, false)
assert.strictEqual(parsed.password, undefined)
})
test('remove option: multiple paths removal', () => {
const obj = {
user: { name: 'john', password: 'secret' },
session: { token: 'abc123', id: 'session1' }
}
const redact = slowRedact({
paths: ['user.password', 'session.token'],
remove: true
})
const result = redact(obj)
// Original unchanged
assert.strictEqual(obj.user.password, 'secret')
assert.strictEqual(obj.session.token, 'abc123')
// Result has keys completely removed
const parsed = JSON.parse(result)
assert.strictEqual(parsed.user.name, 'john')
assert.strictEqual(parsed.session.id, 'session1')
assert.strictEqual('password' in parsed.user, false)
assert.strictEqual('token' in parsed.session, false)
})
test('remove option: wildcard removal', () => {
const obj = {
secrets: {
key1: 'secret1',
key2: 'secret2'
},
public: 'data'
}
const redact = slowRedact({
paths: ['secrets.*'],
remove: true
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.public, 'data')
assert.deepStrictEqual(parsed.secrets, {}) // All keys removed
})
test('remove option: array wildcard removal', () => {
const obj = {
items: ['secret1', 'secret2', 'secret3'],
meta: 'data'
}
const redact = slowRedact({
paths: ['items.*'],
remove: true
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.meta, 'data')
// Array items set to undefined are omitted by JSON.stringify
assert.deepStrictEqual(parsed.items, [null, null, null])
})
test('remove option: intermediate wildcard removal', () => {
const obj = {
users: {
user1: { password: 'secret1', name: 'john' },
user2: { password: 'secret2', name: 'jane' }
}
}
const redact = slowRedact({
paths: ['users.*.password'],
remove: true
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.users.user1.name, 'john')
assert.strictEqual(parsed.users.user2.name, 'jane')
assert.strictEqual('password' in parsed.users.user1, false)
assert.strictEqual('password' in parsed.users.user2, false)
})
test('remove option: serialize false returns object with removed keys', () => {
const obj = { secret: 'hidden', public: 'data' }
const redact = slowRedact({
paths: ['secret'],
remove: true,
serialize: false
})
const result = redact(obj)
// Should be object, not string
assert.strictEqual(typeof result, 'object')
assert.strictEqual(result.public, 'data')
assert.strictEqual('secret' in result, false)
// Should have restore method
assert.strictEqual(typeof result.restore, 'function')
const restored = result.restore()
assert.strictEqual(restored.secret, 'hidden')
})
test('remove option: non-existent paths are ignored', () => {
const obj = { existing: 'value' }
const redact = slowRedact({
paths: ['nonexistent.path'],
remove: true
})
const result = redact(obj)
const parsed = JSON.parse(result)
assert.strictEqual(parsed.existing, 'value')
assert.strictEqual(parsed.nonexistent, undefined)
})
// Test for Issue #13: Empty string bracket notation paths not being redacted correctly
test('empty string bracket notation path', () => {
const obj = { '': { c: 'sensitive-data' } }
const redact = slowRedact({ paths: ["[''].c"] })
const result = redact(obj)
// Original object should remain unchanged
assert.strictEqual(obj[''].c, 'sensitive-data')
// Result should have redacted path
const parsed = JSON.parse(result)
assert.strictEqual(parsed[''].c, '[REDACTED]')
})
test('empty string bracket notation with double quotes', () => {
const obj = { '': { c: 'sensitive-data' } }
const redact = slowRedact({ paths: ['[""].c'] })
const result = redact(obj)
// Original object should remain unchanged
assert.strictEqual(obj[''].c, 'sensitive-data')
// Result should have redacted path
const parsed = JSON.parse(result)
assert.strictEqual(parsed[''].c, '[REDACTED]')
})
test('empty string key with nested bracket notation', () => {
const obj = { '': { '': { secret: 'value' } } }
const redact = slowRedact({ paths: ["[''][''].secret"] })
const result = redact(obj)
// Original object should remain unchanged
assert.strictEqual(obj[''][''].secret, 'value')
// Result should have redacted path
const parsed = JSON.parse(result)
assert.strictEqual(parsed[''][''].secret, '[REDACTED]')
})
// Test for Pino issue #2313: censor should only be called when path exists
test('censor function not called for non-existent paths', () => {
let censorCallCount = 0
const censorCalls = []
const redact = slowRedact({
paths: ['a.b.c', 'req.authorization', 'url'],
serialize: false,
censor (value, path) {
censorCallCount++
censorCalls.push({ value, path: path.slice() })
return '***'
}
})
// Test case 1: { req: { id: 'test' } }
// req.authorization doesn't exist, censor should not be called for it
censorCallCount = 0
censorCalls.length = 0
redact({ req: { id: 'test' } })
// Should not have been called for any path since none exist
assert.strictEqual(censorCallCount, 0, 'censor should not be called when paths do not exist')
// Test case 2: { a: { d: 'test' } }
// a.b.c doesn't exist (a.d exists, but not a.b.c)
censorCallCount = 0
redact({ a: { d: 'test' } })
assert.strictEqual(censorCallCount, 0)
// Test case 3: paths that do exist should still call censor
censorCallCount = 0
censorCalls.length = 0
const result = redact({ req: { authorization: 'bearer token' } })
assert.strictEqual(censorCallCount, 1, 'censor should be called when path exists')
assert.deepStrictEqual(censorCalls[0].path, ['req', 'authorization'])
assert.strictEqual(censorCalls[0].value, 'bearer token')
assert.strictEqual(result.req.authorization, '***')
})
test('censor function not called for non-existent nested paths', () => {
let censorCallCount = 0
const redact = slowRedact({
paths: ['headers.authorization'],
serialize: false,
censor (value, path) {
censorCallCount++
return '[REDACTED]'
}
})
// headers exists but authorization doesn't
censorCallCount = 0
const result1 = redact({ headers: { 'content-type': 'application/json' } })
assert.strictEqual(censorCallCount, 0)
assert.deepStrictEqual(result1.headers, { 'content-type': 'application/json' })
// headers doesn't exist at all
censorCallCount = 0
const result2 = redact({ body: 'data' })
assert.strictEqual(censorCallCount, 0)
assert.strictEqual(result2.body, 'data')
assert.strictEqual(typeof result2.restore, 'function')
// headers.authorization exists - should call censor
censorCallCount = 0
const result3 = redact({ headers: { authorization: 'Bearer token' } })
assert.strictEqual(censorCallCount, 1)
assert.strictEqual(result3.headers.authorization, '[REDACTED]')
})

View File

@ -1,390 +0,0 @@
const { test } = require('node:test')
const { strict: assert } = require('node:assert')
const slowRedact = require('../index.js')
const fastRedact = require('fast-redact')
test('integration: basic path redaction matches fast-redact', () => {
const obj = {
headers: {
cookie: 'secret-cookie',
authorization: 'Bearer token'
},
body: { message: 'hello' }
}
const slowResult = slowRedact({ paths: ['headers.cookie'] })(obj)
const fastResult = fastRedact({ paths: ['headers.cookie'] })(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: multiple paths match fast-redact', () => {
const obj = {
user: { name: 'john', password: 'secret' },
session: { token: 'abc123' }
}
const paths = ['user.password', 'session.token']
const slowResult = slowRedact({ paths })(obj)
const fastResult = fastRedact({ paths })(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: custom censor value matches fast-redact', () => {
const obj = { secret: 'hidden' }
const options = { paths: ['secret'], censor: '***' }
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: bracket notation matches fast-redact', () => {
const obj = {
'weird-key': { 'another-weird': 'secret' },
normal: 'public'
}
const options = { paths: ['["weird-key"]["another-weird"]'] }
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: array paths match fast-redact', () => {
const obj = {
users: [
{ name: 'john', password: 'secret1' },
{ name: 'jane', password: 'secret2' }
]
}
const options = { paths: ['users[0].password', 'users[1].password'] }
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: wildcard at end matches fast-redact', () => {
const obj = {
secrets: {
key1: 'secret1',
key2: 'secret2'
},
public: 'data'
}
const options = { paths: ['secrets.*'] }
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: wildcard with arrays matches fast-redact', () => {
const obj = {
items: ['secret1', 'secret2', 'secret3']
}
const options = { paths: ['items.*'] }
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: intermediate wildcard matches fast-redact', () => {
const obj = {
users: {
user1: { password: 'secret1' },
user2: { password: 'secret2' }
}
}
const options = { paths: ['users.*.password'] }
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: custom serialize function matches fast-redact', () => {
const obj = { secret: 'hidden', public: 'data' }
const options = {
paths: ['secret'],
serialize: (obj) => `custom:${JSON.stringify(obj)}`
}
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: nested paths match fast-redact', () => {
const obj = {
level1: {
level2: {
level3: {
secret: 'hidden'
}
}
}
}
const options = { paths: ['level1.level2.level3.secret'] }
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: non-existent paths match fast-redact', () => {
const obj = { existing: 'value' }
const options = { paths: ['nonexistent.path'] }
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: null and undefined handling - legitimate difference', () => {
const obj = {
nullValue: null,
undefinedValue: undefined,
nested: {
nullValue: null
}
}
const options = { paths: ['nullValue', 'nested.nullValue'] }
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
// This is a legitimate behavioral difference:
// @pinojs/redact redacts null values, fast-redact doesn't
const slowParsed = JSON.parse(slowResult)
const fastParsed = JSON.parse(fastResult)
// @pinojs/redact redacts nulls
assert.strictEqual(slowParsed.nullValue, '[REDACTED]')
assert.strictEqual(slowParsed.nested.nullValue, '[REDACTED]')
// fast-redact preserves nulls
assert.strictEqual(fastParsed.nullValue, null)
assert.strictEqual(fastParsed.nested.nullValue, null)
})
test('integration: strict mode with primitives - different error handling', () => {
const options = { paths: ['test'], strict: true }
const slowRedactFn = slowRedact(options)
const fastRedactFn = fastRedact(options)
// @pinojs/redact handles primitives gracefully
const stringSlowResult = slowRedactFn('primitive')
assert.strictEqual(stringSlowResult, '"primitive"')
const numberSlowResult = slowRedactFn(42)
assert.strictEqual(numberSlowResult, '42')
// fast-redact throws an error for primitives in strict mode
assert.throws(() => {
fastRedactFn('primitive')
}, /primitives cannot be redacted/)
assert.throws(() => {
fastRedactFn(42)
}, /primitives cannot be redacted/)
})
test('integration: serialize false behavior difference', () => {
const slowObj = { secret: 'hidden' }
const fastObj = { secret: 'hidden' }
const options = { paths: ['secret'], serialize: false }
const slowResult = slowRedact(options)(slowObj)
const fastResult = fastRedact(options)(fastObj)
// Both should redact the secret
assert.strictEqual(slowResult.secret, '[REDACTED]')
assert.strictEqual(fastResult.secret, '[REDACTED]')
// @pinojs/redact always has restore method
assert.strictEqual(typeof slowResult.restore, 'function')
// @pinojs/redact should restore to original value
assert.strictEqual(slowResult.restore().secret, 'hidden')
// Key difference: original object state
// fast-redact mutates the original, @pinojs/redact doesn't
assert.strictEqual(slowObj.secret, 'hidden') // @pinojs/redact preserves original
assert.strictEqual(fastObj.secret, '[REDACTED]') // fast-redact mutates original
})
test('integration: censor function behavior', () => {
const obj = { secret: 'hidden' }
const options = {
paths: ['secret'],
censor: (value, path) => `REDACTED:${path}`
}
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: complex object with mixed patterns', () => {
const obj = {
users: [
{
id: 1,
name: 'john',
credentials: { password: 'secret1', apiKey: 'key1' }
},
{
id: 2,
name: 'jane',
credentials: { password: 'secret2', apiKey: 'key2' }
}
],
config: {
database: { password: 'db-secret' },
api: { keys: ['key1', 'key2', 'key3'] }
}
}
const options = {
paths: [
'users.*.credentials.password',
'users.*.credentials.apiKey',
'config.database.password',
'config.api.keys.*'
]
}
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
// Remove option integration tests - comparing with fast-redact
test('integration: remove option basic comparison with fast-redact', () => {
const obj = { username: 'john', password: 'secret123' }
const options = { paths: ['password'], remove: true }
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
// Verify the key is actually removed
const parsed = JSON.parse(slowResult)
assert.strictEqual(parsed.username, 'john')
assert.strictEqual('password' in parsed, false)
})
test('integration: remove option multiple paths comparison with fast-redact', () => {
const obj = {
user: { name: 'john', password: 'secret' },
session: { token: 'abc123', id: 'session1' }
}
const options = {
paths: ['user.password', 'session.token'],
remove: true
}
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: remove option wildcard comparison with fast-redact', () => {
const obj = {
secrets: {
key1: 'secret1',
key2: 'secret2'
},
public: 'data'
}
const options = {
paths: ['secrets.*'],
remove: true
}
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: remove option intermediate wildcard comparison with fast-redact', () => {
const obj = {
users: {
user1: { password: 'secret1', name: 'john' },
user2: { password: 'secret2', name: 'jane' }
}
}
const options = {
paths: ['users.*.password'],
remove: true
}
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
})
test('integration: remove option with custom censor comparison with fast-redact', () => {
const obj = { secret: 'hidden', public: 'data' }
const options = {
paths: ['secret'],
censor: '***',
remove: true
}
const slowResult = slowRedact(options)(obj)
const fastResult = fastRedact(options)(obj)
assert.strictEqual(slowResult, fastResult)
// With remove: true, censor value should be ignored
const parsed = JSON.parse(slowResult)
assert.strictEqual('secret' in parsed, false)
assert.strictEqual(parsed.public, 'data')
})
test('integration: remove option serialize false behavior - @pinojs/redact only', () => {
// fast-redact doesn't support remove option with serialize: false
// so we test @pinojs/redact's behavior only
const obj = { secret: 'hidden', public: 'data' }
const options = { paths: ['secret'], remove: true, serialize: false }
const result = slowRedact(options)(obj)
// Should have the key removed
assert.strictEqual('secret' in result, false)
assert.strictEqual(result.public, 'data')
// Should have restore method
assert.strictEqual(typeof result.restore, 'function')
// Original object should be preserved
assert.strictEqual(obj.secret, 'hidden')
// Restore should bring back the removed key
const restored = result.restore()
assert.strictEqual(restored.secret, 'hidden')
})

View File

@ -1,227 +0,0 @@
'use strict'
const { test } = require('node:test')
const { strict: assert } = require('node:assert')
const slowRedact = require('../index.js')
// Tests for Issue #2319: @pinojs/redact fails to redact patterns with 3+ consecutive wildcards
test('three consecutive wildcards: *.*.*.password (4 levels deep)', () => {
const obj = {
simple: { password: 'secret-2-levels' },
user: { auth: { password: 'secret-3-levels' } },
nested: { deep: { auth: { password: 'secret-4-levels' } } }
}
const redact = slowRedact({
paths: ['*.*.*.password']
})
const result = redact(obj)
const parsed = JSON.parse(result)
// Only the 4-level deep password should be redacted
assert.strictEqual(parsed.simple.password, 'secret-2-levels', '2-level password should NOT be redacted')
assert.strictEqual(parsed.user.auth.password, 'secret-3-levels', '3-level password should NOT be redacted')
assert.strictEqual(parsed.nested.deep.auth.password, '[REDACTED]', '4-level password SHOULD be redacted')
})
test('four consecutive wildcards: *.*.*.*.password (5 levels deep)', () => {
const obj = {
simple: { password: 'secret-2-levels' },
user: { auth: { password: 'secret-3-levels' } },
nested: { deep: { auth: { password: 'secret-4-levels' } } },
config: { user: { auth: { settings: { password: 'secret-5-levels' } } } }
}
const redact = slowRedact({
paths: ['*.*.*.*.password']
})
const result = redact(obj)
const parsed = JSON.parse(result)
// Only the 5-level deep password should be redacted
assert.strictEqual(parsed.simple.password, 'secret-2-levels', '2-level password should NOT be redacted')
assert.strictEqual(parsed.user.auth.password, 'secret-3-levels', '3-level password should NOT be redacted')
assert.strictEqual(parsed.nested.deep.auth.password, 'secret-4-levels', '4-level password should NOT be redacted')
assert.strictEqual(parsed.config.user.auth.settings.password, '[REDACTED]', '5-level password SHOULD be redacted')
})
test('five consecutive wildcards: *.*.*.*.*.password (6 levels deep)', () => {
const obj = {
simple: { password: 'secret-2-levels' },
user: { auth: { password: 'secret-3-levels' } },
nested: { deep: { auth: { password: 'secret-4-levels' } } },
config: { user: { auth: { settings: { password: 'secret-5-levels' } } } },
data: {
reqConfig: {
data: {
credentials: {
settings: {
password: 'secret-6-levels'
}
}
}
}
}
}
const redact = slowRedact({
paths: ['*.*.*.*.*.password']
})
const result = redact(obj)
const parsed = JSON.parse(result)
// Only the 6-level deep password should be redacted
assert.strictEqual(parsed.simple.password, 'secret-2-levels', '2-level password should NOT be redacted')
assert.strictEqual(parsed.user.auth.password, 'secret-3-levels', '3-level password should NOT be redacted')
assert.strictEqual(parsed.nested.deep.auth.password, 'secret-4-levels', '4-level password should NOT be redacted')
assert.strictEqual(parsed.config.user.auth.settings.password, 'secret-5-levels', '5-level password should NOT be redacted')
assert.strictEqual(parsed.data.reqConfig.data.credentials.settings.password, '[REDACTED]', '6-level password SHOULD be redacted')
})
test('three wildcards with censor function receives correct values', () => {
const obj = {
nested: { deep: { auth: { password: 'secret-value' } } }
}
const censorCalls = []
const redact = slowRedact({
paths: ['*.*.*.password'],
censor: (value, path) => {
censorCalls.push({ value, path: [...path] })
return '[REDACTED]'
}
})
const result = redact(obj)
const parsed = JSON.parse(result)
// Should have been called exactly once with the correct value
assert.strictEqual(censorCalls.length, 1, 'censor should be called once')
assert.strictEqual(censorCalls[0].value, 'secret-value', 'censor should receive the actual value')
assert.deepStrictEqual(censorCalls[0].path, ['nested', 'deep', 'auth', 'password'], 'censor should receive correct path')
assert.strictEqual(parsed.nested.deep.auth.password, '[REDACTED]')
})
test('three wildcards with multiple matches', () => {
const obj = {
api1: { v1: { auth: { token: 'token1' } } },
api2: { v2: { auth: { token: 'token2' } } },
api3: { v1: { auth: { token: 'token3' } } }
}
const redact = slowRedact({
paths: ['*.*.*.token']
})
const result = redact(obj)
const parsed = JSON.parse(result)
// All three tokens should be redacted
assert.strictEqual(parsed.api1.v1.auth.token, '[REDACTED]')
assert.strictEqual(parsed.api2.v2.auth.token, '[REDACTED]')
assert.strictEqual(parsed.api3.v1.auth.token, '[REDACTED]')
})
test('three wildcards with remove option', () => {
const obj = {
nested: { deep: { auth: { password: 'secret', username: 'admin' } } }
}
const redact = slowRedact({
paths: ['*.*.*.password'],
remove: true
})
const result = redact(obj)
const parsed = JSON.parse(result)
// Password should be removed entirely
assert.strictEqual('password' in parsed.nested.deep.auth, false, 'password key should be removed')
assert.strictEqual(parsed.nested.deep.auth.username, 'admin', 'username should remain')
})
test('mixed: two and three wildcards in same redactor', () => {
const obj = {
user: { auth: { password: 'secret-3-levels' } },
config: { deep: { auth: { password: 'secret-4-levels' } } }
}
const redact = slowRedact({
paths: ['*.*.password', '*.*.*.password']
})
const result = redact(obj)
const parsed = JSON.parse(result)
// Both should be redacted
assert.strictEqual(parsed.user.auth.password, '[REDACTED]', '3-level should be redacted by *.*.password')
assert.strictEqual(parsed.config.deep.auth.password, '[REDACTED]', '4-level should be redacted by *.*.*.password')
})
test('three wildcards should not call censor for non-existent paths', () => {
const obj = {
shallow: { data: 'value' },
nested: { deep: { auth: { password: 'secret' } } }
}
let censorCallCount = 0
const redact = slowRedact({
paths: ['*.*.*.password'],
censor: (value, path) => {
censorCallCount++
return '[REDACTED]'
}
})
redact(obj)
// Should only be called once for the path that exists
assert.strictEqual(censorCallCount, 1, 'censor should only be called for existing paths')
})
test('three wildcards with arrays', () => {
const obj = {
users: [
{ auth: { password: 'secret1' } },
{ auth: { password: 'secret2' } }
]
}
const redact = slowRedact({
paths: ['*.*.*.password']
})
const result = redact(obj)
const parsed = JSON.parse(result)
// Both passwords should be redacted (users[0].auth.password is 4 levels)
assert.strictEqual(parsed.users[0].auth.password, '[REDACTED]')
assert.strictEqual(parsed.users[1].auth.password, '[REDACTED]')
})
test('four wildcards with authorization header (real-world case)', () => {
const obj = {
requests: {
api1: {
config: {
headers: {
authorization: 'Bearer secret-token'
}
}
},
api2: {
config: {
headers: {
authorization: 'Bearer another-token'
}
}
}
}
}
const redact = slowRedact({
paths: ['*.*.*.*.authorization']
})
const result = redact(obj)
const parsed = JSON.parse(result)
// Both authorization headers should be redacted
assert.strictEqual(parsed.requests.api1.config.headers.authorization, '[REDACTED]')
assert.strictEqual(parsed.requests.api2.config.headers.authorization, '[REDACTED]')
})

View File

@ -1,223 +0,0 @@
const { test } = require('node:test')
const { strict: assert } = require('node:assert')
const slowRedact = require('../index.js')
/* eslint-disable no-proto */
test('prototype pollution: __proto__ path should not pollute Object prototype', () => {
const obj = {
user: { name: 'john' },
__proto__: { isAdmin: true }
}
const redact = slowRedact({
paths: ['__proto__.isAdmin'],
serialize: false
})
const result = redact(obj)
// Should not pollute Object.prototype
assert.strictEqual(Object.prototype.isAdmin, undefined)
assert.strictEqual({}.isAdmin, undefined)
// Should redact the __proto__ property if it exists as a regular property
assert.strictEqual(result.__proto__.isAdmin, '[REDACTED]')
})
test('prototype pollution: constructor.prototype path should not pollute', () => {
const obj = {
user: { name: 'john' },
constructor: {
prototype: { isAdmin: true }
}
}
const redact = slowRedact({
paths: ['constructor.prototype.isAdmin'],
serialize: false
})
const result = redact(obj)
// Should not pollute Object.prototype
assert.strictEqual(Object.prototype.isAdmin, undefined)
assert.strictEqual({}.isAdmin, undefined)
// Should redact the constructor.prototype property if it exists as a regular property
assert.strictEqual(result.constructor.prototype.isAdmin, '[REDACTED]')
})
test('prototype pollution: nested __proto__ should not pollute', () => {
const obj = {
user: {
settings: {
__proto__: { isAdmin: true }
}
}
}
const redact = slowRedact({
paths: ['user.settings.__proto__.isAdmin'],
serialize: false
})
const result = redact(obj)
// Should not pollute Object.prototype
assert.strictEqual(Object.prototype.isAdmin, undefined)
assert.strictEqual({}.isAdmin, undefined)
// Should redact the nested __proto__ property
assert.strictEqual(result.user.settings.__proto__.isAdmin, '[REDACTED]')
})
test('prototype pollution: bracket notation __proto__ should not pollute', () => {
const obj = {
user: { name: 'john' },
__proto__: { isAdmin: true }
}
const redact = slowRedact({
paths: ['["__proto__"]["isAdmin"]'],
serialize: false
})
const result = redact(obj)
// Should not pollute Object.prototype
assert.strictEqual(Object.prototype.isAdmin, undefined)
assert.strictEqual({}.isAdmin, undefined)
// Should redact the __proto__ property when accessed via bracket notation
assert.strictEqual(result.__proto__.isAdmin, '[REDACTED]')
})
test('prototype pollution: wildcard with __proto__ should not pollute', () => {
const obj = {
users: {
__proto__: { isAdmin: true },
user1: { name: 'john' },
user2: { name: 'jane' }
}
}
const redact = slowRedact({
paths: ['users.*'],
serialize: false
})
const result = redact(obj)
// Should not pollute Object.prototype
assert.strictEqual(Object.prototype.isAdmin, undefined)
assert.strictEqual({}.isAdmin, undefined)
// Should redact only own properties
assert.strictEqual(result.users.user1, '[REDACTED]')
assert.strictEqual(result.users.user2, '[REDACTED]')
// __proto__ should only be redacted if it's an own property, not inherited
if (Object.prototype.hasOwnProperty.call(obj.users, '__proto__')) {
assert.strictEqual(result.users.__proto__, '[REDACTED]')
}
})
test('prototype pollution: malicious JSON payload should not pollute', () => {
// Simulate a malicious payload that might come from JSON.parse
const maliciousObj = JSON.parse('{"user": {"name": "john"}, "__proto__": {"isAdmin": true}}')
const redact = slowRedact({
paths: ['__proto__.isAdmin'],
serialize: false
})
const result = redact(maliciousObj)
// Should not pollute Object.prototype
assert.strictEqual(Object.prototype.isAdmin, undefined)
assert.strictEqual({}.isAdmin, undefined)
// The malicious payload should have been redacted
assert.strictEqual(result.__proto__.isAdmin, '[REDACTED]')
})
test('prototype pollution: verify prototype chain is preserved', () => {
function CustomClass () {
this.data = 'test'
}
CustomClass.prototype.method = function () { return 'original' }
const obj = new CustomClass()
const redact = slowRedact({
paths: ['data'],
serialize: false
})
const result = redact(obj)
// Should redact the data property
assert.strictEqual(result.data, '[REDACTED]')
// Should preserve the original prototype chain
assert.strictEqual(result.method(), 'original')
assert.strictEqual(Object.getPrototypeOf(result), CustomClass.prototype)
})
test('prototype pollution: setValue should not create prototype pollution', () => {
const obj = { user: { name: 'john' } }
// Try to pollute via non-existent path that could create __proto__
const redact = slowRedact({
paths: ['__proto__.isAdmin'],
serialize: false
})
const result = redact(obj)
// Should not pollute Object.prototype
assert.strictEqual(Object.prototype.isAdmin, undefined)
assert.strictEqual({}.isAdmin, undefined)
// Should not create the path if it doesn't exist
// The __proto__ property may exist due to Object.create, but should not contain our redacted value
if (result.__proto__) {
assert.strictEqual(result.__proto__.isAdmin, undefined)
}
})
test('prototype pollution: deep nested prototype properties should not pollute', () => {
const obj = {
level1: {
level2: {
level3: {
__proto__: { isAdmin: true },
constructor: {
prototype: { isEvil: true }
}
}
}
}
}
const redact = slowRedact({
paths: [
'level1.level2.level3.__proto__.isAdmin',
'level1.level2.level3.constructor.prototype.isEvil'
],
serialize: false
})
const result = redact(obj)
// Should not pollute Object.prototype
assert.strictEqual(Object.prototype.isAdmin, undefined)
assert.strictEqual(Object.prototype.isEvil, undefined)
assert.strictEqual({}.isAdmin, undefined)
assert.strictEqual({}.isEvil, undefined)
// Should redact the deep nested properties
assert.strictEqual(result.level1.level2.level3.__proto__.isAdmin, '[REDACTED]')
assert.strictEqual(result.level1.level2.level3.constructor.prototype.isEvil, '[REDACTED]')
})

View File

@ -1,115 +0,0 @@
const { test } = require('node:test')
const { strict: assert } = require('node:assert')
const slowRedact = require('../index.js')
test('selective cloning shares references for non-redacted paths', () => {
const sharedObject = { unchanged: 'data' }
const obj = {
toRedact: 'secret',
shared: sharedObject,
nested: {
toRedact: 'secret2',
shared: sharedObject
}
}
const redact = slowRedact({
paths: ['toRedact', 'nested.toRedact'],
serialize: false
})
const result = redact(obj)
// Redacted values should be different
assert.strictEqual(result.toRedact, '[REDACTED]')
assert.strictEqual(result.nested.toRedact, '[REDACTED]')
// Non-redacted references should be shared (same object reference)
assert.strictEqual(result.shared, obj.shared)
assert.strictEqual(result.nested.shared, obj.nested.shared)
// The shared object should be the exact same reference
assert.strictEqual(result.shared, sharedObject)
assert.strictEqual(result.nested.shared, sharedObject)
})
test('selective cloning works with arrays', () => {
const sharedItem = { unchanged: 'data' }
const obj = {
items: [
{ secret: 'hidden1', shared: sharedItem },
{ secret: 'hidden2', shared: sharedItem },
sharedItem
]
}
const redact = slowRedact({
paths: ['items.*.secret'],
serialize: false
})
const result = redact(obj)
// Secrets should be redacted
assert.strictEqual(result.items[0].secret, '[REDACTED]')
assert.strictEqual(result.items[1].secret, '[REDACTED]')
// Shared references should be preserved where possible
// Note: array items with secrets will be cloned, but their shared properties should still reference the original
assert.strictEqual(result.items[0].shared, sharedItem)
assert.strictEqual(result.items[1].shared, sharedItem)
// The third item gets cloned due to wildcard, but should have the same content
assert.deepStrictEqual(result.items[2], sharedItem)
// Note: Due to wildcard '*', all array items are cloned, even if they don't need redaction
// This is still a significant optimization for object properties that aren't in wildcard paths
})
test('selective cloning with no paths returns original object', () => {
const obj = { data: 'unchanged' }
const redact = slowRedact({
paths: [],
serialize: false
})
const result = redact(obj)
// Should return the exact same object reference
assert.strictEqual(result, obj)
})
test('selective cloning performance - large objects with minimal redaction', () => {
// Create a large object with mostly shared data
const sharedData = { large: 'data'.repeat(1000) }
const obj = {
secret: 'hidden',
shared1: sharedData,
shared2: sharedData,
nested: {
secret: 'hidden2',
shared3: sharedData,
deep: {
shared4: sharedData,
moreShared: sharedData
}
}
}
const redact = slowRedact({
paths: ['secret', 'nested.secret'],
serialize: false
})
const result = redact(obj)
// Verify redaction worked
assert.strictEqual(result.secret, '[REDACTED]')
assert.strictEqual(result.nested.secret, '[REDACTED]')
// Verify shared references are preserved
assert.strictEqual(result.shared1, sharedData)
assert.strictEqual(result.shared2, sharedData)
assert.strictEqual(result.nested.shared3, sharedData)
assert.strictEqual(result.nested.deep.shared4, sharedData)
assert.strictEqual(result.nested.deep.moreShared, sharedData)
})

View File

@ -1,19 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"lib": [
"es6"
],
"noImplicitAny": true,
"noImplicitThis": true,
"strictFunctionTypes": true,
"strictNullChecks": true,
"types": [],
"noEmit": true,
"forceConsistentCasingInFileNames": true
},
"files": [
"index.d.ts",
"index.test-d.ts"
]
}

46
node_modules/abbrev/LICENSE generated vendored
View File

@ -1,46 +0,0 @@
This software is dual-licensed under the ISC and MIT licenses.
You may use this software under EITHER of the following licenses.
----------
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
----------
Copyright Isaac Z. Schlueter and Contributors
All rights reserved.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

23
node_modules/abbrev/README.md generated vendored
View File

@ -1,23 +0,0 @@
# abbrev-js
Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
Usage:
var abbrev = require("abbrev");
abbrev("foo", "fool", "folding", "flop");
// returns:
{ fl: 'flop'
, flo: 'flop'
, flop: 'flop'
, fol: 'folding'
, fold: 'folding'
, foldi: 'folding'
, foldin: 'folding'
, folding: 'folding'
, foo: 'foo'
, fool: 'fool'
}
This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.

53
node_modules/abbrev/lib/index.js generated vendored
View File

@ -1,53 +0,0 @@
module.exports = abbrev
function abbrev (...args) {
let list = args
if (args.length === 1 && (Array.isArray(args[0]) || typeof args[0] === 'string')) {
list = [].concat(args[0])
}
for (let i = 0, l = list.length; i < l; i++) {
list[i] = typeof list[i] === 'string' ? list[i] : String(list[i])
}
// sort them lexicographically, so that they're next to their nearest kin
list = list.sort(lexSort)
// walk through each, seeing how much it has in common with the next and previous
const abbrevs = {}
let prev = ''
for (let ii = 0, ll = list.length; ii < ll; ii++) {
const current = list[ii]
const next = list[ii + 1] || ''
let nextMatches = true
let prevMatches = true
if (current === next) {
continue
}
let j = 0
const cl = current.length
for (; j < cl; j++) {
const curChar = current.charAt(j)
nextMatches = nextMatches && curChar === next.charAt(j)
prevMatches = prevMatches && curChar === prev.charAt(j)
if (!nextMatches && !prevMatches) {
j++
break
}
}
prev = current
if (j === cl) {
abbrevs[current] = current
continue
}
for (let a = current.slice(0, j); j <= cl; j++) {
abbrevs[a] = current
a += current.charAt(j)
}
}
return abbrevs
}
function lexSort (a, b) {
return a === b ? 0 : a > b ? 1 : -1
}

41
node_modules/abbrev/package.json generated vendored
View File

@ -1,41 +0,0 @@
{
"name": "abbrev",
"version": "4.0.0",
"description": "Like ruby's abbrev module, but in js",
"author": "GitHub Inc.",
"main": "lib/index.js",
"scripts": {
"test": "node --test",
"lint": "npm run eslint",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force",
"lintfix": "npm run eslint -- --fix",
"snap": "node --test --test-update-snapshots",
"posttest": "npm run lint",
"eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"test:cover": "node --test --experimental-test-coverage --test-timeout=3000 --test-coverage-lines=100 --test-coverage-functions=100 --test-coverage-branches=100"
},
"repository": {
"type": "git",
"url": "git+https://github.com/npm/abbrev-js.git"
},
"license": "ISC",
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
"@npmcli/template-oss": "4.26.1"
},
"files": [
"bin/",
"lib/"
],
"engines": {
"node": "^20.17.0 || >=22.9.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.26.1",
"publish": true,
"testRunner": "node:test",
"latestCiVersion": 24
}
}

250
node_modules/accepts/HISTORY.md generated vendored
View File

@ -1,250 +0,0 @@
2.0.0 / 2024-08-31
==================
* Drop node <18 support
* deps: mime-types@^3.0.0
* deps: negotiator@^1.0.0
1.3.8 / 2022-02-02
==================
* deps: mime-types@~2.1.34
- deps: mime-db@~1.51.0
* deps: negotiator@0.6.3
1.3.7 / 2019-04-29
==================
* deps: negotiator@0.6.2
- Fix sorting charset, encoding, and language with extra parameters
1.3.6 / 2019-04-28
==================
* deps: mime-types@~2.1.24
- deps: mime-db@~1.40.0
1.3.5 / 2018-02-28
==================
* deps: mime-types@~2.1.18
- deps: mime-db@~1.33.0
1.3.4 / 2017-08-22
==================
* deps: mime-types@~2.1.16
- deps: mime-db@~1.29.0
1.3.3 / 2016-05-02
==================
* deps: mime-types@~2.1.11
- deps: mime-db@~1.23.0
* deps: negotiator@0.6.1
- perf: improve `Accept` parsing speed
- perf: improve `Accept-Charset` parsing speed
- perf: improve `Accept-Encoding` parsing speed
- perf: improve `Accept-Language` parsing speed
1.3.2 / 2016-03-08
==================
* deps: mime-types@~2.1.10
- Fix extension of `application/dash+xml`
- Update primary extension for `audio/mp4`
- deps: mime-db@~1.22.0
1.3.1 / 2016-01-19
==================
* deps: mime-types@~2.1.9
- deps: mime-db@~1.21.0
1.3.0 / 2015-09-29
==================
* deps: mime-types@~2.1.7
- deps: mime-db@~1.19.0
* deps: negotiator@0.6.0
- Fix including type extensions in parameters in `Accept` parsing
- Fix parsing `Accept` parameters with quoted equals
- Fix parsing `Accept` parameters with quoted semicolons
- Lazy-load modules from main entry point
- perf: delay type concatenation until needed
- perf: enable strict mode
- perf: hoist regular expressions
- perf: remove closures getting spec properties
- perf: remove a closure from media type parsing
- perf: remove property delete from media type parsing
1.2.13 / 2015-09-06
===================
* deps: mime-types@~2.1.6
- deps: mime-db@~1.18.0
1.2.12 / 2015-07-30
===================
* deps: mime-types@~2.1.4
- deps: mime-db@~1.16.0
1.2.11 / 2015-07-16
===================
* deps: mime-types@~2.1.3
- deps: mime-db@~1.15.0
1.2.10 / 2015-07-01
===================
* deps: mime-types@~2.1.2
- deps: mime-db@~1.14.0
1.2.9 / 2015-06-08
==================
* deps: mime-types@~2.1.1
- perf: fix deopt during mapping
1.2.8 / 2015-06-07
==================
* deps: mime-types@~2.1.0
- deps: mime-db@~1.13.0
* perf: avoid argument reassignment & argument slice
* perf: avoid negotiator recursive construction
* perf: enable strict mode
* perf: remove unnecessary bitwise operator
1.2.7 / 2015-05-10
==================
* deps: negotiator@0.5.3
- Fix media type parameter matching to be case-insensitive
1.2.6 / 2015-05-07
==================
* deps: mime-types@~2.0.11
- deps: mime-db@~1.9.1
* deps: negotiator@0.5.2
- Fix comparing media types with quoted values
- Fix splitting media types with quoted commas
1.2.5 / 2015-03-13
==================
* deps: mime-types@~2.0.10
- deps: mime-db@~1.8.0
1.2.4 / 2015-02-14
==================
* Support Node.js 0.6
* deps: mime-types@~2.0.9
- deps: mime-db@~1.7.0
* deps: negotiator@0.5.1
- Fix preference sorting to be stable for long acceptable lists
1.2.3 / 2015-01-31
==================
* deps: mime-types@~2.0.8
- deps: mime-db@~1.6.0
1.2.2 / 2014-12-30
==================
* deps: mime-types@~2.0.7
- deps: mime-db@~1.5.0
1.2.1 / 2014-12-30
==================
* deps: mime-types@~2.0.5
- deps: mime-db@~1.3.1
1.2.0 / 2014-12-19
==================
* deps: negotiator@0.5.0
- Fix list return order when large accepted list
- Fix missing identity encoding when q=0 exists
- Remove dynamic building of Negotiator class
1.1.4 / 2014-12-10
==================
* deps: mime-types@~2.0.4
- deps: mime-db@~1.3.0
1.1.3 / 2014-11-09
==================
* deps: mime-types@~2.0.3
- deps: mime-db@~1.2.0
1.1.2 / 2014-10-14
==================
* deps: negotiator@0.4.9
- Fix error when media type has invalid parameter
1.1.1 / 2014-09-28
==================
* deps: mime-types@~2.0.2
- deps: mime-db@~1.1.0
* deps: negotiator@0.4.8
- Fix all negotiations to be case-insensitive
- Stable sort preferences of same quality according to client order
1.1.0 / 2014-09-02
==================
* update `mime-types`
1.0.7 / 2014-07-04
==================
* Fix wrong type returned from `type` when match after unknown extension
1.0.6 / 2014-06-24
==================
* deps: negotiator@0.4.7
1.0.5 / 2014-06-20
==================
* fix crash when unknown extension given
1.0.4 / 2014-06-19
==================
* use `mime-types`
1.0.3 / 2014-06-11
==================
* deps: negotiator@0.4.6
- Order by specificity when quality is the same
1.0.2 / 2014-05-29
==================
* Fix interpretation when header not in request
* deps: pin negotiator@0.4.5
1.0.1 / 2014-01-18
==================
* Identity encoding isn't always acceptable
* deps: negotiator@~0.4.0
1.0.0 / 2013-12-27
==================
* Genesis

23
node_modules/accepts/LICENSE generated vendored
View File

@ -1,23 +0,0 @@
(The MIT License)
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
Copyright (c) 2015 Douglas Christopher Wilson <doug@somethingdoug.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

140
node_modules/accepts/README.md generated vendored
View File

@ -1,140 +0,0 @@
# accepts
[![NPM Version][npm-version-image]][npm-url]
[![NPM Downloads][npm-downloads-image]][npm-url]
[![Node.js Version][node-version-image]][node-version-url]
[![Build Status][github-actions-ci-image]][github-actions-ci-url]
[![Test Coverage][coveralls-image]][coveralls-url]
Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator).
Extracted from [koa](https://www.npmjs.com/package/koa) for general use.
In addition to negotiator, it allows:
- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])`
as well as `('text/html', 'application/json')`.
- Allows type shorthands such as `json`.
- Returns `false` when no types match
- Treats non-existent headers as `*`
## Installation
This is a [Node.js](https://nodejs.org/en/) module available through the
[npm registry](https://www.npmjs.com/). Installation is done using the
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally):
```sh
$ npm install accepts
```
## API
```js
var accepts = require('accepts')
```
### accepts(req)
Create a new `Accepts` object for the given `req`.
#### .charset(charsets)
Return the first accepted charset. If nothing in `charsets` is accepted,
then `false` is returned.
#### .charsets()
Return the charsets that the request accepts, in the order of the client's
preference (most preferred first).
#### .encoding(encodings)
Return the first accepted encoding. If nothing in `encodings` is accepted,
then `false` is returned.
#### .encodings()
Return the encodings that the request accepts, in the order of the client's
preference (most preferred first).
#### .language(languages)
Return the first accepted language. If nothing in `languages` is accepted,
then `false` is returned.
#### .languages()
Return the languages that the request accepts, in the order of the client's
preference (most preferred first).
#### .type(types)
Return the first accepted type (and it is returned as the same text as what
appears in the `types` array). If nothing in `types` is accepted, then `false`
is returned.
The `types` array can contain full MIME types or file extensions. Any value
that is not a full MIME type is passed to `require('mime-types').lookup`.
#### .types()
Return the types that the request accepts, in the order of the client's
preference (most preferred first).
## Examples
### Simple type negotiation
This simple example shows how to use `accepts` to return a different typed
respond body based on what the client wants to accept. The server lists it's
preferences in order and will get back the best match between the client and
server.
```js
var accepts = require('accepts')
var http = require('http')
function app (req, res) {
var accept = accepts(req)
// the order of this list is significant; should be server preferred order
switch (accept.type(['json', 'html'])) {
case 'json':
res.setHeader('Content-Type', 'application/json')
res.write('{"hello":"world!"}')
break
case 'html':
res.setHeader('Content-Type', 'text/html')
res.write('<b>hello, world!</b>')
break
default:
// the fallback is text/plain, so no need to specify it above
res.setHeader('Content-Type', 'text/plain')
res.write('hello, world!')
break
}
res.end()
}
http.createServer(app).listen(3000)
```
You can test this out with the cURL program:
```sh
curl -I -H'Accept: text/html' http://localhost:3000/
```
## License
[MIT](LICENSE)
[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master
[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master
[github-actions-ci-image]: https://badgen.net/github/checks/jshttp/accepts/master?label=ci
[github-actions-ci-url]: https://github.com/jshttp/accepts/actions/workflows/ci.yml
[node-version-image]: https://badgen.net/npm/node/accepts
[node-version-url]: https://nodejs.org/en/download
[npm-downloads-image]: https://badgen.net/npm/dm/accepts
[npm-url]: https://npmjs.org/package/accepts
[npm-version-image]: https://badgen.net/npm/v/accepts

238
node_modules/accepts/index.js generated vendored
View File

@ -1,238 +0,0 @@
/*!
* accepts
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var Negotiator = require('negotiator')
var mime = require('mime-types')
/**
* Module exports.
* @public
*/
module.exports = Accepts
/**
* Create a new Accepts object for the given req.
*
* @param {object} req
* @public
*/
function Accepts (req) {
if (!(this instanceof Accepts)) {
return new Accepts(req)
}
this.headers = req.headers
this.negotiator = new Negotiator(req)
}
/**
* Check if the given `type(s)` is acceptable, returning
* the best match when true, otherwise `undefined`, in which
* case you should respond with 406 "Not Acceptable".
*
* The `type` value may be a single mime type string
* such as "application/json", the extension name
* such as "json" or an array `["json", "html", "text/plain"]`. When a list
* or array is given the _best_ match, if any is returned.
*
* Examples:
*
* // Accept: text/html
* this.types('html');
* // => "html"
*
* // Accept: text/*, application/json
* this.types('html');
* // => "html"
* this.types('text/html');
* // => "text/html"
* this.types('json', 'text');
* // => "json"
* this.types('application/json');
* // => "application/json"
*
* // Accept: text/*, application/json
* this.types('image/png');
* this.types('png');
* // => undefined
*
* // Accept: text/*;q=.5, application/json
* this.types(['html', 'json']);
* this.types('html', 'json');
* // => "json"
*
* @param {String|Array} types...
* @return {String|Array|Boolean}
* @public
*/
Accepts.prototype.type =
Accepts.prototype.types = function (types_) {
var types = types_
// support flattened arguments
if (types && !Array.isArray(types)) {
types = new Array(arguments.length)
for (var i = 0; i < types.length; i++) {
types[i] = arguments[i]
}
}
// no types, return all requested types
if (!types || types.length === 0) {
return this.negotiator.mediaTypes()
}
// no accept header, return first given type
if (!this.headers.accept) {
return types[0]
}
var mimes = types.map(extToMime)
var accepts = this.negotiator.mediaTypes(mimes.filter(validMime))
var first = accepts[0]
return first
? types[mimes.indexOf(first)]
: false
}
/**
* Return accepted encodings or best fit based on `encodings`.
*
* Given `Accept-Encoding: gzip, deflate`
* an array sorted by quality is returned:
*
* ['gzip', 'deflate']
*
* @param {String|Array} encodings...
* @return {String|Array}
* @public
*/
Accepts.prototype.encoding =
Accepts.prototype.encodings = function (encodings_) {
var encodings = encodings_
// support flattened arguments
if (encodings && !Array.isArray(encodings)) {
encodings = new Array(arguments.length)
for (var i = 0; i < encodings.length; i++) {
encodings[i] = arguments[i]
}
}
// no encodings, return all requested encodings
if (!encodings || encodings.length === 0) {
return this.negotiator.encodings()
}
return this.negotiator.encodings(encodings)[0] || false
}
/**
* Return accepted charsets or best fit based on `charsets`.
*
* Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5`
* an array sorted by quality is returned:
*
* ['utf-8', 'utf-7', 'iso-8859-1']
*
* @param {String|Array} charsets...
* @return {String|Array}
* @public
*/
Accepts.prototype.charset =
Accepts.prototype.charsets = function (charsets_) {
var charsets = charsets_
// support flattened arguments
if (charsets && !Array.isArray(charsets)) {
charsets = new Array(arguments.length)
for (var i = 0; i < charsets.length; i++) {
charsets[i] = arguments[i]
}
}
// no charsets, return all requested charsets
if (!charsets || charsets.length === 0) {
return this.negotiator.charsets()
}
return this.negotiator.charsets(charsets)[0] || false
}
/**
* Return accepted languages or best fit based on `langs`.
*
* Given `Accept-Language: en;q=0.8, es, pt`
* an array sorted by quality is returned:
*
* ['es', 'pt', 'en']
*
* @param {String|Array} langs...
* @return {Array|String}
* @public
*/
Accepts.prototype.lang =
Accepts.prototype.langs =
Accepts.prototype.language =
Accepts.prototype.languages = function (languages_) {
var languages = languages_
// support flattened arguments
if (languages && !Array.isArray(languages)) {
languages = new Array(arguments.length)
for (var i = 0; i < languages.length; i++) {
languages[i] = arguments[i]
}
}
// no languages, return all requested languages
if (!languages || languages.length === 0) {
return this.negotiator.languages()
}
return this.negotiator.languages(languages)[0] || false
}
/**
* Convert extnames to mime.
*
* @param {String} type
* @return {String}
* @private
*/
function extToMime (type) {
return type.indexOf('/') === -1
? mime.lookup(type)
: type
}
/**
* Check if mime is valid.
*
* @param {String} type
* @return {Boolean}
* @private
*/
function validMime (type) {
return typeof type === 'string'
}

47
node_modules/accepts/package.json generated vendored
View File

@ -1,47 +0,0 @@
{
"name": "accepts",
"description": "Higher-level content negotiation",
"version": "2.0.0",
"contributors": [
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
],
"license": "MIT",
"repository": "jshttp/accepts",
"dependencies": {
"mime-types": "^3.0.0",
"negotiator": "^1.0.0"
},
"devDependencies": {
"deep-equal": "1.0.1",
"eslint": "7.32.0",
"eslint-config-standard": "14.1.1",
"eslint-plugin-import": "2.25.4",
"eslint-plugin-markdown": "2.2.1",
"eslint-plugin-node": "11.1.0",
"eslint-plugin-promise": "4.3.1",
"eslint-plugin-standard": "4.1.0",
"mocha": "9.2.0",
"nyc": "15.1.0"
},
"files": [
"LICENSE",
"HISTORY.md",
"index.js"
],
"engines": {
"node": ">= 0.6"
},
"scripts": {
"lint": "eslint .",
"test": "mocha --reporter spec --check-leaks --bail test/",
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
"test-cov": "nyc --reporter=html --reporter=text npm test"
},
"keywords": [
"content",
"negotiation",
"accept",
"accepts"
]
}

Some files were not shown because too many files have changed in this diff Show More