text
stringlengths
2
4k
import { Meta } from '../base'; import { Context } from '../context'; export interface ContextItem { run(context: Context, value: any, meta: Meta): Promise<void>; }
import { Context } from '../context'; import { ContextItem } from './context-item'; export declare class Bail implements ContextItem { run(context: Context): Promise<void>; }
import { Meta } from '../base'; import { ContextRunner } from '../chain'; import { Context } from '../context'; import { ContextItem } from './context-item'; export declare class ChainCondition implements ContextItem { private readonly chain; constructor(chain: ContextRunner); run(_context: Context, _value: any, meta: Meta): Promise<void>; }
import { CustomValidator, Meta } from '../base'; import { Context } from '../context'; import { ContextItem } from './context-item'; export declare class CustomCondition implements ContextItem { private readonly condition; constructor(condition: CustomValidator); run(_context: Context, value: any, meta: Meta): Promise<void>; }
import { CustomValidator, Meta } from '../base'; import { Context } from '../context'; import { ContextItem } from './context-item'; export declare class CustomValidation implements ContextItem { private readonly validator; private readonly negated; message: any; constructor(validator: CustomValidator, negated: boolean); run(context: Context, value: any, meta: Meta): Promise<void>; }
export * from './chain-condition'; export * from './context-item'; export * from './custom-condition'; export * from './custom-validation'; export * from './standard-validation';
import { Meta, StandardValidator } from '../base'; import { toString as toStringImpl } from '../utils'; import { Context } from '../context'; import { ContextItem } from './context-item'; export declare class StandardValidation implements ContextItem { private readonly validator; private readonly negated; private readonly options; private readonly stringify; message: any; constructor(validator: StandardValidator, negated: boolean, options?: any[], stringify?: typeof toStringImpl); run(context: Context, value: any, meta: Meta): Promise<void>; }
import { Context } from '../context'; import { CustomSanitizer, Meta, StandardSanitizer } from '../base'; import { toString as toStringImpl } from '../utils'; import { ContextItem } from './context-item'; export declare class Sanitization implements ContextItem { private readonly sanitizer; private readonly custom; private readonly options; private readonly stringify; constructor(sanitizer: StandardSanitizer | CustomSanitizer, custom: boolean, options?: any[], stringify?: typeof toStringImpl); run(context: Context, value: any, meta: Meta): Promise<void>; }
import { ErrorMessage, Location, Middleware, UnknownFieldMessageFactory } from '../base'; import { ContextRunner, ValidationChain } from '../chain'; declare type CheckExactOptions = { /** * The list of locations which `checkExact()` should check. * @default ['body', 'params', 'query'] */ locations?: readonly Location[]; message?: UnknownFieldMessageFactory | ErrorMessage; }; declare type CheckExactInput = ValidationChain | ValidationChain[] | (ValidationChain | ValidationChain[])[]; /** * Checks whether the request contains exactly only those fields that have been validated. * * Unknown fields, if found, will generate an error of type `unknown_fields`. * * @param chains either a single chain, an array of chains, or a mixed array of chains and array of chains. * This means that all of the below are valid: * ``` * checkExact(check('foo')) * checkExact([check('foo'), check('bar')]) * checkExact([check('foo'), check('bar')]) * checkExact(checkSchema({ ... })) * checkExact([checkSchema({ ... }), check('foo')]) * ``` * @param opts */ export declare function checkExact(chains?: CheckExactInput, opts?: CheckExactOptions): Middleware & ContextRunner; export {};
import { ErrorMessage, FieldMessageFactory, Location } from '../base'; /** * Creates a variant of `check()` that checks the given request locations. * * @example * const checkBodyAndQuery = buildCheckFunction(['body', 'query']); */ export declare function buildCheckFunction(locations: Location[]): (fields?: string | string[] | undefined, message?: FieldMessageFactory | ErrorMessage | undefined) => import("..").ValidationChain; /** * Creates a middleware/validation chain for one or more fields that may be located in * any of the following: * * - `req.body` * - `req.cookies` * - `req.headers` * - `req.params` * - `req.query` * * @param fields a string or array of field names to validate/sanitize * @param message an error message to use when failed validations don't specify a custom message. * Defaults to `Invalid Value`. */ export declare const check: (fields?: string | string[] | undefined, message?: FieldMessageFactory | ErrorMessage | undefined) => import("..").ValidationChain; /** * Same as {@link check()}, but only validates `req.body`. */ export declare const body: (fields?: string | string[] | undefined, message?: FieldMessageFactory | ErrorMessage | undefined) => import("..").ValidationChain; /** * Same as {@link check()}, but only validates `req.cookies`. */ export declare const cookie: (fields?: string | string[] | undefined, message?: FieldMessageFactory | ErrorMessage | undefined) => import("..").ValidationChain; /** * Same as {@link check()}, but only validates `req.headers`. */ export declare const header: (fields?: string | string[] | undefined, message?: FieldMessageFactory | ErrorMessage | undefined) => import("..").ValidationChain; /** * Same as {@link check()}, but only validates `req.params`. */ export declare const param: (fields?: string | string[] | undefined, message?: FieldMessageFactory | ErrorMessage | undefined) => import("..").ValidationChain; /** * Same as {@link check()}, but only validates `req.query`. */ export declare const query: (fields?: string | string[] | undefined, message?: FieldMessageFactory | ErrorMessage | undefined) => import("..").ValidationChain;
import { ErrorMessage, FieldMessageFactory, Location } from '../base'; import { ValidationChain } from '../chain'; export declare function check(fields?: string | string[], locations?: Location[], message?: FieldMessageFactory | ErrorMessage): ValidationChain;
import { CustomSanitizer, CustomValidator, ErrorMessage, FieldMessageFactory, Location, Request } from '../base'; import { BailOptions, OptionalOptions, ValidationChain, ValidationChainLike } from '../chain'; import { ResultWithContext } from '../chain/context-runner'; import { Sanitizers } from '../chain/sanitizers'; import { Validators } from '../chain/validators'; declare type BaseValidatorSchemaOptions = { /** * The error message if there's a validation error, * or a function for creating an error message dynamically. */ errorMessage?: FieldMessageFactory | ErrorMessage; /** * Whether the validation should be reversed. */ negated?: boolean; /** * Whether the validation should bail after running this validator */ bail?: boolean | BailOptions; /** * Specify a condition upon which this validator should run. * Can either be a validation chain, or a custom validator function. */ if?: CustomValidator | ValidationChain; }; declare type ValidatorSchemaOptions<K extends keyof Validators<any>> = boolean | (BaseValidatorSchemaOptions & { /** * Options to pass to the validator. */ options?: Parameters<Validators<any>[K]> | Parameters<Validators<any>[K]>[0]; }); declare type CustomValidatorSchemaOptions = BaseValidatorSchemaOptions & { /** * The implementation of a custom validator. */ custom: CustomValidator; }; export declare type ExtensionValidatorSchemaOptions = boolean | BaseValidatorSchemaOptions; export declare type ValidatorsSchema = { [K in Exclude<keyof Validators<any>, 'not' | 'withMessage'>]?: ValidatorSchemaOptions<K>; }; declare type SanitizerSchemaOptions<K extends keyof Sanitizers<any>> = boolean | { /** * Options to pass to the sanitizer. */ options?: Parameters<Sanitizers<any>[K]> | Parameters<Sanitizers<any>[K]>[0]; }; declare type CustomSanitizerSchemaOptions = { /** * The implementation of a custom sanitizer. */ customSanitizer: CustomSanitizer; }; export declare type ExtensionSanitizerSchemaOptions = true; export declare type SanitizersSchema = { [K in keyof Sanitizers<any>]?: SanitizerSchemaOptions<K>; }; declare type BaseParamSchema = { /** * Which request location(s) the field to validate is. * If unset, the field will be checked in every request location. */ in?: Location | Location[]; /** * The general error message in case a validator doesn't specify one, * or a function for creating the error message dynamically. */ errorMessage?: FieldMessageFactory | any; /** * Whether this field should be considered optional */ optional?: boolean | { options?: OptionalOptions; }; }; export declare type DefaultSchemaKeys = keyof BaseParamSchema | keyof ValidatorsSchema | keyof SanitizersSchema; /** * Defines a schema of validations/sanitizations for a field */ export declare type ParamSchema<T extends string = DefaultSchemaKeys> = BaseParamSchema & ValidatorsSchema & SanitizersSchema & { [K in T]?: K extends keyof BaseParamSchema ? BaseParamSchema[K] : K extends keyof ValidatorsSchema ? ValidatorsSchema[K] : K extends keyof SanitizersSchema ? SanitizersSchema[K] : CustomValidatorSchemaOptions | CustomSanitizerSchemaOptions; }; /** * Defines a mapping from field name to a validations/sanitizations schema. */ export declare type Schema<T extends string = DefaultSchemaKeys> = Record<string, ParamSchema<T>>; /** * Shortcut type for the return of a {@link checkSchema()}-like function. */ export declare type RunnableValidationChains<C extends ValidationChainLike> = C[] & { run(req: Request): Promise<ResultWithContext[]>; }; /** * Factory for a {@link checkSchema()} function which can have extension validators and sanitizers. * * @see {@link checkSchema()} */ export declare function createCheckSchema<C extends ValidationChainLike>(createChain: (fields?: string | string[], locat
ions?: Location[], errorMessage?: any) => C, extraValidators?: (keyof C)[], extraSanitizers?: (keyof C)[]): <T extends string = DefaultSchemaKeys>(schema: Schema<T>, defaultLocations?: Location[]) => RunnableValidationChains<C>; /** * Creates an express middleware with validations for multiple fields at once in the form of * a schema object. * * @param schema the schema to validate. * @param defaultLocations * @returns */ export declare const checkSchema: <T extends string = DefaultSchemaKeys>(schema: Schema<T>, defaultLocations?: Location[] | undefined) => RunnableValidationChains<ValidationChain>; export {};
import { AlternativeMessageFactory, ErrorMessage, GroupedAlternativeMessageFactory, Middleware } from '../base'; import { ContextRunner, ValidationChain } from '../chain'; export declare type OneOfErrorType = 'grouped' | 'least_errored' | 'flat'; export declare type OneOfOptions = { /** * The error message to use in case none of the chains are valid. */ message?: AlternativeMessageFactory | ErrorMessage; errorType?: Exclude<OneOfErrorType, 'grouped'>; } | { /** * The error message to use in case none of the chain groups are valid. */ message?: GroupedAlternativeMessageFactory | ErrorMessage; errorType?: 'grouped'; }; /** * Creates a middleware that will ensure that at least one of the given validation chains * or validation chain groups are valid. * * If none are, a single `AlternativeValidationError` or `GroupedAlternativeValidationError` * is added to the request, with the errors of each chain made available under the `nestedErrors` property. * * @param chains an array of validation chains to check if are valid. * If any of the items of `chains` is an array of validation chains, then all of them * must be valid together for the request to be considered valid. */ export declare function oneOf(chains: (ValidationChain | ValidationChain[])[], options?: OneOfOptions): Middleware & ContextRunner;
import { Request, ValidationError } from '../base'; import { ReadonlyContext } from '../context'; import { Result } from '../validation-result'; export declare type ContextRunningOptions = { /** * Defines whether errors and sanitization should be persisted to `req`. * @default false */ dryRun?: boolean; }; export interface ResultWithContext extends Result<ValidationError> { readonly context: ReadonlyContext; } export interface ContextRunner { /** * Runs the current validation chain. * @param req the express request to validate * @param options an object of options to customize how the chain will be run * @returns a promise for a {@link Result} that resolves when the validation chain has finished */ run(req: Request, options?: ContextRunningOptions): Promise<ResultWithContext>; }
import { Request } from '../base'; import { Context, ReadonlyContext } from '../context'; import { ContextBuilder } from '../context-builder'; import { SelectFields } from '../field-selection'; import { Result } from '../validation-result'; import { ContextRunner, ResultWithContext } from './context-runner'; export declare class ResultWithContextImpl extends Result implements ResultWithContext { readonly context: ReadonlyContext; constructor(context: ReadonlyContext); } export declare class ContextRunnerImpl implements ContextRunner { private readonly builderOrContext; private readonly selectFields; constructor(builderOrContext: ContextBuilder | Context, selectFields?: SelectFields); run(req: Request, options?: { dryRun?: boolean; }): Promise<ResultWithContextImpl>; }
import { CustomValidator, ErrorMessage, FieldMessageFactory } from '../base'; import * as Options from '../options'; export declare type ExistsOptions = { /** * Defines which kind of value makes a field _NOT_ exist. * * - `undefined`: only `undefined` values; equivalent to `value !== undefined` * - `null`: only `undefined` and `null` values; equivalent to `value != null` * - `falsy`: all falsy values; equivalent to `!!value` * * @default 'undefined' */ values?: 'undefined' | 'null' | 'falsy'; /** * Whether a field whose value is falsy should be considered non-existent. * @default false * @deprecated Use `values` instead */ checkFalsy?: boolean; /** * Whether a field whose value is `null` or `undefined` should be considered non-existent. * @default false * @deprecated Use `values` instead */ checkNull?: boolean; }; export interface Validators<Return> { /** * Negates the result of the next validator. * * @example check('weekday').not().isIn(['sunday', 'saturday']) * @returns the current validation chain */ not(): Return; /** * Sets the error message for the previous validator. * * @param message the message, which can be any value, or a function for dynamically creating the * error message based on the field value * @returns the current validation chain */ withMessage(message: FieldMessageFactory | ErrorMessage): Return; /** * Adds a custom validator to the validation chain. * * @param validator the custom validator * @returns the current validation chain */ custom(validator: CustomValidator): Return; /** * Adds a validator to check that the fields exist in the request. * By default, this means that the value of the fields may not be `undefined`; * all other values are acceptable. * * @param options * @returns the current validation chain */ exists(options?: ExistsOptions): Return; /** * Adds a validator to check if a value is an array. * * @param options * @returns the current validation chain */ isArray(options?: { min?: number; max?: number; }): Return; /** * Adds a validator to check if a value is an object. * * @param options * @returns the current validation chain */ isObject(options?: { strict?: boolean; }): Return; /** * Adds a validator to check if a value is a string. * * @returns the current validation chain */ isString(): Return; /** * Adds a validator to check if a value is not empty; that is, a string with length of 1 or more. * * @param options * @returns the current validation chain */ notEmpty(options?: Options.IsEmptyOptions): Return; contains(elem: any, options?: Options.ContainsOptions): Return; equals(comparison: string): Return; isAfter(dateOrOptions?: string | Options.IsAfterOptions): Return; isAlpha(locale?: Options.AlphaLocale, options?: Options.IsAlphaOptions): Return; isAlphanumeric(locale?: Options.AlphanumericLocale, options?: Options.IsAlphanumericOptions): Return; isAscii(): Return; isBase32(options?: Options.IsBase32Options): Return; isBase58(): Return; isBase64(options?: Options.IsBase64Options): Return; isBefore(date?: string): Return; isBIC(): Return; isBoolean(options?: Options.IsBooleanOptions): Return; isBtcAddress(): Return; isByteLength(options: Options.MinMaxExtendedOptions): Return; isCreditCard(options?: Options.IsCreditCard): Return; isCurrency(options?: Options.IsCurrencyOptions): Return; isDataURI(): Return; isDate(options?: Options.IsDateOptions): Return; isDecimal(options?: Options.IsDecimalOptions): Return; isDivisibleBy(number: number): Return; isEAN(): Return; isEmail(optio
ns?: Options.IsEmailOptions): Return; isEmpty(options?: Options.IsEmptyOptions): Return; isEthereumAddress(): Return; isFQDN(options?: Options.IsFQDNOptions): Return; isFloat(options?: Options.IsFloatOptions): Return; isFullWidth(): Return; isHalfWidth(): Return; isHash(algorithm: Options.HashAlgorithm): Return; isHexColor(): Return; isHexadecimal(): Return; isHSL(): Return; isIBAN(): Return; isIdentityCard(locale?: Options.IdentityCardLocale): Return; isIMEI(options?: Options.IsIMEIOptions): Return; isIP(version?: Options.IPVersion): Return; isIPRange(version?: Options.IPVersion): Return; isISBN(versionOrOptions?: number | Options.IsISBNOptions): Return; isISSN(options?: Options.IsISSNOptions): Return; isISIN(): Return; isISO6391(): Return; isISO8601(options?: Options.IsISO8601Options): Return; isISO31661Alpha2(): Return; isISO31661Alpha3(): Return; isISO4217(): Return; isISRC(): Return; isIn(values: readonly any[]): Return; isInt(options?: Options.IsIntOptions): Return; isJSON(options?: Options.IsJSONOptions): Return; isJWT(): Return; isLatLong(options?: Options.IsLatLongOptions): Return; isLength(options: Options.MinMaxOptions): Return; isLicensePlate(locale: Options.IsLicensePlateLocale): Return; isLocale(): Return; isLowercase(): Return; isLuhnNumber(): Return; isMagnetURI(): Return; isMACAddress(options?: Options.IsMACAddressOptions): Return; isMD5(): Return; isMimeType(): Return; isMobilePhone(locale: Options.MobilePhoneLocale | readonly Options.MobilePhoneLocale[], options?: Options.IsMobilePhoneOptions): Return; isMongoId(): Return; isMultibyte(): Return; isNumeric(options?: Options.IsNumericOptions): Return; isOctal(): Return; isPassportNumber(countryCode?: Options.PassportCountryCode): Return; isPort(): Return; isPostalCode(locale: Options.PostalCodeLocale): Return; isRgbColor(includePercentValues?: boolean): Return; isRFC3339(): Return; isSemVer(): Return; isSlug(): Return; isStrongPassword(options?: Options.IsStrongPasswordOptions): Return; isSurrogatePair(): Return; isTaxID(locale: Options.TaxIDLocale): Return; isTime(options: Options.IsTimeOptions): Return; isURL(options?: Options.IsURLOptions): Return; isUUID(version?: Options.UUIDVersion): Return; isUppercase(): Return; isVariableWidth(): Return; isVAT(countryCode: Options.VATCountryCode): Return; isWhitelisted(chars: string | readonly string[]): Return; matches(pattern: RegExp | string, modifiers?: string): Return; }
import { CustomValidator } from '../base'; import { Optional } from '../context'; import { ContextRunner } from './context-runner'; export interface BailOptions { /** * Defines the level at which to stop running further validations: * - When set to `chain`, further validations won't be run for this validation chain if there * are any errors. * - When set to `request`, no further validations on the same request will be run either if * there are any errors. * * @default 'chain' */ level?: 'chain' | 'request'; } export interface OptionalOptions { /** * Defines which kind of value makes a field optional. * * - `undefined`: only `undefined` values; equivalent to `value === undefined` * - `null`: only `undefined` and `null` values; equivalent to `value == null` * - `falsy`: all falsy values; equivalent to `!value` * * @default 'undefined' */ values?: Exclude<Optional, false>; /** * Whether a field whose value is `null` or `undefined` is to be considered optional. * @default false * @deprecated Use `values` instead. */ nullable?: boolean; /** * Whether a field whose value is falsy (that is, `0`, `false`, `null`, `undefined` or an empty * string) is to be considered optional. * @default false * @deprecated Use `values` instead. */ checkFalsy?: boolean; } export interface ContextHandler<Chain> { /** * Stops running validations if any of the previous ones have failed. * * Useful to prevent a custom validator that touches a database or external API from running when * you know it will fail. * * May be used multiple times in the same validation chain if desired. * * @example * check('username') * .isEmail() * // If not an email, stop here * .bail() * .custom(checkDenylistDomain) * // If domain is not allowed, don't go check if it already exists * .bail() * .custom(checkEmailExists) * * @returns the current validation chain */ bail(opts?: BailOptions): Chain; /** * Adds a condition on whether the validation should continue on a field or not. * @param condition may be either * - a custom validator-like function, which must truthy or a promise that resolves to continue * validation. If the return value is falsy, a promise that rejects, or if it throws, validation * will stop. * - a validation chain which if it would produce errors, the validation chain stops. * @example * check('newPassword') * // Only validate if the old password has been provided * .if((value, { req }) => req.body.oldPassword) * // Or, use it with a a validation chain * .if(body('oldPassword').notEmpty()) * @returns the current validation chain */ if(condition: CustomValidator | ContextRunner): Chain; /** * Marks the field(s) of the validation chain as optional. * By default, only fields with an `undefined` value are considered optional and will be ignored * when validating. * * @param options an object of options to customize the behavior of optional. * @returns the current validation chain */ optional(options?: { values?: Optional; /** * @deprecated use `values` instead */ checkFalsy?: boolean; /** * @deprecated use `values` instead */ nullable?: boolean; } | boolean): Chain; }
import { Request } from '../base'; import { ContextBuilder } from '../context-builder'; import { Sanitizers } from './sanitizers'; import { Validators } from './validators'; import { ContextHandler } from './context-handler'; import { ContextRunner } from './context-runner'; export interface ValidationChain extends Validators<ValidationChain>, Sanitizers<ValidationChain>, ContextHandler<ValidationChain>, ContextRunner { (req: Request, res: any, next: (error?: any) => void): void; builder: ContextBuilder; } /** * A copy of `ValidationChain` where methods that would return the chain itself can return any other * value. * Useful for typing functions which accept either standard or custom validation chains. */ export declare type ValidationChainLike = { [K in keyof ValidationChain]: ValidationChain[K] extends (...args: infer A) => ValidationChain ? (...args: A) => any : ValidationChain[K]; };
import { CustomSanitizer } from '../base'; import { ContextBuilder } from '../context-builder'; import * as Options from '../options'; import { Sanitizers } from './sanitizers'; export declare class SanitizersImpl<Chain> implements Sanitizers<Chain> { private readonly builder; private readonly chain; constructor(builder: ContextBuilder, chain: Chain); customSanitizer(sanitizer: CustomSanitizer): Chain; default(default_value: any): Chain; replace(values_to_replace: any, new_value: any): Chain; private addStandardSanitization; blacklist(chars: string): Chain; escape(): Chain; unescape(): Chain; ltrim(chars?: string): Chain; normalizeEmail(options?: Options.NormalizeEmailOptions): Chain; rtrim(chars?: string): Chain; stripLow(keep_new_lines?: boolean): Chain; toArray(): Chain; toBoolean(strict?: boolean): Chain; toDate(): Chain; toFloat(): Chain; toInt(radix?: number): Chain; toLowerCase(): Chain; toUpperCase(): Chain; trim(chars?: string): Chain; whitelist(chars: string): Chain; }
import { CustomValidator, ErrorMessage, FieldMessageFactory } from '../base'; import { ContextBuilder } from '../context-builder'; import * as Options from '../options'; import { ExistsOptions, Validators } from './validators'; export declare class ValidatorsImpl<Chain> implements Validators<Chain> { private readonly builder; private readonly chain; private lastValidator; private negateNext; constructor(builder: ContextBuilder, chain: Chain); private addItem; not(): Chain; withMessage(message: FieldMessageFactory | ErrorMessage): Chain; custom(validator: CustomValidator): Chain; exists(options?: ExistsOptions): Chain; isArray(options?: { min?: number; max?: number; }): Chain; isObject(options?: { strict?: boolean; }): Chain; isString(): Chain; notEmpty(options?: Options.IsEmptyOptions): Chain; private addStandardValidation; contains(elem: any, options?: Options.ContainsOptions): Chain; equals(comparison: string): Chain; isAfter(dateOrOptions?: string | Options.IsAfterOptions): Chain; isAlpha(locale?: Options.AlphaLocale, options?: Options.IsAlphaOptions): Chain; isAlphanumeric(locale?: Options.AlphanumericLocale, options?: Options.IsAlphanumericOptions): Chain; isAscii(): Chain; isBase32(options?: Options.IsBase32Options): Chain; isBase58(): Chain; isBase64(options?: Options.IsBase64Options): Chain; isBefore(date?: string): Chain; isBIC(): Chain; /** * There are basically three levels of strictness for this validator. * Passing `{ strict: true }` as option only passes the validation if the value is a JS bool. (It also overrides the loose property of the options). * Passing `{ loose: true|false }` along with no `strict` prop of with `strict` falsy follows the behaviour specified in validator.js docs. */ isBoolean(options?: Options.IsBooleanOptions): Chain; isBtcAddress(): Chain; isByteLength(options: Options.MinMaxOptions): Chain; isCreditCard(): Chain; isCurrency(options?: Options.IsCurrencyOptions): Chain; isDataURI(): Chain; isDate(options?: Options.IsDateOptions): Chain; isDecimal(options?: Options.IsDecimalOptions): Chain; isDivisibleBy(number: number): Chain; isEAN(): Chain; isEmail(options?: Options.IsEmailOptions): Chain; isEmpty(options?: Options.IsEmptyOptions): Chain; isEthereumAddress(): Chain; isFQDN(options?: Options.IsFQDNOptions): Chain; isFloat(options?: Options.IsFloatOptions): Chain; isFullWidth(): Chain; isHalfWidth(): Chain; isHash(algorithm: Options.HashAlgorithm): Chain; isHexColor(): Chain; isHexadecimal(): Chain; isHSL(): Chain; isIBAN(): Chain; isIdentityCard(locale: Options.IdentityCardLocale): Chain; isIMEI(options?: Options.IsIMEIOptions): Chain; isIP(version?: Options.IPVersion): Chain; isIPRange(version?: Options.IPVersion): Chain; isISBN(versionOrOptions?: number | Options.IsISBNOptions): Chain; isISSN(options?: Options.IsISSNOptions): Chain; isISIN(): Chain; isISO6391(): Chain; isISO8601(options?: Options.IsISO8601Options): Chain; isISO31661Alpha2(): Chain; isISO31661Alpha3(): Chain; isISO4217(): Chain; isISRC(): Chain; isIn(values: readonly any[]): Chain; isInt(options?: Options.IsIntOptions): Chain; isJSON(options?: Options.IsJSONOptions): Chain; isJWT(): Chain; isLatLong(options?: Options.IsLatLongOptions): Chain; isLength(options: Options.MinMaxOptions): Chain; isLicensePlate(locale: Options.IsLicensePlateLocale): Chain; isLocale(): Chain; isLowercase(): Chain; isLuhnNumber(): Chain; isMagnetURI(): Chain; isMACAddress(options?: Options.IsMACAddressOptions): Chain; isMD5(): Chain; isMimeType(): Chain; isMobilePhone(locale: Options.MobilePhoneLocale | readonly Options.MobilePhoneLocale[], options?: Options.IsMobilePhoneOptions): Chain;
isMongoId(): Chain; isMultibyte(): Chain; isNumeric(options?: Options.IsNumericOptions): Chain; isOctal(): Chain; isPassportNumber(countryCode?: Options.PassportCountryCode): Chain; isPort(): Chain; isPostalCode(locale: Options.PostalCodeLocale): Chain; isRFC3339(): Chain; isRgbColor(includePercentValues?: boolean): Chain; isSemVer(): Chain; isSlug(): Chain; isStrongPassword(options?: Options.IsStrongPasswordOptions): Chain; isSurrogatePair(): Chain; isTaxID(locale: Options.TaxIDLocale): Chain; isTime(options?: Options.IsTimeOptions): Chain; isURL(options?: Options.IsURLOptions): Chain; isUUID(version?: Options.UUIDVersion): Chain; isUppercase(): Chain; isVariableWidth(): Chain; isVAT(countryCode: Options.VATCountryCode): Chain; isWhitelisted(chars: string | readonly string[]): Chain; matches(pattern: RegExp | string, modifiers?: string): any; }
import { ContextBuilder } from '../context-builder'; import { CustomValidator } from '../base'; import { BailOptions, ContextHandler, OptionalOptions } from './context-handler'; import { ContextRunner } from './context-runner'; export declare class ContextHandlerImpl<Chain> implements ContextHandler<Chain> { private readonly builder; private readonly chain; constructor(builder: ContextBuilder, chain: Chain); bail(opts?: BailOptions): Chain; if(condition: CustomValidator | ContextRunner): Chain; optional(options?: OptionalOptions | boolean): Chain; }
export * from './sanitizers'; export * from './sanitizers-impl'; export * from './context-handler'; export * from './context-handler-impl'; export * from './context-runner'; export * from './context-runner-impl'; export * from './validators'; export * from './validators-impl'; export * from './validation-chain';
import { CustomSanitizer } from '../base'; import * as Options from '../options'; export interface Sanitizers<Return> { /** * Adds a custom sanitizer to the validation chain. * * @param sanitizer the custom sanitizer * @returns the current validation chain */ customSanitizer(sanitizer: CustomSanitizer): Return; /** * Replaces the value of the field if it's one of `''`, `null`, `undefined` or `NaN`. * * @param default_value the value to replace with * @returns the current validation chain */ default(default_value: any): Return; /** * Replaces a field's value with another value. * * @param values_to_replace one or more values that should be replaced * @param new_value the value to replace with * @returns the current validation chain */ replace(values_to_replace: any, new_value: any): Return; blacklist(chars: string): Return; escape(): Return; unescape(): Return; ltrim(chars?: string): Return; normalizeEmail(options?: Options.NormalizeEmailOptions): Return; rtrim(chars?: string): Return; stripLow(keep_new_lines?: boolean): Return; toArray(): Return; toBoolean(strict?: boolean): Return; toDate(): Return; toFloat(): Return; toInt(radix?: number): Return; toLowerCase(): Return; toUpperCase(): Return; trim(chars?: string): Return; whitelist(chars: string): Return; }
/// <reference types="node"/> import {ChildProcess} from 'child_process'; import {Stream, Readable as ReadableStream} from 'stream'; declare namespace execa { type StdioOption = | 'pipe' | 'ipc' | 'ignore' | 'inherit' | Stream | number | undefined; interface CommonOptions<EncodingType> { /** Kill the spawned process when the parent process exits unless either: - the spawned process is [`detached`](https://nodejs.org/api/child_process.html#child_process_options_detached) - the parent process is terminated abruptly, for example, with `SIGKILL` as opposed to `SIGTERM` or a normal exit @default true */ readonly cleanup?: boolean; /** Prefer locally installed binaries when looking for a binary to execute. If you `$ npm install foo`, you can then `execa('foo')`. @default false */ readonly preferLocal?: boolean; /** Preferred path to find locally installed binaries in (use with `preferLocal`). @default process.cwd() */ readonly localDir?: string; /** Path to the Node.js executable to use in child processes. This can be either an absolute path or a path relative to the `cwd` option. Requires `preferLocal` to be `true`. For example, this can be used together with [`get-node`](https://github.com/ehmicky/get-node) to run a specific Node.js version in a child process. @default process.execPath */ readonly execPath?: string; /** Buffer the output from the spawned process. When set to `false`, you must read the output of `stdout` and `stderr` (or `all` if the `all` option is `true`). Otherwise the returned promise will not be resolved/rejected. If the spawned process fails, `error.stdout`, `error.stderr`, and `error.all` will contain the buffered data. @default true */ readonly buffer?: boolean; /** Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). @default 'pipe' */ readonly stdin?: StdioOption; /** Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). @default 'pipe' */ readonly stdout?: StdioOption; /** Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). @default 'pipe' */ readonly stderr?: StdioOption; /** Setting this to `false` resolves the promise with the error instead of rejecting it. @default true */ readonly reject?: boolean; /** Add an `.all` property on the promise and the resolved value. The property contains the output of the process with `stdout` and `stderr` interleaved. @default false */ readonly all?: boolean; /** Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from the output. @default true */ readonly stripFinalNewline?: boolean; /** Set to `false` if you don't want to extend the environment variables when providing the `env` property. @default true */ readonly extendEnv?: boolean; /** Current working directory of the child process. @default process.cwd() */ readonly cwd?: string; /** Environment key-value pairs. Extends automatically from `process.env`. Set `extendEnv` to `false` if you don't want this. @default process.env */ readonly env?: NodeJS.ProcessEnv; /** Explicitly set the value of `argv[0]` sent to the child process. This will be set to `command` or `file` if not specified. */ readonly argv0?: string; /** Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration. @default 'pipe' */ readonly stdio?: 'pipe' | 'ignore' | 'inherit' | readonly StdioOption[]; /** Specify the kind of serialization used for sending messages between processes when using the `stdio: 'ipc'` option or `execa.node()`: - `json`: Uses `JSON.stringify()` and `JSON.parse()`. - `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.h
tml#v8_v8_serialize_value) Requires Node.js `13.2.0` or later. [More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization) @default 'json' */ readonly serialization?: 'json' | 'advanced'; /** Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached). @default false */ readonly detached?: boolean; /** Sets the user identity of the process. */ readonly uid?: number; /** Sets the group identity of the process. */ readonly gid?: number; /** If `true`, runs `command` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows. We recommend against using this option since it is: - not cross-platform, encouraging shell-specific syntax. - slower, because of the additional shell interpretation. - unsafe, potentially allowing command injection. @default false */ readonly shell?: boolean | string; /** Specify the character encoding used to decode the `stdout` and `stderr` output. If set to `null`, then `stdout` and `stderr` will be a `Buffer` instead of a string. @default 'utf8' */ readonly encoding?: EncodingType; /** If `timeout` is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than `timeout` milliseconds. @default 0 */ readonly timeout?: number; /** Largest amount of data in bytes allowed on `stdout` or `stderr`. Default: 100 MB. @default 100_000_000 */ readonly maxBuffer?: number; /** Signal value to be used when the spawned process will be killed. @default 'SIGTERM' */ readonly killSignal?: string | number; /** If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`. @default false */ readonly windowsVerbatimArguments?: boolean; /** On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows. @default true */ readonly windowsHide?: boolean; } interface Options<EncodingType = string> extends CommonOptions<EncodingType> { /** Write some input to the `stdin` of your binary. */ readonly input?: string | Buffer | ReadableStream; } interface SyncOptions<EncodingType = string> extends CommonOptions<EncodingType> { /** Write some input to the `stdin` of your binary. */ readonly input?: string | Buffer; } interface NodeOptions<EncodingType = string> extends Options<EncodingType> { /** The Node.js executable to use. @default process.execPath */ readonly nodePath?: string; /** List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the Node.js executable. @default process.execArgv */ readonly nodeOptions?: string[]; } interface ExecaReturnBase<StdoutStderrType> { /** The file and arguments that were run, for logging purposes. This is not escaped and should not be executed directly as a process, including using `execa()` or `execa.command()`. */ command: string; /** Same as `command` but escaped. This is meant to be copy and pasted into a shell, for debugging purposes. Since the escaping is fairly basic, this should not be executed directly as a process, including using `execa()` or `execa.command()`. */ escapedCommand: string; /** The numeric exit code of the process that was run. */ exitCode: number; /** The output of the process on stdout. */ stdout: StdoutStderrType; /** The output of the process on stderr. */ stderr: StdoutStderrType; /** Whether the process failed to run. */ failed: boolean; /**
Whether the process timed out. */ timedOut: boolean; /** Whether the process was killed. */ killed: boolean; /** The name of the signal that was used to terminate the process. For example, `SIGFPE`. If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. */ signal?: string; /** A human-friendly description of the signal that was used to terminate the process. For example, `Floating point arithmetic error`. If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. It is also `undefined` when the signal is very uncommon which should seldomly happen. */ signalDescription?: string; } interface ExecaSyncReturnValue<StdoutErrorType = string> extends ExecaReturnBase<StdoutErrorType> { } /** Result of a child process execution. On success this is a plain object. On failure this is also an `Error` instance. The child process fails when: - its exit code is not `0` - it was killed with a signal - timing out - being canceled - there's not enough memory or there are already too many child processes */ interface ExecaReturnValue<StdoutErrorType = string> extends ExecaSyncReturnValue<StdoutErrorType> { /** The output of the process with `stdout` and `stderr` interleaved. This is `undefined` if either: - the `all` option is `false` (default value) - `execa.sync()` was used */ all?: StdoutErrorType; /** Whether the process was canceled. */ isCanceled: boolean; } interface ExecaSyncError<StdoutErrorType = string> extends Error, ExecaReturnBase<StdoutErrorType> { /** Error message when the child process failed to run. In addition to the underlying error message, it also contains some information related to why the child process errored. The child process stderr then stdout are appended to the end, separated with newlines and not interleaved. */ message: string; /** This is the same as the `message` property except it does not include the child process stdout/stderr. */ shortMessage: string; /** Original error message. This is the same as the `message` property except it includes neither the child process stdout/stderr nor some additional information added by Execa. This is `undefined` unless the child process exited due to an `error` event or a timeout. */ originalMessage?: string; } interface ExecaError<StdoutErrorType = string> extends ExecaSyncError<StdoutErrorType> { /** The output of the process with `stdout` and `stderr` interleaved. This is `undefined` if either: - the `all` option is `false` (default value) - `execa.sync()` was used */ all?: StdoutErrorType; /** Whether the process was canceled. */ isCanceled: boolean; } interface KillOptions { /** Milliseconds to wait for the child process to terminate before sending `SIGKILL`. Can be disabled with `false`. @default 5000 */ forceKillAfterTimeout?: number | false; } interface ExecaChildPromise<StdoutErrorType> { /** Stream combining/interleaving [`stdout`](https://nodejs.org/api/child_process.html#child_process_subprocess_stdout) and [`stderr`](https://nodejs.org/api/child_process.html#child_process_subprocess_stderr). This is `undefined` if either: - the `all` option is `false` (the default value) - both `stdout` and `stderr` options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio) */ all?: ReadableStream; catch<ResultType = never>( onRejected?: (reason: ExecaError<StdoutErrorType>) => ResultType | PromiseLike<ResultType> ): Promise<ExecaReturnValue<StdoutErrorType> | ResultType>; /** Same as the original [`child_process#kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal), except if `signal` is `SIGTERM` (the default va
lue) and the child process is not terminated after 5 seconds, force it by sending `SIGKILL`. */ kill(signal?: string, options?: KillOptions): void; /** Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This is preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`. */ cancel(): void; } type ExecaChildProcess<StdoutErrorType = string> = ChildProcess & ExecaChildPromise<StdoutErrorType> & Promise<ExecaReturnValue<StdoutErrorType>>; } declare const execa: { /** Execute a file. Think of this as a mix of `child_process.execFile` and `child_process.spawn`. @param file - The program/script to execute. @param arguments - Arguments to pass to `file` on execution. @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. @example ``` import execa = require('execa'); (async () => { const {stdout} = await execa('echo', ['unicorns']); console.log(stdout); //=> 'unicorns' // Cancelling a spawned process const subprocess = execa('node'); setTimeout(() => { subprocess.cancel() }, 1000); try { await subprocess; } catch (error) { console.log(subprocess.killed); // true console.log(error.isCanceled); // true } })(); // Pipe the child process stdout to the current stdout execa('echo', ['unicorns']).stdout.pipe(process.stdout); ``` */ ( file: string, arguments?: readonly string[], options?: execa.Options ): execa.ExecaChildProcess; ( file: string, arguments?: readonly string[], options?: execa.Options<null> ): execa.ExecaChildProcess<Buffer>; (file: string, options?: execa.Options): execa.ExecaChildProcess; (file: string, options?: execa.Options<null>): execa.ExecaChildProcess< Buffer >; /** Execute a file synchronously. This method throws an `Error` if the command fails. @param file - The program/script to execute. @param arguments - Arguments to pass to `file` on execution. @returns A result `Object` with `stdout` and `stderr` properties. */ sync( file: string, arguments?: readonly string[], options?: execa.SyncOptions ): execa.ExecaSyncReturnValue; sync( file: string, arguments?: readonly string[], options?: execa.SyncOptions<null> ): execa.ExecaSyncReturnValue<Buffer>; sync(file: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; sync( file: string, options?: execa.SyncOptions<null> ): execa.ExecaSyncReturnValue<Buffer>; /** Same as `execa()` except both file and arguments are specified in a single `command` string. For example, `execa('echo', ['unicorns'])` is the same as `execa.command('echo unicorns')`. If the file or an argument contains spaces, they must be escaped with backslashes. This matters especially if `command` is not a constant but a variable, for example with `__dirname` or `process.cwd()`. Except for spaces, no escaping/quoting is needed. The `shell` option must be used if the `command` uses shell-specific features (for example, `&&` or `||`), as opposed to being a simple `file` followed by its `arguments`. @param command - The program/script to execute and its arguments. @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. @example ``` import execa = require('execa'); (async () => { const {stdout} = await execa.command('echo unicorns'); console.log(stdout); //=> 'unicorns' })(); ``` */ command(command: string, options?: execa.Options): execa.ExecaChildProcess; command(command: string, options?: execa.Options<null>): execa.ExecaChildProcess<Buffer>; /** Same as `execa.command()`
but synchronous. @param command - The program/script to execute and its arguments. @returns A result `Object` with `stdout` and `stderr` properties. */ commandSync(command: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; commandSync(command: string, options?: execa.SyncOptions<null>): execa.ExecaSyncReturnValue<Buffer>; /** Execute a Node.js script as a child process. Same as `execa('node', [scriptPath, ...arguments], options)` except (like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options)): - the current Node version and options are used. This can be overridden using the `nodePath` and `nodeArguments` options. - the `shell` option cannot be used - an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to [`stdio`](#stdio) @param scriptPath - Node.js script to execute. @param arguments - Arguments to pass to `scriptPath` on execution. @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. */ node( scriptPath: string, arguments?: readonly string[], options?: execa.NodeOptions ): execa.ExecaChildProcess; node( scriptPath: string, arguments?: readonly string[], options?: execa.Options<null> ): execa.ExecaChildProcess<Buffer>; node(scriptPath: string, options?: execa.Options): execa.ExecaChildProcess; node(scriptPath: string, options?: execa.Options<null>): execa.ExecaChildProcess<Buffer>; }; export = execa;
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number]; export declare type SourceMapLine = SourceMapSegment[]; export declare type SourceMapMappings = SourceMapLine[]; export declare function decode(mappings: string): SourceMapMappings; export declare function encode(decoded: SourceMapMappings): string; export declare function encode(decoded: Readonly<SourceMapMappings>): string;
import type { SourceMapSegment } from './sourcemap-segment'; import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping'; export interface SourceMapV3 { file?: string | null; names: string[]; sourceRoot?: string; sources: (string | null)[]; sourcesContent?: (string | null)[]; version: 3; } export interface EncodedSourceMap extends SourceMapV3 { mappings: string; } export interface DecodedSourceMap extends SourceMapV3 { mappings: SourceMapSegment[][]; } export interface Section { offset: { line: number; column: number; }; map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap; } export interface SectionedSourceMap { file?: string | null; sections: Section[]; version: 3; } export type OriginalMapping = { source: string | null; line: number; column: number; name: string | null; }; export type InvalidOriginalMapping = { source: null; line: null; column: null; name: null; }; export type GeneratedMapping = { line: number; column: number; }; export type InvalidGeneratedMapping = { line: null; column: null; }; export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND; export type SourceMapInput = string | Ro<EncodedSourceMap> | Ro<DecodedSourceMap> | TraceMap; export type SectionedSourceMapInput = SourceMapInput | Ro<SectionedSourceMap>; export type Needle = { line: number; column: number; bias?: Bias; }; export type SourceNeedle = { source: string; line: number; column: number; bias?: Bias; }; export type EachMapping = { generatedLine: number; generatedColumn: number; source: null; originalLine: null; originalColumn: null; name: null; } | { generatedLine: number; generatedColumn: number; source: string | null; originalLine: number; originalColumn: number; name: string | null; }; export declare abstract class SourceMap { version: SourceMapV3['version']; file: SourceMapV3['file']; names: SourceMapV3['names']; sourceRoot: SourceMapV3['sourceRoot']; sources: SourceMapV3['sources']; sourcesContent: SourceMapV3['sourcesContent']; resolvedSources: SourceMapV3['sources']; } export type Ro<T> = T extends Array<infer V> ? V[] | Readonly<V[]> | RoArray<V> | Readonly<RoArray<V>> : T extends object ? T | Readonly<T> | RoObject<T> | Readonly<RoObject<T>> : T; type RoArray<T> = Ro<T>[]; type RoObject<T> = { [K in keyof T]: T[K] | Ro<T[K]>; }; export {};
import type { SourceMapSegment } from './sourcemap-segment'; import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types'; export type { SourceMapSegment } from './sourcemap-segment'; export type { SourceMap, DecodedSourceMap, EncodedSourceMap, Section, SectionedSourceMap, SourceMapV3, Bias, EachMapping, GeneratedMapping, InvalidGeneratedMapping, InvalidOriginalMapping, Needle, OriginalMapping, OriginalMapping as Mapping, SectionedSourceMapInput, SourceMapInput, SourceNeedle, } from './types'; export declare const LEAST_UPPER_BOUND = -1; export declare const GREATEST_LOWER_BOUND = 1; /** * Returns the encoded (VLQ string) form of the SourceMap's mappings field. */ export declare let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings']; /** * Returns the decoded (array of lines of segments) form of the SourceMap's mappings field. */ export declare let decodedMappings: (map: TraceMap) => Readonly<DecodedSourceMap['mappings']>; /** * A low-level API to find the segment associated with a generated line/column (think, from a * stack trace). Line and column here are 0-based, unlike `originalPositionFor`. */ export declare let traceSegment: (map: TraceMap, line: number, column: number) => Readonly<SourceMapSegment> | null; /** * A higher-level API to find the source/line/column associated with a generated line/column * (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in * `source-map` library. */ export declare let originalPositionFor: (map: TraceMap, needle: Needle) => OriginalMapping | InvalidOriginalMapping; /** * Finds the generated line/column position of the provided source/line/column source position. */ export declare let generatedPositionFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping | InvalidGeneratedMapping; /** * Finds all generated line/column positions of the provided source/line/column source position. */ export declare let allGeneratedPositionsFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping[]; /** * Iterates each mapping in generated position order. */ export declare let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void; /** * Retrieves the source content for a particular source, if its found. Returns null if not. */ export declare let sourceContentFor: (map: TraceMap, source: string) => string | null; /** * A helper that skips sorting of the input map's mappings array, which can be expensive for larger * maps. */ export declare let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap; /** * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects * a sourcemap, or to JSON.stringify. */ export declare let decodedMap: (map: TraceMap) => Omit<DecodedSourceMap, 'mappings'> & { mappings: readonly SourceMapSegment[][]; }; /** * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects * a sourcemap, or to JSON.stringify. */ export declare let encodedMap: (map: TraceMap) => EncodedSourceMap; export { AnyMap } from './any-map'; export declare class TraceMap implements SourceMap { version: SourceMapV3['version']; file: SourceMapV3['file']; names: SourceMapV3['names']; sourceRoot: SourceMapV3['sourceRoot']; sources: SourceMapV3['sources']; sourcesContent: SourceMapV3['sourcesContent']; resolvedSources: string[]; private _encoded; private _decoded; private _decodedMemo; private _bySources; private _bySourceMemos; constructor(map: SourceMapInput, mapUrl?: string | null); }
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment'; import type { MemoState } from './binary-search'; export type Source = { __proto__: null; [line: number]: Exclude<ReverseSegment, [number]>[]; }; export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment'; export type MemoState = { lastKey: number; lastNeedle: number; lastIndex: number; }; export declare let found: boolean; /** * A binary search implementation that returns the index if a match is found. * If no match is found, then the left-index (the index associated with the item that comes just * before the desired index) is returned. To maintain proper sort order, a splice would happen at * the next index: * * ```js * const array = [1, 3]; * const needle = 2; * const index = binarySearch(array, needle, (item, needle) => item - needle); * * assert.equal(index, 0); * array.splice(index + 1, 0, needle); * assert.deepEqual(array, [1, 2, 3]); * ``` */ export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number; export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number; export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number; export declare function memoizedState(): MemoState; /** * This overly complicated beast is just to record the last tested line/column and the resulting * index, allowing us to skip a few tests if mappings are monotonically increasing. */ export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
import { TraceMap } from './trace-mapping'; import type { SectionedSourceMapInput } from './types'; type AnyMap = { new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap; (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap; }; export declare const AnyMap: AnyMap; export {};
/** * Removes everything after the last "/", but leaves the slash. */ export default function stripFilename(path: string | undefined | null): string;
type GeneratedColumn = number; type SourcesIndex = number; type SourceLine = number; type SourceColumn = number; type NamesIndex = number; type GeneratedLine = number; export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex]; export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn]; export declare const COLUMN = 0; export declare const SOURCES_INDEX = 1; export declare const SOURCE_LINE = 2; export declare const SOURCE_COLUMN = 3; export declare const NAMES_INDEX = 4; export declare const REV_GENERATED_LINE = 1; export declare const REV_GENERATED_COLUMN = 2; export {};
import type { SourceMapSegment } from './sourcemap-segment'; export interface SourceMapV3 { file?: string | null; names: readonly string[]; sourceRoot?: string; sources: readonly (string | null)[]; sourcesContent?: readonly (string | null)[]; version: 3; } export interface EncodedSourceMap extends SourceMapV3 { mappings: string; } export interface DecodedSourceMap extends SourceMapV3 { mappings: readonly SourceMapSegment[][]; } export interface Pos { line: number; column: number; } export declare type Mapping = { generated: Pos; source: undefined; original: undefined; name: undefined; } | { generated: Pos; source: string; original: Pos; name: string; } | { generated: Pos; source: string; original: Pos; name: undefined; };
import type { SourceMapInput } from '@jridgewell/trace-mapping'; import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types'; export type { DecodedSourceMap, EncodedSourceMap, Mapping }; export declare type Options = { file?: string | null; sourceRoot?: string | null; }; /** * A low-level API to associate a generated position with an original source position. Line and * column here are 0-based, unlike `addMapping`. */ export declare let addSegment: { (map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void; (map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void; (map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void; }; /** * A high-level API to associate a generated position with an original source position. Line is * 1-based, but column is 0-based, due to legacy behavior in `source-map` library. */ export declare let addMapping: { (map: GenMapping, mapping: { generated: Pos; source?: null; original?: null; name?: null; content?: null; }): void; (map: GenMapping, mapping: { generated: Pos; source: string; original: Pos; name?: null; content?: string | null; }): void; (map: GenMapping, mapping: { generated: Pos; source: string; original: Pos; name: string; content?: string | null; }): void; }; /** * Same as `addSegment`, but will only add the segment if it generates useful information in the * resulting map. This only works correctly if segments are added **in order**, meaning you should * not add a segment with a lower generated line/column than one that came before. */ export declare let maybeAddSegment: typeof addSegment; /** * Same as `addMapping`, but will only add the mapping if it generates useful information in the * resulting map. This only works correctly if mappings are added **in order**, meaning you should * not add a mapping with a lower generated line/column than one that came before. */ export declare let maybeAddMapping: typeof addMapping; /** * Adds/removes the content of the source file to the source map. */ export declare let setSourceContent: (map: GenMapping, source: string, content: string | null) => void; /** * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects * a sourcemap, or to JSON.stringify. */ export declare let toDecodedMap: (map: GenMapping) => DecodedSourceMap; /** * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects * a sourcemap, or to JSON.stringify. */ export declare let toEncodedMap: (map: GenMapping) => EncodedSourceMap; /** * Constructs a new GenMapping, using the already present mappings of the input. */ export declare let fromMap: (input: SourceMapInput) => GenMapping; /** * Returns an array of high-level mapping objects for every recorded segment, which could then be * passed to the `source-map` library. */ export declare let allMappings: (map: GenMapping) => Mapping[]; /** * Provides the state to generate a sourcemap. */ export declare class GenMapping { private _names; private _sources; private _sourcesContent; private _mappings; file: string | null | undefined; sourceRoot: string | null | undefined; constructor({ file, sourceRoot }?: Options); }
declare type GeneratedColumn = number; declare type SourcesIndex = number; declare type SourceLine = number; declare type SourceColumn = number; declare type NamesIndex = number; export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex]; export declare const COLUMN = 0; export declare const SOURCES_INDEX = 1; export declare const SOURCE_LINE = 2; export declare const SOURCE_COLUMN = 3; export declare const NAMES_INDEX = 4; export {};
/** * Gets the index associated with `key` in the backing array, if it is already present. */ export declare let get: (strarr: SetArray, key: string) => number | undefined; /** * Puts `key` into the backing array, if it is not already present. Returns * the index of the `key` in the backing array. */ export declare let put: (strarr: SetArray, key: string) => number; /** * Pops the last added item out of the SetArray. */ export declare let pop: (strarr: SetArray) => void; /** * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the * index of the `key` in the backing array. * * This is designed to allow synchronizing a second array with the contents of the backing array, * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, * and there are never duplicates. */ export declare class SetArray { private _indexes; array: readonly string[]; constructor(); }
/** * Gets the index associated with `key` in the backing array, if it is already present. */ export let get: (strarr: SetArray, key: string) => number | undefined; /** * Puts `key` into the backing array, if it is not already present. Returns * the index of the `key` in the backing array. */ export let put: (strarr: SetArray, key: string) => number; /** * Pops the last added item out of the SetArray. */ export let pop: (strarr: SetArray) => void; /** * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the * index of the `key` in the backing array. * * This is designed to allow synchronizing a second array with the contents of the backing array, * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, * and there are never duplicates. */ export class SetArray { private declare _indexes: { [key: string]: number | undefined }; declare array: readonly string[]; constructor() { this._indexes = { __proto__: null } as any; this.array = []; } static { get = (strarr, key) => strarr._indexes[key]; put = (strarr, key) => { // The key may or may not be present. If it is present, it's a number. const index = get(strarr, key); if (index !== undefined) return index; const { array, _indexes: indexes } = strarr; return (indexes[key] = (array as string[]).push(key) - 1); }; pop = (strarr) => { const { array, _indexes: indexes } = strarr; if (array.length === 0) return; const last = (array as string[]).pop()!; indexes[last] = undefined; }; } }
/** * Attempts to resolve `input` URL/path relative to `base`. */ export default function resolve(input: string, base: string | undefined): string;
declare const pTry: { /** Start a promise chain. @param fn - The function to run to start the promise chain. @param arguments - Arguments to pass to `fn`. @returns The value of calling `fn(...arguments)`. If the function throws an error, the returned `Promise` will be rejected with that error. @example ``` import pTry = require('p-try'); (async () => { try { const value = await pTry(() => { return synchronousFunctionThatMightThrow(); }); console.log(value); } catch (error) { console.error(error); } })(); ``` */ <ValueType, ArgumentsType extends unknown[]>( fn: (...arguments: ArgumentsType) => PromiseLike<ValueType> | ValueType, ...arguments: ArgumentsType ): Promise<ValueType>; // TODO: remove this in the next major version, refactor the whole definition to: // declare function pTry<ValueType, ArgumentsType extends unknown[]>( // fn: (...arguments: ArgumentsType) => PromiseLike<ValueType> | ValueType, // ...arguments: ArgumentsType // ): Promise<ValueType>; // export = pTry; default: typeof pTry; }; export = pTry;
declare namespace camelcase { interface Options { /** Uppercase the first character: `foo-bar` → `FooBar`. @default false */ readonly pascalCase?: boolean; } } declare const camelcase: { /** Convert a dash/dot/underscore/space separated string to camelCase or PascalCase: `foo-bar` → `fooBar`. @param input - String to convert to camel case. @example ``` import camelCase = require('camelcase'); camelCase('foo-bar'); //=> 'fooBar' camelCase('foo_bar'); //=> 'fooBar' camelCase('Foo-Bar'); //=> 'fooBar' camelCase('Foo-Bar', {pascalCase: true}); //=> 'FooBar' camelCase('--foo.bar', {pascalCase: false}); //=> 'fooBar' camelCase('foo bar'); //=> 'fooBar' console.log(process.argv[3]); //=> '--foo-bar' camelCase(process.argv[3]); //=> 'fooBar' camelCase(['foo', 'bar']); //=> 'fooBar' camelCase(['__foo__', '--bar'], {pascalCase: true}); //=> 'FooBar' ``` */ (input: string | ReadonlyArray<string>, options?: camelcase.Options): string; // TODO: Remove this for the next major release, refactor the whole definition to: // declare function camelcase( // input: string | ReadonlyArray<string>, // options?: camelcase.Options // ): string; // export = camelcase; default: typeof camelcase; }; export = camelcase;
declare const resolveFrom: { /** Resolve the path of a module like [`require.resolve()`](https://nodejs.org/api/globals.html#globals_require_resolve) but from a given path. @param fromDirectory - Directory to resolve from. @param moduleId - What you would use in `require()`. @returns Resolved module path. Throws when the module can't be found. @example ``` import resolveFrom = require('resolve-from'); // There is a file at `./foo/bar.js` resolveFrom('foo', './bar'); //=> '/Users/sindresorhus/dev/test/foo/bar.js' ``` */ (fromDirectory: string, moduleId: string): string; /** Resolve the path of a module like [`require.resolve()`](https://nodejs.org/api/globals.html#globals_require_resolve) but from a given path. @param fromDirectory - Directory to resolve from. @param moduleId - What you would use in `require()`. @returns Resolved module path or `undefined` when the module can't be found. */ silent(fromDirectory: string, moduleId: string): string | undefined; }; export = resolveFrom;
/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ export declare type ChangedFiles = { repos: Repos; changedFiles: Paths; }; export declare type ChangedFilesPromise = Promise<ChangedFiles>; export declare const findRepos: (roots: Array<string>) => Promise<Repos>; export declare const getChangedFilesForRoots: ( roots: Array<string>, options: Options, ) => ChangedFilesPromise; declare type Options = { lastCommit?: boolean; withAncestor?: boolean; changedSince?: string; includePaths?: Array<string>; }; declare type Paths = Set<string>; declare type Repos = { git: Paths; hg: Paths; sl: Paths; }; export {};
/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ export declare function runCLI(): Promise<void>; export declare function runCreate(rootDir?: string): Promise<void>; export {};
import * as buildTools from "turbo-gulp"; import { LibTarget, registerLibTasks } from "turbo-gulp/targets/lib"; import { MochaTarget, registerMochaTasks } from "turbo-gulp/targets/mocha"; import gulp from "gulp"; import minimist from "minimist"; interface Options { devDist?: string; } const options: Options & minimist.ParsedArgs = minimist(process.argv.slice(2), { string: ["devDist"], default: {devDist: undefined}, alias: {devDist: "dev-dist"}, }); const project: buildTools.Project = { root: __dirname, packageJson: "package.json", buildDir: "build", distDir: "dist", srcDir: "src", typescript: {} }; const lib: LibTarget = { project, name: "lib", srcDir: "src/lib", scripts: ["**/*.ts"], mainModule: "index", dist: { packageJsonMap: (old: buildTools.PackageJson): buildTools.PackageJson => { const version: string = options.devDist !== undefined ? `${old.version}-build.${options.devDist}` : old.version; return <any> {...old, version, scripts: undefined, private: false}; }, npmPublish: { tag: options.devDist !== undefined ? "next" : "latest", }, }, tscOptions: { declaration: true, skipLibCheck: true, }, typedoc: { dir: "typedoc", name: "Helpers for V8 coverage files", deploy: { repository: "git@github.com:demurgos/v8-coverage.git", branch: "gh-pages", }, }, copy: [ { files: ["**/*.json"], }, ], clean: { dirs: ["build/lib", "dist/lib"], }, }; const test: MochaTarget = { project, name: "test", srcDir: "src", scripts: ["test/**/*.ts", "lib/**/*.ts", "e2e/*/*.ts"], customTypingsDir: "src/custom-typings", tscOptions: { allowSyntheticDefaultImports: true, esModuleInterop: true, skipLibCheck: true, }, // generateTestMain: true, copy: [ { src: "e2e", // <project-name>/(project|test-resources)/<any> files: ["*/project/**/*", "*/test-resources/**/*"], dest: "e2e", }, ], clean: { dirs: ["build/test"], }, }; const libTasks: any = registerLibTasks(gulp, lib); registerMochaTasks(gulp, test); buildTools.projectTasks.registerAll(gulp, project); gulp.task("all:tsconfig.json", gulp.parallel("lib:tsconfig.json", "test:tsconfig.json")); gulp.task("dist", libTasks.dist); gulp.task("default", libTasks.dist);
import { RangeTree } from "./range-tree"; import { FunctionCov, ProcessCov, ScriptCov } from "./types"; /** * Normalizes a process coverage. * * Sorts the scripts alphabetically by `url`. * Reassigns script ids: the script at index `0` receives `"0"`, the script at * index `1` receives `"1"` etc. * This does not normalize the script coverages. * * @param processCov Process coverage to normalize. */ export declare function normalizeProcessCov(processCov: ProcessCov): void; /** * Normalizes a process coverage deeply. * * Normalizes the script coverages deeply, then normalizes the process coverage * itself. * * @param processCov Process coverage to normalize. */ export declare function deepNormalizeProcessCov(processCov: ProcessCov): void; /** * Normalizes a script coverage. * * Sorts the function by root range (pre-order sort). * This does not normalize the function coverages. * * @param scriptCov Script coverage to normalize. */ export declare function normalizeScriptCov(scriptCov: ScriptCov): void; /** * Normalizes a script coverage deeply. * * Normalizes the function coverages deeply, then normalizes the script coverage * itself. * * @param scriptCov Script coverage to normalize. */ export declare function deepNormalizeScriptCov(scriptCov: ScriptCov): void; /** * Normalizes a function coverage. * * Sorts the ranges (pre-order sort). * TODO: Tree-based normalization of the ranges. * * @param funcCov Function coverage to normalize. */ export declare function normalizeFunctionCov(funcCov: FunctionCov): void; /** * @internal */ export declare function normalizeRangeTree(tree: RangeTree): void;
export interface ProcessCov { result: ScriptCov[]; } export interface ScriptCov { scriptId: string; url: string; functions: FunctionCov[]; } export interface FunctionCov { functionName: string; ranges: RangeCov[]; isBlockCoverage: boolean; } export interface Range { readonly start: number; readonly end: number; } export interface RangeCov { startOffset: number; endOffset: number; count: number; }
import { FunctionCov, ProcessCov, RangeCov, ScriptCov } from "./types"; /** * Creates a deep copy of a process coverage. * * @param processCov Process coverage to clone. * @return Cloned process coverage. */ export declare function cloneProcessCov(processCov: Readonly<ProcessCov>): ProcessCov; /** * Creates a deep copy of a script coverage. * * @param scriptCov Script coverage to clone. * @return Cloned script coverage. */ export declare function cloneScriptCov(scriptCov: Readonly<ScriptCov>): ScriptCov; /** * Creates a deep copy of a function coverage. * * @param functionCov Function coverage to clone. * @return Cloned function coverage. */ export declare function cloneFunctionCov(functionCov: Readonly<FunctionCov>): FunctionCov; /** * Creates a deep copy of a function coverage. * * @param rangeCov Range coverage to clone. * @return Cloned range coverage. */ export declare function cloneRangeCov(rangeCov: Readonly<RangeCov>): RangeCov;
import { RangeCov } from "./types"; export declare class RangeTree { start: number; end: number; delta: number; children: RangeTree[]; constructor(start: number, end: number, delta: number, children: RangeTree[]); /** * @precodition `ranges` are well-formed and pre-order sorted */ static fromSortedRanges(ranges: ReadonlyArray<RangeCov>): RangeTree | undefined; normalize(): void; /** * @precondition `tree.start < value && value < tree.end` * @return RangeTree Right part */ split(value: number): RangeTree; /** * Get the range coverages corresponding to the tree. * * The ranges are pre-order sorted. */ toRanges(): RangeCov[]; }
export { emitForest, emitForestLines, parseFunctionRanges, parseOffsets } from "./ascii"; export { cloneFunctionCov, cloneProcessCov, cloneScriptCov, cloneRangeCov } from "./clone"; export { compareScriptCovs, compareFunctionCovs, compareRangeCovs } from "./compare"; export { mergeFunctionCovs, mergeProcessCovs, mergeScriptCovs } from "./merge"; export { RangeTree } from "./range-tree"; export { ProcessCov, ScriptCov, FunctionCov, RangeCov } from "./types";
import { FunctionCov, ProcessCov, ScriptCov } from "./types"; /** * Merges a list of process coverages. * * The result is normalized. * The input values may be mutated, it is not safe to use them after passing * them to this function. * The computation is synchronous. * * @param processCovs Process coverages to merge. * @return Merged process coverage. */ export declare function mergeProcessCovs(processCovs: ReadonlyArray<ProcessCov>): ProcessCov; /** * Merges a list of matching script coverages. * * Scripts are matching if they have the same `url`. * The result is normalized. * The input values may be mutated, it is not safe to use them after passing * them to this function. * The computation is synchronous. * * @param scriptCovs Process coverages to merge. * @return Merged script coverage, or `undefined` if the input list was empty. */ export declare function mergeScriptCovs(scriptCovs: ReadonlyArray<ScriptCov>): ScriptCov | undefined; /** * Merges a list of matching function coverages. * * Functions are matching if their root ranges have the same span. * The result is normalized. * The input values may be mutated, it is not safe to use them after passing * them to this function. * The computation is synchronous. * * @param funcCovs Function coverages to merge. * @return Merged function coverage, or `undefined` if the input list was empty. */ export declare function mergeFunctionCovs(funcCovs: ReadonlyArray<FunctionCov>): FunctionCov | undefined;
import { RangeCov } from "./types"; interface ReadonlyRangeTree { readonly start: number; readonly end: number; readonly count: number; readonly children: ReadonlyRangeTree[]; } export declare function emitForest(trees: ReadonlyArray<ReadonlyRangeTree>): string; export declare function emitForestLines(trees: ReadonlyArray<ReadonlyRangeTree>): string[]; export declare function parseFunctionRanges(text: string, offsetMap: Map<number, number>): RangeCov[]; export declare function parseOffsets(text: string): Map<number, number>; export {};
import { FunctionCov, RangeCov, ScriptCov } from "./types"; /** * Compares two script coverages. * * The result corresponds to the comparison of their `url` value (alphabetical sort). */ export declare function compareScriptCovs(a: Readonly<ScriptCov>, b: Readonly<ScriptCov>): number; /** * Compares two function coverages. * * The result corresponds to the comparison of the root ranges. */ export declare function compareFunctionCovs(a: Readonly<FunctionCov>, b: Readonly<FunctionCov>): number; /** * Compares two range coverages. * * The ranges are first ordered by ascending `startOffset` and then by * descending `endOffset`. * This corresponds to a pre-order tree traversal. */ export declare function compareRangeCovs(a: Readonly<RangeCov>, b: Readonly<RangeCov>): number;
import { FunctionCov, ProcessCov, RangeCov, ScriptCov } from "./types"; /** * Creates a deep copy of a process coverage. * * @param processCov Process coverage to clone. * @return Cloned process coverage. */ export function cloneProcessCov(processCov: Readonly<ProcessCov>): ProcessCov { const result: ScriptCov[] = []; for (const scriptCov of processCov.result) { result.push(cloneScriptCov(scriptCov)); } return { result, }; } /** * Creates a deep copy of a script coverage. * * @param scriptCov Script coverage to clone. * @return Cloned script coverage. */ export function cloneScriptCov(scriptCov: Readonly<ScriptCov>): ScriptCov { const functions: FunctionCov[] = []; for (const functionCov of scriptCov.functions) { functions.push(cloneFunctionCov(functionCov)); } return { scriptId: scriptCov.scriptId, url: scriptCov.url, functions, }; } /** * Creates a deep copy of a function coverage. * * @param functionCov Function coverage to clone. * @return Cloned function coverage. */ export function cloneFunctionCov(functionCov: Readonly<FunctionCov>): FunctionCov { const ranges: RangeCov[] = []; for (const rangeCov of functionCov.ranges) { ranges.push(cloneRangeCov(rangeCov)); } return { functionName: functionCov.functionName, ranges, isBlockCoverage: functionCov.isBlockCoverage, }; } /** * Creates a deep copy of a function coverage. * * @param rangeCov Range coverage to clone. * @return Cloned range coverage. */ export function cloneRangeCov(rangeCov: Readonly<RangeCov>): RangeCov { return { startOffset: rangeCov.startOffset, endOffset: rangeCov.endOffset, count: rangeCov.count, }; }
import { FunctionCov, RangeCov, ScriptCov } from "./types"; /** * Compares two script coverages. * * The result corresponds to the comparison of their `url` value (alphabetical sort). */ export function compareScriptCovs(a: Readonly<ScriptCov>, b: Readonly<ScriptCov>): number { if (a.url === b.url) { return 0; } else if (a.url < b.url) { return -1; } else { return 1; } } /** * Compares two function coverages. * * The result corresponds to the comparison of the root ranges. */ export function compareFunctionCovs(a: Readonly<FunctionCov>, b: Readonly<FunctionCov>): number { return compareRangeCovs(a.ranges[0], b.ranges[0]); } /** * Compares two range coverages. * * The ranges are first ordered by ascending `startOffset` and then by * descending `endOffset`. * This corresponds to a pre-order tree traversal. */ export function compareRangeCovs(a: Readonly<RangeCov>, b: Readonly<RangeCov>): number { if (a.startOffset !== b.startOffset) { return a.startOffset - b.startOffset; } else { return b.endOffset - a.endOffset; } }
export interface ProcessCov { result: ScriptCov[]; } export interface ScriptCov { scriptId: string; url: string; functions: FunctionCov[]; } export interface FunctionCov { functionName: string; ranges: RangeCov[]; isBlockCoverage: boolean; } export interface Range { readonly start: number; readonly end: number; } export interface RangeCov { startOffset: number; endOffset: number; count: number; }
import { compareFunctionCovs, compareRangeCovs, compareScriptCovs } from "./compare"; import { RangeTree } from "./range-tree"; import { FunctionCov, ProcessCov, ScriptCov } from "./types"; /** * Normalizes a process coverage. * * Sorts the scripts alphabetically by `url`. * Reassigns script ids: the script at index `0` receives `"0"`, the script at * index `1` receives `"1"` etc. * This does not normalize the script coverages. * * @param processCov Process coverage to normalize. */ export function normalizeProcessCov(processCov: ProcessCov): void { processCov.result.sort(compareScriptCovs); for (const [scriptId, scriptCov] of processCov.result.entries()) { scriptCov.scriptId = scriptId.toString(10); } } /** * Normalizes a process coverage deeply. * * Normalizes the script coverages deeply, then normalizes the process coverage * itself. * * @param processCov Process coverage to normalize. */ export function deepNormalizeProcessCov(processCov: ProcessCov): void { for (const scriptCov of processCov.result) { deepNormalizeScriptCov(scriptCov); } normalizeProcessCov(processCov); } /** * Normalizes a script coverage. * * Sorts the function by root range (pre-order sort). * This does not normalize the function coverages. * * @param scriptCov Script coverage to normalize. */ export function normalizeScriptCov(scriptCov: ScriptCov): void { scriptCov.functions.sort(compareFunctionCovs); } /** * Normalizes a script coverage deeply. * * Normalizes the function coverages deeply, then normalizes the script coverage * itself. * * @param scriptCov Script coverage to normalize. */ export function deepNormalizeScriptCov(scriptCov: ScriptCov): void { for (const funcCov of scriptCov.functions) { normalizeFunctionCov(funcCov); } normalizeScriptCov(scriptCov); } /** * Normalizes a function coverage. * * Sorts the ranges (pre-order sort). * TODO: Tree-based normalization of the ranges. * * @param funcCov Function coverage to normalize. */ export function normalizeFunctionCov(funcCov: FunctionCov): void { funcCov.ranges.sort(compareRangeCovs); const tree: RangeTree = RangeTree.fromSortedRanges(funcCov.ranges)!; normalizeRangeTree(tree); funcCov.ranges = tree.toRanges(); } /** * @internal */ export function normalizeRangeTree(tree: RangeTree): void { tree.normalize(); }
import { compareRangeCovs } from "./compare"; import { RangeCov } from "./types"; interface ReadonlyRangeTree { readonly start: number; readonly end: number; readonly count: number; readonly children: ReadonlyRangeTree[]; } export function emitForest(trees: ReadonlyArray<ReadonlyRangeTree>): string { return emitForestLines(trees).join("\n"); } export function emitForestLines(trees: ReadonlyArray<ReadonlyRangeTree>): string[] { const colMap: Map<number, number> = getColMap(trees); const header: string = emitOffsets(colMap); return [header, ...trees.map(tree => emitTree(tree, colMap).join("\n"))]; } function getColMap(trees: Iterable<ReadonlyRangeTree>): Map<number, number> { const eventSet: Set<number> = new Set(); for (const tree of trees) { const stack: ReadonlyRangeTree[] = [tree]; while (stack.length > 0) { const cur: ReadonlyRangeTree = stack.pop()!; eventSet.add(cur.start); eventSet.add(cur.end); for (const child of cur.children) { stack.push(child); } } } const events: number[] = [...eventSet]; events.sort((a, b) => a - b); let maxDigits: number = 1; for (const event of events) { maxDigits = Math.max(maxDigits, event.toString(10).length); } const colWidth: number = maxDigits + 3; const colMap: Map<number, number> = new Map(); for (const [i, event] of events.entries()) { colMap.set(event, i * colWidth); } return colMap; } function emitTree(tree: ReadonlyRangeTree, colMap: Map<number, number>): string[] { const layers: ReadonlyRangeTree[][] = []; let nextLayer: ReadonlyRangeTree[] = [tree]; while (nextLayer.length > 0) { const layer: ReadonlyRangeTree[] = nextLayer; layers.push(layer); nextLayer = []; for (const node of layer) { for (const child of node.children) { nextLayer.push(child); } } } return layers.map(layer => emitTreeLayer(layer, colMap)); } export function parseFunctionRanges(text: string, offsetMap: Map<number, number>): RangeCov[] { const result: RangeCov[] = []; for (const line of text.split("\n")) { for (const range of parseTreeLayer(line, offsetMap)) { result.push(range); } } result.sort(compareRangeCovs); return result; } /** * * @param layer Sorted list of disjoint trees. * @param colMap */ function emitTreeLayer(layer: ReadonlyRangeTree[], colMap: Map<number, number>): string { const line: string[] = []; let curIdx: number = 0; for (const {start, end, count} of layer) { const startIdx: number = colMap.get(start)!; const endIdx: number = colMap.get(end)!; if (startIdx > curIdx) { line.push(" ".repeat(startIdx - curIdx)); } line.push(emitRange(count, endIdx - startIdx)); curIdx = endIdx; } return line.join(""); } function parseTreeLayer(text: string, offsetMap: Map<number, number>): RangeCov[] { const result: RangeCov[] = []; const regex: RegExp = /\[(\d+)-*\)/gs; while (true) { const match: RegExpMatchArray | null = regex.exec(text); if (match === null) { break; } const startIdx: number = match.index!; const endIdx: number = startIdx + match[0].length; const count: number = parseInt(match[1], 10); const startOffset: number | undefined = offsetMap.get(startIdx); const endOffset: number | undefined = offsetMap.get(endIdx); if (startOffset === undefined || endOffset === undefined) { throw new Error(`Invalid offsets for: ${JSON.stringify(text)}`); } result.push({startOffset, endOffset, count}); } return result; } function emitRange(count: number, len: number): string { const rangeStart: string = `[${count.toString(10)}`; const rangeEnd: string = ")"; const hyphensLen: number = len - (rangeStart.length + rangeEnd.length); const hyphens: string = "-".repeat(Math.max(0, hyphensLen)); return `${rangeStart}${hyphens}${rangeEnd}`; } function emitOffsets(colMap: Map<number, number>): string { let line: string = "";
for (const [event, col] of colMap) { if (line.length < col) { line += " ".repeat(col - line.length); } line += event.toString(10); } return line; } export function parseOffsets(text: string): Map<number, number> { const result: Map<number, number> = new Map(); const regex: RegExp = /\d+/gs; while (true) { const match: RegExpExecArray | null = regex.exec(text); if (match === null) { break; } result.set(match.index, parseInt(match[0], 10)); } return result; }
export { emitForest, emitForestLines, parseFunctionRanges, parseOffsets } from "./ascii"; export { cloneFunctionCov, cloneProcessCov, cloneScriptCov, cloneRangeCov } from "./clone"; export { compareScriptCovs, compareFunctionCovs, compareRangeCovs } from "./compare"; export { mergeFunctionCovs, mergeProcessCovs, mergeScriptCovs } from "./merge"; export { RangeTree } from "./range-tree"; export { ProcessCov, ScriptCov, FunctionCov, RangeCov } from "./types";
import { RangeCov } from "./types"; export class RangeTree { start: number; end: number; delta: number; children: RangeTree[]; constructor( start: number, end: number, delta: number, children: RangeTree[], ) { this.start = start; this.end = end; this.delta = delta; this.children = children; } /** * @precodition `ranges` are well-formed and pre-order sorted */ static fromSortedRanges(ranges: ReadonlyArray<RangeCov>): RangeTree | undefined { let root: RangeTree | undefined; // Stack of parent trees and parent counts. const stack: [RangeTree, number][] = []; for (const range of ranges) { const node: RangeTree = new RangeTree(range.startOffset, range.endOffset, range.count, []); if (root === undefined) { root = node; stack.push([node, range.count]); continue; } let parent: RangeTree; let parentCount: number; while (true) { [parent, parentCount] = stack[stack.length - 1]; // assert: `top !== undefined` (the ranges are sorted) if (range.startOffset < parent.end) { break; } else { stack.pop(); } } node.delta -= parentCount; parent.children.push(node); stack.push([node, range.count]); } return root; } normalize(): void { const children: RangeTree[] = []; let curEnd: number; let head: RangeTree | undefined; const tail: RangeTree[] = []; for (const child of this.children) { if (head === undefined) { head = child; } else if (child.delta === head.delta && child.start === curEnd!) { tail.push(child); } else { endChain(); head = child; } curEnd = child.end; } if (head !== undefined) { endChain(); } if (children.length === 1) { const child: RangeTree = children[0]; if (child.start === this.start && child.end === this.end) { this.delta += child.delta; this.children = child.children; // `.lazyCount` is zero for both (both are after normalization) return; } } this.children = children; function endChain(): void { if (tail.length !== 0) { head!.end = tail[tail.length - 1].end; for (const tailTree of tail) { for (const subChild of tailTree.children) { subChild.delta += tailTree.delta - head!.delta; head!.children.push(subChild); } } tail.length = 0; } head!.normalize(); children.push(head!); } } /** * @precondition `tree.start < value && value < tree.end` * @return RangeTree Right part */ split(value: number): RangeTree { let leftChildLen: number = this.children.length; let mid: RangeTree | undefined; // TODO(perf): Binary search (check overhead) for (let i: number = 0; i < this.children.length; i++) { const child: RangeTree = this.children[i]; if (child.start < value && value < child.end) { mid = child.split(value); leftChildLen = i + 1; break; } else if (child.start >= value) { leftChildLen = i; break; } } const rightLen: number = this.children.length - leftChildLen; const rightChildren: RangeTree[] = this.children.splice(leftChildLen, rightLen); if (mid !== undefined) { rightChildren.unshift(mid); } const result: RangeTree = new RangeTree( value, this.end, this.delta, rightChildren, ); this.end = value; return result; } /** * Get the range coverages corresponding to the tree. * * The ranges are pre-order sorted. */ toRanges(): RangeCov[] { const ranges: RangeCov[] = []; // Stack of parent trees and counts. const stack: [RangeTree, number][] = [[this, 0]]; while (stack.length > 0) { const [cur, parentCount]: [RangeTree, number] = stack.pop()!;
const count: number = parentCount + cur.delta; ranges.push({startOffset: cur.start, endOffset: cur.end, count}); for (let i: number = cur.children.length - 1; i >= 0; i--) { stack.push([cur.children[i], count]); } } return ranges; } }
import { deepNormalizeScriptCov, normalizeFunctionCov, normalizeProcessCov, normalizeRangeTree, normalizeScriptCov, } from "./normalize"; import { RangeTree } from "./range-tree"; import { FunctionCov, ProcessCov, Range, RangeCov, ScriptCov } from "./types"; /** * Merges a list of process coverages. * * The result is normalized. * The input values may be mutated, it is not safe to use them after passing * them to this function. * The computation is synchronous. * * @param processCovs Process coverages to merge. * @return Merged process coverage. */ export function mergeProcessCovs(processCovs: ReadonlyArray<ProcessCov>): ProcessCov { if (processCovs.length === 0) { return {result: []}; } const urlToScripts: Map<string, ScriptCov[]> = new Map(); for (const processCov of processCovs) { for (const scriptCov of processCov.result) { let scriptCovs: ScriptCov[] | undefined = urlToScripts.get(scriptCov.url); if (scriptCovs === undefined) { scriptCovs = []; urlToScripts.set(scriptCov.url, scriptCovs); } scriptCovs.push(scriptCov); } } const result: ScriptCov[] = []; for (const scripts of urlToScripts.values()) { // assert: `scripts.length > 0` result.push(mergeScriptCovs(scripts)!); } const merged: ProcessCov = {result}; normalizeProcessCov(merged); return merged; } /** * Merges a list of matching script coverages. * * Scripts are matching if they have the same `url`. * The result is normalized. * The input values may be mutated, it is not safe to use them after passing * them to this function. * The computation is synchronous. * * @param scriptCovs Process coverages to merge. * @return Merged script coverage, or `undefined` if the input list was empty. */ export function mergeScriptCovs(scriptCovs: ReadonlyArray<ScriptCov>): ScriptCov | undefined { if (scriptCovs.length === 0) { return undefined; } else if (scriptCovs.length === 1) { const merged: ScriptCov = scriptCovs[0]; deepNormalizeScriptCov(merged); return merged; } const first: ScriptCov = scriptCovs[0]; const scriptId: string = first.scriptId; const url: string = first.url; const rangeToFuncs: Map<string, FunctionCov[]> = new Map(); for (const scriptCov of scriptCovs) { for (const funcCov of scriptCov.functions) { const rootRange: string = stringifyFunctionRootRange(funcCov); let funcCovs: FunctionCov[] | undefined = rangeToFuncs.get(rootRange); if (funcCovs === undefined || // if the entry in rangeToFuncs is function-level granularity and // the new coverage is block-level, prefer block-level. (!funcCovs[0].isBlockCoverage && funcCov.isBlockCoverage)) { funcCovs = []; rangeToFuncs.set(rootRange, funcCovs); } else if (funcCovs[0].isBlockCoverage && !funcCov.isBlockCoverage) { // if the entry in rangeToFuncs is block-level granularity, we should // not append function level granularity. continue; } funcCovs.push(funcCov); } } const functions: FunctionCov[] = []; for (const funcCovs of rangeToFuncs.values()) { // assert: `funcCovs.length > 0` functions.push(mergeFunctionCovs(funcCovs)!); } const merged: ScriptCov = {scriptId, url, functions}; normalizeScriptCov(merged); return merged; } /** * Returns a string representation of the root range of the function. * * This string can be used to match function with same root range. * The string is derived from the start and end offsets of the root range of * the function. * This assumes that `ranges` is non-empty (true for valid function coverages). * * @param funcCov Function coverage with the range to stringify * @internal */ function stringifyFunctionRootRange(funcCov: Readonly<FunctionCov>): string { const rootRange: RangeCov = funcCov.ranges[0]; return `${rootRange.startOffset.toString(10)};${rootRange.endOffset.toString(10)}`; } /**
* Merges a list of matching function coverages. * * Functions are matching if their root ranges have the same span. * The result is normalized. * The input values may be mutated, it is not safe to use them after passing * them to this function. * The computation is synchronous. * * @param funcCovs Function coverages to merge. * @return Merged function coverage, or `undefined` if the input list was empty. */ export function mergeFunctionCovs(funcCovs: ReadonlyArray<FunctionCov>): FunctionCov | undefined { if (funcCovs.length === 0) { return undefined; } else if (funcCovs.length === 1) { const merged: FunctionCov = funcCovs[0]; normalizeFunctionCov(merged); return merged; } const functionName: string = funcCovs[0].functionName; const trees: RangeTree[] = []; for (const funcCov of funcCovs) { // assert: `fn.ranges.length > 0` // assert: `fn.ranges` is sorted trees.push(RangeTree.fromSortedRanges(funcCov.ranges)!); } // assert: `trees.length > 0` const mergedTree: RangeTree = mergeRangeTrees(trees)!; normalizeRangeTree(mergedTree); const ranges: RangeCov[] = mergedTree.toRanges(); const isBlockCoverage: boolean = !(ranges.length === 1 && ranges[0].count === 0); const merged: FunctionCov = {functionName, ranges, isBlockCoverage}; // assert: `merged` is normalized return merged; } /** * @precondition Same `start` and `end` for all the trees */ function mergeRangeTrees(trees: ReadonlyArray<RangeTree>): RangeTree | undefined { if (trees.length <= 1) { return trees[0]; } const first: RangeTree = trees[0]; let delta: number = 0; for (const tree of trees) { delta += tree.delta; } const children: RangeTree[] = mergeRangeTreeChildren(trees); return new RangeTree(first.start, first.end, delta, children); } class RangeTreeWithParent { readonly parentIndex: number; readonly tree: RangeTree; constructor(parentIndex: number, tree: RangeTree) { this.parentIndex = parentIndex; this.tree = tree; } } class StartEvent { readonly offset: number; readonly trees: RangeTreeWithParent[]; constructor(offset: number, trees: RangeTreeWithParent[]) { this.offset = offset; this.trees = trees; } static compare(a: StartEvent, b: StartEvent): number { return a.offset - b.offset; } } class StartEventQueue { private readonly queue: StartEvent[]; private nextIndex: number; private pendingOffset: number; private pendingTrees: RangeTreeWithParent[] | undefined; private constructor(queue: StartEvent[]) { this.queue = queue; this.nextIndex = 0; this.pendingOffset = 0; this.pendingTrees = undefined; } static fromParentTrees(parentTrees: ReadonlyArray<RangeTree>): StartEventQueue { const startToTrees: Map<number, RangeTreeWithParent[]> = new Map(); for (const [parentIndex, parentTree] of parentTrees.entries()) { for (const child of parentTree.children) { let trees: RangeTreeWithParent[] | undefined = startToTrees.get(child.start); if (trees === undefined) { trees = []; startToTrees.set(child.start, trees); } trees.push(new RangeTreeWithParent(parentIndex, child)); } } const queue: StartEvent[] = []; for (const [startOffset, trees] of startToTrees) { queue.push(new StartEvent(startOffset, trees)); } queue.sort(StartEvent.compare); return new StartEventQueue(queue); } setPendingOffset(offset: number): void { this.pendingOffset = offset; } pushPendingTree(tree: RangeTreeWithParent): void { if (this.pendingTrees === undefined) { this.pendingTrees = []; } this.pendingTrees.push(tree); } next(): StartEvent | undefined { const pendingTrees: RangeTreeWithParent[] | undefined = this.pendingTrees; const nextEvent: StartEvent | undefined = this.queue[this.nextIndex]; if (pendingTrees === undefined) { this.nextIndex++; return nextEvent; } else
if (nextEvent === undefined) { this.pendingTrees = undefined; return new StartEvent(this.pendingOffset, pendingTrees); } else { if (this.pendingOffset < nextEvent.offset) { this.pendingTrees = undefined; return new StartEvent(this.pendingOffset, pendingTrees); } else { if (this.pendingOffset === nextEvent.offset) { this.pendingTrees = undefined; for (const tree of pendingTrees) { nextEvent.trees.push(tree); } } this.nextIndex++; return nextEvent; } } } } function mergeRangeTreeChildren(parentTrees: ReadonlyArray<RangeTree>): RangeTree[] { const result: RangeTree[] = []; const startEventQueue: StartEventQueue = StartEventQueue.fromParentTrees(parentTrees); const parentToNested: Map<number, RangeTree[]> = new Map(); let openRange: Range | undefined; while (true) { const event: StartEvent | undefined = startEventQueue.next(); if (event === undefined) { break; } if (openRange !== undefined && openRange.end <= event.offset) { result.push(nextChild(openRange, parentToNested)); openRange = undefined; } if (openRange === undefined) { let openRangeEnd: number = event.offset + 1; for (const {parentIndex, tree} of event.trees) { openRangeEnd = Math.max(openRangeEnd, tree.end); insertChild(parentToNested, parentIndex, tree); } startEventQueue.setPendingOffset(openRangeEnd); openRange = {start: event.offset, end: openRangeEnd}; } else { for (const {parentIndex, tree} of event.trees) { if (tree.end > openRange.end) { const right: RangeTree = tree.split(openRange.end); startEventQueue.pushPendingTree(new RangeTreeWithParent(parentIndex, right)); } insertChild(parentToNested, parentIndex, tree); } } } if (openRange !== undefined) { result.push(nextChild(openRange, parentToNested)); } return result; } function insertChild(parentToNested: Map<number, RangeTree[]>, parentIndex: number, tree: RangeTree): void { let nested: RangeTree[] | undefined = parentToNested.get(parentIndex); if (nested === undefined) { nested = []; parentToNested.set(parentIndex, nested); } nested.push(tree); } function nextChild(openRange: Range, parentToNested: Map<number, RangeTree[]>): RangeTree { const matchingTrees: RangeTree[] = []; for (const nested of parentToNested.values()) { if (nested.length === 1 && nested[0].start === openRange.start && nested[0].end === openRange.end) { matchingTrees.push(nested[0]); } else { matchingTrees.push(new RangeTree( openRange.start, openRange.end, 0, nested, )); } } parentToNested.clear(); return mergeRangeTrees(matchingTrees)!; }
import chai from "chai"; import fs from "fs"; import path from "path"; import { FunctionCov, mergeFunctionCovs, mergeProcessCovs, mergeScriptCovs, ProcessCov, ScriptCov } from "../lib"; const REPO_ROOT: string = path.join(__dirname, "..", "..", "..", ".."); const BENCHES_INPUT_DIR: string = path.join(REPO_ROOT, "benches"); const BENCHES_DIR: string = path.join(REPO_ROOT, "test-data", "merge", "benches"); const RANGES_DIR: string = path.join(REPO_ROOT, "test-data", "merge", "ranges"); const BENCHES_TIMEOUT: number = 20000; // 20sec interface MergeRangeItem { name: string; status: "run" | "skip" | "only"; inputs: ProcessCov[]; expected: ProcessCov; } const FIXTURES_DIR: string = path.join(REPO_ROOT, "test-data", "bugs"); function loadFixture(name: string) { const content: string = fs.readFileSync( path.resolve(FIXTURES_DIR, `${name}.json`), {encoding: "UTF-8"}, ); return JSON.parse(content); } describe("merge", () => { describe("Various", () => { it("accepts empty arrays for `mergeProcessCovs`", () => { const inputs: ProcessCov[] = []; const expected: ProcessCov = {result: []}; const actual: ProcessCov = mergeProcessCovs(inputs); chai.assert.deepEqual(actual, expected); }); it("accepts empty arrays for `mergeScriptCovs`", () => { const inputs: ScriptCov[] = []; const expected: ScriptCov | undefined = undefined; const actual: ScriptCov | undefined = mergeScriptCovs(inputs); chai.assert.deepEqual(actual, expected); }); it("accepts empty arrays for `mergeFunctionCovs`", () => { const inputs: FunctionCov[] = []; const expected: FunctionCov | undefined = undefined; const actual: FunctionCov | undefined = mergeFunctionCovs(inputs); chai.assert.deepEqual(actual, expected); }); it("accepts arrays with a single item for `mergeProcessCovs`", () => { const inputs: ProcessCov[] = [ { result: [ { scriptId: "123", url: "/lib.js", functions: [ { functionName: "test", isBlockCoverage: true, ranges: [ {startOffset: 0, endOffset: 4, count: 2}, {startOffset: 1, endOffset: 2, count: 1}, {startOffset: 2, endOffset: 3, count: 1}, ], }, ], }, ], }, ]; const expected: ProcessCov = { result: [ { scriptId: "0", url: "/lib.js", functions: [ { functionName: "test", isBlockCoverage: true, ranges: [ {startOffset: 0, endOffset: 4, count: 2}, {startOffset: 1, endOffset: 3, count: 1}, ], }, ], }, ], }; const actual: ProcessCov = mergeProcessCovs(inputs); chai.assert.deepEqual(actual, expected); }); describe("mergeProcessCovs", () => { // see: https://github.com/demurgos/v8-coverage/issues/2 it("handles function coverage merged into block coverage", () => { const blockCoverage: ProcessCov = loadFixture("issue-2-block-coverage"); const functionCoverage: ProcessCov = loadFixture("issue-2-func-coverage"); const inputs: ProcessCov[] = [ functionCoverage, blockCoverage, ]; const expected: ProcessCov = loadFixture("issue-2-expected"); const actual: ProcessCov = mergeProcessCovs(inputs); chai.assert.deepEqual(actual, expected); }); // see: https://github.com/demurgos/v8-coverage/issues/2 it("handles block coverage merged into function coverage", () => { const blockCoverage: ProcessCov = loadFixture("issue-2-block-coverage"); const functionCoverage: ProcessCov = loadFixture("issue-2-func-covera
ge"); const inputs: ProcessCov[] = [ blockCoverage, functionCoverage, ]; const expected: ProcessCov = loadFixture("issue-2-expected"); const actual: ProcessCov = mergeProcessCovs(inputs); chai.assert.deepEqual(actual, expected); }); }); it("accepts arrays with a single item for `mergeScriptCovs`", () => { const inputs: ScriptCov[] = [ { scriptId: "123", url: "/lib.js", functions: [ { functionName: "test", isBlockCoverage: true, ranges: [ {startOffset: 0, endOffset: 4, count: 2}, {startOffset: 1, endOffset: 2, count: 1}, {startOffset: 2, endOffset: 3, count: 1}, ], }, ], }, ]; const expected: ScriptCov | undefined = { scriptId: "123", url: "/lib.js", functions: [ { functionName: "test", isBlockCoverage: true, ranges: [ {startOffset: 0, endOffset: 4, count: 2}, {startOffset: 1, endOffset: 3, count: 1}, ], }, ], }; const actual: ScriptCov | undefined = mergeScriptCovs(inputs); chai.assert.deepEqual(actual, expected); }); it("accepts arrays with a single item for `mergeFunctionCovs`", () => { const inputs: FunctionCov[] = [ { functionName: "test", isBlockCoverage: true, ranges: [ {startOffset: 0, endOffset: 4, count: 2}, {startOffset: 1, endOffset: 2, count: 1}, {startOffset: 2, endOffset: 3, count: 1}, ], }, ]; const expected: FunctionCov = { functionName: "test", isBlockCoverage: true, ranges: [ {startOffset: 0, endOffset: 4, count: 2}, {startOffset: 1, endOffset: 3, count: 1}, ], }; const actual: FunctionCov | undefined = mergeFunctionCovs(inputs); chai.assert.deepEqual(actual, expected); }); }); describe("ranges", () => { for (const sourceFile of getSourceFiles()) { const relPath: string = path.relative(RANGES_DIR, sourceFile); describe(relPath, () => { const content: string = fs.readFileSync(sourceFile, {encoding: "UTF-8"}); const items: MergeRangeItem[] = JSON.parse(content); for (const item of items) { const test: () => void = () => { const actual: ProcessCov | undefined = mergeProcessCovs(item.inputs); chai.assert.deepEqual(actual, item.expected); }; switch (item.status) { case "run": it(item.name, test); break; case "only": it.only(item.name, test); break; case "skip": it.skip(item.name, test); break; default: throw new Error(`Unexpected status: ${item.status}`); } } }); } }); describe("benches", () => { for (const bench of getBenches()) { const BENCHES_TO_SKIP: Set<string> = new Set(); if (process.env.CI === "true") { // Skip very large benchmarks when running continuous integration BENCHES_TO_SKIP.add("node@10.11.0"); BENCHES_TO_SKIP.add("npm@6.4.1"); } const name: string = path.basename(bench); if (BENCHES_TO_SKIP.has(name)) { it.skip(`${name} (skipped: too large for CI)`, testBench); } else { it(name, testBench); } async function testBench(this: Mocha.Context) { this.timeout(BENCHES_TIMEOUT); const inputFileNames: string[] = await fs.promises.readdir(bench); const inputPromises: Promise<ProcessCov>[] = []; for (const inputFileName of inputFileNames) { const resolved: string = path.join(bench, inputFileName
); inputPromises.push(fs.promises.readFile(resolved).then(buffer => JSON.parse(buffer.toString("UTF-8")))); } const inputs: ProcessCov[] = await Promise.all(inputPromises); const expectedPath: string = path.join(BENCHES_DIR, `${name}.json`); const expectedContent: string = await fs.promises.readFile(expectedPath, {encoding: "UTF-8"}) as string; const expected: ProcessCov = JSON.parse(expectedContent); const startTime: number = Date.now(); const actual: ProcessCov | undefined = mergeProcessCovs(inputs); const endTime: number = Date.now(); console.error(`Time (${name}): ${(endTime - startTime) / 1000}`); chai.assert.deepEqual(actual, expected); console.error(`OK: ${name}`); } } }); }); function getSourceFiles() { return getSourcesFrom(RANGES_DIR); function* getSourcesFrom(dir: string): Iterable<string> { const names: string[] = fs.readdirSync(dir); for (const name of names) { const resolved: string = path.join(dir, name); const stat: fs.Stats = fs.statSync(resolved); if (stat.isDirectory()) { yield* getSourcesFrom(dir); } else { yield resolved; } } } } function* getBenches(): Iterable<string> { const names: string[] = fs.readdirSync(BENCHES_INPUT_DIR); for (const name of names) { const resolved: string = path.join(BENCHES_INPUT_DIR, name); const stat: fs.Stats = fs.statSync(resolved); if (stat.isDirectory()) { yield resolved; } } }
import { FunctionCov, ProcessCov, RangeCov, ScriptCov } from "./types"; /** * Creates a deep copy of a process coverage. * * @param processCov Process coverage to clone. * @return Cloned process coverage. */ export function cloneProcessCov(processCov: Readonly<ProcessCov>): ProcessCov { const result: ScriptCov[] = []; for (const scriptCov of processCov.result) { result.push(cloneScriptCov(scriptCov)); } return { result, }; } /** * Creates a deep copy of a script coverage. * * @param scriptCov Script coverage to clone. * @return Cloned script coverage. */ export function cloneScriptCov(scriptCov: Readonly<ScriptCov>): ScriptCov { const functions: FunctionCov[] = []; for (const functionCov of scriptCov.functions) { functions.push(cloneFunctionCov(functionCov)); } return { scriptId: scriptCov.scriptId, url: scriptCov.url, functions, }; } /** * Creates a deep copy of a function coverage. * * @param functionCov Function coverage to clone. * @return Cloned function coverage. */ export function cloneFunctionCov(functionCov: Readonly<FunctionCov>): FunctionCov { const ranges: RangeCov[] = []; for (const rangeCov of functionCov.ranges) { ranges.push(cloneRangeCov(rangeCov)); } return { functionName: functionCov.functionName, ranges, isBlockCoverage: functionCov.isBlockCoverage, }; } /** * Creates a deep copy of a function coverage. * * @param rangeCov Range coverage to clone. * @return Cloned range coverage. */ export function cloneRangeCov(rangeCov: Readonly<RangeCov>): RangeCov { return { startOffset: rangeCov.startOffset, endOffset: rangeCov.endOffset, count: rangeCov.count, }; }
import { FunctionCov, RangeCov, ScriptCov } from "./types"; /** * Compares two script coverages. * * The result corresponds to the comparison of their `url` value (alphabetical sort). */ export function compareScriptCovs(a: Readonly<ScriptCov>, b: Readonly<ScriptCov>): number { if (a.url === b.url) { return 0; } else if (a.url < b.url) { return -1; } else { return 1; } } /** * Compares two function coverages. * * The result corresponds to the comparison of the root ranges. */ export function compareFunctionCovs(a: Readonly<FunctionCov>, b: Readonly<FunctionCov>): number { return compareRangeCovs(a.ranges[0], b.ranges[0]); } /** * Compares two range coverages. * * The ranges are first ordered by ascending `startOffset` and then by * descending `endOffset`. * This corresponds to a pre-order tree traversal. */ export function compareRangeCovs(a: Readonly<RangeCov>, b: Readonly<RangeCov>): number { if (a.startOffset !== b.startOffset) { return a.startOffset - b.startOffset; } else { return b.endOffset - a.endOffset; } }
export interface ProcessCov { result: ScriptCov[]; } export interface ScriptCov { scriptId: string; url: string; functions: FunctionCov[]; } export interface FunctionCov { functionName: string; ranges: RangeCov[]; isBlockCoverage: boolean; } export interface Range { readonly start: number; readonly end: number; } export interface RangeCov { startOffset: number; endOffset: number; count: number; }
import { compareFunctionCovs, compareRangeCovs, compareScriptCovs } from "./compare"; import { RangeTree } from "./range-tree"; import { FunctionCov, ProcessCov, ScriptCov } from "./types"; /** * Normalizes a process coverage. * * Sorts the scripts alphabetically by `url`. * Reassigns script ids: the script at index `0` receives `"0"`, the script at * index `1` receives `"1"` etc. * This does not normalize the script coverages. * * @param processCov Process coverage to normalize. */ export function normalizeProcessCov(processCov: ProcessCov): void { processCov.result.sort(compareScriptCovs); for (const [scriptId, scriptCov] of processCov.result.entries()) { scriptCov.scriptId = scriptId.toString(10); } } /** * Normalizes a process coverage deeply. * * Normalizes the script coverages deeply, then normalizes the process coverage * itself. * * @param processCov Process coverage to normalize. */ export function deepNormalizeProcessCov(processCov: ProcessCov): void { for (const scriptCov of processCov.result) { deepNormalizeScriptCov(scriptCov); } normalizeProcessCov(processCov); } /** * Normalizes a script coverage. * * Sorts the function by root range (pre-order sort). * This does not normalize the function coverages. * * @param scriptCov Script coverage to normalize. */ export function normalizeScriptCov(scriptCov: ScriptCov): void { scriptCov.functions.sort(compareFunctionCovs); } /** * Normalizes a script coverage deeply. * * Normalizes the function coverages deeply, then normalizes the script coverage * itself. * * @param scriptCov Script coverage to normalize. */ export function deepNormalizeScriptCov(scriptCov: ScriptCov): void { for (const funcCov of scriptCov.functions) { normalizeFunctionCov(funcCov); } normalizeScriptCov(scriptCov); } /** * Normalizes a function coverage. * * Sorts the ranges (pre-order sort). * TODO: Tree-based normalization of the ranges. * * @param funcCov Function coverage to normalize. */ export function normalizeFunctionCov(funcCov: FunctionCov): void { funcCov.ranges.sort(compareRangeCovs); const tree: RangeTree = RangeTree.fromSortedRanges(funcCov.ranges)!; normalizeRangeTree(tree); funcCov.ranges = tree.toRanges(); } /** * @internal */ export function normalizeRangeTree(tree: RangeTree): void { tree.normalize(); }
import { compareRangeCovs } from "./compare"; import { RangeCov } from "./types"; interface ReadonlyRangeTree { readonly start: number; readonly end: number; readonly count: number; readonly children: ReadonlyRangeTree[]; } export function emitForest(trees: ReadonlyArray<ReadonlyRangeTree>): string { return emitForestLines(trees).join("\n"); } export function emitForestLines(trees: ReadonlyArray<ReadonlyRangeTree>): string[] { const colMap: Map<number, number> = getColMap(trees); const header: string = emitOffsets(colMap); return [header, ...trees.map(tree => emitTree(tree, colMap).join("\n"))]; } function getColMap(trees: Iterable<ReadonlyRangeTree>): Map<number, number> { const eventSet: Set<number> = new Set(); for (const tree of trees) { const stack: ReadonlyRangeTree[] = [tree]; while (stack.length > 0) { const cur: ReadonlyRangeTree = stack.pop()!; eventSet.add(cur.start); eventSet.add(cur.end); for (const child of cur.children) { stack.push(child); } } } const events: number[] = [...eventSet]; events.sort((a, b) => a - b); let maxDigits: number = 1; for (const event of events) { maxDigits = Math.max(maxDigits, event.toString(10).length); } const colWidth: number = maxDigits + 3; const colMap: Map<number, number> = new Map(); for (const [i, event] of events.entries()) { colMap.set(event, i * colWidth); } return colMap; } function emitTree(tree: ReadonlyRangeTree, colMap: Map<number, number>): string[] { const layers: ReadonlyRangeTree[][] = []; let nextLayer: ReadonlyRangeTree[] = [tree]; while (nextLayer.length > 0) { const layer: ReadonlyRangeTree[] = nextLayer; layers.push(layer); nextLayer = []; for (const node of layer) { for (const child of node.children) { nextLayer.push(child); } } } return layers.map(layer => emitTreeLayer(layer, colMap)); } export function parseFunctionRanges(text: string, offsetMap: Map<number, number>): RangeCov[] { const result: RangeCov[] = []; for (const line of text.split("\n")) { for (const range of parseTreeLayer(line, offsetMap)) { result.push(range); } } result.sort(compareRangeCovs); return result; } /** * * @param layer Sorted list of disjoint trees. * @param colMap */ function emitTreeLayer(layer: ReadonlyRangeTree[], colMap: Map<number, number>): string { const line: string[] = []; let curIdx: number = 0; for (const {start, end, count} of layer) { const startIdx: number = colMap.get(start)!; const endIdx: number = colMap.get(end)!; if (startIdx > curIdx) { line.push(" ".repeat(startIdx - curIdx)); } line.push(emitRange(count, endIdx - startIdx)); curIdx = endIdx; } return line.join(""); } function parseTreeLayer(text: string, offsetMap: Map<number, number>): RangeCov[] { const result: RangeCov[] = []; const regex: RegExp = /\[(\d+)-*\)/gs; while (true) { const match: RegExpMatchArray | null = regex.exec(text); if (match === null) { break; } const startIdx: number = match.index!; const endIdx: number = startIdx + match[0].length; const count: number = parseInt(match[1], 10); const startOffset: number | undefined = offsetMap.get(startIdx); const endOffset: number | undefined = offsetMap.get(endIdx); if (startOffset === undefined || endOffset === undefined) { throw new Error(`Invalid offsets for: ${JSON.stringify(text)}`); } result.push({startOffset, endOffset, count}); } return result; } function emitRange(count: number, len: number): string { const rangeStart: string = `[${count.toString(10)}`; const rangeEnd: string = ")"; const hyphensLen: number = len - (rangeStart.length + rangeEnd.length); const hyphens: string = "-".repeat(Math.max(0, hyphensLen)); return `${rangeStart}${hyphens}${rangeEnd}`; } function emitOffsets(colMap: Map<number, number>): string { let line: string = "";
for (const [event, col] of colMap) { if (line.length < col) { line += " ".repeat(col - line.length); } line += event.toString(10); } return line; } export function parseOffsets(text: string): Map<number, number> { const result: Map<number, number> = new Map(); const regex: RegExp = /\d+/gs; while (true) { const match: RegExpExecArray | null = regex.exec(text); if (match === null) { break; } result.set(match.index, parseInt(match[0], 10)); } return result; }
export { emitForest, emitForestLines, parseFunctionRanges, parseOffsets } from "./ascii"; export { cloneFunctionCov, cloneProcessCov, cloneScriptCov, cloneRangeCov } from "./clone"; export { compareScriptCovs, compareFunctionCovs, compareRangeCovs } from "./compare"; export { mergeFunctionCovs, mergeProcessCovs, mergeScriptCovs } from "./merge"; export { RangeTree } from "./range-tree"; export { ProcessCov, ScriptCov, FunctionCov, RangeCov } from "./types";
import { RangeCov } from "./types"; export class RangeTree { start: number; end: number; delta: number; children: RangeTree[]; constructor( start: number, end: number, delta: number, children: RangeTree[], ) { this.start = start; this.end = end; this.delta = delta; this.children = children; } /** * @precodition `ranges` are well-formed and pre-order sorted */ static fromSortedRanges(ranges: ReadonlyArray<RangeCov>): RangeTree | undefined { let root: RangeTree | undefined; // Stack of parent trees and parent counts. const stack: [RangeTree, number][] = []; for (const range of ranges) { const node: RangeTree = new RangeTree(range.startOffset, range.endOffset, range.count, []); if (root === undefined) { root = node; stack.push([node, range.count]); continue; } let parent: RangeTree; let parentCount: number; while (true) { [parent, parentCount] = stack[stack.length - 1]; // assert: `top !== undefined` (the ranges are sorted) if (range.startOffset < parent.end) { break; } else { stack.pop(); } } node.delta -= parentCount; parent.children.push(node); stack.push([node, range.count]); } return root; } normalize(): void { const children: RangeTree[] = []; let curEnd: number; let head: RangeTree | undefined; const tail: RangeTree[] = []; for (const child of this.children) { if (head === undefined) { head = child; } else if (child.delta === head.delta && child.start === curEnd!) { tail.push(child); } else { endChain(); head = child; } curEnd = child.end; } if (head !== undefined) { endChain(); } if (children.length === 1) { const child: RangeTree = children[0]; if (child.start === this.start && child.end === this.end) { this.delta += child.delta; this.children = child.children; // `.lazyCount` is zero for both (both are after normalization) return; } } this.children = children; function endChain(): void { if (tail.length !== 0) { head!.end = tail[tail.length - 1].end; for (const tailTree of tail) { for (const subChild of tailTree.children) { subChild.delta += tailTree.delta - head!.delta; head!.children.push(subChild); } } tail.length = 0; } head!.normalize(); children.push(head!); } } /** * @precondition `tree.start < value && value < tree.end` * @return RangeTree Right part */ split(value: number): RangeTree { let leftChildLen: number = this.children.length; let mid: RangeTree | undefined; // TODO(perf): Binary search (check overhead) for (let i: number = 0; i < this.children.length; i++) { const child: RangeTree = this.children[i]; if (child.start < value && value < child.end) { mid = child.split(value); leftChildLen = i + 1; break; } else if (child.start >= value) { leftChildLen = i; break; } } const rightLen: number = this.children.length - leftChildLen; const rightChildren: RangeTree[] = this.children.splice(leftChildLen, rightLen); if (mid !== undefined) { rightChildren.unshift(mid); } const result: RangeTree = new RangeTree( value, this.end, this.delta, rightChildren, ); this.end = value; return result; } /** * Get the range coverages corresponding to the tree. * * The ranges are pre-order sorted. */ toRanges(): RangeCov[] { const ranges: RangeCov[] = []; // Stack of parent trees and counts. const stack: [RangeTree, number][] = [[this, 0]]; while (stack.length > 0) { const [cur, parentCount]: [RangeTree, number] = stack.pop()!;
const count: number = parentCount + cur.delta; ranges.push({startOffset: cur.start, endOffset: cur.end, count}); for (let i: number = cur.children.length - 1; i >= 0; i--) { stack.push([cur.children[i], count]); } } return ranges; } }
import { deepNormalizeScriptCov, normalizeFunctionCov, normalizeProcessCov, normalizeRangeTree, normalizeScriptCov, } from "./normalize"; import { RangeTree } from "./range-tree"; import { FunctionCov, ProcessCov, Range, RangeCov, ScriptCov } from "./types"; /** * Merges a list of process coverages. * * The result is normalized. * The input values may be mutated, it is not safe to use them after passing * them to this function. * The computation is synchronous. * * @param processCovs Process coverages to merge. * @return Merged process coverage. */ export function mergeProcessCovs(processCovs: ReadonlyArray<ProcessCov>): ProcessCov { if (processCovs.length === 0) { return {result: []}; } const urlToScripts: Map<string, ScriptCov[]> = new Map(); for (const processCov of processCovs) { for (const scriptCov of processCov.result) { let scriptCovs: ScriptCov[] | undefined = urlToScripts.get(scriptCov.url); if (scriptCovs === undefined) { scriptCovs = []; urlToScripts.set(scriptCov.url, scriptCovs); } scriptCovs.push(scriptCov); } } const result: ScriptCov[] = []; for (const scripts of urlToScripts.values()) { // assert: `scripts.length > 0` result.push(mergeScriptCovs(scripts)!); } const merged: ProcessCov = {result}; normalizeProcessCov(merged); return merged; } /** * Merges a list of matching script coverages. * * Scripts are matching if they have the same `url`. * The result is normalized. * The input values may be mutated, it is not safe to use them after passing * them to this function. * The computation is synchronous. * * @param scriptCovs Process coverages to merge. * @return Merged script coverage, or `undefined` if the input list was empty. */ export function mergeScriptCovs(scriptCovs: ReadonlyArray<ScriptCov>): ScriptCov | undefined { if (scriptCovs.length === 0) { return undefined; } else if (scriptCovs.length === 1) { const merged: ScriptCov = scriptCovs[0]; deepNormalizeScriptCov(merged); return merged; } const first: ScriptCov = scriptCovs[0]; const scriptId: string = first.scriptId; const url: string = first.url; const rangeToFuncs: Map<string, FunctionCov[]> = new Map(); for (const scriptCov of scriptCovs) { for (const funcCov of scriptCov.functions) { const rootRange: string = stringifyFunctionRootRange(funcCov); let funcCovs: FunctionCov[] | undefined = rangeToFuncs.get(rootRange); if (funcCovs === undefined || // if the entry in rangeToFuncs is function-level granularity and // the new coverage is block-level, prefer block-level. (!funcCovs[0].isBlockCoverage && funcCov.isBlockCoverage)) { funcCovs = []; rangeToFuncs.set(rootRange, funcCovs); } else if (funcCovs[0].isBlockCoverage && !funcCov.isBlockCoverage) { // if the entry in rangeToFuncs is block-level granularity, we should // not append function level granularity. continue; } funcCovs.push(funcCov); } } const functions: FunctionCov[] = []; for (const funcCovs of rangeToFuncs.values()) { // assert: `funcCovs.length > 0` functions.push(mergeFunctionCovs(funcCovs)!); } const merged: ScriptCov = {scriptId, url, functions}; normalizeScriptCov(merged); return merged; } /** * Returns a string representation of the root range of the function. * * This string can be used to match function with same root range. * The string is derived from the start and end offsets of the root range of * the function. * This assumes that `ranges` is non-empty (true for valid function coverages). * * @param funcCov Function coverage with the range to stringify * @internal */ function stringifyFunctionRootRange(funcCov: Readonly<FunctionCov>): string { const rootRange: RangeCov = funcCov.ranges[0]; return `${rootRange.startOffset.toString(10)};${rootRange.endOffset.toString(10)}`; } /**
* Merges a list of matching function coverages. * * Functions are matching if their root ranges have the same span. * The result is normalized. * The input values may be mutated, it is not safe to use them after passing * them to this function. * The computation is synchronous. * * @param funcCovs Function coverages to merge. * @return Merged function coverage, or `undefined` if the input list was empty. */ export function mergeFunctionCovs(funcCovs: ReadonlyArray<FunctionCov>): FunctionCov | undefined { if (funcCovs.length === 0) { return undefined; } else if (funcCovs.length === 1) { const merged: FunctionCov = funcCovs[0]; normalizeFunctionCov(merged); return merged; } const functionName: string = funcCovs[0].functionName; const trees: RangeTree[] = []; for (const funcCov of funcCovs) { // assert: `fn.ranges.length > 0` // assert: `fn.ranges` is sorted trees.push(RangeTree.fromSortedRanges(funcCov.ranges)!); } // assert: `trees.length > 0` const mergedTree: RangeTree = mergeRangeTrees(trees)!; normalizeRangeTree(mergedTree); const ranges: RangeCov[] = mergedTree.toRanges(); const isBlockCoverage: boolean = !(ranges.length === 1 && ranges[0].count === 0); const merged: FunctionCov = {functionName, ranges, isBlockCoverage}; // assert: `merged` is normalized return merged; } /** * @precondition Same `start` and `end` for all the trees */ function mergeRangeTrees(trees: ReadonlyArray<RangeTree>): RangeTree | undefined { if (trees.length <= 1) { return trees[0]; } const first: RangeTree = trees[0]; let delta: number = 0; for (const tree of trees) { delta += tree.delta; } const children: RangeTree[] = mergeRangeTreeChildren(trees); return new RangeTree(first.start, first.end, delta, children); } class RangeTreeWithParent { readonly parentIndex: number; readonly tree: RangeTree; constructor(parentIndex: number, tree: RangeTree) { this.parentIndex = parentIndex; this.tree = tree; } } class StartEvent { readonly offset: number; readonly trees: RangeTreeWithParent[]; constructor(offset: number, trees: RangeTreeWithParent[]) { this.offset = offset; this.trees = trees; } static compare(a: StartEvent, b: StartEvent): number { return a.offset - b.offset; } } class StartEventQueue { private readonly queue: StartEvent[]; private nextIndex: number; private pendingOffset: number; private pendingTrees: RangeTreeWithParent[] | undefined; private constructor(queue: StartEvent[]) { this.queue = queue; this.nextIndex = 0; this.pendingOffset = 0; this.pendingTrees = undefined; } static fromParentTrees(parentTrees: ReadonlyArray<RangeTree>): StartEventQueue { const startToTrees: Map<number, RangeTreeWithParent[]> = new Map(); for (const [parentIndex, parentTree] of parentTrees.entries()) { for (const child of parentTree.children) { let trees: RangeTreeWithParent[] | undefined = startToTrees.get(child.start); if (trees === undefined) { trees = []; startToTrees.set(child.start, trees); } trees.push(new RangeTreeWithParent(parentIndex, child)); } } const queue: StartEvent[] = []; for (const [startOffset, trees] of startToTrees) { queue.push(new StartEvent(startOffset, trees)); } queue.sort(StartEvent.compare); return new StartEventQueue(queue); } setPendingOffset(offset: number): void { this.pendingOffset = offset; } pushPendingTree(tree: RangeTreeWithParent): void { if (this.pendingTrees === undefined) { this.pendingTrees = []; } this.pendingTrees.push(tree); } next(): StartEvent | undefined { const pendingTrees: RangeTreeWithParent[] | undefined = this.pendingTrees; const nextEvent: StartEvent | undefined = this.queue[this.nextIndex]; if (pendingTrees === undefined) { this.nextIndex++; return nextEvent; } else
if (nextEvent === undefined) { this.pendingTrees = undefined; return new StartEvent(this.pendingOffset, pendingTrees); } else { if (this.pendingOffset < nextEvent.offset) { this.pendingTrees = undefined; return new StartEvent(this.pendingOffset, pendingTrees); } else { if (this.pendingOffset === nextEvent.offset) { this.pendingTrees = undefined; for (const tree of pendingTrees) { nextEvent.trees.push(tree); } } this.nextIndex++; return nextEvent; } } } } function mergeRangeTreeChildren(parentTrees: ReadonlyArray<RangeTree>): RangeTree[] { const result: RangeTree[] = []; const startEventQueue: StartEventQueue = StartEventQueue.fromParentTrees(parentTrees); const parentToNested: Map<number, RangeTree[]> = new Map(); let openRange: Range | undefined; while (true) { const event: StartEvent | undefined = startEventQueue.next(); if (event === undefined) { break; } if (openRange !== undefined && openRange.end <= event.offset) { result.push(nextChild(openRange, parentToNested)); openRange = undefined; } if (openRange === undefined) { let openRangeEnd: number = event.offset + 1; for (const {parentIndex, tree} of event.trees) { openRangeEnd = Math.max(openRangeEnd, tree.end); insertChild(parentToNested, parentIndex, tree); } startEventQueue.setPendingOffset(openRangeEnd); openRange = {start: event.offset, end: openRangeEnd}; } else { for (const {parentIndex, tree} of event.trees) { if (tree.end > openRange.end) { const right: RangeTree = tree.split(openRange.end); startEventQueue.pushPendingTree(new RangeTreeWithParent(parentIndex, right)); } insertChild(parentToNested, parentIndex, tree); } } } if (openRange !== undefined) { result.push(nextChild(openRange, parentToNested)); } return result; } function insertChild(parentToNested: Map<number, RangeTree[]>, parentIndex: number, tree: RangeTree): void { let nested: RangeTree[] | undefined = parentToNested.get(parentIndex); if (nested === undefined) { nested = []; parentToNested.set(parentIndex, nested); } nested.push(tree); } function nextChild(openRange: Range, parentToNested: Map<number, RangeTree[]>): RangeTree { const matchingTrees: RangeTree[] = []; for (const nested of parentToNested.values()) { if (nested.length === 1 && nested[0].start === openRange.start && nested[0].end === openRange.end) { matchingTrees.push(nested[0]); } else { matchingTrees.push(new RangeTree( openRange.start, openRange.end, 0, nested, )); } } parentToNested.clear(); return mergeRangeTrees(matchingTrees)!; }
// Originally by: Rogier Schouten <https://github.com/rogierschouten> // Adapted by: Madhav Varshney <https://github.com/madhavarshney> declare namespace kleur { interface Color { (x: string | number): string; (): Kleur; } interface Kleur { // Colors black: Color; red: Color; green: Color; yellow: Color; blue: Color; magenta: Color; cyan: Color; white: Color; gray: Color; grey: Color; // Backgrounds bgBlack: Color; bgRed: Color; bgGreen: Color; bgYellow: Color; bgBlue: Color; bgMagenta: Color; bgCyan: Color; bgWhite: Color; // Modifiers reset: Color; bold: Color; dim: Color; italic: Color; underline: Color; inverse: Color; hidden: Color; strikethrough: Color; } } declare let kleur: kleur.Kleur & { enabled: boolean }; export = kleur;
declare module 'emoji-regex' { function emojiRegex(): RegExp; export default emojiRegex; } declare module 'emoji-regex/text' { function emojiRegex(): RegExp; export default emojiRegex; } declare module 'emoji-regex/es2015' { function emojiRegex(): RegExp; export default emojiRegex; } declare module 'emoji-regex/es2015/text' { function emojiRegex(): RegExp; export default emojiRegex; }
interface DedentOptions { escapeSpecialCharacters?: boolean; } interface Dedent { (literals: string): string; (strings: TemplateStringsArray, ...values: unknown[]): string; withOptions: CreateDedent; } type CreateDedent = (options: DedentOptions) => Dedent; declare const _default: { (literals: string): string; (strings: TemplateStringsArray, ...values: unknown[]): string; withOptions(newOptions: DedentOptions): any; }; export { CreateDedent, Dedent, DedentOptions, _default as default };
declare namespace locatePath { interface Options { /** Current working directory. @default process.cwd() */ readonly cwd?: string; /** Type of path to match. @default 'file' */ readonly type?: 'file' | 'directory'; /** Allow symbolic links to match if they point to the requested path type. @default true */ readonly allowSymlinks?: boolean; } interface AsyncOptions extends Options { /** Number of concurrently pending promises. Minimum: `1`. @default Infinity */ readonly concurrency?: number; /** Preserve `paths` order when searching. Disable this to improve performance if you don't care about the order. @default true */ readonly preserveOrder?: boolean; } } declare const locatePath: { /** Get the first path that exists on disk of multiple paths. @param paths - Paths to check. @returns The first path that exists or `undefined` if none exists. @example ``` import locatePath = require('locate-path'); const files = [ 'unicorn.png', 'rainbow.png', // Only this one actually exists on disk 'pony.png' ]; (async () => { console(await locatePath(files)); //=> 'rainbow' })(); ``` */ (paths: Iterable<string>, options?: locatePath.AsyncOptions): Promise< string | undefined >; /** Synchronously get the first path that exists on disk of multiple paths. @param paths - Paths to check. @returns The first path that exists or `undefined` if none exists. */ sync( paths: Iterable<string>, options?: locatePath.Options ): string | undefined; }; export = locatePath;
/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ import type {CompareKeys} from 'pretty-format'; /** * Class representing one diff tuple. * Attempts to look like a two-element array (which is what this used to be). * @param {number} op Operation, one of: DIFF_DELETE, DIFF_INSERT, DIFF_EQUAL. * @param {string} text Text to be deleted, inserted, or retained. * @constructor */ export declare class Diff { 0: number; 1: string; constructor(op: number, text: string); } export declare function diff( a: any, b: any, options?: DiffOptions, ): string | null; /** * The data structure representing a diff is an array of tuples: * [[DIFF_DELETE, 'Hello'], [DIFF_INSERT, 'Goodbye'], [DIFF_EQUAL, ' world.']] * which means: delete 'Hello', add 'Goodbye' and keep ' world.' */ export declare var DIFF_DELETE: number; export declare var DIFF_EQUAL: number; export declare var DIFF_INSERT: number; export declare const diffLinesRaw: ( aLines: Array<string>, bLines: Array<string>, ) => Array<Diff>; export declare const diffLinesUnified: ( aLines: Array<string>, bLines: Array<string>, options?: DiffOptions, ) => string; export declare const diffLinesUnified2: ( aLinesDisplay: Array<string>, bLinesDisplay: Array<string>, aLinesCompare: Array<string>, bLinesCompare: Array<string>, options?: DiffOptions, ) => string; export declare type DiffOptions = { aAnnotation?: string; aColor?: DiffOptionsColor; aIndicator?: string; bAnnotation?: string; bColor?: DiffOptionsColor; bIndicator?: string; changeColor?: DiffOptionsColor; changeLineTrailingSpaceColor?: DiffOptionsColor; commonColor?: DiffOptionsColor; commonIndicator?: string; commonLineTrailingSpaceColor?: DiffOptionsColor; contextLines?: number; emptyFirstOrLastLinePlaceholder?: string; expand?: boolean; includeChangeCounts?: boolean; omitAnnotationLines?: boolean; patchColor?: DiffOptionsColor; compareKeys?: CompareKeys; }; export declare type DiffOptionsColor = (arg: string) => string; export declare const diffStringsRaw: ( a: string, b: string, cleanup: boolean, ) => Array<Diff>; export declare const diffStringsUnified: ( a: string, b: string, options?: DiffOptions, ) => string; export {};
/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ import type {default as default_2} from 'jest-resolve'; import type {IHasteFS} from 'jest-haste-map'; import type {ResolveModuleConfig} from 'jest-resolve'; import {SnapshotResolver} from 'jest-snapshot'; /** * DependencyResolver is used to resolve the direct dependencies of a module or * to retrieve a list of all transitive inverse dependencies. */ export declare class DependencyResolver { private readonly _hasteFS; private readonly _resolver; private readonly _snapshotResolver; constructor( resolver: default_2, hasteFS: IHasteFS, snapshotResolver: SnapshotResolver, ); resolve(file: string, options?: ResolveModuleConfig): Array<string>; resolveInverseModuleMap( paths: Set<string>, filter: (file: string) => boolean, options?: ResolveModuleConfig, ): Array<ResolvedModule>; resolveInverse( paths: Set<string>, filter: (file: string) => boolean, options?: ResolveModuleConfig, ): Array<string>; } export declare type ResolvedModule = { file: string; dependencies: Array<string>; }; export {};
/** * Object whose keys are signal names and values are signal objects. */ export declare const signalsByName: { [signalName: string]: Signal } /** * Object whose keys are signal numbers and values are signal objects. */ export declare const signalsByNumber: { [signalNumber: string]: Signal } export declare type SignalAction = | 'terminate' | 'core' | 'ignore' | 'pause' | 'unpause' export declare type SignalStandard = | 'ansi' | 'posix' | 'bsd' | 'systemv' | 'other' export declare type Signal = { /** * Standard name of the signal, for example 'SIGINT'. */ name: string /** * Code number of the signal, for example 2. While most number are cross-platform, some are different between different OS. */ number: number /** * Human-friendly description for the signal, for example 'User interruption with CTRL-C'. */ description: string /** * Whether the current OS can handle this signal in Node.js using process.on(name, handler). The list of supported signals is OS-specific. */ supported: boolean /** * What is the default action for this signal when it is not handled. */ action: SignalAction /** * Whether the signal's default action cannot be prevented. This is true for SIGTERM, SIGKILL and SIGSTOP. */ forced: boolean /** * Which standard defined that signal. */ standard: SignalStandard }
declare type Event = "created" | "cloned" | "modified" | "deleted" | "moved" | "root-changed" | "unknown"; declare type Type = "file" | "directory" | "symlink"; declare type FileChanges = { inode: boolean; finder: boolean; access: boolean; xattrs: boolean; }; declare type Info = { event: Event; path: string; type: Type; changes: FileChanges; flags: number; }; declare type WatchHandler = (path: string, flags: number, id: string) => void; export declare function watch(path: string, handler: WatchHandler): () => Promise<void>; export declare function watch(path: string, since: number, handler: WatchHandler): () => Promise<void>; export declare function getInfo(path: string, flags: number): Info; export declare const constants: { None: 0x00000000; MustScanSubDirs: 0x00000001; UserDropped: 0x00000002; KernelDropped: 0x00000004; EventIdsWrapped: 0x00000008; HistoryDone: 0x00000010; RootChanged: 0x00000020; Mount: 0x00000040; Unmount: 0x00000080; ItemCreated: 0x00000100; ItemRemoved: 0x00000200; ItemInodeMetaMod: 0x00000400; ItemRenamed: 0x00000800; ItemModified: 0x00001000; ItemFinderInfoMod: 0x00002000; ItemChangeOwner: 0x00004000; ItemXattrMod: 0x00008000; ItemIsFile: 0x00010000; ItemIsDir: 0x00020000; ItemIsSymlink: 0x00040000; ItemIsHardlink: 0x00100000; ItemIsLastHardlink: 0x00200000; OwnEvent: 0x00080000; ItemCloned: 0x00400000; }; export {};
/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ import type {Config} from '@jest/types'; import type {DeprecatedOptions} from 'jest-validate'; declare type AllOptions = Config.ProjectConfig & Config.GlobalConfig; declare namespace constants { export { NODE_MODULES, DEFAULT_JS_PATTERN, PACKAGE_JSON, JEST_CONFIG_BASE_NAME, JEST_CONFIG_EXT_CJS, JEST_CONFIG_EXT_MJS, JEST_CONFIG_EXT_JS, JEST_CONFIG_EXT_TS, JEST_CONFIG_EXT_JSON, JEST_CONFIG_EXT_ORDER, }; } export {constants}; declare const DEFAULT_JS_PATTERN = '\\.[jt]sx?$'; export declare const defaults: Config.DefaultOptions; export declare const deprecationEntries: DeprecatedOptions; export declare const descriptions: { [key in keyof Config.InitialOptions]: string; }; export declare const isJSONString: ( text?: JSONString | string, ) => text is JSONString; declare const JEST_CONFIG_BASE_NAME = 'jest.config'; declare const JEST_CONFIG_EXT_CJS = '.cjs'; declare const JEST_CONFIG_EXT_JS = '.js'; declare const JEST_CONFIG_EXT_JSON = '.json'; declare const JEST_CONFIG_EXT_MJS = '.mjs'; declare const JEST_CONFIG_EXT_ORDER: readonly string[]; declare const JEST_CONFIG_EXT_TS = '.ts'; declare type JSONString = string & { readonly $$type: never; }; declare const NODE_MODULES: string; export declare function normalize( initialOptions: Config.InitialOptions, argv: Config.Argv, configPath?: string | null, projectIndex?: number, isProjectOptions?: boolean, ): Promise<{ hasDeprecationWarnings: boolean; options: AllOptions; }>; declare const PACKAGE_JSON = 'package.json'; declare type ReadConfig = { configPath: string | null | undefined; globalConfig: Config.GlobalConfig; hasDeprecationWarnings: boolean; projectConfig: Config.ProjectConfig; }; export declare function readConfig( argv: Config.Argv, packageRootOrConfig: string | Config.InitialOptions, skipArgvConfigOption?: boolean, parentConfigDirname?: string | null, projectIndex?: number, skipMultipleConfigError?: boolean, ): Promise<ReadConfig>; export declare function readConfigs( argv: Config.Argv, projectPaths: Array<string>, ): Promise<{ globalConfig: Config.GlobalConfig; configs: Array<Config.ProjectConfig>; hasDeprecationWarnings: boolean; }>; /** * Reads the jest config, without validating them or filling it out with defaults. * @param config The path to the file or serialized config. * @param param1 Additional options * @returns The raw initial config (not validated) */ export declare function readInitialOptions( config?: string, { packageRootOrConfig, parentConfigDirname, readFromCwd, skipMultipleConfigError, }?: ReadJestConfigOptions, ): Promise<{ config: Config.InitialOptions; configPath: string | null; }>; export declare interface ReadJestConfigOptions { /** * The package root or deserialized config (default is cwd) */ packageRootOrConfig?: string | Config.InitialOptions; /** * When the `packageRootOrConfig` contains config, this parameter should * contain the dirname of the parent config */ parentConfigDirname?: null | string; /** * Indicates whether or not to read the specified config file from disk. * When true, jest will read try to read config from the current working directory. * (default is false) */ readFromCwd?: boolean; /** * Indicates whether or not to ignore the error of jest finding multiple config files. * (default is false) */ skipMultipleConfigError?: boolean; } export declare const replaceRootDirInPath: ( rootDir: string, filePath: string, ) => string; export {};
/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ /** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * */ export declare const escapePathForRegex: (dir: string) => string; export declare const escapeStrForRegex: (string: string) => string; export declare const replacePathSepForRegex: (string: string) => string; export {};
/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ import type {Config as Config_2} from '@jest/types'; import {createTestScheduler} from '@jest/core'; import {getVersion} from '@jest/core'; import {run} from 'jest-cli'; import {runCLI} from '@jest/core'; import {SearchSource} from '@jest/core'; export declare type Config = Config_2.InitialOptions; export {createTestScheduler}; export {getVersion}; export {run}; export {runCLI}; export {SearchSource}; export {};
/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ import type {EqualsFunction} from '@jest/expect-utils'; import type * as jestMatcherUtils from 'jest-matcher-utils'; import {Tester} from '@jest/expect-utils'; import {TesterContext} from '@jest/expect-utils'; export declare abstract class AsymmetricMatcher<T> implements AsymmetricMatcher_2 { protected sample: T; protected inverse: boolean; $$typeof: symbol; constructor(sample: T, inverse?: boolean); protected getMatcherContext(): MatcherContext; abstract asymmetricMatch(other: unknown): boolean; abstract toString(): string; getExpectedType?(): string; toAsymmetricMatcher?(): string; } declare type AsymmetricMatcher_2 = { asymmetricMatch(other: unknown): boolean; toString(): string; getExpectedType?(): string; toAsymmetricMatcher?(): string; }; export declare interface AsymmetricMatchers { any(sample: unknown): AsymmetricMatcher_2; anything(): AsymmetricMatcher_2; arrayContaining(sample: Array<unknown>): AsymmetricMatcher_2; closeTo(sample: number, precision?: number): AsymmetricMatcher_2; objectContaining(sample: Record<string, unknown>): AsymmetricMatcher_2; stringContaining(sample: string): AsymmetricMatcher_2; stringMatching(sample: string | RegExp): AsymmetricMatcher_2; } export declare type AsyncExpectationResult = Promise<SyncExpectationResult>; export declare interface BaseExpect { assertions(numberOfAssertions: number): void; addEqualityTesters(testers: Array<Tester>): void; extend(matchers: MatchersObject): void; extractExpectedAssertionsErrors(): ExpectedAssertionsErrors; getState(): MatcherState; hasAssertions(): void; setState(state: Partial<MatcherState>): void; } export declare type Expect = { <T = unknown>(actual: T): Matchers<void, T> & Inverse<Matchers<void, T>> & PromiseMatchers<T>; } & BaseExpect & AsymmetricMatchers & Inverse<Omit<AsymmetricMatchers, 'any' | 'anything'>>; declare const expect: Expect; export default expect; export {expect}; export declare type ExpectationResult = | SyncExpectationResult | AsyncExpectationResult; declare type ExpectedAssertionsErrors = Array<{ actual: string | number; error: Error; expected: string; }>; declare type Inverse<Matchers> = { /** * Inverse next matcher. If you know how to test something, `.not` lets you test its opposite. */ not: Matchers; }; export declare class JestAssertionError extends Error { matcherResult?: Omit<SyncExpectationResult, 'message'> & { message: string; }; } export declare type MatcherContext = MatcherUtils & Readonly<MatcherState>; export declare type MatcherFunction<Expected extends Array<unknown> = []> = MatcherFunctionWithContext<MatcherContext, Expected>; export declare type MatcherFunctionWithContext< Context extends MatcherContext = MatcherContext, Expected extends Array<any> = [] /** TODO should be: extends Array<unknown> = [] */, > = ( this: Context, actual: unknown, ...expected: Expected ) => ExpectationResult; export declare interface Matchers<R extends void | Promise<void>, T = unknown> { /** * Ensures the last call to a mock function was provided specific args. */ lastCalledWith(...expected: Array<unknown>): R; /** * Ensure that the last call to a mock function has returned a specified value. */ lastReturnedWith(expected?: unknown): R; /** * Ensure that a mock function is called with specific arguments on an Nth call. */ nthCalledWith(nth: number, ...expected: Array<unknown>): R; /** * Ensure that the nth call to a mock function has returned a specified value. */ nthReturnedWith(nth: number, expected?: unknown): R; /** * Checks that a value is what you expect. It calls `Object.is` to compare values. * Don't use `toBe` with floating-point numbers. */ toBe(expected: unkn
own): R; /** * Ensures that a mock function is called. */ toBeCalled(): R; /** * Ensures that a mock function is called an exact number of times. */ toBeCalledTimes(expected: number): R; /** * Ensure that a mock function is called with specific arguments. */ toBeCalledWith(...expected: Array<unknown>): R; /** * Using exact equality with floating point numbers is a bad idea. * Rounding means that intuitive things fail. * The default for `precision` is 2. */ toBeCloseTo(expected: number, precision?: number): R; /** * Ensure that a variable is not undefined. */ toBeDefined(): R; /** * When you don't care what a value is, you just want to * ensure a value is false in a boolean context. */ toBeFalsy(): R; /** * For comparing floating point numbers. */ toBeGreaterThan(expected: number | bigint): R; /** * For comparing floating point numbers. */ toBeGreaterThanOrEqual(expected: number | bigint): R; /** * Ensure that an object is an instance of a class. * This matcher uses `instanceof` underneath. */ toBeInstanceOf(expected: unknown): R; /** * For comparing floating point numbers. */ toBeLessThan(expected: number | bigint): R; /** * For comparing floating point numbers. */ toBeLessThanOrEqual(expected: number | bigint): R; /** * Used to check that a variable is NaN. */ toBeNaN(): R; /** * This is the same as `.toBe(null)` but the error messages are a bit nicer. * So use `.toBeNull()` when you want to check that something is null. */ toBeNull(): R; /** * Use when you don't care what a value is, you just want to ensure a value * is true in a boolean context. In JavaScript, there are six falsy values: * `false`, `0`, `''`, `null`, `undefined`, and `NaN`. Everything else is truthy. */ toBeTruthy(): R; /** * Used to check that a variable is undefined. */ toBeUndefined(): R; /** * Used when you want to check that an item is in a list. * For testing the items in the list, this uses `===`, a strict equality check. */ toContain(expected: unknown): R; /** * Used when you want to check that an item is in a list. * For testing the items in the list, this matcher recursively checks the * equality of all fields, rather than checking for object identity. */ toContainEqual(expected: unknown): R; /** * Used when you want to check that two objects have the same value. * This matcher recursively checks the equality of all fields, rather than checking for object identity. */ toEqual(expected: unknown): R; /** * Ensures that a mock function is called. */ toHaveBeenCalled(): R; /** * Ensures that a mock function is called an exact number of times. */ toHaveBeenCalledTimes(expected: number): R; /** * Ensure that a mock function is called with specific arguments. */ toHaveBeenCalledWith(...expected: Array<unknown>): R; /** * Ensure that a mock function is called with specific arguments on an Nth call. */ toHaveBeenNthCalledWith(nth: number, ...expected: Array<unknown>): R; /** * If you have a mock function, you can use `.toHaveBeenLastCalledWith` * to test what arguments it was last called with. */ toHaveBeenLastCalledWith(...expected: Array<unknown>): R; /** * Use to test the specific value that a mock function last returned. * If the last call to the mock function threw an error, then this matcher will fail * no matter what value you provided as the expected return value. */ toHaveLastReturnedWith(expected?: unknown): R; /** * Used to check that an object has a `.length` property * and it is set to a certain numeric value. */ toHaveLength(expected: number): R; /** * Use to test the specific value that a mock function returned for the nth call. * If the nth call to the mock function threw an error, then this matcher will fail * no matter what value you pro
vided as the expected return value. */ toHaveNthReturnedWith(nth: number, expected?: unknown): R; /** * Use to check if property at provided reference keyPath exists for an object. * For checking deeply nested properties in an object you may use dot notation or an array containing * the keyPath for deep references. * * Optionally, you can provide a value to check if it's equal to the value present at keyPath * on the target object. This matcher uses 'deep equality' (like `toEqual()`) and recursively checks * the equality of all fields. * * @example * * expect(houseForSale).toHaveProperty('kitchen.area', 20); */ toHaveProperty( expectedPath: string | Array<string>, expectedValue?: unknown, ): R; /** * Use to test that the mock function successfully returned (i.e., did not throw an error) at least one time */ toHaveReturned(): R; /** * Use to ensure that a mock function returned successfully (i.e., did not throw an error) an exact number of times. * Any calls to the mock function that throw an error are not counted toward the number of times the function returned. */ toHaveReturnedTimes(expected: number): R; /** * Use to ensure that a mock function returned a specific value. */ toHaveReturnedWith(expected?: unknown): R; /** * Check that a string matches a regular expression. */ toMatch(expected: string | RegExp): R; /** * Used to check that a JavaScript object matches a subset of the properties of an object */ toMatchObject( expected: Record<string, unknown> | Array<Record<string, unknown>>, ): R; /** * Ensure that a mock function has returned (as opposed to thrown) at least once. */ toReturn(): R; /** * Ensure that a mock function has returned (as opposed to thrown) a specified number of times. */ toReturnTimes(expected: number): R; /** * Ensure that a mock function has returned a specified value at least once. */ toReturnWith(expected?: unknown): R; /** * Use to test that objects have the same types as well as structure. */ toStrictEqual(expected: unknown): R; /** * Used to test that a function throws when it is called. */ toThrow(expected?: unknown): R; /** * If you want to test that a specific error is thrown inside a function. */ toThrowError(expected?: unknown): R; } declare type MatchersObject = { [name: string]: RawMatcherFn; }; export declare interface MatcherState { assertionCalls: number; currentConcurrentTestName?: () => string | undefined; currentTestName?: string; error?: Error; expand?: boolean; expectedAssertionsNumber: number | null; expectedAssertionsNumberError?: Error; isExpectingAssertions: boolean; isExpectingAssertionsError?: Error; isNot?: boolean; numPassingAsserts: number; promise?: string; suppressedErrors: Array<Error>; testPath?: string; } export declare interface MatcherUtils { customTesters: Array<Tester>; dontThrow(): void; equals: EqualsFunction; utils: typeof jestMatcherUtils & { iterableEquality: Tester; subsetEquality: Tester; }; } declare type PromiseMatchers<T = unknown> = { /** * Unwraps the reason of a rejected promise so any other matcher can be chained. * If the promise is fulfilled the assertion fails. */ rejects: Matchers<Promise<void>, T> & Inverse<Matchers<Promise<void>, T>>; /** * Unwraps the value of a fulfilled promise so any other matcher can be chained. * If the promise is rejected the assertion fails. */ resolves: Matchers<Promise<void>, T> & Inverse<Matchers<Promise<void>, T>>; }; declare type RawMatcherFn<Context extends MatcherContext = MatcherContext> = { (this: Context, actual: any, ...expected: Array<any>): ExpectationResult; }; export declare type SyncExpectationResult = { pass: boolean; message(): string; }; export {Tester}; export {TesterContext}; export {};