id
int64
0
3.78k
code
stringlengths
13
37.9k
declarations
stringlengths
16
64.6k
100
function resolveBlockSeq( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bs: BlockSequence, onError: ComposeErrorHandler ) { const seq = new YAMLSeq(ctx.schema) if (ctx.atRoot) ctx.atRoot = false let offset = bs.offset let commentEnd: number | null = null for (const { start, value } of bs.items) { const props = resolveProps(start, { indicator: 'seq-item-ind', next: value, offset, onError, startOnNewline: true }) if (!props.found) { if (props.anchor || props.tag || value) { if (value && value.type === 'block-seq') onError( props.end, 'BAD_INDENT', 'All sequence items must start at the same column' ) else onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator') } else { commentEnd = props.end if (props.comment) seq.comment = props.comment continue } } const node = value ? composeNode(ctx, value, props, onError) : composeEmptyNode(ctx, props.end, start, null, props, onError) if (ctx.schema.compat) flowIndentCheck(bs.indent, value, onError) offset = node.range[2] seq.items.push(node) } seq.range = [bs.offset, offset, commentEnd ?? offset] return seq as YAMLSeq.Parsed }
type ComposeNode = typeof CN
101
function resolveBlockSeq( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bs: BlockSequence, onError: ComposeErrorHandler ) { const seq = new YAMLSeq(ctx.schema) if (ctx.atRoot) ctx.atRoot = false let offset = bs.offset let commentEnd: number | null = null for (const { start, value } of bs.items) { const props = resolveProps(start, { indicator: 'seq-item-ind', next: value, offset, onError, startOnNewline: true }) if (!props.found) { if (props.anchor || props.tag || value) { if (value && value.type === 'block-seq') onError( props.end, 'BAD_INDENT', 'All sequence items must start at the same column' ) else onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator') } else { commentEnd = props.end if (props.comment) seq.comment = props.comment continue } } const node = value ? composeNode(ctx, value, props, onError) : composeEmptyNode(ctx, props.end, start, null, props, onError) if (ctx.schema.compat) flowIndentCheck(bs.indent, value, onError) offset = node.range[2] seq.items.push(node) } seq.range = [bs.offset, offset, commentEnd ?? offset] return seq as YAMLSeq.Parsed }
interface ComposeContext { atRoot: boolean directives: Directives options: Readonly<Required<Omit<ParseOptions, 'lineCounter'>>> schema: Readonly<Schema> }
102
function resolveBlockSeq( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bs: BlockSequence, onError: ComposeErrorHandler ) { const seq = new YAMLSeq(ctx.schema) if (ctx.atRoot) ctx.atRoot = false let offset = bs.offset let commentEnd: number | null = null for (const { start, value } of bs.items) { const props = resolveProps(start, { indicator: 'seq-item-ind', next: value, offset, onError, startOnNewline: true }) if (!props.found) { if (props.anchor || props.tag || value) { if (value && value.type === 'block-seq') onError( props.end, 'BAD_INDENT', 'All sequence items must start at the same column' ) else onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator') } else { commentEnd = props.end if (props.comment) seq.comment = props.comment continue } } const node = value ? composeNode(ctx, value, props, onError) : composeEmptyNode(ctx, props.end, start, null, props, onError) if (ctx.schema.compat) flowIndentCheck(bs.indent, value, onError) offset = node.range[2] seq.items.push(node) } seq.range = [bs.offset, offset, commentEnd ?? offset] return seq as YAMLSeq.Parsed }
type ComposeErrorHandler = ( source: ErrorSource, code: ErrorCode, message: string, warning?: boolean ) => void
103
function resolveBlockSeq( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bs: BlockSequence, onError: ComposeErrorHandler ) { const seq = new YAMLSeq(ctx.schema) if (ctx.atRoot) ctx.atRoot = false let offset = bs.offset let commentEnd: number | null = null for (const { start, value } of bs.items) { const props = resolveProps(start, { indicator: 'seq-item-ind', next: value, offset, onError, startOnNewline: true }) if (!props.found) { if (props.anchor || props.tag || value) { if (value && value.type === 'block-seq') onError( props.end, 'BAD_INDENT', 'All sequence items must start at the same column' ) else onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator') } else { commentEnd = props.end if (props.comment) seq.comment = props.comment continue } } const node = value ? composeNode(ctx, value, props, onError) : composeEmptyNode(ctx, props.end, start, null, props, onError) if (ctx.schema.compat) flowIndentCheck(bs.indent, value, onError) offset = node.range[2] seq.items.push(node) } seq.range = [bs.offset, offset, commentEnd ?? offset] return seq as YAMLSeq.Parsed }
interface BlockSequence { type: 'block-seq' offset: number indent: number items: Array<{ start: SourceToken[] key?: never sep?: never value?: Token }> }
104
function resolveBlockMap( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bm: BlockMap, onError: ComposeErrorHandler ) { const map = new YAMLMap<ParsedNode, ParsedNode>(ctx.schema) if (ctx.atRoot) ctx.atRoot = false let offset = bm.offset let commentEnd: number | null = null for (const collItem of bm.items) { const { start, key, sep, value } = collItem // key properties const keyProps = resolveProps(start, { indicator: 'explicit-key-ind', next: key ?? sep?.[0], offset, onError, startOnNewline: true }) const implicitKey = !keyProps.found if (implicitKey) { if (key) { if (key.type === 'block-seq') onError( offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key' ) else if ('indent' in key && key.indent !== bm.indent) onError(offset, 'BAD_INDENT', startColMsg) } if (!keyProps.anchor && !keyProps.tag && !sep) { commentEnd = keyProps.end if (keyProps.comment) { if (map.comment) map.comment += '\n' + keyProps.comment else map.comment = keyProps.comment } continue } if (keyProps.hasNewlineAfterProp || containsNewline(key)) { onError( key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line' ) } } else if (keyProps.found?.indent !== bm.indent) { onError(offset, 'BAD_INDENT', startColMsg) } // key value const keyStart = keyProps.end const keyNode = key ? composeNode(ctx, key, keyProps, onError) : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError) if (ctx.schema.compat) flowIndentCheck(bm.indent, key, onError) if (mapIncludes(ctx, map.items, keyNode)) onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique') // value properties const valueProps = resolveProps(sep ?? [], { indicator: 'map-value-ind', next: value, offset: keyNode.range[2], onError, startOnNewline: !key || key.type === 'block-scalar' }) offset = valueProps.end if (valueProps.found) { if (implicitKey) { if (value?.type === 'block-map' && !valueProps.hasNewline) onError( offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings' ) if ( ctx.options.strict && keyProps.start < valueProps.found.offset - 1024 ) onError( keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key' ) } // value value const valueNode = value ? composeNode(ctx, value, valueProps, onError) : composeEmptyNode(ctx, offset, sep, null, valueProps, onError) if (ctx.schema.compat) flowIndentCheck(bm.indent, value, onError) offset = valueNode.range[2] const pair = new Pair(keyNode, valueNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem map.items.push(pair) } else { // key with no value if (implicitKey) onError( keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values' ) if (valueProps.comment) { if (keyNode.comment) keyNode.comment += '\n' + valueProps.comment else keyNode.comment = valueProps.comment } const pair: Pair<ParsedNode, ParsedNode> = new Pair(keyNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem map.items.push(pair) } } if (commentEnd && commentEnd < offset) onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content') map.range = [bm.offset, offset, commentEnd ?? offset] return map as YAMLMap.Parsed }
interface BlockMap { type: 'block-map' offset: number indent: number items: Array< | { start: SourceToken[]; key?: never; sep?: never; value?: never } | { start: SourceToken[] key: Token | null sep: SourceToken[] value?: Token } > }
105
function resolveBlockMap( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bm: BlockMap, onError: ComposeErrorHandler ) { const map = new YAMLMap<ParsedNode, ParsedNode>(ctx.schema) if (ctx.atRoot) ctx.atRoot = false let offset = bm.offset let commentEnd: number | null = null for (const collItem of bm.items) { const { start, key, sep, value } = collItem // key properties const keyProps = resolveProps(start, { indicator: 'explicit-key-ind', next: key ?? sep?.[0], offset, onError, startOnNewline: true }) const implicitKey = !keyProps.found if (implicitKey) { if (key) { if (key.type === 'block-seq') onError( offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key' ) else if ('indent' in key && key.indent !== bm.indent) onError(offset, 'BAD_INDENT', startColMsg) } if (!keyProps.anchor && !keyProps.tag && !sep) { commentEnd = keyProps.end if (keyProps.comment) { if (map.comment) map.comment += '\n' + keyProps.comment else map.comment = keyProps.comment } continue } if (keyProps.hasNewlineAfterProp || containsNewline(key)) { onError( key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line' ) } } else if (keyProps.found?.indent !== bm.indent) { onError(offset, 'BAD_INDENT', startColMsg) } // key value const keyStart = keyProps.end const keyNode = key ? composeNode(ctx, key, keyProps, onError) : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError) if (ctx.schema.compat) flowIndentCheck(bm.indent, key, onError) if (mapIncludes(ctx, map.items, keyNode)) onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique') // value properties const valueProps = resolveProps(sep ?? [], { indicator: 'map-value-ind', next: value, offset: keyNode.range[2], onError, startOnNewline: !key || key.type === 'block-scalar' }) offset = valueProps.end if (valueProps.found) { if (implicitKey) { if (value?.type === 'block-map' && !valueProps.hasNewline) onError( offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings' ) if ( ctx.options.strict && keyProps.start < valueProps.found.offset - 1024 ) onError( keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key' ) } // value value const valueNode = value ? composeNode(ctx, value, valueProps, onError) : composeEmptyNode(ctx, offset, sep, null, valueProps, onError) if (ctx.schema.compat) flowIndentCheck(bm.indent, value, onError) offset = valueNode.range[2] const pair = new Pair(keyNode, valueNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem map.items.push(pair) } else { // key with no value if (implicitKey) onError( keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values' ) if (valueProps.comment) { if (keyNode.comment) keyNode.comment += '\n' + valueProps.comment else keyNode.comment = valueProps.comment } const pair: Pair<ParsedNode, ParsedNode> = new Pair(keyNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem map.items.push(pair) } } if (commentEnd && commentEnd < offset) onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content') map.range = [bm.offset, offset, commentEnd ?? offset] return map as YAMLMap.Parsed }
type ComposeNode = typeof CN
106
function resolveBlockMap( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bm: BlockMap, onError: ComposeErrorHandler ) { const map = new YAMLMap<ParsedNode, ParsedNode>(ctx.schema) if (ctx.atRoot) ctx.atRoot = false let offset = bm.offset let commentEnd: number | null = null for (const collItem of bm.items) { const { start, key, sep, value } = collItem // key properties const keyProps = resolveProps(start, { indicator: 'explicit-key-ind', next: key ?? sep?.[0], offset, onError, startOnNewline: true }) const implicitKey = !keyProps.found if (implicitKey) { if (key) { if (key.type === 'block-seq') onError( offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key' ) else if ('indent' in key && key.indent !== bm.indent) onError(offset, 'BAD_INDENT', startColMsg) } if (!keyProps.anchor && !keyProps.tag && !sep) { commentEnd = keyProps.end if (keyProps.comment) { if (map.comment) map.comment += '\n' + keyProps.comment else map.comment = keyProps.comment } continue } if (keyProps.hasNewlineAfterProp || containsNewline(key)) { onError( key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line' ) } } else if (keyProps.found?.indent !== bm.indent) { onError(offset, 'BAD_INDENT', startColMsg) } // key value const keyStart = keyProps.end const keyNode = key ? composeNode(ctx, key, keyProps, onError) : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError) if (ctx.schema.compat) flowIndentCheck(bm.indent, key, onError) if (mapIncludes(ctx, map.items, keyNode)) onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique') // value properties const valueProps = resolveProps(sep ?? [], { indicator: 'map-value-ind', next: value, offset: keyNode.range[2], onError, startOnNewline: !key || key.type === 'block-scalar' }) offset = valueProps.end if (valueProps.found) { if (implicitKey) { if (value?.type === 'block-map' && !valueProps.hasNewline) onError( offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings' ) if ( ctx.options.strict && keyProps.start < valueProps.found.offset - 1024 ) onError( keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key' ) } // value value const valueNode = value ? composeNode(ctx, value, valueProps, onError) : composeEmptyNode(ctx, offset, sep, null, valueProps, onError) if (ctx.schema.compat) flowIndentCheck(bm.indent, value, onError) offset = valueNode.range[2] const pair = new Pair(keyNode, valueNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem map.items.push(pair) } else { // key with no value if (implicitKey) onError( keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values' ) if (valueProps.comment) { if (keyNode.comment) keyNode.comment += '\n' + valueProps.comment else keyNode.comment = valueProps.comment } const pair: Pair<ParsedNode, ParsedNode> = new Pair(keyNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem map.items.push(pair) } } if (commentEnd && commentEnd < offset) onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content') map.range = [bm.offset, offset, commentEnd ?? offset] return map as YAMLMap.Parsed }
interface ComposeContext { atRoot: boolean directives: Directives options: Readonly<Required<Omit<ParseOptions, 'lineCounter'>>> schema: Readonly<Schema> }
107
function resolveBlockMap( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bm: BlockMap, onError: ComposeErrorHandler ) { const map = new YAMLMap<ParsedNode, ParsedNode>(ctx.schema) if (ctx.atRoot) ctx.atRoot = false let offset = bm.offset let commentEnd: number | null = null for (const collItem of bm.items) { const { start, key, sep, value } = collItem // key properties const keyProps = resolveProps(start, { indicator: 'explicit-key-ind', next: key ?? sep?.[0], offset, onError, startOnNewline: true }) const implicitKey = !keyProps.found if (implicitKey) { if (key) { if (key.type === 'block-seq') onError( offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key' ) else if ('indent' in key && key.indent !== bm.indent) onError(offset, 'BAD_INDENT', startColMsg) } if (!keyProps.anchor && !keyProps.tag && !sep) { commentEnd = keyProps.end if (keyProps.comment) { if (map.comment) map.comment += '\n' + keyProps.comment else map.comment = keyProps.comment } continue } if (keyProps.hasNewlineAfterProp || containsNewline(key)) { onError( key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line' ) } } else if (keyProps.found?.indent !== bm.indent) { onError(offset, 'BAD_INDENT', startColMsg) } // key value const keyStart = keyProps.end const keyNode = key ? composeNode(ctx, key, keyProps, onError) : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError) if (ctx.schema.compat) flowIndentCheck(bm.indent, key, onError) if (mapIncludes(ctx, map.items, keyNode)) onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique') // value properties const valueProps = resolveProps(sep ?? [], { indicator: 'map-value-ind', next: value, offset: keyNode.range[2], onError, startOnNewline: !key || key.type === 'block-scalar' }) offset = valueProps.end if (valueProps.found) { if (implicitKey) { if (value?.type === 'block-map' && !valueProps.hasNewline) onError( offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings' ) if ( ctx.options.strict && keyProps.start < valueProps.found.offset - 1024 ) onError( keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key' ) } // value value const valueNode = value ? composeNode(ctx, value, valueProps, onError) : composeEmptyNode(ctx, offset, sep, null, valueProps, onError) if (ctx.schema.compat) flowIndentCheck(bm.indent, value, onError) offset = valueNode.range[2] const pair = new Pair(keyNode, valueNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem map.items.push(pair) } else { // key with no value if (implicitKey) onError( keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values' ) if (valueProps.comment) { if (keyNode.comment) keyNode.comment += '\n' + valueProps.comment else keyNode.comment = valueProps.comment } const pair: Pair<ParsedNode, ParsedNode> = new Pair(keyNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem map.items.push(pair) } } if (commentEnd && commentEnd < offset) onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content') map.range = [bm.offset, offset, commentEnd ?? offset] return map as YAMLMap.Parsed }
type ComposeErrorHandler = ( source: ErrorSource, code: ErrorCode, message: string, warning?: boolean ) => void
108
function resolveFlowCollection( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, fc: FlowCollection, onError: ComposeErrorHandler ) { const isMap = fc.start.source === '{' const fcName = isMap ? 'flow map' : 'flow sequence' const coll = isMap ? (new YAMLMap(ctx.schema) as YAMLMap.Parsed) : (new YAMLSeq(ctx.schema) as YAMLSeq.Parsed) coll.flow = true const atRoot = ctx.atRoot if (atRoot) ctx.atRoot = false let offset = fc.offset + fc.start.source.length for (let i = 0; i < fc.items.length; ++i) { const collItem = fc.items[i] const { start, key, sep, value } = collItem const props = resolveProps(start, { flow: fcName, indicator: 'explicit-key-ind', next: key ?? sep?.[0], offset, onError, startOnNewline: false }) if (!props.found) { if (!props.anchor && !props.tag && !sep && !value) { if (i === 0 && props.comma) onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`) else if (i < fc.items.length - 1) onError( props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}` ) if (props.comment) { if (coll.comment) coll.comment += '\n' + props.comment else coll.comment = props.comment } offset = props.end continue } if (!isMap && ctx.options.strict && containsNewline(key)) onError( key as Token, // checked by containsNewline() 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line' ) } if (i === 0) { if (props.comma) onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`) } else { if (!props.comma) onError( props.start, 'MISSING_CHAR', `Missing , between ${fcName} items` ) if (props.comment) { let prevItemComment = '' loop: for (const st of start) { switch (st.type) { case 'comma': case 'space': break case 'comment': prevItemComment = st.source.substring(1) break loop default: break loop } } if (prevItemComment) { let prev = coll.items[coll.items.length - 1] if (isPair(prev)) prev = prev.value ?? prev.key if (prev.comment) prev.comment += '\n' + prevItemComment else prev.comment = prevItemComment props.comment = props.comment.substring(prevItemComment.length + 1) } } } if (!isMap && !sep && !props.found) { // item is a value in a seq // → key & sep are empty, start does not include ? or : const valueNode = value ? composeNode(ctx, value, props, onError) : composeEmptyNode(ctx, props.end, sep, null, props, onError) ;(coll as YAMLSeq).items.push(valueNode) offset = valueNode.range[2] if (isBlock(value)) onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg) } else { // item is a key+value pair // key value const keyStart = props.end const keyNode = key ? composeNode(ctx, key, props, onError) : composeEmptyNode(ctx, keyStart, start, null, props, onError) if (isBlock(key)) onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg) // value properties const valueProps = resolveProps(sep ?? [], { flow: fcName, indicator: 'map-value-ind', next: value, offset: keyNode.range[2], onError, startOnNewline: false }) if (valueProps.found) { if (!isMap && !props.found && ctx.options.strict) { if (sep) for (const st of sep) { if (st === valueProps.found) break if (st.type === 'newline') { onError( st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line' ) break } } if (props.start < valueProps.found.offset - 1024) onError( valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key' ) } } else if (value) { if ('source' in value && value.source && value.source[0] === ':') onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`) else onError( valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items` ) } // value value const valueNode = value ? composeNode(ctx, value, valueProps, onError) : valueProps.found ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) : null if (valueNode) { if (isBlock(value)) onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg) } else if (valueProps.comment) { if (keyNode.comment) keyNode.comment += '\n' + valueProps.comment else keyNode.comment = valueProps.comment } const pair = new Pair(keyNode, valueNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem if (isMap) { const map = coll as YAMLMap.Parsed if (mapIncludes(ctx, map.items, keyNode)) onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique') map.items.push(pair) } else { const map = new YAMLMap(ctx.schema) map.flow = true map.items.push(pair) ;(coll as YAMLSeq).items.push(map) } offset = valueNode ? valueNode.range[2] : valueProps.end } } const expectedEnd = isMap ? '}' : ']' const [ce, ...ee] = fc.end let cePos = offset if (ce && ce.source === expectedEnd) cePos = ce.offset + ce.source.length else { const name = fcName[0].toUpperCase() + fcName.substring(1) const msg = atRoot ? `${name} must end with a ${expectedEnd}` : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}` onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg) if (ce && ce.source.length !== 1) ee.unshift(ce) } if (ee.length > 0) { const end = resolveEnd(ee, cePos, ctx.options.strict, onError) if (end.comment) { if (coll.comment) coll.comment += '\n' + end.comment else coll.comment = end.comment } coll.range = [fc.offset, cePos, end.offset] } else { coll.range = [fc.offset, cePos, cePos] } return coll }
type ComposeNode = typeof CN
109
function resolveFlowCollection( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, fc: FlowCollection, onError: ComposeErrorHandler ) { const isMap = fc.start.source === '{' const fcName = isMap ? 'flow map' : 'flow sequence' const coll = isMap ? (new YAMLMap(ctx.schema) as YAMLMap.Parsed) : (new YAMLSeq(ctx.schema) as YAMLSeq.Parsed) coll.flow = true const atRoot = ctx.atRoot if (atRoot) ctx.atRoot = false let offset = fc.offset + fc.start.source.length for (let i = 0; i < fc.items.length; ++i) { const collItem = fc.items[i] const { start, key, sep, value } = collItem const props = resolveProps(start, { flow: fcName, indicator: 'explicit-key-ind', next: key ?? sep?.[0], offset, onError, startOnNewline: false }) if (!props.found) { if (!props.anchor && !props.tag && !sep && !value) { if (i === 0 && props.comma) onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`) else if (i < fc.items.length - 1) onError( props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}` ) if (props.comment) { if (coll.comment) coll.comment += '\n' + props.comment else coll.comment = props.comment } offset = props.end continue } if (!isMap && ctx.options.strict && containsNewline(key)) onError( key as Token, // checked by containsNewline() 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line' ) } if (i === 0) { if (props.comma) onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`) } else { if (!props.comma) onError( props.start, 'MISSING_CHAR', `Missing , between ${fcName} items` ) if (props.comment) { let prevItemComment = '' loop: for (const st of start) { switch (st.type) { case 'comma': case 'space': break case 'comment': prevItemComment = st.source.substring(1) break loop default: break loop } } if (prevItemComment) { let prev = coll.items[coll.items.length - 1] if (isPair(prev)) prev = prev.value ?? prev.key if (prev.comment) prev.comment += '\n' + prevItemComment else prev.comment = prevItemComment props.comment = props.comment.substring(prevItemComment.length + 1) } } } if (!isMap && !sep && !props.found) { // item is a value in a seq // → key & sep are empty, start does not include ? or : const valueNode = value ? composeNode(ctx, value, props, onError) : composeEmptyNode(ctx, props.end, sep, null, props, onError) ;(coll as YAMLSeq).items.push(valueNode) offset = valueNode.range[2] if (isBlock(value)) onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg) } else { // item is a key+value pair // key value const keyStart = props.end const keyNode = key ? composeNode(ctx, key, props, onError) : composeEmptyNode(ctx, keyStart, start, null, props, onError) if (isBlock(key)) onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg) // value properties const valueProps = resolveProps(sep ?? [], { flow: fcName, indicator: 'map-value-ind', next: value, offset: keyNode.range[2], onError, startOnNewline: false }) if (valueProps.found) { if (!isMap && !props.found && ctx.options.strict) { if (sep) for (const st of sep) { if (st === valueProps.found) break if (st.type === 'newline') { onError( st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line' ) break } } if (props.start < valueProps.found.offset - 1024) onError( valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key' ) } } else if (value) { if ('source' in value && value.source && value.source[0] === ':') onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`) else onError( valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items` ) } // value value const valueNode = value ? composeNode(ctx, value, valueProps, onError) : valueProps.found ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) : null if (valueNode) { if (isBlock(value)) onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg) } else if (valueProps.comment) { if (keyNode.comment) keyNode.comment += '\n' + valueProps.comment else keyNode.comment = valueProps.comment } const pair = new Pair(keyNode, valueNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem if (isMap) { const map = coll as YAMLMap.Parsed if (mapIncludes(ctx, map.items, keyNode)) onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique') map.items.push(pair) } else { const map = new YAMLMap(ctx.schema) map.flow = true map.items.push(pair) ;(coll as YAMLSeq).items.push(map) } offset = valueNode ? valueNode.range[2] : valueProps.end } } const expectedEnd = isMap ? '}' : ']' const [ce, ...ee] = fc.end let cePos = offset if (ce && ce.source === expectedEnd) cePos = ce.offset + ce.source.length else { const name = fcName[0].toUpperCase() + fcName.substring(1) const msg = atRoot ? `${name} must end with a ${expectedEnd}` : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}` onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg) if (ce && ce.source.length !== 1) ee.unshift(ce) } if (ee.length > 0) { const end = resolveEnd(ee, cePos, ctx.options.strict, onError) if (end.comment) { if (coll.comment) coll.comment += '\n' + end.comment else coll.comment = end.comment } coll.range = [fc.offset, cePos, end.offset] } else { coll.range = [fc.offset, cePos, cePos] } return coll }
interface ComposeContext { atRoot: boolean directives: Directives options: Readonly<Required<Omit<ParseOptions, 'lineCounter'>>> schema: Readonly<Schema> }
110
function resolveFlowCollection( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, fc: FlowCollection, onError: ComposeErrorHandler ) { const isMap = fc.start.source === '{' const fcName = isMap ? 'flow map' : 'flow sequence' const coll = isMap ? (new YAMLMap(ctx.schema) as YAMLMap.Parsed) : (new YAMLSeq(ctx.schema) as YAMLSeq.Parsed) coll.flow = true const atRoot = ctx.atRoot if (atRoot) ctx.atRoot = false let offset = fc.offset + fc.start.source.length for (let i = 0; i < fc.items.length; ++i) { const collItem = fc.items[i] const { start, key, sep, value } = collItem const props = resolveProps(start, { flow: fcName, indicator: 'explicit-key-ind', next: key ?? sep?.[0], offset, onError, startOnNewline: false }) if (!props.found) { if (!props.anchor && !props.tag && !sep && !value) { if (i === 0 && props.comma) onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`) else if (i < fc.items.length - 1) onError( props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}` ) if (props.comment) { if (coll.comment) coll.comment += '\n' + props.comment else coll.comment = props.comment } offset = props.end continue } if (!isMap && ctx.options.strict && containsNewline(key)) onError( key as Token, // checked by containsNewline() 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line' ) } if (i === 0) { if (props.comma) onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`) } else { if (!props.comma) onError( props.start, 'MISSING_CHAR', `Missing , between ${fcName} items` ) if (props.comment) { let prevItemComment = '' loop: for (const st of start) { switch (st.type) { case 'comma': case 'space': break case 'comment': prevItemComment = st.source.substring(1) break loop default: break loop } } if (prevItemComment) { let prev = coll.items[coll.items.length - 1] if (isPair(prev)) prev = prev.value ?? prev.key if (prev.comment) prev.comment += '\n' + prevItemComment else prev.comment = prevItemComment props.comment = props.comment.substring(prevItemComment.length + 1) } } } if (!isMap && !sep && !props.found) { // item is a value in a seq // → key & sep are empty, start does not include ? or : const valueNode = value ? composeNode(ctx, value, props, onError) : composeEmptyNode(ctx, props.end, sep, null, props, onError) ;(coll as YAMLSeq).items.push(valueNode) offset = valueNode.range[2] if (isBlock(value)) onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg) } else { // item is a key+value pair // key value const keyStart = props.end const keyNode = key ? composeNode(ctx, key, props, onError) : composeEmptyNode(ctx, keyStart, start, null, props, onError) if (isBlock(key)) onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg) // value properties const valueProps = resolveProps(sep ?? [], { flow: fcName, indicator: 'map-value-ind', next: value, offset: keyNode.range[2], onError, startOnNewline: false }) if (valueProps.found) { if (!isMap && !props.found && ctx.options.strict) { if (sep) for (const st of sep) { if (st === valueProps.found) break if (st.type === 'newline') { onError( st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line' ) break } } if (props.start < valueProps.found.offset - 1024) onError( valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key' ) } } else if (value) { if ('source' in value && value.source && value.source[0] === ':') onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`) else onError( valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items` ) } // value value const valueNode = value ? composeNode(ctx, value, valueProps, onError) : valueProps.found ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) : null if (valueNode) { if (isBlock(value)) onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg) } else if (valueProps.comment) { if (keyNode.comment) keyNode.comment += '\n' + valueProps.comment else keyNode.comment = valueProps.comment } const pair = new Pair(keyNode, valueNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem if (isMap) { const map = coll as YAMLMap.Parsed if (mapIncludes(ctx, map.items, keyNode)) onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique') map.items.push(pair) } else { const map = new YAMLMap(ctx.schema) map.flow = true map.items.push(pair) ;(coll as YAMLSeq).items.push(map) } offset = valueNode ? valueNode.range[2] : valueProps.end } } const expectedEnd = isMap ? '}' : ']' const [ce, ...ee] = fc.end let cePos = offset if (ce && ce.source === expectedEnd) cePos = ce.offset + ce.source.length else { const name = fcName[0].toUpperCase() + fcName.substring(1) const msg = atRoot ? `${name} must end with a ${expectedEnd}` : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}` onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg) if (ce && ce.source.length !== 1) ee.unshift(ce) } if (ee.length > 0) { const end = resolveEnd(ee, cePos, ctx.options.strict, onError) if (end.comment) { if (coll.comment) coll.comment += '\n' + end.comment else coll.comment = end.comment } coll.range = [fc.offset, cePos, end.offset] } else { coll.range = [fc.offset, cePos, cePos] } return coll }
type ComposeErrorHandler = ( source: ErrorSource, code: ErrorCode, message: string, warning?: boolean ) => void
111
function resolveFlowCollection( { composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, fc: FlowCollection, onError: ComposeErrorHandler ) { const isMap = fc.start.source === '{' const fcName = isMap ? 'flow map' : 'flow sequence' const coll = isMap ? (new YAMLMap(ctx.schema) as YAMLMap.Parsed) : (new YAMLSeq(ctx.schema) as YAMLSeq.Parsed) coll.flow = true const atRoot = ctx.atRoot if (atRoot) ctx.atRoot = false let offset = fc.offset + fc.start.source.length for (let i = 0; i < fc.items.length; ++i) { const collItem = fc.items[i] const { start, key, sep, value } = collItem const props = resolveProps(start, { flow: fcName, indicator: 'explicit-key-ind', next: key ?? sep?.[0], offset, onError, startOnNewline: false }) if (!props.found) { if (!props.anchor && !props.tag && !sep && !value) { if (i === 0 && props.comma) onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`) else if (i < fc.items.length - 1) onError( props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}` ) if (props.comment) { if (coll.comment) coll.comment += '\n' + props.comment else coll.comment = props.comment } offset = props.end continue } if (!isMap && ctx.options.strict && containsNewline(key)) onError( key as Token, // checked by containsNewline() 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line' ) } if (i === 0) { if (props.comma) onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`) } else { if (!props.comma) onError( props.start, 'MISSING_CHAR', `Missing , between ${fcName} items` ) if (props.comment) { let prevItemComment = '' loop: for (const st of start) { switch (st.type) { case 'comma': case 'space': break case 'comment': prevItemComment = st.source.substring(1) break loop default: break loop } } if (prevItemComment) { let prev = coll.items[coll.items.length - 1] if (isPair(prev)) prev = prev.value ?? prev.key if (prev.comment) prev.comment += '\n' + prevItemComment else prev.comment = prevItemComment props.comment = props.comment.substring(prevItemComment.length + 1) } } } if (!isMap && !sep && !props.found) { // item is a value in a seq // → key & sep are empty, start does not include ? or : const valueNode = value ? composeNode(ctx, value, props, onError) : composeEmptyNode(ctx, props.end, sep, null, props, onError) ;(coll as YAMLSeq).items.push(valueNode) offset = valueNode.range[2] if (isBlock(value)) onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg) } else { // item is a key+value pair // key value const keyStart = props.end const keyNode = key ? composeNode(ctx, key, props, onError) : composeEmptyNode(ctx, keyStart, start, null, props, onError) if (isBlock(key)) onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg) // value properties const valueProps = resolveProps(sep ?? [], { flow: fcName, indicator: 'map-value-ind', next: value, offset: keyNode.range[2], onError, startOnNewline: false }) if (valueProps.found) { if (!isMap && !props.found && ctx.options.strict) { if (sep) for (const st of sep) { if (st === valueProps.found) break if (st.type === 'newline') { onError( st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line' ) break } } if (props.start < valueProps.found.offset - 1024) onError( valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key' ) } } else if (value) { if ('source' in value && value.source && value.source[0] === ':') onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`) else onError( valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items` ) } // value value const valueNode = value ? composeNode(ctx, value, valueProps, onError) : valueProps.found ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) : null if (valueNode) { if (isBlock(value)) onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg) } else if (valueProps.comment) { if (keyNode.comment) keyNode.comment += '\n' + valueProps.comment else keyNode.comment = valueProps.comment } const pair = new Pair(keyNode, valueNode) if (ctx.options.keepSourceTokens) pair.srcToken = collItem if (isMap) { const map = coll as YAMLMap.Parsed if (mapIncludes(ctx, map.items, keyNode)) onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique') map.items.push(pair) } else { const map = new YAMLMap(ctx.schema) map.flow = true map.items.push(pair) ;(coll as YAMLSeq).items.push(map) } offset = valueNode ? valueNode.range[2] : valueProps.end } } const expectedEnd = isMap ? '}' : ']' const [ce, ...ee] = fc.end let cePos = offset if (ce && ce.source === expectedEnd) cePos = ce.offset + ce.source.length else { const name = fcName[0].toUpperCase() + fcName.substring(1) const msg = atRoot ? `${name} must end with a ${expectedEnd}` : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}` onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg) if (ce && ce.source.length !== 1) ee.unshift(ce) } if (ee.length > 0) { const end = resolveEnd(ee, cePos, ctx.options.strict, onError) if (end.comment) { if (coll.comment) coll.comment += '\n' + end.comment else coll.comment = end.comment } coll.range = [fc.offset, cePos, end.offset] } else { coll.range = [fc.offset, cePos, cePos] } return coll }
interface FlowCollection { type: 'flow-collection' offset: number indent: number start: SourceToken items: CollectionItem[] end: SourceToken[] }
112
function resolveBlockScalar( scalar: BlockScalar, strict: boolean, onError: ComposeErrorHandler ): { value: string type: Scalar.BLOCK_FOLDED | Scalar.BLOCK_LITERAL | null comment: string range: Range } { const start = scalar.offset const header = parseBlockScalarHeader(scalar, strict, onError) if (!header) return { value: '', type: null, comment: '', range: [start, start, start] } const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL const lines = scalar.source ? splitLines(scalar.source) : [] // determine the end of content & start of chomping let chompStart = lines.length for (let i = lines.length - 1; i >= 0; --i) { const content = lines[i][1] if (content === '' || content === '\r') chompStart = i else break } // shortcut for empty contents if (chompStart === 0) { const value = header.chomp === '+' && lines.length > 0 ? '\n'.repeat(Math.max(1, lines.length - 1)) : '' let end = start + header.length if (scalar.source) end += scalar.source.length return { value, type, comment: header.comment, range: [start, end, end] } } // find the indentation level to trim from start let trimIndent = scalar.indent + header.indent let offset = scalar.offset + header.length let contentStart = 0 for (let i = 0; i < chompStart; ++i) { const [indent, content] = lines[i] if (content === '' || content === '\r') { if (header.indent === 0 && indent.length > trimIndent) trimIndent = indent.length } else { if (indent.length < trimIndent) { const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator' onError(offset + indent.length, 'MISSING_CHAR', message) } if (header.indent === 0) trimIndent = indent.length contentStart = i break } offset += indent.length + content.length + 1 } // include trailing more-indented empty lines in content for (let i = lines.length - 1; i >= chompStart; --i) { if (lines[i][0].length > trimIndent) chompStart = i + 1 } let value = '' let sep = '' let prevMoreIndented = false // leading whitespace is kept intact for (let i = 0; i < contentStart; ++i) value += lines[i][0].slice(trimIndent) + '\n' for (let i = contentStart; i < chompStart; ++i) { let [indent, content] = lines[i] offset += indent.length + content.length + 1 const crlf = content[content.length - 1] === '\r' if (crlf) content = content.slice(0, -1) /* istanbul ignore if already caught in lexer */ if (content && indent.length < trimIndent) { const src = header.indent ? 'explicit indentation indicator' : 'first line' const message = `Block scalar lines must not be less indented than their ${src}` onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message) indent = '' } if (type === Scalar.BLOCK_LITERAL) { value += sep + indent.slice(trimIndent) + content sep = '\n' } else if (indent.length > trimIndent || content[0] === '\t') { // more-indented content within a folded block if (sep === ' ') sep = '\n' else if (!prevMoreIndented && sep === '\n') sep = '\n\n' value += sep + indent.slice(trimIndent) + content sep = '\n' prevMoreIndented = true } else if (content === '') { // empty line if (sep === '\n') value += '\n' else sep = '\n' } else { value += sep + content sep = ' ' prevMoreIndented = false } } switch (header.chomp) { case '-': break case '+': for (let i = chompStart; i < lines.length; ++i) value += '\n' + lines[i][0].slice(trimIndent) if (value[value.length - 1] !== '\n') value += '\n' break default: value += '\n' } const end = start + header.length + scalar.source.length return { value, type, comment: header.comment, range: [start, end, end] } }
interface BlockScalar { type: 'block-scalar' offset: number indent: number props: Token[] source: string }
113
function resolveBlockScalar( scalar: BlockScalar, strict: boolean, onError: ComposeErrorHandler ): { value: string type: Scalar.BLOCK_FOLDED | Scalar.BLOCK_LITERAL | null comment: string range: Range } { const start = scalar.offset const header = parseBlockScalarHeader(scalar, strict, onError) if (!header) return { value: '', type: null, comment: '', range: [start, start, start] } const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL const lines = scalar.source ? splitLines(scalar.source) : [] // determine the end of content & start of chomping let chompStart = lines.length for (let i = lines.length - 1; i >= 0; --i) { const content = lines[i][1] if (content === '' || content === '\r') chompStart = i else break } // shortcut for empty contents if (chompStart === 0) { const value = header.chomp === '+' && lines.length > 0 ? '\n'.repeat(Math.max(1, lines.length - 1)) : '' let end = start + header.length if (scalar.source) end += scalar.source.length return { value, type, comment: header.comment, range: [start, end, end] } } // find the indentation level to trim from start let trimIndent = scalar.indent + header.indent let offset = scalar.offset + header.length let contentStart = 0 for (let i = 0; i < chompStart; ++i) { const [indent, content] = lines[i] if (content === '' || content === '\r') { if (header.indent === 0 && indent.length > trimIndent) trimIndent = indent.length } else { if (indent.length < trimIndent) { const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator' onError(offset + indent.length, 'MISSING_CHAR', message) } if (header.indent === 0) trimIndent = indent.length contentStart = i break } offset += indent.length + content.length + 1 } // include trailing more-indented empty lines in content for (let i = lines.length - 1; i >= chompStart; --i) { if (lines[i][0].length > trimIndent) chompStart = i + 1 } let value = '' let sep = '' let prevMoreIndented = false // leading whitespace is kept intact for (let i = 0; i < contentStart; ++i) value += lines[i][0].slice(trimIndent) + '\n' for (let i = contentStart; i < chompStart; ++i) { let [indent, content] = lines[i] offset += indent.length + content.length + 1 const crlf = content[content.length - 1] === '\r' if (crlf) content = content.slice(0, -1) /* istanbul ignore if already caught in lexer */ if (content && indent.length < trimIndent) { const src = header.indent ? 'explicit indentation indicator' : 'first line' const message = `Block scalar lines must not be less indented than their ${src}` onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message) indent = '' } if (type === Scalar.BLOCK_LITERAL) { value += sep + indent.slice(trimIndent) + content sep = '\n' } else if (indent.length > trimIndent || content[0] === '\t') { // more-indented content within a folded block if (sep === ' ') sep = '\n' else if (!prevMoreIndented && sep === '\n') sep = '\n\n' value += sep + indent.slice(trimIndent) + content sep = '\n' prevMoreIndented = true } else if (content === '') { // empty line if (sep === '\n') value += '\n' else sep = '\n' } else { value += sep + content sep = ' ' prevMoreIndented = false } } switch (header.chomp) { case '-': break case '+': for (let i = chompStart; i < lines.length; ++i) value += '\n' + lines[i][0].slice(trimIndent) if (value[value.length - 1] !== '\n') value += '\n' break default: value += '\n' } const end = start + header.length + scalar.source.length return { value, type, comment: header.comment, range: [start, end, end] } }
type ComposeErrorHandler = ( source: ErrorSource, code: ErrorCode, message: string, warning?: boolean ) => void
114
function parseBlockScalarHeader( { offset, props }: BlockScalar, strict: boolean, onError: ComposeErrorHandler ) { /* istanbul ignore if should not happen */ if (props[0].type !== 'block-scalar-header') { onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found') return null } const { source } = props[0] const mode = source[0] as '>' | '|' let indent = 0 let chomp: '' | '-' | '+' = '' let error = -1 for (let i = 1; i < source.length; ++i) { const ch = source[i] if (!chomp && (ch === '-' || ch === '+')) chomp = ch else { const n = Number(ch) if (!indent && n) indent = n else if (error === -1) error = offset + i } } if (error !== -1) onError( error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}` ) let hasSpace = false let comment = '' let length = source.length for (let i = 1; i < props.length; ++i) { const token = props[i] switch (token.type) { case 'space': hasSpace = true // fallthrough case 'newline': length += token.source.length break case 'comment': if (strict && !hasSpace) { const message = 'Comments must be separated from other tokens by white space characters' onError(token, 'MISSING_CHAR', message) } length += token.source.length comment = token.source.substring(1) break case 'error': onError(token, 'UNEXPECTED_TOKEN', token.message) length += token.source.length break /* istanbul ignore next should not happen */ default: { const message = `Unexpected token in block scalar header: ${token.type}` onError(token, 'UNEXPECTED_TOKEN', message) const ts = (token as any).source if (ts && typeof ts === 'string') length += ts.length } } } return { mode, indent, chomp, comment, length } }
interface BlockScalar { type: 'block-scalar' offset: number indent: number props: Token[] source: string }
115
function parseBlockScalarHeader( { offset, props }: BlockScalar, strict: boolean, onError: ComposeErrorHandler ) { /* istanbul ignore if should not happen */ if (props[0].type !== 'block-scalar-header') { onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found') return null } const { source } = props[0] const mode = source[0] as '>' | '|' let indent = 0 let chomp: '' | '-' | '+' = '' let error = -1 for (let i = 1; i < source.length; ++i) { const ch = source[i] if (!chomp && (ch === '-' || ch === '+')) chomp = ch else { const n = Number(ch) if (!indent && n) indent = n else if (error === -1) error = offset + i } } if (error !== -1) onError( error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}` ) let hasSpace = false let comment = '' let length = source.length for (let i = 1; i < props.length; ++i) { const token = props[i] switch (token.type) { case 'space': hasSpace = true // fallthrough case 'newline': length += token.source.length break case 'comment': if (strict && !hasSpace) { const message = 'Comments must be separated from other tokens by white space characters' onError(token, 'MISSING_CHAR', message) } length += token.source.length comment = token.source.substring(1) break case 'error': onError(token, 'UNEXPECTED_TOKEN', token.message) length += token.source.length break /* istanbul ignore next should not happen */ default: { const message = `Unexpected token in block scalar header: ${token.type}` onError(token, 'UNEXPECTED_TOKEN', message) const ts = (token as any).source if (ts && typeof ts === 'string') length += ts.length } } } return { mode, indent, chomp, comment, length } }
type ComposeErrorHandler = ( source: ErrorSource, code: ErrorCode, message: string, warning?: boolean ) => void
116
function applyReviver( reviver: Reviver, obj: unknown, key: unknown, val: any ) { if (val && typeof val === 'object') { if (Array.isArray(val)) { for (let i = 0, len = val.length; i < len; ++i) { const v0 = val[i] const v1 = applyReviver(reviver, val, String(i), v0) if (v1 === undefined) delete val[i] else if (v1 !== v0) val[i] = v1 } } else if (val instanceof Map) { for (const k of Array.from(val.keys())) { const v0 = val.get(k) const v1 = applyReviver(reviver, val, k, v0) if (v1 === undefined) val.delete(k) else if (v1 !== v0) val.set(k, v1) } } else if (val instanceof Set) { for (const v0 of Array.from(val)) { const v1 = applyReviver(reviver, val, v0, v0) if (v1 === undefined) val.delete(v0) else if (v1 !== v0) { val.delete(v0) val.add(v1) } } } else { for (const [k, v0] of Object.entries(val)) { const v1 = applyReviver(reviver, val, k, v0) if (v1 === undefined) delete val[k] else if (v1 !== v0) val[k] = v1 } } } return reviver.call(obj, key, val) }
type Reviver = (key: unknown, value: unknown) => unknown
117
function createNodeAnchors(doc: Document, prefix: string) { const aliasObjects: unknown[] = [] const sourceObjects: CreateNodeContext['sourceObjects'] = new Map() let prevAnchors: Set<string> | null = null return { onAnchor: (source: unknown) => { aliasObjects.push(source) if (!prevAnchors) prevAnchors = anchorNames(doc) const anchor = findNewAnchor(prefix, prevAnchors) prevAnchors.add(anchor) return anchor }, /** * With circular references, the source node is only resolved after all * of its child nodes are. This is why anchors are set only after all of * the nodes have been created. */ setAnchors: () => { for (const source of aliasObjects) { const ref = sourceObjects.get(source) if ( typeof ref === 'object' && ref.anchor && (isScalar(ref.node) || isCollection(ref.node)) ) { ref.node.anchor = ref.anchor } else { const error = new Error( 'Failed to resolve repeated object (this should not happen)' ) as Error & { source: unknown } error.source = source throw error } } }, sourceObjects } }
interface Document { type: 'document' offset: number start: SourceToken[] value?: Token end?: SourceToken[] }
118
function createNodeAnchors(doc: Document, prefix: string) { const aliasObjects: unknown[] = [] const sourceObjects: CreateNodeContext['sourceObjects'] = new Map() let prevAnchors: Set<string> | null = null return { onAnchor: (source: unknown) => { aliasObjects.push(source) if (!prevAnchors) prevAnchors = anchorNames(doc) const anchor = findNewAnchor(prefix, prevAnchors) prevAnchors.add(anchor) return anchor }, /** * With circular references, the source node is only resolved after all * of its child nodes are. This is why anchors are set only after all of * the nodes have been created. */ setAnchors: () => { for (const source of aliasObjects) { const ref = sourceObjects.get(source) if ( typeof ref === 'object' && ref.anchor && (isScalar(ref.node) || isCollection(ref.node)) ) { ref.node.anchor = ref.anchor } else { const error = new Error( 'Failed to resolve repeated object (this should not happen)' ) as Error & { source: unknown } error.source = source throw error } } }, sourceObjects } }
class Document<T extends Node = Node> { declare readonly [NODE_TYPE]: symbol /** A comment before this Document */ commentBefore: string | null = null /** A comment immediately after this Document */ comment: string | null = null /** The document contents. */ contents: T | null directives?: Directives /** Errors encountered during parsing. */ errors: YAMLError[] = [] options: Required< Omit< ParseOptions & DocumentOptions, '_directives' | 'lineCounter' | 'version' > > /** * The `[start, value-end, node-end]` character offsets for the part of the * source parsed into this document (undefined if not parsed). The `value-end` * and `node-end` positions are themselves not included in their respective * ranges. */ declare range?: Range // TS can't figure out that setSchema() will set this, or throw /** The schema used with the document. Use `setSchema()` to change. */ declare schema: Schema /** Warnings encountered during parsing. */ warnings: YAMLWarning[] = [] /** * @param value - The initial value for the document, which will be wrapped * in a Node container. */ constructor( value?: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions ) constructor( value: any, replacer: null | Replacer, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions ) constructor( value?: unknown, replacer?: | Replacer | (DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions) | null, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions ) { Object.defineProperty(this, NODE_TYPE, { value: DOC }) let _replacer: Replacer | null = null if (typeof replacer === 'function' || Array.isArray(replacer)) { _replacer = replacer } else if (options === undefined && replacer) { options = replacer replacer = undefined } const opt = Object.assign( { intAsBigInt: false, keepSourceTokens: false, logLevel: 'warn', prettyErrors: true, strict: true, uniqueKeys: true, version: '1.2' }, options ) this.options = opt let { version } = opt if (options?._directives) { this.directives = options._directives.atDocument() if (this.directives.yaml.explicit) version = this.directives.yaml.version } else this.directives = new Directives({ version }) this.setSchema(version, options) if (value === undefined) this.contents = null else { this.contents = this.createNode(value, _replacer, options) as unknown as T } } /** * Create a deep copy of this Document and its contents. * * Custom Node values that inherit from `Object` still refer to their original instances. */ clone(): Document<T> { const copy: Document<T> = Object.create(Document.prototype, { [NODE_TYPE]: { value: DOC } }) copy.commentBefore = this.commentBefore copy.comment = this.comment copy.errors = this.errors.slice() copy.warnings = this.warnings.slice() copy.options = Object.assign({}, this.options) if (this.directives) copy.directives = this.directives.clone() copy.schema = this.schema.clone() copy.contents = isNode(this.contents) ? (this.contents.clone(copy.schema) as unknown as T) : this.contents if (this.range) copy.range = this.range.slice() as Document['range'] return copy } /** Adds a value to the document. */ add(value: any) { if (assertCollection(this.contents)) this.contents.add(value) } /** Adds a value to the document. */ addIn(path: Iterable<unknown>, value: unknown) { if (assertCollection(this.contents)) this.contents.addIn(path, value) } /** * Create a new `Alias` node, ensuring that the target `node` has the required anchor. * * If `node` already has an anchor, `name` is ignored. * Otherwise, the `node.anchor` value will be set to `name`, * or if an anchor with that name is already present in the document, * `name` will be used as a prefix for a new unique anchor. * If `name` is undefined, the generated anchor will use 'a' as a prefix. */ createAlias(node: Scalar | YAMLMap | YAMLSeq, name?: string): Alias { if (!node.anchor) { const prev = anchorNames(this) node.anchor = // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing !name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name } return new Alias(node.anchor) } /** * Convert any value into a `Node` using the current schema, recursively * turning objects into collections. */ createNode<T = unknown>(value: T, options?: CreateNodeOptions): NodeType<T> createNode<T = unknown>( value: T, replacer: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions ): NodeType<T> createNode( value: unknown, replacer?: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions ): Node { let _replacer: Replacer | undefined = undefined if (typeof replacer === 'function') { value = replacer.call({ '': value }, '', value) _replacer = replacer } else if (Array.isArray(replacer)) { const keyToStr = (v: unknown) => typeof v === 'number' || v instanceof String || v instanceof Number const asStr = replacer.filter(keyToStr).map(String) if (asStr.length > 0) replacer = replacer.concat(asStr) _replacer = replacer } else if (options === undefined && replacer) { options = replacer replacer = undefined } const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {} const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors( this, // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing anchorPrefix || 'a' ) const ctx: CreateNodeContext = { aliasDuplicateObjects: aliasDuplicateObjects ?? true, keepUndefined: keepUndefined ?? false, onAnchor, onTagObj, replacer: _replacer, schema: this.schema, sourceObjects } const node = createNode(value, tag, ctx) if (flow && isCollection(node)) node.flow = true setAnchors() return node } /** * Convert a key and a value into a `Pair` using the current schema, * recursively wrapping all values as `Scalar` or `Collection` nodes. */ createPair<K extends Node = Node, V extends Node = Node>( key: unknown, value: unknown, options: CreateNodeOptions = {} ) { const k = this.createNode(key, null, options) as K const v = this.createNode(value, null, options) as V return new Pair(k, v) } /** * Removes a value from the document. * @returns `true` if the item was found and removed. */ delete(key: unknown): boolean { return assertCollection(this.contents) ? this.contents.delete(key) : false } /** * Removes a value from the document. * @returns `true` if the item was found and removed. */ deleteIn(path: Iterable<unknown> | null): boolean { if (isEmptyPath(path)) { if (this.contents == null) return false this.contents = null return true } return assertCollection(this.contents) ? this.contents.deleteIn(path) : false } /** * Returns item at `key`, or `undefined` if not found. By default unwraps * scalar values from their surrounding node; to disable set `keepScalar` to * `true` (collections are always returned intact). */ get(key: unknown, keepScalar?: boolean): unknown { return isCollection(this.contents) ? this.contents.get(key, keepScalar) : undefined } /** * Returns item at `path`, or `undefined` if not found. By default unwraps * scalar values from their surrounding node; to disable set `keepScalar` to * `true` (collections are always returned intact). */ getIn(path: Iterable<unknown> | null, keepScalar?: boolean): unknown { if (isEmptyPath(path)) return !keepScalar && isScalar(this.contents) ? this.contents.value : this.contents return isCollection(this.contents) ? this.contents.getIn(path, keepScalar) : undefined } /** * Checks if the document includes a value with the key `key`. */ has(key: unknown): boolean { return isCollection(this.contents) ? this.contents.has(key) : false } /** * Checks if the document includes a value at `path`. */ hasIn(path: Iterable<unknown> | null): boolean { if (isEmptyPath(path)) return this.contents !== undefined return isCollection(this.contents) ? this.contents.hasIn(path) : false } /** * Sets a value in this document. For `!!set`, `value` needs to be a * boolean to add/remove the item from the set. */ set(key: any, value: unknown): void { if (this.contents == null) { this.contents = collectionFromPath( this.schema, [key], value ) as unknown as T } else if (assertCollection(this.contents)) { this.contents.set(key, value) } } /** * Sets a value in this document. For `!!set`, `value` needs to be a * boolean to add/remove the item from the set. */ setIn(path: Iterable<unknown> | null, value: unknown): void { if (isEmptyPath(path)) this.contents = value as T else if (this.contents == null) { this.contents = collectionFromPath( this.schema, Array.from(path), value ) as unknown as T } else if (assertCollection(this.contents)) { this.contents.setIn(path, value) } } /** * Change the YAML version and schema used by the document. * A `null` version disables support for directives, explicit tags, anchors, and aliases. * It also requires the `schema` option to be given as a `Schema` instance value. * * Overrides all previously set schema options. */ setSchema( version: '1.1' | '1.2' | 'next' | null, options: SchemaOptions = {} ) { if (typeof version === 'number') version = String(version) as '1.1' | '1.2' let opt: (SchemaOptions & { schema: string }) | null switch (version) { case '1.1': if (this.directives) this.directives.yaml.version = '1.1' else this.directives = new Directives({ version: '1.1' }) opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' } break case '1.2': case 'next': if (this.directives) this.directives.yaml.version = version else this.directives = new Directives({ version }) opt = { merge: false, resolveKnownTags: true, schema: 'core' } break case null: if (this.directives) delete this.directives opt = null break default: { const sv = JSON.stringify(version) throw new Error( `Expected '1.1', '1.2' or null as first argument, but found: ${sv}` ) } } // Not using `instanceof Schema` to allow for duck typing if (options.schema instanceof Object) this.schema = options.schema else if (opt) this.schema = new Schema(Object.assign(opt, options)) else throw new Error( `With a null YAML version, the { schema: Schema } option is required` ) } /** A plain JavaScript representation of the document `contents`. */ toJS(opt?: ToJSOptions & { [ignored: string]: unknown }): any // json & jsonArg are only used from toJSON() toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver }: ToJSOptions & { json?: boolean; jsonArg?: string | null } = {}): any { const ctx: ToJSContext = { anchors: new Map(), doc: this, keep: !json, mapAsMap: mapAsMap === true, mapKeyWarned: false, maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100, stringify } const res = toJS(this.contents, jsonArg ?? '', ctx) if (typeof onAnchor === 'function') for (const { count, res } of ctx.anchors.values()) onAnchor(res, count) return typeof reviver === 'function' ? applyReviver(reviver, { '': res }, '', res) : res } /** * A JSON representation of the document `contents`. * * @param jsonArg Used by `JSON.stringify` to indicate the array index or * property name. */ toJSON(jsonArg?: string | null, onAnchor?: ToJSOptions['onAnchor']): any { return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }) } /** A YAML representation of the document. */ toString(options: ToStringOptions = {}): string { if (this.errors.length > 0) throw new Error('Document with errors cannot be stringified') if ( 'indent' in options && (!Number.isInteger(options.indent) || Number(options.indent) <= 0) ) { const s = JSON.stringify(options.indent) throw new Error(`"indent" option must be a positive integer, not ${s}`) } return stringifyDocument(this, options) } }
119
createNode<T = unknown>(value: T, options?: CreateNodeOptions): NodeType<T>
type CreateNodeOptions = { /** * During node construction, use anchors and aliases to keep strictly equal * non-null objects as equivalent in YAML. * * Default: `true` */ aliasDuplicateObjects?: boolean /** * Default prefix for anchors. * * Default: `'a'`, resulting in anchors `a1`, `a2`, etc. */ anchorPrefix?: string /** Force the top-level collection node to use flow style. */ flow?: boolean /** * Keep `undefined` object values when creating mappings, rather than * discarding them. * * Default: `false` */ keepUndefined?: boolean | null onTagObj?: (tagObj: ScalarTag | CollectionTag) => void /** * Specify the top-level collection type, e.g. `"!!omap"`. Note that this * requires the corresponding tag to be available in this document's schema. */ tag?: string }
120
createNode<T = unknown>( value: T, replacer: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions ): NodeType<T>
type CreateNodeOptions = { /** * During node construction, use anchors and aliases to keep strictly equal * non-null objects as equivalent in YAML. * * Default: `true` */ aliasDuplicateObjects?: boolean /** * Default prefix for anchors. * * Default: `'a'`, resulting in anchors `a1`, `a2`, etc. */ anchorPrefix?: string /** Force the top-level collection node to use flow style. */ flow?: boolean /** * Keep `undefined` object values when creating mappings, rather than * discarding them. * * Default: `false` */ keepUndefined?: boolean | null onTagObj?: (tagObj: ScalarTag | CollectionTag) => void /** * Specify the top-level collection type, e.g. `"!!omap"`. Note that this * requires the corresponding tag to be available in this document's schema. */ tag?: string }
121
toString(options: ToStringOptions = {}): string { if (this.errors.length > 0) throw new Error('Document with errors cannot be stringified') if ( 'indent' in options && (!Number.isInteger(options.indent) || Number(options.indent) <= 0) ) { const s = JSON.stringify(options.indent) throw new Error(`"indent" option must be a positive integer, not ${s}`) } return stringifyDocument(this, options) }
type ToStringOptions = { /** * Use block quote styles for scalar values where applicable. * Set to `false` to disable block quotes completely. * * Default: `true` */ blockQuote?: boolean | 'folded' | 'literal' /** * Enforce `'block'` or `'flow'` style on maps and sequences. * Empty collections will always be stringified as `{}` or `[]`. * * Default: `'any'`, allowing each node to set its style separately * with its `flow: boolean` (default `false`) property. */ collectionStyle?: 'any' | 'block' | 'flow' /** * Comment stringifier. * Output should be valid for the current schema. * * By default, empty comment lines are left empty, * lines consisting of a single space are replaced by `#`, * and all other lines are prefixed with a `#`. */ commentString?: (comment: string) => string /** * The default type of string literal used to stringify implicit key values. * Output may use other types if required to fully represent the value. * * If `null`, the value of `defaultStringType` is used. * * Default: `null` */ defaultKeyType?: Scalar.Type | null /** * The default type of string literal used to stringify values in general. * Output may use other types if required to fully represent the value. * * Default: `'PLAIN'` */ defaultStringType?: Scalar.Type /** * Include directives in the output. * * - If `true`, at least the document-start marker `---` is always included. * This does not force the `%YAML` directive to be included. To do that, * set `doc.directives.yaml.explicit = true`. * - If `false`, no directives or marker is ever included. If using the `%TAG` * directive, you are expected to include it manually in the stream before * its use. * - If `null`, directives and marker may be included if required. * * Default: `null` */ directives?: boolean | null /** * Restrict double-quoted strings to use JSON-compatible syntax. * * Default: `false` */ doubleQuotedAsJSON?: boolean /** * Minimum length for double-quoted strings to use multiple lines to * represent the value. Ignored if `doubleQuotedAsJSON` is set. * * Default: `40` */ doubleQuotedMinMultiLineLength?: number /** * String representation for `false`. * With the core schema, use `'false'`, `'False'`, or `'FALSE'`. * * Default: `'false'` */ falseStr?: string /** * When true, a single space of padding will be added inside the delimiters * of non-empty single-line flow collections. * * Default: `true` */ flowCollectionPadding?: boolean /** * The number of spaces to use when indenting code. * * Default: `2` */ indent?: number /** * Whether block sequences should be indented. * * Default: `true` */ indentSeq?: boolean /** * Maximum line width (set to `0` to disable folding). * * This is a soft limit, as only double-quoted semantics allow for inserting * a line break in the middle of a word, as well as being influenced by the * `minContentWidth` option. * * Default: `80` */ lineWidth?: number /** * Minimum line width for highly-indented content (set to `0` to disable). * * Default: `20` */ minContentWidth?: number /** * String representation for `null`. * With the core schema, use `'null'`, `'Null'`, `'NULL'`, `'~'`, or an empty * string `''`. * * Default: `'null'` */ nullStr?: string /** * Require keys to be scalars and to use implicit rather than explicit notation. * * Default: `false` */ simpleKeys?: boolean /** * Use 'single quote' rather than "double quote" where applicable. * Set to `false` to disable single quotes completely. * * Default: `null` */ singleQuote?: boolean | null /** * String representation for `true`. * With the core schema, use `'true'`, `'True'`, or `'TRUE'`. * * Default: `'true'` */ trueStr?: string /** * The anchor used by an alias must be defined before the alias node. As it's * possible for the document to be modified manually, the order may be * verified during stringification. * * Default: `'true'` */ verifyAliasOrder?: boolean }
122
toString(doc?: Document) { const lines = this.yaml.explicit ? [`%YAML ${this.yaml.version || '1.2'}`] : [] const tagEntries = Object.entries(this.tags) let tagNames: string[] if (doc && tagEntries.length > 0 && isNode(doc.contents)) { const tags: Record<string, boolean> = {} visit(doc.contents, (_key, node) => { if (isNode(node) && node.tag) tags[node.tag] = true }) tagNames = Object.keys(tags) } else tagNames = [] for (const [handle, prefix] of tagEntries) { if (handle === '!!' && prefix === 'tag:yaml.org,2002:') continue if (!doc || tagNames.some(tn => tn.startsWith(prefix))) lines.push(`%TAG ${handle} ${prefix}`) } return lines.join('\n') }
interface Document { type: 'document' offset: number start: SourceToken[] value?: Token end?: SourceToken[] }
123
toString(doc?: Document) { const lines = this.yaml.explicit ? [`%YAML ${this.yaml.version || '1.2'}`] : [] const tagEntries = Object.entries(this.tags) let tagNames: string[] if (doc && tagEntries.length > 0 && isNode(doc.contents)) { const tags: Record<string, boolean> = {} visit(doc.contents, (_key, node) => { if (isNode(node) && node.tag) tags[node.tag] = true }) tagNames = Object.keys(tags) } else tagNames = [] for (const [handle, prefix] of tagEntries) { if (handle === '!!' && prefix === 'tag:yaml.org,2002:') continue if (!doc || tagNames.some(tn => tn.startsWith(prefix))) lines.push(`%TAG ${handle} ${prefix}`) } return lines.join('\n') }
class Document<T extends Node = Node> { declare readonly [NODE_TYPE]: symbol /** A comment before this Document */ commentBefore: string | null = null /** A comment immediately after this Document */ comment: string | null = null /** The document contents. */ contents: T | null directives?: Directives /** Errors encountered during parsing. */ errors: YAMLError[] = [] options: Required< Omit< ParseOptions & DocumentOptions, '_directives' | 'lineCounter' | 'version' > > /** * The `[start, value-end, node-end]` character offsets for the part of the * source parsed into this document (undefined if not parsed). The `value-end` * and `node-end` positions are themselves not included in their respective * ranges. */ declare range?: Range // TS can't figure out that setSchema() will set this, or throw /** The schema used with the document. Use `setSchema()` to change. */ declare schema: Schema /** Warnings encountered during parsing. */ warnings: YAMLWarning[] = [] /** * @param value - The initial value for the document, which will be wrapped * in a Node container. */ constructor( value?: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions ) constructor( value: any, replacer: null | Replacer, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions ) constructor( value?: unknown, replacer?: | Replacer | (DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions) | null, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions ) { Object.defineProperty(this, NODE_TYPE, { value: DOC }) let _replacer: Replacer | null = null if (typeof replacer === 'function' || Array.isArray(replacer)) { _replacer = replacer } else if (options === undefined && replacer) { options = replacer replacer = undefined } const opt = Object.assign( { intAsBigInt: false, keepSourceTokens: false, logLevel: 'warn', prettyErrors: true, strict: true, uniqueKeys: true, version: '1.2' }, options ) this.options = opt let { version } = opt if (options?._directives) { this.directives = options._directives.atDocument() if (this.directives.yaml.explicit) version = this.directives.yaml.version } else this.directives = new Directives({ version }) this.setSchema(version, options) if (value === undefined) this.contents = null else { this.contents = this.createNode(value, _replacer, options) as unknown as T } } /** * Create a deep copy of this Document and its contents. * * Custom Node values that inherit from `Object` still refer to their original instances. */ clone(): Document<T> { const copy: Document<T> = Object.create(Document.prototype, { [NODE_TYPE]: { value: DOC } }) copy.commentBefore = this.commentBefore copy.comment = this.comment copy.errors = this.errors.slice() copy.warnings = this.warnings.slice() copy.options = Object.assign({}, this.options) if (this.directives) copy.directives = this.directives.clone() copy.schema = this.schema.clone() copy.contents = isNode(this.contents) ? (this.contents.clone(copy.schema) as unknown as T) : this.contents if (this.range) copy.range = this.range.slice() as Document['range'] return copy } /** Adds a value to the document. */ add(value: any) { if (assertCollection(this.contents)) this.contents.add(value) } /** Adds a value to the document. */ addIn(path: Iterable<unknown>, value: unknown) { if (assertCollection(this.contents)) this.contents.addIn(path, value) } /** * Create a new `Alias` node, ensuring that the target `node` has the required anchor. * * If `node` already has an anchor, `name` is ignored. * Otherwise, the `node.anchor` value will be set to `name`, * or if an anchor with that name is already present in the document, * `name` will be used as a prefix for a new unique anchor. * If `name` is undefined, the generated anchor will use 'a' as a prefix. */ createAlias(node: Scalar | YAMLMap | YAMLSeq, name?: string): Alias { if (!node.anchor) { const prev = anchorNames(this) node.anchor = // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing !name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name } return new Alias(node.anchor) } /** * Convert any value into a `Node` using the current schema, recursively * turning objects into collections. */ createNode<T = unknown>(value: T, options?: CreateNodeOptions): NodeType<T> createNode<T = unknown>( value: T, replacer: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions ): NodeType<T> createNode( value: unknown, replacer?: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions ): Node { let _replacer: Replacer | undefined = undefined if (typeof replacer === 'function') { value = replacer.call({ '': value }, '', value) _replacer = replacer } else if (Array.isArray(replacer)) { const keyToStr = (v: unknown) => typeof v === 'number' || v instanceof String || v instanceof Number const asStr = replacer.filter(keyToStr).map(String) if (asStr.length > 0) replacer = replacer.concat(asStr) _replacer = replacer } else if (options === undefined && replacer) { options = replacer replacer = undefined } const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {} const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors( this, // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing anchorPrefix || 'a' ) const ctx: CreateNodeContext = { aliasDuplicateObjects: aliasDuplicateObjects ?? true, keepUndefined: keepUndefined ?? false, onAnchor, onTagObj, replacer: _replacer, schema: this.schema, sourceObjects } const node = createNode(value, tag, ctx) if (flow && isCollection(node)) node.flow = true setAnchors() return node } /** * Convert a key and a value into a `Pair` using the current schema, * recursively wrapping all values as `Scalar` or `Collection` nodes. */ createPair<K extends Node = Node, V extends Node = Node>( key: unknown, value: unknown, options: CreateNodeOptions = {} ) { const k = this.createNode(key, null, options) as K const v = this.createNode(value, null, options) as V return new Pair(k, v) } /** * Removes a value from the document. * @returns `true` if the item was found and removed. */ delete(key: unknown): boolean { return assertCollection(this.contents) ? this.contents.delete(key) : false } /** * Removes a value from the document. * @returns `true` if the item was found and removed. */ deleteIn(path: Iterable<unknown> | null): boolean { if (isEmptyPath(path)) { if (this.contents == null) return false this.contents = null return true } return assertCollection(this.contents) ? this.contents.deleteIn(path) : false } /** * Returns item at `key`, or `undefined` if not found. By default unwraps * scalar values from their surrounding node; to disable set `keepScalar` to * `true` (collections are always returned intact). */ get(key: unknown, keepScalar?: boolean): unknown { return isCollection(this.contents) ? this.contents.get(key, keepScalar) : undefined } /** * Returns item at `path`, or `undefined` if not found. By default unwraps * scalar values from their surrounding node; to disable set `keepScalar` to * `true` (collections are always returned intact). */ getIn(path: Iterable<unknown> | null, keepScalar?: boolean): unknown { if (isEmptyPath(path)) return !keepScalar && isScalar(this.contents) ? this.contents.value : this.contents return isCollection(this.contents) ? this.contents.getIn(path, keepScalar) : undefined } /** * Checks if the document includes a value with the key `key`. */ has(key: unknown): boolean { return isCollection(this.contents) ? this.contents.has(key) : false } /** * Checks if the document includes a value at `path`. */ hasIn(path: Iterable<unknown> | null): boolean { if (isEmptyPath(path)) return this.contents !== undefined return isCollection(this.contents) ? this.contents.hasIn(path) : false } /** * Sets a value in this document. For `!!set`, `value` needs to be a * boolean to add/remove the item from the set. */ set(key: any, value: unknown): void { if (this.contents == null) { this.contents = collectionFromPath( this.schema, [key], value ) as unknown as T } else if (assertCollection(this.contents)) { this.contents.set(key, value) } } /** * Sets a value in this document. For `!!set`, `value` needs to be a * boolean to add/remove the item from the set. */ setIn(path: Iterable<unknown> | null, value: unknown): void { if (isEmptyPath(path)) this.contents = value as T else if (this.contents == null) { this.contents = collectionFromPath( this.schema, Array.from(path), value ) as unknown as T } else if (assertCollection(this.contents)) { this.contents.setIn(path, value) } } /** * Change the YAML version and schema used by the document. * A `null` version disables support for directives, explicit tags, anchors, and aliases. * It also requires the `schema` option to be given as a `Schema` instance value. * * Overrides all previously set schema options. */ setSchema( version: '1.1' | '1.2' | 'next' | null, options: SchemaOptions = {} ) { if (typeof version === 'number') version = String(version) as '1.1' | '1.2' let opt: (SchemaOptions & { schema: string }) | null switch (version) { case '1.1': if (this.directives) this.directives.yaml.version = '1.1' else this.directives = new Directives({ version: '1.1' }) opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' } break case '1.2': case 'next': if (this.directives) this.directives.yaml.version = version else this.directives = new Directives({ version }) opt = { merge: false, resolveKnownTags: true, schema: 'core' } break case null: if (this.directives) delete this.directives opt = null break default: { const sv = JSON.stringify(version) throw new Error( `Expected '1.1', '1.2' or null as first argument, but found: ${sv}` ) } } // Not using `instanceof Schema` to allow for duck typing if (options.schema instanceof Object) this.schema = options.schema else if (opt) this.schema = new Schema(Object.assign(opt, options)) else throw new Error( `With a null YAML version, the { schema: Schema } option is required` ) } /** A plain JavaScript representation of the document `contents`. */ toJS(opt?: ToJSOptions & { [ignored: string]: unknown }): any // json & jsonArg are only used from toJSON() toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver }: ToJSOptions & { json?: boolean; jsonArg?: string | null } = {}): any { const ctx: ToJSContext = { anchors: new Map(), doc: this, keep: !json, mapAsMap: mapAsMap === true, mapKeyWarned: false, maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100, stringify } const res = toJS(this.contents, jsonArg ?? '', ctx) if (typeof onAnchor === 'function') for (const { count, res } of ctx.anchors.values()) onAnchor(res, count) return typeof reviver === 'function' ? applyReviver(reviver, { '': res }, '', res) : res } /** * A JSON representation of the document `contents`. * * @param jsonArg Used by `JSON.stringify` to indicate the array index or * property name. */ toJSON(jsonArg?: string | null, onAnchor?: ToJSOptions['onAnchor']): any { return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }) } /** A YAML representation of the document. */ toString(options: ToStringOptions = {}): string { if (this.errors.length > 0) throw new Error('Document with errors cannot be stringified') if ( 'indent' in options && (!Number.isInteger(options.indent) || Number(options.indent) <= 0) ) { const s = JSON.stringify(options.indent) throw new Error(`"indent" option must be a positive integer, not ${s}`) } return stringifyDocument(this, options) } }
124
function stringifyNumber({ format, minFractionDigits, tag, value }: Scalar) { if (typeof value === 'bigint') return String(value) const num = typeof value === 'number' ? value : Number(value) if (!isFinite(num)) return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf' let n = JSON.stringify(value) if ( !format && minFractionDigits && (!tag || tag === 'tag:yaml.org,2002:float') && /^\d/.test(n) ) { let i = n.indexOf('.') if (i < 0) { i = n.length n += '.' } let d = minFractionDigits - (n.length - i - 1) while (d-- > 0) n += '0' } return n }
class Scalar<T = unknown> extends NodeBase { static readonly BLOCK_FOLDED = 'BLOCK_FOLDED' static readonly BLOCK_LITERAL = 'BLOCK_LITERAL' static readonly PLAIN = 'PLAIN' static readonly QUOTE_DOUBLE = 'QUOTE_DOUBLE' static readonly QUOTE_SINGLE = 'QUOTE_SINGLE' value: T /** An optional anchor on this node. Used by alias nodes. */ declare anchor?: string /** * By default (undefined), numbers use decimal notation. * The YAML 1.2 core schema only supports 'HEX' and 'OCT'. * The YAML 1.1 schema also supports 'BIN' and 'TIME' */ declare format?: string /** If `value` is a number, use this value when stringifying this node. */ declare minFractionDigits?: number /** Set during parsing to the source string value */ declare source?: string /** The scalar style used for the node's string representation */ declare type?: Scalar.Type constructor(value: T) { super(SCALAR) this.value = value } toJSON(arg?: any, ctx?: ToJSContext): any { return ctx?.keep ? this.value : toJS(this.value, arg, ctx) } toString() { return String(this.value) } }
125
function addCommentBefore( { indent, options: { commentString } }: StringifyContext, lines: string[], comment: string | null | undefined, chompKeep: boolean ) { if (comment && chompKeep) comment = comment.replace(/^\n+/, '') if (comment) { const ic = indentComment(commentString(comment), indent) lines.push(ic.trimStart()) // Avoid double indent on first line } }
type StringifyContext = { actualString?: boolean allNullValues?: boolean anchors: Set<string> doc: Document forceBlockIndent?: boolean implicitKey?: boolean indent: string indentStep: string indentAtStart?: number inFlow: boolean | null inStringifyKey?: boolean flowCollectionPadding: string options: Readonly< Required<Omit<ToStringOptions, 'collectionStyle' | 'indent'>> > resolvedAliases?: Set<Alias> }
126
function createStringifyContext( doc: Document, options: ToStringOptions ): StringifyContext { const opt = Object.assign( { blockQuote: true, commentString: stringifyComment, defaultKeyType: null, defaultStringType: 'PLAIN', directives: null, doubleQuotedAsJSON: false, doubleQuotedMinMultiLineLength: 40, falseStr: 'false', flowCollectionPadding: true, indentSeq: true, lineWidth: 80, minContentWidth: 20, nullStr: 'null', simpleKeys: false, singleQuote: null, trueStr: 'true', verifyAliasOrder: true }, doc.schema.toStringOptions, options ) let inFlow: boolean | null switch (opt.collectionStyle) { case 'block': inFlow = false break case 'flow': inFlow = true break default: inFlow = null } return { anchors: new Set(), doc, flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '', indent: '', indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', inFlow, options: opt } }
interface Document { type: 'document' offset: number start: SourceToken[] value?: Token end?: SourceToken[] }
127
function createStringifyContext( doc: Document, options: ToStringOptions ): StringifyContext { const opt = Object.assign( { blockQuote: true, commentString: stringifyComment, defaultKeyType: null, defaultStringType: 'PLAIN', directives: null, doubleQuotedAsJSON: false, doubleQuotedMinMultiLineLength: 40, falseStr: 'false', flowCollectionPadding: true, indentSeq: true, lineWidth: 80, minContentWidth: 20, nullStr: 'null', simpleKeys: false, singleQuote: null, trueStr: 'true', verifyAliasOrder: true }, doc.schema.toStringOptions, options ) let inFlow: boolean | null switch (opt.collectionStyle) { case 'block': inFlow = false break case 'flow': inFlow = true break default: inFlow = null } return { anchors: new Set(), doc, flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '', indent: '', indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', inFlow, options: opt } }
class Document<T extends Node = Node> { declare readonly [NODE_TYPE]: symbol /** A comment before this Document */ commentBefore: string | null = null /** A comment immediately after this Document */ comment: string | null = null /** The document contents. */ contents: T | null directives?: Directives /** Errors encountered during parsing. */ errors: YAMLError[] = [] options: Required< Omit< ParseOptions & DocumentOptions, '_directives' | 'lineCounter' | 'version' > > /** * The `[start, value-end, node-end]` character offsets for the part of the * source parsed into this document (undefined if not parsed). The `value-end` * and `node-end` positions are themselves not included in their respective * ranges. */ declare range?: Range // TS can't figure out that setSchema() will set this, or throw /** The schema used with the document. Use `setSchema()` to change. */ declare schema: Schema /** Warnings encountered during parsing. */ warnings: YAMLWarning[] = [] /** * @param value - The initial value for the document, which will be wrapped * in a Node container. */ constructor( value?: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions ) constructor( value: any, replacer: null | Replacer, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions ) constructor( value?: unknown, replacer?: | Replacer | (DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions) | null, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions ) { Object.defineProperty(this, NODE_TYPE, { value: DOC }) let _replacer: Replacer | null = null if (typeof replacer === 'function' || Array.isArray(replacer)) { _replacer = replacer } else if (options === undefined && replacer) { options = replacer replacer = undefined } const opt = Object.assign( { intAsBigInt: false, keepSourceTokens: false, logLevel: 'warn', prettyErrors: true, strict: true, uniqueKeys: true, version: '1.2' }, options ) this.options = opt let { version } = opt if (options?._directives) { this.directives = options._directives.atDocument() if (this.directives.yaml.explicit) version = this.directives.yaml.version } else this.directives = new Directives({ version }) this.setSchema(version, options) if (value === undefined) this.contents = null else { this.contents = this.createNode(value, _replacer, options) as unknown as T } } /** * Create a deep copy of this Document and its contents. * * Custom Node values that inherit from `Object` still refer to their original instances. */ clone(): Document<T> { const copy: Document<T> = Object.create(Document.prototype, { [NODE_TYPE]: { value: DOC } }) copy.commentBefore = this.commentBefore copy.comment = this.comment copy.errors = this.errors.slice() copy.warnings = this.warnings.slice() copy.options = Object.assign({}, this.options) if (this.directives) copy.directives = this.directives.clone() copy.schema = this.schema.clone() copy.contents = isNode(this.contents) ? (this.contents.clone(copy.schema) as unknown as T) : this.contents if (this.range) copy.range = this.range.slice() as Document['range'] return copy } /** Adds a value to the document. */ add(value: any) { if (assertCollection(this.contents)) this.contents.add(value) } /** Adds a value to the document. */ addIn(path: Iterable<unknown>, value: unknown) { if (assertCollection(this.contents)) this.contents.addIn(path, value) } /** * Create a new `Alias` node, ensuring that the target `node` has the required anchor. * * If `node` already has an anchor, `name` is ignored. * Otherwise, the `node.anchor` value will be set to `name`, * or if an anchor with that name is already present in the document, * `name` will be used as a prefix for a new unique anchor. * If `name` is undefined, the generated anchor will use 'a' as a prefix. */ createAlias(node: Scalar | YAMLMap | YAMLSeq, name?: string): Alias { if (!node.anchor) { const prev = anchorNames(this) node.anchor = // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing !name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name } return new Alias(node.anchor) } /** * Convert any value into a `Node` using the current schema, recursively * turning objects into collections. */ createNode<T = unknown>(value: T, options?: CreateNodeOptions): NodeType<T> createNode<T = unknown>( value: T, replacer: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions ): NodeType<T> createNode( value: unknown, replacer?: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions ): Node { let _replacer: Replacer | undefined = undefined if (typeof replacer === 'function') { value = replacer.call({ '': value }, '', value) _replacer = replacer } else if (Array.isArray(replacer)) { const keyToStr = (v: unknown) => typeof v === 'number' || v instanceof String || v instanceof Number const asStr = replacer.filter(keyToStr).map(String) if (asStr.length > 0) replacer = replacer.concat(asStr) _replacer = replacer } else if (options === undefined && replacer) { options = replacer replacer = undefined } const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {} const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors( this, // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing anchorPrefix || 'a' ) const ctx: CreateNodeContext = { aliasDuplicateObjects: aliasDuplicateObjects ?? true, keepUndefined: keepUndefined ?? false, onAnchor, onTagObj, replacer: _replacer, schema: this.schema, sourceObjects } const node = createNode(value, tag, ctx) if (flow && isCollection(node)) node.flow = true setAnchors() return node } /** * Convert a key and a value into a `Pair` using the current schema, * recursively wrapping all values as `Scalar` or `Collection` nodes. */ createPair<K extends Node = Node, V extends Node = Node>( key: unknown, value: unknown, options: CreateNodeOptions = {} ) { const k = this.createNode(key, null, options) as K const v = this.createNode(value, null, options) as V return new Pair(k, v) } /** * Removes a value from the document. * @returns `true` if the item was found and removed. */ delete(key: unknown): boolean { return assertCollection(this.contents) ? this.contents.delete(key) : false } /** * Removes a value from the document. * @returns `true` if the item was found and removed. */ deleteIn(path: Iterable<unknown> | null): boolean { if (isEmptyPath(path)) { if (this.contents == null) return false this.contents = null return true } return assertCollection(this.contents) ? this.contents.deleteIn(path) : false } /** * Returns item at `key`, or `undefined` if not found. By default unwraps * scalar values from their surrounding node; to disable set `keepScalar` to * `true` (collections are always returned intact). */ get(key: unknown, keepScalar?: boolean): unknown { return isCollection(this.contents) ? this.contents.get(key, keepScalar) : undefined } /** * Returns item at `path`, or `undefined` if not found. By default unwraps * scalar values from their surrounding node; to disable set `keepScalar` to * `true` (collections are always returned intact). */ getIn(path: Iterable<unknown> | null, keepScalar?: boolean): unknown { if (isEmptyPath(path)) return !keepScalar && isScalar(this.contents) ? this.contents.value : this.contents return isCollection(this.contents) ? this.contents.getIn(path, keepScalar) : undefined } /** * Checks if the document includes a value with the key `key`. */ has(key: unknown): boolean { return isCollection(this.contents) ? this.contents.has(key) : false } /** * Checks if the document includes a value at `path`. */ hasIn(path: Iterable<unknown> | null): boolean { if (isEmptyPath(path)) return this.contents !== undefined return isCollection(this.contents) ? this.contents.hasIn(path) : false } /** * Sets a value in this document. For `!!set`, `value` needs to be a * boolean to add/remove the item from the set. */ set(key: any, value: unknown): void { if (this.contents == null) { this.contents = collectionFromPath( this.schema, [key], value ) as unknown as T } else if (assertCollection(this.contents)) { this.contents.set(key, value) } } /** * Sets a value in this document. For `!!set`, `value` needs to be a * boolean to add/remove the item from the set. */ setIn(path: Iterable<unknown> | null, value: unknown): void { if (isEmptyPath(path)) this.contents = value as T else if (this.contents == null) { this.contents = collectionFromPath( this.schema, Array.from(path), value ) as unknown as T } else if (assertCollection(this.contents)) { this.contents.setIn(path, value) } } /** * Change the YAML version and schema used by the document. * A `null` version disables support for directives, explicit tags, anchors, and aliases. * It also requires the `schema` option to be given as a `Schema` instance value. * * Overrides all previously set schema options. */ setSchema( version: '1.1' | '1.2' | 'next' | null, options: SchemaOptions = {} ) { if (typeof version === 'number') version = String(version) as '1.1' | '1.2' let opt: (SchemaOptions & { schema: string }) | null switch (version) { case '1.1': if (this.directives) this.directives.yaml.version = '1.1' else this.directives = new Directives({ version: '1.1' }) opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' } break case '1.2': case 'next': if (this.directives) this.directives.yaml.version = version else this.directives = new Directives({ version }) opt = { merge: false, resolveKnownTags: true, schema: 'core' } break case null: if (this.directives) delete this.directives opt = null break default: { const sv = JSON.stringify(version) throw new Error( `Expected '1.1', '1.2' or null as first argument, but found: ${sv}` ) } } // Not using `instanceof Schema` to allow for duck typing if (options.schema instanceof Object) this.schema = options.schema else if (opt) this.schema = new Schema(Object.assign(opt, options)) else throw new Error( `With a null YAML version, the { schema: Schema } option is required` ) } /** A plain JavaScript representation of the document `contents`. */ toJS(opt?: ToJSOptions & { [ignored: string]: unknown }): any // json & jsonArg are only used from toJSON() toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver }: ToJSOptions & { json?: boolean; jsonArg?: string | null } = {}): any { const ctx: ToJSContext = { anchors: new Map(), doc: this, keep: !json, mapAsMap: mapAsMap === true, mapKeyWarned: false, maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100, stringify } const res = toJS(this.contents, jsonArg ?? '', ctx) if (typeof onAnchor === 'function') for (const { count, res } of ctx.anchors.values()) onAnchor(res, count) return typeof reviver === 'function' ? applyReviver(reviver, { '': res }, '', res) : res } /** * A JSON representation of the document `contents`. * * @param jsonArg Used by `JSON.stringify` to indicate the array index or * property name. */ toJSON(jsonArg?: string | null, onAnchor?: ToJSOptions['onAnchor']): any { return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }) } /** A YAML representation of the document. */ toString(options: ToStringOptions = {}): string { if (this.errors.length > 0) throw new Error('Document with errors cannot be stringified') if ( 'indent' in options && (!Number.isInteger(options.indent) || Number(options.indent) <= 0) ) { const s = JSON.stringify(options.indent) throw new Error(`"indent" option must be a positive integer, not ${s}`) } return stringifyDocument(this, options) } }
128
function createStringifyContext( doc: Document, options: ToStringOptions ): StringifyContext { const opt = Object.assign( { blockQuote: true, commentString: stringifyComment, defaultKeyType: null, defaultStringType: 'PLAIN', directives: null, doubleQuotedAsJSON: false, doubleQuotedMinMultiLineLength: 40, falseStr: 'false', flowCollectionPadding: true, indentSeq: true, lineWidth: 80, minContentWidth: 20, nullStr: 'null', simpleKeys: false, singleQuote: null, trueStr: 'true', verifyAliasOrder: true }, doc.schema.toStringOptions, options ) let inFlow: boolean | null switch (opt.collectionStyle) { case 'block': inFlow = false break case 'flow': inFlow = true break default: inFlow = null } return { anchors: new Set(), doc, flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '', indent: '', indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', inFlow, options: opt } }
type ToStringOptions = { /** * Use block quote styles for scalar values where applicable. * Set to `false` to disable block quotes completely. * * Default: `true` */ blockQuote?: boolean | 'folded' | 'literal' /** * Enforce `'block'` or `'flow'` style on maps and sequences. * Empty collections will always be stringified as `{}` or `[]`. * * Default: `'any'`, allowing each node to set its style separately * with its `flow: boolean` (default `false`) property. */ collectionStyle?: 'any' | 'block' | 'flow' /** * Comment stringifier. * Output should be valid for the current schema. * * By default, empty comment lines are left empty, * lines consisting of a single space are replaced by `#`, * and all other lines are prefixed with a `#`. */ commentString?: (comment: string) => string /** * The default type of string literal used to stringify implicit key values. * Output may use other types if required to fully represent the value. * * If `null`, the value of `defaultStringType` is used. * * Default: `null` */ defaultKeyType?: Scalar.Type | null /** * The default type of string literal used to stringify values in general. * Output may use other types if required to fully represent the value. * * Default: `'PLAIN'` */ defaultStringType?: Scalar.Type /** * Include directives in the output. * * - If `true`, at least the document-start marker `---` is always included. * This does not force the `%YAML` directive to be included. To do that, * set `doc.directives.yaml.explicit = true`. * - If `false`, no directives or marker is ever included. If using the `%TAG` * directive, you are expected to include it manually in the stream before * its use. * - If `null`, directives and marker may be included if required. * * Default: `null` */ directives?: boolean | null /** * Restrict double-quoted strings to use JSON-compatible syntax. * * Default: `false` */ doubleQuotedAsJSON?: boolean /** * Minimum length for double-quoted strings to use multiple lines to * represent the value. Ignored if `doubleQuotedAsJSON` is set. * * Default: `40` */ doubleQuotedMinMultiLineLength?: number /** * String representation for `false`. * With the core schema, use `'false'`, `'False'`, or `'FALSE'`. * * Default: `'false'` */ falseStr?: string /** * When true, a single space of padding will be added inside the delimiters * of non-empty single-line flow collections. * * Default: `true` */ flowCollectionPadding?: boolean /** * The number of spaces to use when indenting code. * * Default: `2` */ indent?: number /** * Whether block sequences should be indented. * * Default: `true` */ indentSeq?: boolean /** * Maximum line width (set to `0` to disable folding). * * This is a soft limit, as only double-quoted semantics allow for inserting * a line break in the middle of a word, as well as being influenced by the * `minContentWidth` option. * * Default: `80` */ lineWidth?: number /** * Minimum line width for highly-indented content (set to `0` to disable). * * Default: `20` */ minContentWidth?: number /** * String representation for `null`. * With the core schema, use `'null'`, `'Null'`, `'NULL'`, `'~'`, or an empty * string `''`. * * Default: `'null'` */ nullStr?: string /** * Require keys to be scalars and to use implicit rather than explicit notation. * * Default: `false` */ simpleKeys?: boolean /** * Use 'single quote' rather than "double quote" where applicable. * Set to `false` to disable single quotes completely. * * Default: `null` */ singleQuote?: boolean | null /** * String representation for `true`. * With the core schema, use `'true'`, `'True'`, or `'TRUE'`. * * Default: `'true'` */ trueStr?: string /** * The anchor used by an alias must be defined before the alias node. As it's * possible for the document to be modified manually, the order may be * verified during stringification. * * Default: `'true'` */ verifyAliasOrder?: boolean }
129
function stringifyProps( node: Node, tagObj: ScalarTag | CollectionTag, { anchors, doc }: StringifyContext ) { if (!doc.directives) return '' const props = [] const anchor = (isScalar(node) || isCollection(node)) && node.anchor if (anchor && anchorIsValid(anchor)) { anchors.add(anchor) props.push(`&${anchor}`) } const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag if (tag) props.push(doc.directives.tagString(tag)) return props.join(' ') }
type StringifyContext = { actualString?: boolean allNullValues?: boolean anchors: Set<string> doc: Document forceBlockIndent?: boolean implicitKey?: boolean indent: string indentStep: string indentAtStart?: number inFlow: boolean | null inStringifyKey?: boolean flowCollectionPadding: string options: Readonly< Required<Omit<ToStringOptions, 'collectionStyle' | 'indent'>> > resolvedAliases?: Set<Alias> }
130
function stringifyProps( node: Node, tagObj: ScalarTag | CollectionTag, { anchors, doc }: StringifyContext ) { if (!doc.directives) return '' const props = [] const anchor = (isScalar(node) || isCollection(node)) && node.anchor if (anchor && anchorIsValid(anchor)) { anchors.add(anchor) props.push(`&${anchor}`) } const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag if (tag) props.push(doc.directives.tagString(tag)) return props.join(' ') }
type Node<T = unknown> = | Alias | Scalar<T> | YAMLMap<unknown, T> | YAMLSeq<T>
131
(ctx: StringifyContext): FoldOptions => ({ indentAtStart: ctx.indentAtStart, lineWidth: ctx.options.lineWidth, minContentWidth: ctx.options.minContentWidth })
type StringifyContext = { actualString?: boolean allNullValues?: boolean anchors: Set<string> doc: Document forceBlockIndent?: boolean implicitKey?: boolean indent: string indentStep: string indentAtStart?: number inFlow: boolean | null inStringifyKey?: boolean flowCollectionPadding: string options: Readonly< Required<Omit<ToStringOptions, 'collectionStyle' | 'indent'>> > resolvedAliases?: Set<Alias> }
132
function blockString( { comment, type, value }: StringifyScalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void ) { const { blockQuote, commentString, lineWidth } = ctx.options // 1. Block can't end in whitespace unless the last line is non-empty. // 2. Strings consisting of only whitespace are best rendered explicitly. if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) { return quotedString(value, ctx) } const indent = ctx.indent || (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '') const literal = blockQuote === 'literal' ? true : blockQuote === 'folded' || type === Scalar.BLOCK_FOLDED ? false : type === Scalar.BLOCK_LITERAL ? true : !lineLengthOverLimit(value, lineWidth, indent.length) if (!value) return literal ? '|\n' : '>\n' // determine chomping from whitespace at value end let chomp: '' | '-' | '+' let endStart: number for (endStart = value.length; endStart > 0; --endStart) { const ch = value[endStart - 1] if (ch !== '\n' && ch !== '\t' && ch !== ' ') break } let end = value.substring(endStart) const endNlPos = end.indexOf('\n') if (endNlPos === -1) { chomp = '-' // strip } else if (value === end || endNlPos !== end.length - 1) { chomp = '+' // keep if (onChompKeep) onChompKeep() } else { chomp = '' // clip } if (end) { value = value.slice(0, -end.length) if (end[end.length - 1] === '\n') end = end.slice(0, -1) end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`) } // determine indent indicator from whitespace at value start let startWithSpace = false let startEnd: number let startNlPos = -1 for (startEnd = 0; startEnd < value.length; ++startEnd) { const ch = value[startEnd] if (ch === ' ') startWithSpace = true else if (ch === '\n') startNlPos = startEnd else break } let start = value.substring( 0, startNlPos < startEnd ? startNlPos + 1 : startEnd ) if (start) { value = value.substring(start.length) start = start.replace(/\n+/g, `$&${indent}`) } const indentSize = indent ? '2' : '1' // root is at -1 let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp if (comment) { header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')) if (onComment) onComment() } if (literal) { value = value.replace(/\n+/g, `$&${indent}`) return `${header}\n${indent}${start}${value}${end}` } value = value .replace(/\n+/g, '\n$&') .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent .replace(/\n+/g, `$&${indent}`) const body = foldFlowLines( `${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx) ) return `${header}\n${indent}${body}` }
type StringifyContext = { actualString?: boolean allNullValues?: boolean anchors: Set<string> doc: Document forceBlockIndent?: boolean implicitKey?: boolean indent: string indentStep: string indentAtStart?: number inFlow: boolean | null inStringifyKey?: boolean flowCollectionPadding: string options: Readonly< Required<Omit<ToStringOptions, 'collectionStyle' | 'indent'>> > resolvedAliases?: Set<Alias> }
133
function blockString( { comment, type, value }: StringifyScalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void ) { const { blockQuote, commentString, lineWidth } = ctx.options // 1. Block can't end in whitespace unless the last line is non-empty. // 2. Strings consisting of only whitespace are best rendered explicitly. if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) { return quotedString(value, ctx) } const indent = ctx.indent || (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '') const literal = blockQuote === 'literal' ? true : blockQuote === 'folded' || type === Scalar.BLOCK_FOLDED ? false : type === Scalar.BLOCK_LITERAL ? true : !lineLengthOverLimit(value, lineWidth, indent.length) if (!value) return literal ? '|\n' : '>\n' // determine chomping from whitespace at value end let chomp: '' | '-' | '+' let endStart: number for (endStart = value.length; endStart > 0; --endStart) { const ch = value[endStart - 1] if (ch !== '\n' && ch !== '\t' && ch !== ' ') break } let end = value.substring(endStart) const endNlPos = end.indexOf('\n') if (endNlPos === -1) { chomp = '-' // strip } else if (value === end || endNlPos !== end.length - 1) { chomp = '+' // keep if (onChompKeep) onChompKeep() } else { chomp = '' // clip } if (end) { value = value.slice(0, -end.length) if (end[end.length - 1] === '\n') end = end.slice(0, -1) end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`) } // determine indent indicator from whitespace at value start let startWithSpace = false let startEnd: number let startNlPos = -1 for (startEnd = 0; startEnd < value.length; ++startEnd) { const ch = value[startEnd] if (ch === ' ') startWithSpace = true else if (ch === '\n') startNlPos = startEnd else break } let start = value.substring( 0, startNlPos < startEnd ? startNlPos + 1 : startEnd ) if (start) { value = value.substring(start.length) start = start.replace(/\n+/g, `$&${indent}`) } const indentSize = indent ? '2' : '1' // root is at -1 let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp if (comment) { header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')) if (onComment) onComment() } if (literal) { value = value.replace(/\n+/g, `$&${indent}`) return `${header}\n${indent}${start}${value}${end}` } value = value .replace(/\n+/g, '\n$&') .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent .replace(/\n+/g, `$&${indent}`) const body = foldFlowLines( `${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx) ) return `${header}\n${indent}${body}` }
interface StringifyScalar { value: string comment?: string | null type?: string }
134
function plainString( item: StringifyScalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void ) { const { type, value } = item const { actualString, implicitKey, indent, indentStep, inFlow } = ctx if ( (implicitKey && /[\n[\]{},]/.test(value)) || (inFlow && /[[\]{},]/.test(value)) ) { return quotedString(value, ctx) } if ( !value || /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test( value ) ) { // not allowed: // - empty string, '-' or '?' // - start with an indicator character (except [?:-]) or /[?-] / // - '\n ', ': ' or ' \n' anywhere // - '#' not preceded by a non-space char // - end with ' ' or ':' return implicitKey || inFlow || !value.includes('\n') ? quotedString(value, ctx) : blockString(item, ctx, onComment, onChompKeep) } if ( !implicitKey && !inFlow && type !== Scalar.PLAIN && value.includes('\n') ) { // Where allowed & type not set explicitly, prefer block style for multiline strings return blockString(item, ctx, onComment, onChompKeep) } if (containsDocumentMarker(value)) { if (indent === '') { ctx.forceBlockIndent = true return blockString(item, ctx, onComment, onChompKeep) } else if (implicitKey && indent === indentStep) { return quotedString(value, ctx) } } const str = value.replace(/\n+/g, `$&\n${indent}`) // Verify that output will be parsed as a string, as e.g. plain numbers and // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), // and others in v1.1. if (actualString) { const test = (tag: CollectionTag | ScalarTag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str) const { compat, tags } = ctx.doc.schema if (tags.some(test) || compat?.some(test)) return quotedString(value, ctx) } return implicitKey ? str : foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx)) }
type StringifyContext = { actualString?: boolean allNullValues?: boolean anchors: Set<string> doc: Document forceBlockIndent?: boolean implicitKey?: boolean indent: string indentStep: string indentAtStart?: number inFlow: boolean | null inStringifyKey?: boolean flowCollectionPadding: string options: Readonly< Required<Omit<ToStringOptions, 'collectionStyle' | 'indent'>> > resolvedAliases?: Set<Alias> }
135
function plainString( item: StringifyScalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void ) { const { type, value } = item const { actualString, implicitKey, indent, indentStep, inFlow } = ctx if ( (implicitKey && /[\n[\]{},]/.test(value)) || (inFlow && /[[\]{},]/.test(value)) ) { return quotedString(value, ctx) } if ( !value || /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test( value ) ) { // not allowed: // - empty string, '-' or '?' // - start with an indicator character (except [?:-]) or /[?-] / // - '\n ', ': ' or ' \n' anywhere // - '#' not preceded by a non-space char // - end with ' ' or ':' return implicitKey || inFlow || !value.includes('\n') ? quotedString(value, ctx) : blockString(item, ctx, onComment, onChompKeep) } if ( !implicitKey && !inFlow && type !== Scalar.PLAIN && value.includes('\n') ) { // Where allowed & type not set explicitly, prefer block style for multiline strings return blockString(item, ctx, onComment, onChompKeep) } if (containsDocumentMarker(value)) { if (indent === '') { ctx.forceBlockIndent = true return blockString(item, ctx, onComment, onChompKeep) } else if (implicitKey && indent === indentStep) { return quotedString(value, ctx) } } const str = value.replace(/\n+/g, `$&\n${indent}`) // Verify that output will be parsed as a string, as e.g. plain numbers and // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), // and others in v1.1. if (actualString) { const test = (tag: CollectionTag | ScalarTag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str) const { compat, tags } = ctx.doc.schema if (tags.some(test) || compat?.some(test)) return quotedString(value, ctx) } return implicitKey ? str : foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx)) }
interface StringifyScalar { value: string comment?: string | null type?: string }
136
function _normalizeComponentEncoding(components:URIComponents, protocol:URIRegExps) { function decodeUnreserved(str:string):string { const decStr = pctDecChars(str); return (!decStr.match(protocol.UNRESERVED) ? str : decStr); } if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, ""); if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace((components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME), pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); return components; }
interface URIComponents { scheme?:string; userinfo?:string; host?:string; port?:number|string; path?:string; query?:string; fragment?:string; reference?:string; error?:string; }
137
function _normalizeComponentEncoding(components:URIComponents, protocol:URIRegExps) { function decodeUnreserved(str:string):string { const decStr = pctDecChars(str); return (!decStr.match(protocol.UNRESERVED) ? str : decStr); } if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, ""); if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace((components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME), pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); return components; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
138
function _normalizeComponentEncoding(components:URIComponents, protocol:URIRegExps) { function decodeUnreserved(str:string):string { const decStr = pctDecChars(str); return (!decStr.match(protocol.UNRESERVED) ? str : decStr); } if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, ""); if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace((components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME), pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); return components; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
139
function _normalizeComponentEncoding(components:URIComponents, protocol:URIRegExps) { function decodeUnreserved(str:string):string { const decStr = pctDecChars(str); return (!decStr.match(protocol.UNRESERVED) ? str : decStr); } if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, ""); if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace((components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME), pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); return components; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
140
function _normalizeComponentEncoding(components:URIComponents, protocol:URIRegExps) { function decodeUnreserved(str:string):string { const decStr = pctDecChars(str); return (!decStr.match(protocol.UNRESERVED) ? str : decStr); } if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, ""); if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace((components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME), pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); return components; }
interface URIRegExps { NOT_SCHEME : RegExp, NOT_USERINFO : RegExp, NOT_HOST : RegExp, NOT_PATH : RegExp, NOT_PATH_NOSCHEME : RegExp, NOT_QUERY : RegExp, NOT_FRAGMENT : RegExp, ESCAPE : RegExp, UNRESERVED : RegExp, OTHER_CHARS : RegExp, PCT_ENCODED : RegExp, IPV4ADDRESS : RegExp, IPV6ADDRESS : RegExp, }
141
function _normalizeComponentEncoding(components:URIComponents, protocol:URIRegExps) { function decodeUnreserved(str:string):string { const decStr = pctDecChars(str); return (!decStr.match(protocol.UNRESERVED) ? str : decStr); } if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, ""); if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace((components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME), pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); return components; }
interface URIRegExps { NOT_SCHEME: RegExp; NOT_USERINFO: RegExp; NOT_HOST: RegExp; NOT_PATH: RegExp; NOT_PATH_NOSCHEME: RegExp; NOT_QUERY: RegExp; NOT_FRAGMENT: RegExp; ESCAPE: RegExp; UNRESERVED: RegExp; OTHER_CHARS: RegExp; PCT_ENCODED: RegExp; IPV4ADDRESS: RegExp; IPV6ADDRESS: RegExp; }
142
function _normalizeComponentEncoding(components:URIComponents, protocol:URIRegExps) { function decodeUnreserved(str:string):string { const decStr = pctDecChars(str); return (!decStr.match(protocol.UNRESERVED) ? str : decStr); } if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, ""); if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace((components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME), pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); return components; }
interface URIRegExps { NOT_SCHEME: RegExp; NOT_USERINFO: RegExp; NOT_HOST: RegExp; NOT_PATH: RegExp; NOT_PATH_NOSCHEME: RegExp; NOT_QUERY: RegExp; NOT_FRAGMENT: RegExp; ESCAPE: RegExp; UNRESERVED: RegExp; OTHER_CHARS: RegExp; PCT_ENCODED: RegExp; IPV4ADDRESS: RegExp; IPV6ADDRESS: RegExp; }
143
function _normalizeComponentEncoding(components:URIComponents, protocol:URIRegExps) { function decodeUnreserved(str:string):string { const decStr = pctDecChars(str); return (!decStr.match(protocol.UNRESERVED) ? str : decStr); } if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, ""); if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace((components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME), pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase); return components; }
interface URIRegExps { NOT_SCHEME: RegExp; NOT_USERINFO: RegExp; NOT_HOST: RegExp; NOT_PATH: RegExp; NOT_PATH_NOSCHEME: RegExp; NOT_QUERY: RegExp; NOT_FRAGMENT: RegExp; ESCAPE: RegExp; UNRESERVED: RegExp; OTHER_CHARS: RegExp; PCT_ENCODED: RegExp; IPV4ADDRESS: RegExp; IPV6ADDRESS: RegExp; }
144
function _recomposeAuthority(components:URIComponents, options:URIOptions):string|undefined { const protocol = (options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; if (components.userinfo !== undefined) { uriTokens.push(components.userinfo); uriTokens.push("@"); } if (components.host !== undefined) { //normalize IP hosts, add brackets and escape zone separator for IPv6 uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, (_, $1, $2) => "[" + $1 + ($2 ? "%25" + $2 : "") + "]")); } if (typeof components.port === "number" || typeof components.port === "string") { uriTokens.push(":"); uriTokens.push(String(components.port)); } return uriTokens.length ? uriTokens.join("") : undefined; }
interface URIComponents { scheme?:string; userinfo?:string; host?:string; port?:number|string; path?:string; query?:string; fragment?:string; reference?:string; error?:string; }
145
function _recomposeAuthority(components:URIComponents, options:URIOptions):string|undefined { const protocol = (options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; if (components.userinfo !== undefined) { uriTokens.push(components.userinfo); uriTokens.push("@"); } if (components.host !== undefined) { //normalize IP hosts, add brackets and escape zone separator for IPv6 uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, (_, $1, $2) => "[" + $1 + ($2 ? "%25" + $2 : "") + "]")); } if (typeof components.port === "number" || typeof components.port === "string") { uriTokens.push(":"); uriTokens.push(String(components.port)); } return uriTokens.length ? uriTokens.join("") : undefined; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
146
function _recomposeAuthority(components:URIComponents, options:URIOptions):string|undefined { const protocol = (options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; if (components.userinfo !== undefined) { uriTokens.push(components.userinfo); uriTokens.push("@"); } if (components.host !== undefined) { //normalize IP hosts, add brackets and escape zone separator for IPv6 uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, (_, $1, $2) => "[" + $1 + ($2 ? "%25" + $2 : "") + "]")); } if (typeof components.port === "number" || typeof components.port === "string") { uriTokens.push(":"); uriTokens.push(String(components.port)); } return uriTokens.length ? uriTokens.join("") : undefined; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
147
function _recomposeAuthority(components:URIComponents, options:URIOptions):string|undefined { const protocol = (options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; if (components.userinfo !== undefined) { uriTokens.push(components.userinfo); uriTokens.push("@"); } if (components.host !== undefined) { //normalize IP hosts, add brackets and escape zone separator for IPv6 uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, (_, $1, $2) => "[" + $1 + ($2 ? "%25" + $2 : "") + "]")); } if (typeof components.port === "number" || typeof components.port === "string") { uriTokens.push(":"); uriTokens.push(String(components.port)); } return uriTokens.length ? uriTokens.join("") : undefined; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
148
function _recomposeAuthority(components:URIComponents, options:URIOptions):string|undefined { const protocol = (options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; if (components.userinfo !== undefined) { uriTokens.push(components.userinfo); uriTokens.push("@"); } if (components.host !== undefined) { //normalize IP hosts, add brackets and escape zone separator for IPv6 uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, (_, $1, $2) => "[" + $1 + ($2 ? "%25" + $2 : "") + "]")); } if (typeof components.port === "number" || typeof components.port === "string") { uriTokens.push(":"); uriTokens.push(String(components.port)); } return uriTokens.length ? uriTokens.join("") : undefined; }
interface URIOptions { scheme?:string; reference?:string; tolerant?:boolean; absolutePath?:boolean; iri?:boolean; unicodeSupport?:boolean; domainHost?:boolean; }
149
function _recomposeAuthority(components:URIComponents, options:URIOptions):string|undefined { const protocol = (options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; if (components.userinfo !== undefined) { uriTokens.push(components.userinfo); uriTokens.push("@"); } if (components.host !== undefined) { //normalize IP hosts, add brackets and escape zone separator for IPv6 uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, (_, $1, $2) => "[" + $1 + ($2 ? "%25" + $2 : "") + "]")); } if (typeof components.port === "number" || typeof components.port === "string") { uriTokens.push(":"); uriTokens.push(String(components.port)); } return uriTokens.length ? uriTokens.join("") : undefined; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
150
function _recomposeAuthority(components:URIComponents, options:URIOptions):string|undefined { const protocol = (options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; if (components.userinfo !== undefined) { uriTokens.push(components.userinfo); uriTokens.push("@"); } if (components.host !== undefined) { //normalize IP hosts, add brackets and escape zone separator for IPv6 uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, (_, $1, $2) => "[" + $1 + ($2 ? "%25" + $2 : "") + "]")); } if (typeof components.port === "number" || typeof components.port === "string") { uriTokens.push(":"); uriTokens.push(String(components.port)); } return uriTokens.length ? uriTokens.join("") : undefined; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
151
function _recomposeAuthority(components:URIComponents, options:URIOptions):string|undefined { const protocol = (options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; if (components.userinfo !== undefined) { uriTokens.push(components.userinfo); uriTokens.push("@"); } if (components.host !== undefined) { //normalize IP hosts, add brackets and escape zone separator for IPv6 uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, (_, $1, $2) => "[" + $1 + ($2 ? "%25" + $2 : "") + "]")); } if (typeof components.port === "number" || typeof components.port === "string") { uriTokens.push(":"); uriTokens.push(String(components.port)); } return uriTokens.length ? uriTokens.join("") : undefined; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
152
function serialize(components:URIComponents, options:URIOptions = {}):string { const protocol = (options.iri ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; //find scheme handler const schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()]; //perform scheme specific serialization if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options); if (components.host) { //if host component is an IPv6 address if (protocol.IPV6ADDRESS.test(components.host)) { //TODO: normalize IPv6 address as per RFC 5952 } //if host component is a domain name else if (options.domainHost || (schemeHandler && schemeHandler.domainHost)) { //convert IDN via punycode try { components.host = (!options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host)); } catch (e) { components.error = components.error || "Host's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e; } } } //normalize encoding _normalizeComponentEncoding(components, protocol); if (options.reference !== "suffix" && components.scheme) { uriTokens.push(components.scheme); uriTokens.push(":"); } const authority = _recomposeAuthority(components, options); if (authority !== undefined) { if (options.reference !== "suffix") { uriTokens.push("//"); } uriTokens.push(authority); if (components.path && components.path.charAt(0) !== "/") { uriTokens.push("/"); } } if (components.path !== undefined) { let s = components.path; if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) { s = removeDotSegments(s); } if (authority === undefined) { s = s.replace(/^\/\//, "/%2F"); //don't allow the path to start with "//" } uriTokens.push(s); } if (components.query !== undefined) { uriTokens.push("?"); uriTokens.push(components.query); } if (components.fragment !== undefined) { uriTokens.push("#"); uriTokens.push(components.fragment); } return uriTokens.join(""); //merge tokens into a string }
interface URIComponents { scheme?:string; userinfo?:string; host?:string; port?:number|string; path?:string; query?:string; fragment?:string; reference?:string; error?:string; }
153
function serialize(components:URIComponents, options:URIOptions = {}):string { const protocol = (options.iri ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; //find scheme handler const schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()]; //perform scheme specific serialization if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options); if (components.host) { //if host component is an IPv6 address if (protocol.IPV6ADDRESS.test(components.host)) { //TODO: normalize IPv6 address as per RFC 5952 } //if host component is a domain name else if (options.domainHost || (schemeHandler && schemeHandler.domainHost)) { //convert IDN via punycode try { components.host = (!options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host)); } catch (e) { components.error = components.error || "Host's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e; } } } //normalize encoding _normalizeComponentEncoding(components, protocol); if (options.reference !== "suffix" && components.scheme) { uriTokens.push(components.scheme); uriTokens.push(":"); } const authority = _recomposeAuthority(components, options); if (authority !== undefined) { if (options.reference !== "suffix") { uriTokens.push("//"); } uriTokens.push(authority); if (components.path && components.path.charAt(0) !== "/") { uriTokens.push("/"); } } if (components.path !== undefined) { let s = components.path; if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) { s = removeDotSegments(s); } if (authority === undefined) { s = s.replace(/^\/\//, "/%2F"); //don't allow the path to start with "//" } uriTokens.push(s); } if (components.query !== undefined) { uriTokens.push("?"); uriTokens.push(components.query); } if (components.fragment !== undefined) { uriTokens.push("#"); uriTokens.push(components.fragment); } return uriTokens.join(""); //merge tokens into a string }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
154
function serialize(components:URIComponents, options:URIOptions = {}):string { const protocol = (options.iri ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; //find scheme handler const schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()]; //perform scheme specific serialization if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options); if (components.host) { //if host component is an IPv6 address if (protocol.IPV6ADDRESS.test(components.host)) { //TODO: normalize IPv6 address as per RFC 5952 } //if host component is a domain name else if (options.domainHost || (schemeHandler && schemeHandler.domainHost)) { //convert IDN via punycode try { components.host = (!options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host)); } catch (e) { components.error = components.error || "Host's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e; } } } //normalize encoding _normalizeComponentEncoding(components, protocol); if (options.reference !== "suffix" && components.scheme) { uriTokens.push(components.scheme); uriTokens.push(":"); } const authority = _recomposeAuthority(components, options); if (authority !== undefined) { if (options.reference !== "suffix") { uriTokens.push("//"); } uriTokens.push(authority); if (components.path && components.path.charAt(0) !== "/") { uriTokens.push("/"); } } if (components.path !== undefined) { let s = components.path; if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) { s = removeDotSegments(s); } if (authority === undefined) { s = s.replace(/^\/\//, "/%2F"); //don't allow the path to start with "//" } uriTokens.push(s); } if (components.query !== undefined) { uriTokens.push("?"); uriTokens.push(components.query); } if (components.fragment !== undefined) { uriTokens.push("#"); uriTokens.push(components.fragment); } return uriTokens.join(""); //merge tokens into a string }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
155
function serialize(components:URIComponents, options:URIOptions = {}):string { const protocol = (options.iri ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; //find scheme handler const schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()]; //perform scheme specific serialization if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options); if (components.host) { //if host component is an IPv6 address if (protocol.IPV6ADDRESS.test(components.host)) { //TODO: normalize IPv6 address as per RFC 5952 } //if host component is a domain name else if (options.domainHost || (schemeHandler && schemeHandler.domainHost)) { //convert IDN via punycode try { components.host = (!options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host)); } catch (e) { components.error = components.error || "Host's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e; } } } //normalize encoding _normalizeComponentEncoding(components, protocol); if (options.reference !== "suffix" && components.scheme) { uriTokens.push(components.scheme); uriTokens.push(":"); } const authority = _recomposeAuthority(components, options); if (authority !== undefined) { if (options.reference !== "suffix") { uriTokens.push("//"); } uriTokens.push(authority); if (components.path && components.path.charAt(0) !== "/") { uriTokens.push("/"); } } if (components.path !== undefined) { let s = components.path; if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) { s = removeDotSegments(s); } if (authority === undefined) { s = s.replace(/^\/\//, "/%2F"); //don't allow the path to start with "//" } uriTokens.push(s); } if (components.query !== undefined) { uriTokens.push("?"); uriTokens.push(components.query); } if (components.fragment !== undefined) { uriTokens.push("#"); uriTokens.push(components.fragment); } return uriTokens.join(""); //merge tokens into a string }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
156
function serialize(components:URIComponents, options:URIOptions = {}):string { const protocol = (options.iri ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; //find scheme handler const schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()]; //perform scheme specific serialization if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options); if (components.host) { //if host component is an IPv6 address if (protocol.IPV6ADDRESS.test(components.host)) { //TODO: normalize IPv6 address as per RFC 5952 } //if host component is a domain name else if (options.domainHost || (schemeHandler && schemeHandler.domainHost)) { //convert IDN via punycode try { components.host = (!options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host)); } catch (e) { components.error = components.error || "Host's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e; } } } //normalize encoding _normalizeComponentEncoding(components, protocol); if (options.reference !== "suffix" && components.scheme) { uriTokens.push(components.scheme); uriTokens.push(":"); } const authority = _recomposeAuthority(components, options); if (authority !== undefined) { if (options.reference !== "suffix") { uriTokens.push("//"); } uriTokens.push(authority); if (components.path && components.path.charAt(0) !== "/") { uriTokens.push("/"); } } if (components.path !== undefined) { let s = components.path; if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) { s = removeDotSegments(s); } if (authority === undefined) { s = s.replace(/^\/\//, "/%2F"); //don't allow the path to start with "//" } uriTokens.push(s); } if (components.query !== undefined) { uriTokens.push("?"); uriTokens.push(components.query); } if (components.fragment !== undefined) { uriTokens.push("#"); uriTokens.push(components.fragment); } return uriTokens.join(""); //merge tokens into a string }
interface URIOptions { scheme?:string; reference?:string; tolerant?:boolean; absolutePath?:boolean; iri?:boolean; unicodeSupport?:boolean; domainHost?:boolean; }
157
function serialize(components:URIComponents, options:URIOptions = {}):string { const protocol = (options.iri ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; //find scheme handler const schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()]; //perform scheme specific serialization if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options); if (components.host) { //if host component is an IPv6 address if (protocol.IPV6ADDRESS.test(components.host)) { //TODO: normalize IPv6 address as per RFC 5952 } //if host component is a domain name else if (options.domainHost || (schemeHandler && schemeHandler.domainHost)) { //convert IDN via punycode try { components.host = (!options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host)); } catch (e) { components.error = components.error || "Host's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e; } } } //normalize encoding _normalizeComponentEncoding(components, protocol); if (options.reference !== "suffix" && components.scheme) { uriTokens.push(components.scheme); uriTokens.push(":"); } const authority = _recomposeAuthority(components, options); if (authority !== undefined) { if (options.reference !== "suffix") { uriTokens.push("//"); } uriTokens.push(authority); if (components.path && components.path.charAt(0) !== "/") { uriTokens.push("/"); } } if (components.path !== undefined) { let s = components.path; if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) { s = removeDotSegments(s); } if (authority === undefined) { s = s.replace(/^\/\//, "/%2F"); //don't allow the path to start with "//" } uriTokens.push(s); } if (components.query !== undefined) { uriTokens.push("?"); uriTokens.push(components.query); } if (components.fragment !== undefined) { uriTokens.push("#"); uriTokens.push(components.fragment); } return uriTokens.join(""); //merge tokens into a string }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
158
function serialize(components:URIComponents, options:URIOptions = {}):string { const protocol = (options.iri ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; //find scheme handler const schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()]; //perform scheme specific serialization if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options); if (components.host) { //if host component is an IPv6 address if (protocol.IPV6ADDRESS.test(components.host)) { //TODO: normalize IPv6 address as per RFC 5952 } //if host component is a domain name else if (options.domainHost || (schemeHandler && schemeHandler.domainHost)) { //convert IDN via punycode try { components.host = (!options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host)); } catch (e) { components.error = components.error || "Host's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e; } } } //normalize encoding _normalizeComponentEncoding(components, protocol); if (options.reference !== "suffix" && components.scheme) { uriTokens.push(components.scheme); uriTokens.push(":"); } const authority = _recomposeAuthority(components, options); if (authority !== undefined) { if (options.reference !== "suffix") { uriTokens.push("//"); } uriTokens.push(authority); if (components.path && components.path.charAt(0) !== "/") { uriTokens.push("/"); } } if (components.path !== undefined) { let s = components.path; if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) { s = removeDotSegments(s); } if (authority === undefined) { s = s.replace(/^\/\//, "/%2F"); //don't allow the path to start with "//" } uriTokens.push(s); } if (components.query !== undefined) { uriTokens.push("?"); uriTokens.push(components.query); } if (components.fragment !== undefined) { uriTokens.push("#"); uriTokens.push(components.fragment); } return uriTokens.join(""); //merge tokens into a string }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
159
function serialize(components:URIComponents, options:URIOptions = {}):string { const protocol = (options.iri ? IRI_PROTOCOL : URI_PROTOCOL); const uriTokens:Array<string> = []; //find scheme handler const schemeHandler = SCHEMES[(options.scheme || components.scheme || "").toLowerCase()]; //perform scheme specific serialization if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options); if (components.host) { //if host component is an IPv6 address if (protocol.IPV6ADDRESS.test(components.host)) { //TODO: normalize IPv6 address as per RFC 5952 } //if host component is a domain name else if (options.domainHost || (schemeHandler && schemeHandler.domainHost)) { //convert IDN via punycode try { components.host = (!options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host)); } catch (e) { components.error = components.error || "Host's domain name can not be converted to " + (!options.iri ? "ASCII" : "Unicode") + " via punycode: " + e; } } } //normalize encoding _normalizeComponentEncoding(components, protocol); if (options.reference !== "suffix" && components.scheme) { uriTokens.push(components.scheme); uriTokens.push(":"); } const authority = _recomposeAuthority(components, options); if (authority !== undefined) { if (options.reference !== "suffix") { uriTokens.push("//"); } uriTokens.push(authority); if (components.path && components.path.charAt(0) !== "/") { uriTokens.push("/"); } } if (components.path !== undefined) { let s = components.path; if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) { s = removeDotSegments(s); } if (authority === undefined) { s = s.replace(/^\/\//, "/%2F"); //don't allow the path to start with "//" } uriTokens.push(s); } if (components.query !== undefined) { uriTokens.push("?"); uriTokens.push(components.query); } if (components.fragment !== undefined) { uriTokens.push("#"); uriTokens.push(components.fragment); } return uriTokens.join(""); //merge tokens into a string }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
160
function resolveComponents(base:URIComponents, relative:URIComponents, options:URIOptions = {}, skipNormalization?:boolean):URIComponents { const target:URIComponents = {}; if (!skipNormalization) { base = parse(serialize(base, options), options); //normalize base components relative = parse(serialize(relative, options), options); //normalize relative components } options = options || {}; if (!options.tolerant && relative.scheme) { target.scheme = relative.scheme; //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) { //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (!relative.path) { target.path = base.path; if (relative.query !== undefined) { target.query = relative.query; } else { target.query = base.query; } } else { if (relative.path.charAt(0) === "/") { target.path = removeDotSegments(relative.path); } else { if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) { target.path = "/" + relative.path; } else if (!base.path) { target.path = relative.path; } else { target.path = base.path.slice(0, base.path.lastIndexOf("/") + 1) + relative.path; } target.path = removeDotSegments(target.path); } target.query = relative.query; } //target.authority = base.authority; target.userinfo = base.userinfo; target.host = base.host; target.port = base.port; } target.scheme = base.scheme; } target.fragment = relative.fragment; return target; }
interface URIComponents { scheme?:string; userinfo?:string; host?:string; port?:number|string; path?:string; query?:string; fragment?:string; reference?:string; error?:string; }
161
function resolveComponents(base:URIComponents, relative:URIComponents, options:URIOptions = {}, skipNormalization?:boolean):URIComponents { const target:URIComponents = {}; if (!skipNormalization) { base = parse(serialize(base, options), options); //normalize base components relative = parse(serialize(relative, options), options); //normalize relative components } options = options || {}; if (!options.tolerant && relative.scheme) { target.scheme = relative.scheme; //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) { //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (!relative.path) { target.path = base.path; if (relative.query !== undefined) { target.query = relative.query; } else { target.query = base.query; } } else { if (relative.path.charAt(0) === "/") { target.path = removeDotSegments(relative.path); } else { if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) { target.path = "/" + relative.path; } else if (!base.path) { target.path = relative.path; } else { target.path = base.path.slice(0, base.path.lastIndexOf("/") + 1) + relative.path; } target.path = removeDotSegments(target.path); } target.query = relative.query; } //target.authority = base.authority; target.userinfo = base.userinfo; target.host = base.host; target.port = base.port; } target.scheme = base.scheme; } target.fragment = relative.fragment; return target; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
162
function resolveComponents(base:URIComponents, relative:URIComponents, options:URIOptions = {}, skipNormalization?:boolean):URIComponents { const target:URIComponents = {}; if (!skipNormalization) { base = parse(serialize(base, options), options); //normalize base components relative = parse(serialize(relative, options), options); //normalize relative components } options = options || {}; if (!options.tolerant && relative.scheme) { target.scheme = relative.scheme; //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) { //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (!relative.path) { target.path = base.path; if (relative.query !== undefined) { target.query = relative.query; } else { target.query = base.query; } } else { if (relative.path.charAt(0) === "/") { target.path = removeDotSegments(relative.path); } else { if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) { target.path = "/" + relative.path; } else if (!base.path) { target.path = relative.path; } else { target.path = base.path.slice(0, base.path.lastIndexOf("/") + 1) + relative.path; } target.path = removeDotSegments(target.path); } target.query = relative.query; } //target.authority = base.authority; target.userinfo = base.userinfo; target.host = base.host; target.port = base.port; } target.scheme = base.scheme; } target.fragment = relative.fragment; return target; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
163
function resolveComponents(base:URIComponents, relative:URIComponents, options:URIOptions = {}, skipNormalization?:boolean):URIComponents { const target:URIComponents = {}; if (!skipNormalization) { base = parse(serialize(base, options), options); //normalize base components relative = parse(serialize(relative, options), options); //normalize relative components } options = options || {}; if (!options.tolerant && relative.scheme) { target.scheme = relative.scheme; //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) { //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (!relative.path) { target.path = base.path; if (relative.query !== undefined) { target.query = relative.query; } else { target.query = base.query; } } else { if (relative.path.charAt(0) === "/") { target.path = removeDotSegments(relative.path); } else { if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) { target.path = "/" + relative.path; } else if (!base.path) { target.path = relative.path; } else { target.path = base.path.slice(0, base.path.lastIndexOf("/") + 1) + relative.path; } target.path = removeDotSegments(target.path); } target.query = relative.query; } //target.authority = base.authority; target.userinfo = base.userinfo; target.host = base.host; target.port = base.port; } target.scheme = base.scheme; } target.fragment = relative.fragment; return target; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
164
function resolveComponents(base:URIComponents, relative:URIComponents, options:URIOptions = {}, skipNormalization?:boolean):URIComponents { const target:URIComponents = {}; if (!skipNormalization) { base = parse(serialize(base, options), options); //normalize base components relative = parse(serialize(relative, options), options); //normalize relative components } options = options || {}; if (!options.tolerant && relative.scheme) { target.scheme = relative.scheme; //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) { //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (!relative.path) { target.path = base.path; if (relative.query !== undefined) { target.query = relative.query; } else { target.query = base.query; } } else { if (relative.path.charAt(0) === "/") { target.path = removeDotSegments(relative.path); } else { if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) { target.path = "/" + relative.path; } else if (!base.path) { target.path = relative.path; } else { target.path = base.path.slice(0, base.path.lastIndexOf("/") + 1) + relative.path; } target.path = removeDotSegments(target.path); } target.query = relative.query; } //target.authority = base.authority; target.userinfo = base.userinfo; target.host = base.host; target.port = base.port; } target.scheme = base.scheme; } target.fragment = relative.fragment; return target; }
interface URIOptions { scheme?:string; reference?:string; tolerant?:boolean; absolutePath?:boolean; iri?:boolean; unicodeSupport?:boolean; domainHost?:boolean; }
165
function resolveComponents(base:URIComponents, relative:URIComponents, options:URIOptions = {}, skipNormalization?:boolean):URIComponents { const target:URIComponents = {}; if (!skipNormalization) { base = parse(serialize(base, options), options); //normalize base components relative = parse(serialize(relative, options), options); //normalize relative components } options = options || {}; if (!options.tolerant && relative.scheme) { target.scheme = relative.scheme; //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) { //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (!relative.path) { target.path = base.path; if (relative.query !== undefined) { target.query = relative.query; } else { target.query = base.query; } } else { if (relative.path.charAt(0) === "/") { target.path = removeDotSegments(relative.path); } else { if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) { target.path = "/" + relative.path; } else if (!base.path) { target.path = relative.path; } else { target.path = base.path.slice(0, base.path.lastIndexOf("/") + 1) + relative.path; } target.path = removeDotSegments(target.path); } target.query = relative.query; } //target.authority = base.authority; target.userinfo = base.userinfo; target.host = base.host; target.port = base.port; } target.scheme = base.scheme; } target.fragment = relative.fragment; return target; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
166
function resolveComponents(base:URIComponents, relative:URIComponents, options:URIOptions = {}, skipNormalization?:boolean):URIComponents { const target:URIComponents = {}; if (!skipNormalization) { base = parse(serialize(base, options), options); //normalize base components relative = parse(serialize(relative, options), options); //normalize relative components } options = options || {}; if (!options.tolerant && relative.scheme) { target.scheme = relative.scheme; //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) { //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (!relative.path) { target.path = base.path; if (relative.query !== undefined) { target.query = relative.query; } else { target.query = base.query; } } else { if (relative.path.charAt(0) === "/") { target.path = removeDotSegments(relative.path); } else { if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) { target.path = "/" + relative.path; } else if (!base.path) { target.path = relative.path; } else { target.path = base.path.slice(0, base.path.lastIndexOf("/") + 1) + relative.path; } target.path = removeDotSegments(target.path); } target.query = relative.query; } //target.authority = base.authority; target.userinfo = base.userinfo; target.host = base.host; target.port = base.port; } target.scheme = base.scheme; } target.fragment = relative.fragment; return target; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
167
function resolveComponents(base:URIComponents, relative:URIComponents, options:URIOptions = {}, skipNormalization?:boolean):URIComponents { const target:URIComponents = {}; if (!skipNormalization) { base = parse(serialize(base, options), options); //normalize base components relative = parse(serialize(relative, options), options); //normalize relative components } options = options || {}; if (!options.tolerant && relative.scheme) { target.scheme = relative.scheme; //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) { //target.authority = relative.authority; target.userinfo = relative.userinfo; target.host = relative.host; target.port = relative.port; target.path = removeDotSegments(relative.path || ""); target.query = relative.query; } else { if (!relative.path) { target.path = base.path; if (relative.query !== undefined) { target.query = relative.query; } else { target.query = base.query; } } else { if (relative.path.charAt(0) === "/") { target.path = removeDotSegments(relative.path); } else { if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) { target.path = "/" + relative.path; } else if (!base.path) { target.path = relative.path; } else { target.path = base.path.slice(0, base.path.lastIndexOf("/") + 1) + relative.path; } target.path = removeDotSegments(target.path); } target.query = relative.query; } //target.authority = base.authority; target.userinfo = base.userinfo; target.host = base.host; target.port = base.port; } target.scheme = base.scheme; } target.fragment = relative.fragment; return target; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
168
function normalize(uri:URIComponents, options?:URIOptions):URIComponents;
interface URIComponents { scheme?:string; userinfo?:string; host?:string; port?:number|string; path?:string; query?:string; fragment?:string; reference?:string; error?:string; }
169
function normalize(uri:URIComponents, options?:URIOptions):URIComponents;
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
170
function normalize(uri:URIComponents, options?:URIOptions):URIComponents;
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
171
function normalize(uri:URIComponents, options?:URIOptions):URIComponents;
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
172
function normalize(uri:URIComponents, options?:URIOptions):URIComponents;
interface URIOptions { scheme?:string; reference?:string; tolerant?:boolean; absolutePath?:boolean; iri?:boolean; unicodeSupport?:boolean; domainHost?:boolean; }
173
function normalize(uri:URIComponents, options?:URIOptions):URIComponents;
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
174
function normalize(uri:URIComponents, options?:URIOptions):URIComponents;
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
175
function normalize(uri:URIComponents, options?:URIOptions):URIComponents;
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
176
function equal(uriA:URIComponents, uriB:URIComponents, options?:URIOptions):boolean;
interface URIComponents { scheme?:string; userinfo?:string; host?:string; port?:number|string; path?:string; query?:string; fragment?:string; reference?:string; error?:string; }
177
function equal(uriA:URIComponents, uriB:URIComponents, options?:URIOptions):boolean;
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
178
function equal(uriA:URIComponents, uriB:URIComponents, options?:URIOptions):boolean;
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
179
function equal(uriA:URIComponents, uriB:URIComponents, options?:URIOptions):boolean;
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
180
function equal(uriA:URIComponents, uriB:URIComponents, options?:URIOptions):boolean;
interface URIOptions { scheme?:string; reference?:string; tolerant?:boolean; absolutePath?:boolean; iri?:boolean; unicodeSupport?:boolean; domainHost?:boolean; }
181
function equal(uriA:URIComponents, uriB:URIComponents, options?:URIOptions):boolean;
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
182
function equal(uriA:URIComponents, uriB:URIComponents, options?:URIOptions):boolean;
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
183
function equal(uriA:URIComponents, uriB:URIComponents, options?:URIOptions):boolean;
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
184
function (urnComponents:URNComponents, options:URIOptions):UUIDComponents { const uuidComponents = urnComponents as UUIDComponents; uuidComponents.uuid = uuidComponents.nss; uuidComponents.nss = undefined; if (!options.tolerant && (!uuidComponents.uuid || !uuidComponents.uuid.match(UUID))) { uuidComponents.error = uuidComponents.error || "UUID is not valid."; } return uuidComponents; }
interface URIOptions { scheme?:string; reference?:string; tolerant?:boolean; absolutePath?:boolean; iri?:boolean; unicodeSupport?:boolean; domainHost?:boolean; }
185
function (urnComponents:URNComponents, options:URIOptions):UUIDComponents { const uuidComponents = urnComponents as UUIDComponents; uuidComponents.uuid = uuidComponents.nss; uuidComponents.nss = undefined; if (!options.tolerant && (!uuidComponents.uuid || !uuidComponents.uuid.match(UUID))) { uuidComponents.error = uuidComponents.error || "UUID is not valid."; } return uuidComponents; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
186
function (urnComponents:URNComponents, options:URIOptions):UUIDComponents { const uuidComponents = urnComponents as UUIDComponents; uuidComponents.uuid = uuidComponents.nss; uuidComponents.nss = undefined; if (!options.tolerant && (!uuidComponents.uuid || !uuidComponents.uuid.match(UUID))) { uuidComponents.error = uuidComponents.error || "UUID is not valid."; } return uuidComponents; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
187
function (urnComponents:URNComponents, options:URIOptions):UUIDComponents { const uuidComponents = urnComponents as UUIDComponents; uuidComponents.uuid = uuidComponents.nss; uuidComponents.nss = undefined; if (!options.tolerant && (!uuidComponents.uuid || !uuidComponents.uuid.match(UUID))) { uuidComponents.error = uuidComponents.error || "UUID is not valid."; } return uuidComponents; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
188
function (urnComponents:URNComponents, options:URIOptions):UUIDComponents { const uuidComponents = urnComponents as UUIDComponents; uuidComponents.uuid = uuidComponents.nss; uuidComponents.nss = undefined; if (!options.tolerant && (!uuidComponents.uuid || !uuidComponents.uuid.match(UUID))) { uuidComponents.error = uuidComponents.error || "UUID is not valid."; } return uuidComponents; }
interface URNComponents extends URIComponents { nid?:string; nss?:string; }
189
function (urnComponents:URNComponents, options:URIOptions):UUIDComponents { const uuidComponents = urnComponents as UUIDComponents; uuidComponents.uuid = uuidComponents.nss; uuidComponents.nss = undefined; if (!options.tolerant && (!uuidComponents.uuid || !uuidComponents.uuid.match(UUID))) { uuidComponents.error = uuidComponents.error || "UUID is not valid."; } return uuidComponents; }
interface URNComponents extends URIComponents { nid?: string; nss?: string; }
190
function (uuidComponents:UUIDComponents, options:URIOptions):URNComponents { const urnComponents = uuidComponents as URNComponents; //normalize UUID urnComponents.nss = (uuidComponents.uuid || "").toLowerCase(); return urnComponents; }
interface URIOptions { scheme?:string; reference?:string; tolerant?:boolean; absolutePath?:boolean; iri?:boolean; unicodeSupport?:boolean; domainHost?:boolean; }
191
function (uuidComponents:UUIDComponents, options:URIOptions):URNComponents { const urnComponents = uuidComponents as URNComponents; //normalize UUID urnComponents.nss = (uuidComponents.uuid || "").toLowerCase(); return urnComponents; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
192
function (uuidComponents:UUIDComponents, options:URIOptions):URNComponents { const urnComponents = uuidComponents as URNComponents; //normalize UUID urnComponents.nss = (uuidComponents.uuid || "").toLowerCase(); return urnComponents; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
193
function (uuidComponents:UUIDComponents, options:URIOptions):URNComponents { const urnComponents = uuidComponents as URNComponents; //normalize UUID urnComponents.nss = (uuidComponents.uuid || "").toLowerCase(); return urnComponents; }
interface URIOptions { scheme?: string; reference?: string; tolerant?: boolean; absolutePath?: boolean; iri?: boolean; unicodeSupport?: boolean; domainHost?: boolean; }
194
function (uuidComponents:UUIDComponents, options:URIOptions):URNComponents { const urnComponents = uuidComponents as URNComponents; //normalize UUID urnComponents.nss = (uuidComponents.uuid || "").toLowerCase(); return urnComponents; }
interface UUIDComponents extends URNComponents { uuid?: string; }
195
function (uuidComponents:UUIDComponents, options:URIOptions):URNComponents { const urnComponents = uuidComponents as URNComponents; //normalize UUID urnComponents.nss = (uuidComponents.uuid || "").toLowerCase(); return urnComponents; }
interface UUIDComponents extends URNComponents { uuid?: string; }
196
function (components:URIComponents, options:URNOptions):URNComponents { const matches = components.path && components.path.match(URN_PARSE); let urnComponents = components as URNComponents; if (matches) { const scheme = options.scheme || urnComponents.scheme || "urn"; const nid = matches[1].toLowerCase(); const nss = matches[2]; const urnScheme = `${scheme}:${options.nid || nid}`; const schemeHandler = SCHEMES[urnScheme]; urnComponents.nid = nid; urnComponents.nss = nss; urnComponents.path = undefined; if (schemeHandler) { urnComponents = schemeHandler.parse(urnComponents, options) as URNComponents; } } else { urnComponents.error = urnComponents.error || "URN can not be parsed."; } return urnComponents; }
interface URIComponents { scheme?:string; userinfo?:string; host?:string; port?:number|string; path?:string; query?:string; fragment?:string; reference?:string; error?:string; }
197
function (components:URIComponents, options:URNOptions):URNComponents { const matches = components.path && components.path.match(URN_PARSE); let urnComponents = components as URNComponents; if (matches) { const scheme = options.scheme || urnComponents.scheme || "urn"; const nid = matches[1].toLowerCase(); const nss = matches[2]; const urnScheme = `${scheme}:${options.nid || nid}`; const schemeHandler = SCHEMES[urnScheme]; urnComponents.nid = nid; urnComponents.nss = nss; urnComponents.path = undefined; if (schemeHandler) { urnComponents = schemeHandler.parse(urnComponents, options) as URNComponents; } } else { urnComponents.error = urnComponents.error || "URN can not be parsed."; } return urnComponents; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
198
function (components:URIComponents, options:URNOptions):URNComponents { const matches = components.path && components.path.match(URN_PARSE); let urnComponents = components as URNComponents; if (matches) { const scheme = options.scheme || urnComponents.scheme || "urn"; const nid = matches[1].toLowerCase(); const nss = matches[2]; const urnScheme = `${scheme}:${options.nid || nid}`; const schemeHandler = SCHEMES[urnScheme]; urnComponents.nid = nid; urnComponents.nss = nss; urnComponents.path = undefined; if (schemeHandler) { urnComponents = schemeHandler.parse(urnComponents, options) as URNComponents; } } else { urnComponents.error = urnComponents.error || "URN can not be parsed."; } return urnComponents; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }
199
function (components:URIComponents, options:URNOptions):URNComponents { const matches = components.path && components.path.match(URN_PARSE); let urnComponents = components as URNComponents; if (matches) { const scheme = options.scheme || urnComponents.scheme || "urn"; const nid = matches[1].toLowerCase(); const nss = matches[2]; const urnScheme = `${scheme}:${options.nid || nid}`; const schemeHandler = SCHEMES[urnScheme]; urnComponents.nid = nid; urnComponents.nss = nss; urnComponents.path = undefined; if (schemeHandler) { urnComponents = schemeHandler.parse(urnComponents, options) as URNComponents; } } else { urnComponents.error = urnComponents.error || "URN can not be parsed."; } return urnComponents; }
interface URIComponents { scheme?: string; userinfo?: string; host?: string; port?: number | string; path?: string; query?: string; fragment?: string; reference?: string; error?: string; }