diff --git a/doc/api/cli.md b/doc/api/cli.md index 8e26ae16ae6233..39033269885ad7 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -2319,7 +2319,8 @@ on unsupported platforms will not be fixed. ### `NODE_TEST_CONTEXT=value` If `value` equals `'child'`, test reporter options will be overridden and test -output will be sent to stdout in the TAP format. +output will be sent to stdout in the TAP format. If any other value is provided, +Node.js makes no guarantees about the reporter format used or its stability. ### `NODE_TLS_REJECT_UNAUTHORIZED=value` diff --git a/lib/internal/error_serdes.js b/lib/internal/error_serdes.js index 13f3f8b35fdab0..94fab6422716ce 100644 --- a/lib/internal/error_serdes.js +++ b/lib/internal/error_serdes.js @@ -13,19 +13,24 @@ const { ObjectGetOwnPropertyNames, ObjectGetPrototypeOf, ObjectKeys, + ObjectPrototypeHasOwnProperty, ObjectPrototypeToString, RangeError, ReferenceError, SafeSet, SymbolToStringTag, SyntaxError, + SymbolFor, TypeError, URIError, } = primordials; +const { inspect: { custom: customInspectSymbol } } = require('util'); const kSerializedError = 0; const kSerializedObject = 1; const kInspectedError = 2; +const kInspectedSymbol = 3; +const kCustomInspecteObject = 4; const errors = { Error, TypeError, RangeError, URIError, SyntaxError, ReferenceError, EvalError, @@ -52,7 +57,13 @@ function TryGetAllProperties(object, target = object) { // Continue regardless of error. } } - if ('value' in descriptor && typeof descriptor.value !== 'function') { + if (key === 'cause') { + delete descriptor.get; + delete descriptor.set; + descriptor.value = serializeError(descriptor.value); + all[key] = descriptor; + } else if ('value' in descriptor && + typeof descriptor.value !== 'function' && typeof descriptor.value !== 'symbol') { delete descriptor.get; delete descriptor.set; all[key] = descriptor; @@ -95,6 +106,10 @@ function inspect(...args) { let serialize; function serializeError(error) { if (!serialize) serialize = require('v8').serialize; + if (typeof error === 'symbol') { + return Buffer.concat([Buffer.from([kInspectedSymbol]), + Buffer.from(inspect(error), 'utf8')]); + } try { if (typeof error === 'object' && ObjectPrototypeToString(error) === '[object Error]') { @@ -113,6 +128,15 @@ function serializeError(error) { } catch { // Continue regardless of error. } + try { + if (error != null && + ObjectPrototypeHasOwnProperty(error, customInspectSymbol)) { + return Buffer.concat([Buffer.from([kCustomInspecteObject]), + Buffer.from(inspect(error), 'utf8')]); + } + } catch { + // Continue regardless of error. + } try { const serialized = serialize(error); return Buffer.concat([Buffer.from([kSerializedObject]), serialized]); @@ -123,6 +147,12 @@ function serializeError(error) { Buffer.from(inspect(error), 'utf8')]); } +function fromBuffer(error) { + return Buffer.from(error.buffer, + error.byteOffset + 1, + error.byteLength - 1); +} + let deserialize; function deserializeError(error) { if (!deserialize) deserialize = require('v8').deserialize; @@ -132,19 +162,27 @@ function deserializeError(error) { const ctor = errors[constructor]; ObjectDefineProperty(properties, SymbolToStringTag, { __proto__: null, - value: { value: 'Error', configurable: true }, + value: { __proto__: null, value: 'Error', configurable: true }, enumerable: true, }); + if ('cause' in properties && 'value' in properties.cause) { + properties.cause.value = deserializeError(properties.cause.value); + } return ObjectCreate(ctor.prototype, properties); } case kSerializedObject: return deserialize(error.subarray(1)); - case kInspectedError: { - const buf = Buffer.from(error.buffer, - error.byteOffset + 1, - error.byteLength - 1); - return buf.toString('utf8'); + case kInspectedError: + return fromBuffer(error).toString('utf8'); + case kInspectedSymbol: { + const buf = fromBuffer(error); + return SymbolFor(buf.toString('utf8').substring('Symbol('.length, buf.length - 1)); } + case kCustomInspecteObject: + return { + __proto__: null, + [customInspectSymbol]: () => fromBuffer(error).toString('utf8'), + }; } require('assert').fail('This should not happen'); } diff --git a/lib/internal/test_runner/reporter/v8.js b/lib/internal/test_runner/reporter/v8.js new file mode 100644 index 00000000000000..031b3bde01a529 --- /dev/null +++ b/lib/internal/test_runner/reporter/v8.js @@ -0,0 +1,36 @@ +'use strict'; + +const { DefaultSerializer } = require('v8'); +const { Buffer } = require('buffer'); +const { serializeError } = require('internal/error_serdes'); + + +module.exports = async function* v8Reporter(source) { + const serializer = new DefaultSerializer(); + + for await (const item of source) { + const originalError = item.data.details?.error; + if (originalError) { + item.data.details.error = serializeError(originalError); + } + // Add 4 bytes, to later populate with message length + serializer.writeRawBytes(Buffer.allocUnsafe(4)); + serializer.writeHeader(); + serializer.writeValue(item); + + if (originalError) { + item.data.details.error = originalError; + } + + const serializedMessage = serializer.releaseBuffer(); + const serializedMessageLength = serializedMessage.length - 4; + + serializedMessage.set([ + serializedMessageLength >> 24 & 0xFF, + serializedMessageLength >> 16 & 0xFF, + serializedMessageLength >> 8 & 0xFF, + serializedMessageLength & 0xFF, + ], 0); + yield serializedMessage; + } +}; diff --git a/lib/internal/test_runner/runner.js b/lib/internal/test_runner/runner.js index 45fb830a982fd8..5f3071412e3669 100644 --- a/lib/internal/test_runner/runner.js +++ b/lib/internal/test_runner/runner.js @@ -12,22 +12,25 @@ const { ArrayPrototypeSort, ObjectAssign, PromisePrototypeThen, - SafePromiseAll, SafePromiseAllReturnVoid, SafePromiseAllSettledReturnVoid, PromiseResolve, SafeMap, SafeSet, + String, StringPrototypeIndexOf, StringPrototypeSlice, StringPrototypeStartsWith, + TypedArrayPrototypeSubarray, } = primordials; const { spawn } = require('child_process'); const { readdirSync, statSync } = require('fs'); -const { finished } = require('internal/streams/end-of-stream'); +const { DefaultDeserializer, DefaultSerializer } = require('v8'); // TODO(aduh95): switch to internal/readline/interface when backporting to Node.js 16.x is no longer a concern. const { createInterface } = require('readline'); +const { deserializeError } = require('internal/error_serdes'); +const { Buffer } = require('buffer'); const { FilesWatcher } = require('internal/watch_mode/files_watcher'); const console = require('internal/console/global'); const { @@ -40,6 +43,7 @@ const { validateArray, validateBoolean, validateFunction } = require('internal/v const { getInspectPort, isUsingInspector, isInspectorMessage } = require('internal/util/inspector'); const { isRegExp } = require('internal/util/types'); const { kEmptyObject } = require('internal/util'); +const { kEmitMessage } = require('internal/test_runner/tests_stream'); const { createTestTree } = require('internal/test_runner/harness'); const { kAborted, @@ -49,9 +53,6 @@ const { kTestTimeoutFailure, Test, } = require('internal/test_runner/test'); -const { TapParser } = require('internal/test_runner/tap_parser'); -const { YAMLToJs } = require('internal/test_runner/yaml_to_js'); -const { TokenKind } = require('internal/test_runner/tap_lexer'); const { convertStringToRegExp, @@ -153,92 +154,62 @@ function getRunArgs({ path, inspectPort, testNamePatterns }) { return argv; } +const serializer = new DefaultSerializer(); +serializer.writeHeader(); +const v8Header = serializer.releaseBuffer(); +const v8HeaderAndSize = 4 + v8Header.length; + class FileTest extends Test { #buffer = []; + #messageBuffer = []; + #messageBufferSize = 0; #reportedChildren = 0; failedSubtests = false; #skipReporting() { return this.#reportedChildren > 0 && (!this.error || this.error.failureType === kSubtestsFailed); } - #checkNestedComment({ comment }) { + #checkNestedComment(comment) { const firstSpaceIndex = StringPrototypeIndexOf(comment, ' '); if (firstSpaceIndex === -1) return false; const secondSpaceIndex = StringPrototypeIndexOf(comment, ' ', firstSpaceIndex + 1); return secondSpaceIndex === -1 && ArrayPrototypeIncludes(kDiagnosticsFilterArgs, StringPrototypeSlice(comment, 0, firstSpaceIndex)); } - #handleReportItem({ kind, node, comments, nesting = 0 }) { - if (comments) { - ArrayPrototypeForEach(comments, (comment) => this.reporter.diagnostic(nesting, this.name, comment)); - } - switch (kind) { - case TokenKind.TAP_VERSION: - // TODO(manekinekko): handle TAP version coming from the parser. - // this.reporter.version(node.version); - break; - - case TokenKind.TAP_PLAN: - if (nesting === 0 && this.#skipReporting()) { - break; - } - this.reporter.plan(nesting, this.name, node.end - node.start + 1); - break; - - case TokenKind.TAP_SUBTEST_POINT: - this.reporter.start(nesting, this.name, node.name); - break; - - case TokenKind.TAP_TEST_POINT: { - - const { todo, skip, pass } = node.status; - - let directive; - - if (skip) { - directive = this.reporter.getSkip(node.reason || true); - } else if (todo) { - directive = this.reporter.getTodo(node.reason || true); - } else { - directive = kEmptyObject; - } - - const diagnostics = YAMLToJs(node.diagnostics); - const cancelled = kCanceledTests.has(diagnostics.error?.failureType); - const testNumber = nesting === 0 ? (this.root.harness.counters.topLevel + 1) : node.id; - const method = pass ? 'ok' : 'fail'; - this.reporter[method](nesting, this.name, testNumber, node.description, diagnostics, directive); - countCompletedTest({ - name: node.description, - finished: true, - skipped: skip, - isTodo: todo, - passed: pass, - cancelled, - nesting, - reportedType: diagnostics.type, - }, this.root.harness); - break; - + #handleReportItem(item) { + const isTopLevel = item.data.nesting === 0; + if (isTopLevel) { + if (item.type === 'test:plan' && this.#skipReporting()) { + return; } - case TokenKind.COMMENT: - if (nesting === 0 && this.#checkNestedComment(node)) { - // Ignore file top level diagnostics - break; - } - this.reporter.diagnostic(nesting, this.name, node.comment); - break; - - case TokenKind.UNKNOWN: - this.reporter.diagnostic(nesting, this.name, node.value); - break; + if (item.type === 'test:diagnostic' && this.#checkNestedComment(item.data.message)) { + return; + } + } + if (item.data.details?.error) { + item.data.details.error = deserializeError(item.data.details.error); } + if (item.type === 'test:pass' || item.type === 'test:fail') { + item.data.testNumber = isTopLevel ? (this.root.harness.counters.topLevel + 1) : item.data.testNumber; + countCompletedTest({ + __proto__: null, + name: item.data.name, + finished: true, + skipped: item.data.skip !== undefined, + isTodo: item.data.todo !== undefined, + passed: item.type === 'test:pass', + cancelled: kCanceledTests.has(item.data.details?.error?.failureType), + nesting: item.data.nesting, + reportedType: item.data.details?.type, + }, this.root.harness); + } + this.reporter[kEmitMessage](item.type, item.data); } - #accumulateReportItem({ kind, node, comments, nesting = 0 }) { - if (kind !== TokenKind.TAP_TEST_POINT) { + #accumulateReportItem(item) { + if (item.type !== 'test:pass' && item.type !== 'test:fail') { return; } this.#reportedChildren++; - if (nesting === 0 && !node.status.pass) { + if (item.data.nesting === 0 && item.type === 'test:fail') { this.failedSubtests = true; } } @@ -248,14 +219,65 @@ class FileTest extends Test { this.#buffer = []; } } - addToReport(ast) { - this.#accumulateReportItem(ast); + addToReport(item) { + this.#accumulateReportItem(item); if (!this.isClearToSend()) { - ArrayPrototypePush(this.#buffer, ast); + ArrayPrototypePush(this.#buffer, item); return; } this.#drainBuffer(); - this.#handleReportItem(ast); + this.#handleReportItem(item); + } + parseMessage(readData) { + if (readData.length === 0) return; + + ArrayPrototypePush(this.#messageBuffer, readData); + this.#messageBufferSize += readData.length; + + // Index 0 should always be present because we just pushed data into it. + let messageBufferHead = this.#messageBuffer[0]; + + while (messageBufferHead.length >= 4) { + const isSerializedMessage = messageBufferHead.length >= v8HeaderAndSize && + v8Header.compare(messageBufferHead, 4, v8HeaderAndSize) === 0; + if (!isSerializedMessage) { + const message = Buffer.concat(this.#messageBuffer, this.#messageBufferSize); + this.#messageBufferSize = 0; + this.#messageBuffer = []; + this.addToReport({ + __proto__: null, + type: 'test:diagnostic', + data: { __proto__: null, nesting: 0, file: this.name, message: String(message) }, + }); + return; + } + + // We call `readUInt32BE` manually here, because this is faster than first converting + // it to a buffer and using `readUInt32BE` on that. + const fullMessageSize = ( + messageBufferHead[0] << 24 | + messageBufferHead[1] << 16 | + messageBufferHead[2] << 8 | + messageBufferHead[3] + ) + 4; + + if (this.#messageBufferSize < fullMessageSize) break; + + const concatenatedBuffer = this.#messageBuffer.length === 1 ? + this.#messageBuffer[0] : Buffer.concat(this.#messageBuffer, this.#messageBufferSize); + + const deserializer = new DefaultDeserializer( + TypedArrayPrototypeSubarray(concatenatedBuffer, 4, fullMessageSize), + ); + + messageBufferHead = TypedArrayPrototypeSubarray(concatenatedBuffer, fullMessageSize); + this.#messageBufferSize = messageBufferHead.length; + this.#messageBuffer = this.#messageBufferSize !== 0 ? [messageBufferHead] : []; + + deserializer.readHeader(); + const item = deserializer.readValue(); + this.addToReport(item); + } } reportStarted() {} report() { @@ -275,7 +297,7 @@ function runTestFile(path, root, inspectPort, filesWatcher, testNamePatterns) { const subtest = root.createSubtest(FileTest, path, async (t) => { const args = getRunArgs({ path, inspectPort, testNamePatterns }); const stdio = ['pipe', 'pipe', 'pipe']; - const env = { ...process.env, NODE_TEST_CONTEXT: 'child' }; + const env = { ...process.env, NODE_TEST_CONTEXT: 'child-v8' }; if (filesWatcher) { stdio.push('ipc'); env.WATCH_REPORT_DEPENDENCIES = '1'; @@ -292,6 +314,10 @@ function runTestFile(path, root, inspectPort, filesWatcher, testNamePatterns) { err = error; }); + child.stdout.on('data', (data) => { + subtest.parseMessage(data); + }); + const rl = createInterface({ input: child.stderr }); rl.on('line', (line) => { if (isInspectorMessage(line)) { @@ -303,26 +329,14 @@ function runTestFile(path, root, inspectPort, filesWatcher, testNamePatterns) { // surface stderr lines as TAP diagnostics to improve the DX. Inject // each line into the test output as an unknown token as if it came // from the TAP parser. - const node = { - kind: TokenKind.UNKNOWN, - node: { - value: line, - }, - }; - - subtest.addToReport(node); - }); - - const parser = new TapParser(); - - child.stdout.pipe(parser).on('data', (ast) => { - subtest.addToReport(ast); + subtest.addToReport({ + __proto__: null, + type: 'test:diagnostic', + data: { __proto__: null, nesting: 0, file: path, message: line }, + }); }); - const { 0: { 0: code, 1: signal } } = await SafePromiseAll([ - once(child, 'exit', { signal: t.signal }), - finished(parser, { signal: t.signal }), - ]); + const { 0: code, 1: signal } = await once(child, 'exit', { signal: t.signal }); runningProcesses.delete(path); runningSubtests.delete(path); diff --git a/lib/internal/test_runner/tap_checker.js b/lib/internal/test_runner/tap_checker.js deleted file mode 100644 index 1b9945c5485a38..00000000000000 --- a/lib/internal/test_runner/tap_checker.js +++ /dev/null @@ -1,155 +0,0 @@ -'use strict'; - -const { - ArrayPrototypeFilter, - ArrayPrototypeFind, - NumberParseInt, -} = primordials; -const { - codes: { ERR_TAP_VALIDATION_ERROR }, -} = require('internal/errors'); -const { TokenKind } = require('internal/test_runner/tap_lexer'); - -// TODO(@manekinekko): add more validation rules based on the TAP14 spec. -// See https://testanything.org/tap-version-14-specification.html -class TAPValidationStrategy { - validate(ast) { - this.#validateVersion(ast); - this.#validatePlan(ast); - this.#validateTestPoints(ast); - - return true; - } - - #validateVersion(ast) { - const entry = ArrayPrototypeFind( - ast, - (node) => node.kind === TokenKind.TAP_VERSION, - ); - - if (!entry) { - throw new ERR_TAP_VALIDATION_ERROR('missing TAP version'); - } - - const { version } = entry.node; - - // TAP14 specification is compatible with observed behavior of existing TAP13 consumers and producers - if (version !== '14' && version !== '13') { - throw new ERR_TAP_VALIDATION_ERROR('TAP version should be 13 or 14'); - } - } - - #validatePlan(ast) { - const entry = ArrayPrototypeFind( - ast, - (node) => node.kind === TokenKind.TAP_PLAN, - ); - - if (!entry) { - throw new ERR_TAP_VALIDATION_ERROR('missing TAP plan'); - } - - const plan = entry.node; - - if (!plan.start) { - throw new ERR_TAP_VALIDATION_ERROR('missing plan start'); - } - - if (!plan.end) { - throw new ERR_TAP_VALIDATION_ERROR('missing plan end'); - } - - const planStart = NumberParseInt(plan.start, 10); - const planEnd = NumberParseInt(plan.end, 10); - - if (planEnd !== 0 && planStart > planEnd) { - throw new ERR_TAP_VALIDATION_ERROR( - `plan start ${planStart} is greater than plan end ${planEnd}`, - ); - } - } - - // TODO(@manekinekko): since we are dealing with a flat AST, we need to - // validate test points grouped by their "nesting" level. This is because a set of - // Test points belongs to a TAP document. Each new subtest block creates a new TAP document. - // https://testanything.org/tap-version-14-specification.html#subtests - #validateTestPoints(ast) { - const bailoutEntry = ArrayPrototypeFind( - ast, - (node) => node.kind === TokenKind.TAP_BAIL_OUT, - ); - const planEntry = ArrayPrototypeFind( - ast, - (node) => node.kind === TokenKind.TAP_PLAN, - ); - const testPointEntries = ArrayPrototypeFilter( - ast, - (node) => node.kind === TokenKind.TAP_TEST_POINT, - ); - - const plan = planEntry.node; - - const planStart = NumberParseInt(plan.start, 10); - const planEnd = NumberParseInt(plan.end, 10); - - if (planEnd === 0 && testPointEntries.length > 0) { - throw new ERR_TAP_VALIDATION_ERROR( - `found ${testPointEntries.length} Test Point${ - testPointEntries.length > 1 ? 's' : '' - } but plan is ${planStart}..0`, - ); - } - - if (planEnd > 0) { - if (testPointEntries.length === 0) { - throw new ERR_TAP_VALIDATION_ERROR('missing Test Points'); - } - - if (!bailoutEntry && testPointEntries.length !== planEnd) { - throw new ERR_TAP_VALIDATION_ERROR( - `test Points count ${testPointEntries.length} does not match plan count ${planEnd}`, - ); - } - - for (let i = 0; i < testPointEntries.length; i++) { - const test = testPointEntries[i].node; - const testId = NumberParseInt(test.id, 10); - - if (testId < planStart || testId > planEnd) { - throw new ERR_TAP_VALIDATION_ERROR( - `test ${testId} is out of plan range ${planStart}..${planEnd}`, - ); - } - } - } - } -} - -// TAP14 and TAP13 are compatible with each other -class TAP13ValidationStrategy extends TAPValidationStrategy {} -class TAP14ValidationStrategy extends TAPValidationStrategy {} - -class TapChecker { - static TAP13 = '13'; - static TAP14 = '14'; - - constructor({ specs }) { - switch (specs) { - case TapChecker.TAP13: - this.strategy = new TAP13ValidationStrategy(); - break; - default: - this.strategy = new TAP14ValidationStrategy(); - } - } - - check(ast) { - return this.strategy.validate(ast); - } -} - -module.exports = { - TapChecker, - TAP14ValidationStrategy, - TAP13ValidationStrategy, -}; diff --git a/lib/internal/test_runner/tap_lexer.js b/lib/internal/test_runner/tap_lexer.js deleted file mode 100644 index 8af5453b283a38..00000000000000 --- a/lib/internal/test_runner/tap_lexer.js +++ /dev/null @@ -1,540 +0,0 @@ -'use strict'; - -const { - ArrayPrototypePop, - ArrayPrototypePush, - MathMax, - SafeSet, - StringPrototypeCodePointAt, - StringPrototypeTrim, -} = primordials; -const { - codes: { ERR_TAP_LEXER_ERROR }, -} = require('internal/errors'); - -const { isZeroWidthCodePoint } = require('internal/util/inspect'); - -const kEOL = ''; -const kEOF = ''; - -const TokenKind = { - EOF: 'EOF', - EOL: 'EOL', - NEWLINE: 'NewLine', - NUMERIC: 'Numeric', - LITERAL: 'Literal', - KEYWORD: 'Keyword', - WHITESPACE: 'Whitespace', - COMMENT: 'Comment', - DASH: 'Dash', - PLUS: 'Plus', - HASH: 'Hash', - ESCAPE: 'Escape', - UNKNOWN: 'Unknown', - - // TAP tokens - TAP: 'TAPKeyword', - TAP_VERSION: 'VersionKeyword', - TAP_PLAN: 'PlanKeyword', - TAP_TEST_POINT: 'TestPointKeyword', - TAP_SUBTEST_POINT: 'SubTestPointKeyword', - TAP_TEST_OK: 'TestOkKeyword', - TAP_TEST_NOTOK: 'TestNotOkKeyword', - TAP_YAML_START: 'YamlStartKeyword', - TAP_YAML_END: 'YamlEndKeyword', - TAP_YAML_BLOCK: 'YamlKeyword', - TAP_PRAGMA: 'PragmaKeyword', - TAP_BAIL_OUT: 'BailOutKeyword', -}; - -class Token { - constructor({ kind, value, stream }) { - const valueLength = ('' + value).length; - this.kind = kind; - this.value = value; - this.location = { - line: stream.line, - column: MathMax(stream.column - valueLength + 1, 1), // 1 based - start: MathMax(stream.pos - valueLength, 0), // zero based - end: stream.pos - (value === '' ? 0 : 1), // zero based - }; - - // EOF is a special case - if (value === TokenKind.EOF) { - const eofPosition = stream.input.length + 1; // We consider EOF to be outside the stream - this.location.start = eofPosition; - this.location.end = eofPosition; - this.location.column = stream.column + 1; // 1 based - } - } -} - -class InputStream { - constructor(input) { - this.input = input; - this.pos = 0; - this.column = 0; - this.line = 1; - } - - eof() { - return this.peek() === undefined; - } - - peek(offset = 0) { - return this.input[this.pos + offset]; - } - - next() { - const char = this.peek(); - if (char === undefined) { - return undefined; - } - - this.pos++; - this.column++; - if (char === '\n') { - this.line++; - this.column = 0; - } - - return char; - } -} - -class TapLexer { - static Keywords = new SafeSet([ - 'TAP', - 'version', - 'ok', - 'not', - '...', - '---', - '..', - 'pragma', - '-', - '+', - - // NOTE: "Skip", "Todo" and "Bail out!" literals are deferred to the parser - ]); - - #isComment = false; - #source = null; - #line = 1; - #column = 0; - #escapeStack = []; - #lastScannedToken = null; - - constructor(source) { - this.#source = new InputStream(source); - this.#lastScannedToken = new Token({ - kind: TokenKind.EOL, - value: kEOL, - stream: this.#source, - }); - } - - scan() { - const tokens = []; - let chunk = []; - while (!this.eof()) { - const token = this.#scanToken(); - - // Remember the last scanned token (except for whitespace) - if (token.kind !== TokenKind.WHITESPACE) { - this.#lastScannedToken = token; - } - - ArrayPrototypePush(chunk, token); - if (token.kind === TokenKind.NEWLINE) { - // Store the current chunk + NEWLINE token - ArrayPrototypePush(tokens, chunk); - chunk = []; - } - } - - if (chunk.length > 0) { - ArrayPrototypePush(chunk, this.#scanEOL()); - ArrayPrototypePush(tokens, chunk); - } - - // send EOF as a separate chunk - ArrayPrototypePush(tokens, [this.#scanEOF()]); - - return tokens; - } - - next() { - return this.#source.next(); - } - - eof() { - return this.#source.eof(); - } - - error(message, token, expected = '') { - this.#source.error(message, token, expected); - } - - #scanToken() { - const char = this.next(); - - if (this.#isEOFSymbol(char)) { - return this.#scanEOF(); - } else if (this.#isNewLineSymbol(char)) { - return this.#scanNewLine(char); - } else if (this.#isNumericSymbol(char)) { - return this.#scanNumeric(char); - } else if (this.#isDashSymbol(char)) { - return this.#scanDash(char); - } else if (this.#isPlusSymbol(char)) { - return this.#scanPlus(char); - } else if (this.#isHashSymbol(char)) { - return this.#scanHash(char); - } else if (this.#isEscapeSymbol(char)) { - return this.#scanEscapeSymbol(char); - } else if (this.#isWhitespaceSymbol(char)) { - return this.#scanWhitespace(char); - } else if (this.#isLiteralSymbol(char)) { - return this.#scanLiteral(char); - } - - throw new ERR_TAP_LEXER_ERROR( - `Unexpected character: ${char} at line ${this.#line}, column ${ - this.#column - }`, - ); - } - - #scanNewLine(char) { - // In case of odd number of ESCAPE symbols, we need to clear the remaining - // escape chars from the stack and start fresh for the next line. - this.#escapeStack = []; - - // We also need to reset the comment flag - this.#isComment = false; - - return new Token({ - kind: TokenKind.NEWLINE, - value: char, - stream: this.#source, - }); - } - - #scanEOL() { - return new Token({ - kind: TokenKind.EOL, - value: kEOL, - stream: this.#source, - }); - } - - #scanEOF() { - this.#isComment = false; - - return new Token({ - kind: TokenKind.EOF, - value: kEOF, - stream: this.#source, - }); - } - - #scanEscapeSymbol(char) { - // If the escape symbol has been escaped (by previous symbol), - // or if the next symbol is a whitespace symbol, - // then consume it as a literal. - if ( - this.#hasTheCurrentCharacterBeenEscaped() || - this.#source.peek(1) === TokenKind.WHITESPACE - ) { - ArrayPrototypePop(this.#escapeStack); - return new Token({ - kind: TokenKind.LITERAL, - value: char, - stream: this.#source, - }); - } - - // Otherwise, consume the escape symbol as an escape symbol that should be ignored by the parser - // we also need to push the escape symbol to the escape stack - // and consume the next character as a literal (done in the next turn) - ArrayPrototypePush(this.#escapeStack, char); - return new Token({ - kind: TokenKind.ESCAPE, - value: char, - stream: this.#source, - }); - } - - #scanWhitespace(char) { - return new Token({ - kind: TokenKind.WHITESPACE, - value: char, - stream: this.#source, - }); - } - - #scanDash(char) { - // Peek next 3 characters and check if it's a YAML start marker - const marker = char + this.#source.peek() + this.#source.peek(1); - - if (this.#isYamlStartSymbol(marker)) { - this.next(); // consume second - - this.next(); // consume third - - - return new Token({ - kind: TokenKind.TAP_YAML_START, - value: marker, - stream: this.#source, - }); - } - - return new Token({ - kind: TokenKind.DASH, - value: char, - stream: this.#source, - }); - } - - #scanPlus(char) { - return new Token({ - kind: TokenKind.PLUS, - value: char, - stream: this.#source, - }); - } - - #scanHash(char) { - const lastCharacter = this.#source.peek(-2); - const nextToken = this.#source.peek(); - - // If we encounter a hash symbol at the beginning of a line, - // we consider it as a comment - if (!lastCharacter || this.#isNewLineSymbol(lastCharacter)) { - this.#isComment = true; - return new Token({ - kind: TokenKind.COMMENT, - value: char, - stream: this.#source, - }); - } - - // The only valid case where a hash symbol is considered as a hash token - // is when it's preceded by a whitespace symbol and followed by a non-hash symbol - if ( - this.#isWhitespaceSymbol(lastCharacter) && - !this.#isHashSymbol(nextToken) - ) { - return new Token({ - kind: TokenKind.HASH, - value: char, - stream: this.#source, - }); - } - - const charHasBeenEscaped = this.#hasTheCurrentCharacterBeenEscaped(); - if (this.#isComment || charHasBeenEscaped) { - if (charHasBeenEscaped) { - ArrayPrototypePop(this.#escapeStack); - } - - return new Token({ - kind: TokenKind.LITERAL, - value: char, - stream: this.#source, - }); - } - - // As a fallback, we consume the hash symbol as a literal - return new Token({ - kind: TokenKind.LITERAL, - value: char, - stream: this.#source, - }); - } - - #scanLiteral(char) { - let word = char; - while (!this.#source.eof()) { - const nextChar = this.#source.peek(); - if (this.#isLiteralSymbol(nextChar)) { - word += this.#source.next(); - } else { - break; - } - } - - word = StringPrototypeTrim(word); - - if (TapLexer.Keywords.has(word)) { - const token = this.#scanTAPKeyword(word); - if (token) { - return token; - } - } - - if (this.#isYamlEndSymbol(word)) { - return new Token({ - kind: TokenKind.TAP_YAML_END, - value: word, - stream: this.#source, - }); - } - - return new Token({ - kind: TokenKind.LITERAL, - value: word, - stream: this.#source, - }); - } - - #scanTAPKeyword(word) { - const isLastScannedTokenEOLorNewLine = - TokenKind.EOL === this.#lastScannedToken.kind || - TokenKind.NEWLINE === this.#lastScannedToken.kind; - - if (word === 'TAP' && isLastScannedTokenEOLorNewLine) { - return new Token({ - kind: TokenKind.TAP, - value: word, - stream: this.#source, - }); - } - - if (word === 'version' && this.#lastScannedToken.kind === TokenKind.TAP) { - return new Token({ - kind: TokenKind.TAP_VERSION, - value: word, - stream: this.#source, - }); - } - - if (word === '..' && this.#lastScannedToken.kind === TokenKind.NUMERIC) { - return new Token({ - kind: TokenKind.TAP_PLAN, - value: word, - stream: this.#source, - }); - } - - if (word === 'not' && isLastScannedTokenEOLorNewLine) { - return new Token({ - kind: TokenKind.TAP_TEST_NOTOK, - value: word, - stream: this.#source, - }); - } - - if ( - word === 'ok' && - (this.#lastScannedToken.kind === TokenKind.TAP_TEST_NOTOK || - isLastScannedTokenEOLorNewLine) - ) { - return new Token({ - kind: TokenKind.TAP_TEST_OK, - value: word, - stream: this.#source, - }); - } - - if (word === 'pragma' && isLastScannedTokenEOLorNewLine) { - return new Token({ - kind: TokenKind.TAP_PRAGMA, - value: word, - stream: this.#source, - }); - } - - return null; - } - - #scanNumeric(char) { - let number = char; - while (!this.#source.eof()) { - const nextChar = this.#source.peek(); - if (this.#isNumericSymbol(nextChar)) { - number += nextChar; - this.#source.next(); - } else { - break; - } - } - return new Token({ - kind: TokenKind.NUMERIC, - value: number, - stream: this.#source, - }); - } - - #hasTheCurrentCharacterBeenEscaped() { - // Use the escapeStack to keep track of the escape characters - return this.#escapeStack.length > 0; - } - - #isNumericSymbol(char) { - return char >= '0' && char <= '9'; - } - - #isLiteralSymbol(char) { - if (typeof char !== 'string') return false; - const charCode = StringPrototypeCodePointAt(char); - - if (isZeroWidthCodePoint(charCode)) return false; - if (this.#isWhitespaceSymbol(char)) return false; - const MAX_ASCII_CHAR_CODE = 0b111_1111; // ASCII is 7-bit long - // Allow all non-latin characters. - if (charCode > MAX_ASCII_CHAR_CODE) return true; - const ZERO = 48; // 0 - const NINE = 58; // 9 - // Disallow numeric values - if (charCode >= ZERO && char <= NINE) return false; - - // Disallow characters with special meaning in TAP - const HASH = 35; // # - const BACKSLASH = 92; // \ - const PLUS = 43; // + - const DASH = 45; // - - - // Disallow characters with special meaning in TAP - return charCode !== HASH && charCode !== BACKSLASH && - charCode !== PLUS && charCode !== DASH; - } - - #isWhitespaceSymbol(char) { - return char === ' ' || char === '\t'; - } - - #isEOFSymbol(char) { - return char === undefined; - } - - #isNewLineSymbol(char) { - return char === '\n' || char === '\r'; - } - - #isHashSymbol(char) { - return char === '#'; - } - - #isDashSymbol(char) { - return char === '-'; - } - - #isPlusSymbol(char) { - return char === '+'; - } - - #isEscapeSymbol(char) { - return char === '\\' || char === '\x1b'; - } - - #isYamlStartSymbol(char) { - return char === '---'; - } - - #isYamlEndSymbol(char) { - return char === '...'; - } -} - -module.exports = { TapLexer, TokenKind }; diff --git a/lib/internal/test_runner/tap_parser.js b/lib/internal/test_runner/tap_parser.js deleted file mode 100644 index 5bf483ad2b2ec1..00000000000000 --- a/lib/internal/test_runner/tap_parser.js +++ /dev/null @@ -1,989 +0,0 @@ -'use strict'; - -const { - ArrayPrototypeFilter, - ArrayPrototypeForEach, - ArrayPrototypeIncludes, - ArrayPrototypeJoin, - ArrayPrototypeMap, - ArrayPrototypePop, - ArrayPrototypePush, - Boolean, - Number, - RegExpPrototypeExec, - String, - StringPrototypeEndsWith, - StringPrototypeReplaceAll, - StringPrototypeSlice, - StringPrototypeSplit, - StringPrototypeTrim, -} = primordials; -const Transform = require('internal/streams/transform'); -const { TapLexer, TokenKind } = require('internal/test_runner/tap_lexer'); -const { TapChecker } = require('internal/test_runner/tap_checker'); -const { - codes: { ERR_TAP_VALIDATION_ERROR, ERR_TAP_PARSER_ERROR }, -} = require('internal/errors'); -const { kEmptyObject } = require('internal/util'); -/** - * - * TAP14 specifications - * - * See https://testanything.org/tap-version-14-specification.html - * - * Note that the following grammar is intended as a rough "pseudocode" guidance. - * It is not strict EBNF: - * - * TAPDocument := Version Plan Body | Version Body Plan - * Version := "TAP version 14\n" - * Plan := "1.." (Number) (" # " Reason)? "\n" - * Body := (TestPoint | BailOut | Pragma | Comment | Anything | Empty | Subtest)* - * TestPoint := ("not ")? "ok" (" " Number)? ((" -")? (" " Description) )? (" " Directive)? "\n" (YAMLBlock)? - * Directive := " # " ("todo" | "skip") (" " Reason)? - * YAMLBlock := " ---\n" (YAMLLine)* " ...\n" - * YAMLLine := " " (YAML)* "\n" - * BailOut := "Bail out!" (" " Reason)? "\n" - * Reason := [^\n]+ - * Pragma := "pragma " [+-] PragmaKey "\n" - * PragmaKey := ([a-zA-Z0-9_-])+ - * Subtest := ("# Subtest" (": " SubtestName)?)? "\n" SubtestDocument TestPoint - * Comment := ^ (" ")* "#" [^\n]* "\n" - * Empty := [\s\t]* "\n" - * Anything := [^\n]+ "\n" - * - */ - -/** - * An LL(1) parser for TAP14/TAP13. - */ -class TapParser extends Transform { - #checker = null; - #lexer = null; - #currentToken = null; - - #input = ''; - #currentChunkAsString = ''; - #lastLine = ''; - - #tokens = [[]]; - #flatAST = []; - #bufferedComments = []; - #bufferedTestPoints = []; - #lastTestPointDetails = {}; - #yamlBlockBuffer = []; - - #currentTokenIndex = 0; - #currentTokenChunk = 0; - #subTestNestingLevel = 0; - #yamlCurrentIndentationLevel = 0; - #kSubtestBlockIndentationFactor = 4; - - #isYAMLBlock = false; - #isSyncParsingEnabled = false; - - constructor({ specs = TapChecker.TAP13 } = kEmptyObject) { - super({ __proto__: null, readableObjectMode: true }); - - this.#checker = new TapChecker({ specs }); - } - - // ----------------------------------------------------------------------// - // ----------------------------- Public API -----------------------------// - // ----------------------------------------------------------------------// - - parse(chunkAsString = '', callback = null) { - this.#isSyncParsingEnabled = false; - this.#currentTokenChunk = 0; - this.#currentTokenIndex = 0; - // Note: we are overwriting the input on each stream call - // This is fine because we don't want to parse previous chunks - this.#input = chunkAsString; - this.#lexer = new TapLexer(chunkAsString); - - try { - this.#tokens = this.#scanTokens(); - this.#parseTokens(callback); - } catch (error) { - callback(null, error); - } - } - - parseSync(input = '', callback = null) { - if (typeof input !== 'string' || input === '') { - return []; - } - - this.#isSyncParsingEnabled = true; - this.#input = input; - this.#lexer = new TapLexer(input); - this.#tokens = this.#scanTokens(); - - this.#parseTokens(callback); - - if (this.#isYAMLBlock) { - // Looks like we have a non-ending YAML block - this.#error('Expected end of YAML block'); - } - - // Manually flush the remaining buffered comments and test points - this._flush(); - - return this.#flatAST; - } - - // Check if the TAP content is semantically valid - // Note: Validating the TAP content requires the whole AST to be available. - check() { - if (this.#isSyncParsingEnabled) { - return this.#checker.check(this.#flatAST); - } - - // TODO(@manekinekko): when running in async mode, it doesn't make sense to - // validate the current chunk. Validation needs to whole AST to be available. - throw new ERR_TAP_VALIDATION_ERROR( - 'TAP validation is not supported for async parsing', - ); - } - // ----------------------------------------------------------------------// - // --------------------------- Transform API ----------------------------// - // ----------------------------------------------------------------------// - - processChunk(chunk) { - const str = this.#lastLine + chunk.toString('utf8'); - const lines = StringPrototypeSplit(str, '\n'); - this.#lastLine = ArrayPrototypePop(lines); - - let chunkAsString = ArrayPrototypeJoin(lines, '\n'); - // Special case where chunk is emitted by a child process - chunkAsString = StringPrototypeReplaceAll( - chunkAsString, - '[out] ', - '', - ); - chunkAsString = StringPrototypeReplaceAll( - chunkAsString, - '[err] ', - '', - ); - if (StringPrototypeEndsWith(chunkAsString, '\n')) { - chunkAsString = StringPrototypeSlice(chunkAsString, 0, -1); - } - if (StringPrototypeEndsWith(chunkAsString, 'EOF')) { - chunkAsString = StringPrototypeSlice(chunkAsString, 0, -3); - } - - return chunkAsString; - } - - _transform(chunk, _encoding, next) { - const chunkAsString = this.processChunk(chunk); - - if (!chunkAsString) { - // Ignore empty chunks - next(); - return; - } - - this.parse(chunkAsString, (node, error) => { - if (error) { - next(error); - return; - } - - if (node.kind === TokenKind.EOF) { - // Emit when the current chunk is fully processed and consumed - next(); - } - }); - } - - // Flush the remaining buffered comments and test points - // This will be called automatically when the stream is closed - // We also call this method manually when we reach the end of the sync parsing - _flush(next = null) { - if (!this.#lastLine) { - this.#__flushPendingTestPointsAndComments(); - next?.(); - return; - } - // Parse the remaining line - this.parse(this.#lastLine, (node, error) => { - this.#lastLine = ''; - - if (error) { - next?.(error); - return; - } - - if (node.kind === TokenKind.EOF) { - this.#__flushPendingTestPointsAndComments(); - next?.(); - } - }); - } - - #__flushPendingTestPointsAndComments() { - ArrayPrototypeForEach(this.#bufferedTestPoints, (node) => { - this.#emit(node); - }); - ArrayPrototypeForEach(this.#bufferedComments, (node) => { - this.#emit(node); - }); - - // Clean up - this.#bufferedTestPoints = []; - this.#bufferedComments = []; - } - - // ----------------------------------------------------------------------// - // ----------------------------- Private API ----------------------------// - // ----------------------------------------------------------------------// - - #scanTokens() { - return this.#lexer.scan(); - } - - #parseTokens(callback = null) { - for (let index = 0; index < this.#tokens.length; index++) { - const chunk = this.#tokens[index]; - this.#parseChunk(chunk); - } - - callback?.({ kind: TokenKind.EOF }); - } - - #parseChunk(chunk) { - this.#subTestNestingLevel = this.#getCurrentIndentationLevel(chunk); - // We compute the current index of the token in the chunk - // based on the indentation level (number of spaces). - // We also need to take into account if we are in a YAML block or not. - // If we are in a YAML block, we compute the current index of the token - // based on the indentation level of the YAML block (start block). - - if (this.#isYAMLBlock) { - this.#currentTokenIndex = - this.#yamlCurrentIndentationLevel * - this.#kSubtestBlockIndentationFactor; - } else { - this.#currentTokenIndex = - this.#subTestNestingLevel * this.#kSubtestBlockIndentationFactor; - this.#yamlCurrentIndentationLevel = this.#subTestNestingLevel; - } - - let node; - - // Parse current chunk - try { - node = this.#TAPDocument(chunk); - } catch { - node = { - kind: TokenKind.UNKNOWN, - node: { - value: this.#currentChunkAsString, - }, - }; - } - - // Emit the parsed node to both the stream and the AST - this.#emitOrBufferCurrentNode(node); - - // Move pointers to the next chunk and reset the current token index - this.#currentTokenChunk++; - this.#currentTokenIndex = 0; - } - - #error(message) { - const token = this.#currentToken || { value: '', kind: '' }; - // Escape NewLine characters - if (token.value === '\n') { - token.value = '\\n'; - } - - throw new ERR_TAP_PARSER_ERROR( - message, - `, received "${token.value}" (${token.kind})`, - token, - this.#input, - ); - } - - #peek(shouldSkipBlankTokens = true) { - if (shouldSkipBlankTokens) { - this.#skip(TokenKind.WHITESPACE); - } - - return this.#tokens[this.#currentTokenChunk][this.#currentTokenIndex]; - } - - #next(shouldSkipBlankTokens = true) { - if (shouldSkipBlankTokens) { - this.#skip(TokenKind.WHITESPACE); - } - - if (this.#tokens[this.#currentTokenChunk]) { - this.#currentToken = - this.#tokens[this.#currentTokenChunk][this.#currentTokenIndex++]; - } else { - this.#currentToken = null; - } - - return this.#currentToken; - } - - // Skip the provided tokens in the current chunk - #skip(...tokensToSkip) { - let token = this.#tokens[this.#currentTokenChunk][this.#currentTokenIndex]; - while (token && ArrayPrototypeIncludes(tokensToSkip, token.kind)) { - // pre-increment to skip current tokens but make sure we don't advance index on the last iteration - token = this.#tokens[this.#currentTokenChunk][++this.#currentTokenIndex]; - } - } - - #readNextLiterals() { - const literals = []; - let nextToken = this.#peek(false); - - // Read all literal, numeric, whitespace and escape tokens until we hit a different token - // or reach end of current chunk - while ( - nextToken && - ArrayPrototypeIncludes( - [ - TokenKind.LITERAL, - TokenKind.NUMERIC, - TokenKind.DASH, - TokenKind.PLUS, - TokenKind.WHITESPACE, - TokenKind.ESCAPE, - ], - nextToken.kind, - ) - ) { - const word = this.#next(false).value; - - // Don't output escaped characters - if (nextToken.kind !== TokenKind.ESCAPE) { - ArrayPrototypePush(literals, word); - } - - nextToken = this.#peek(false); - } - - return ArrayPrototypeJoin(literals, ''); - } - - #countLeadingSpacesInCurrentChunk(chunk) { - // Count the number of whitespace tokens in the chunk, starting from the first token - let whitespaceCount = 0; - while (chunk?.[whitespaceCount]?.kind === TokenKind.WHITESPACE) { - whitespaceCount++; - } - return whitespaceCount; - } - - #addDiagnosticsToLastTestPoint(currentNode) { - const { length, [length - 1]: lastTestPoint } = this.#bufferedTestPoints; - - // Diagnostic nodes are only added to Test points of the same nesting level - if (lastTestPoint && lastTestPoint.nesting === currentNode.nesting) { - lastTestPoint.node.time = this.#lastTestPointDetails.duration; - - // TODO(@manekinekko): figure out where to put the other diagnostic properties - // See https://github.com/nodejs/node/pull/44952 - lastTestPoint.node.diagnostics ||= []; - - ArrayPrototypeForEach(currentNode.node.diagnostics, (diagnostic) => { - // Avoid adding empty diagnostics - if (diagnostic) { - ArrayPrototypePush(lastTestPoint.node.diagnostics, diagnostic); - } - }); - - this.#bufferedTestPoints = []; - } - - return lastTestPoint; - } - - #flushBufferedTestPointNode(shouldClearBuffer = true) { - if (this.#bufferedTestPoints.length > 0) { - this.#emit(this.#bufferedTestPoints[0]); - - if (shouldClearBuffer) { - this.#bufferedTestPoints = []; - } - } - } - - #addCommentsToCurrentNode(currentNode) { - if (this.#bufferedComments.length > 0) { - currentNode.comments = ArrayPrototypeMap( - this.#bufferedComments, - (c) => c.node.comment, - ); - this.#bufferedComments = []; - } - - return currentNode; - } - - #flushBufferedComments(shouldClearBuffer = true) { - if (this.#bufferedComments.length > 0) { - ArrayPrototypeForEach(this.#bufferedComments, (node) => { - this.#emit(node); - }); - - if (shouldClearBuffer) { - this.#bufferedComments = []; - } - } - } - - #getCurrentIndentationLevel(chunk) { - const whitespaceCount = this.#countLeadingSpacesInCurrentChunk(chunk); - return (whitespaceCount / this.#kSubtestBlockIndentationFactor) | 0; - } - - #emit(node) { - if (node.kind !== TokenKind.EOF) { - ArrayPrototypePush(this.#flatAST, node); - this.push({ - __proto__: null, - ...node, - }); - } - } - - #emitOrBufferCurrentNode(currentNode) { - currentNode = { - ...currentNode, - nesting: this.#subTestNestingLevel, - lexeme: this.#currentChunkAsString, - }; - - switch (currentNode.kind) { - // Emit these nodes - case TokenKind.UNKNOWN: - if (!currentNode.node.value) { - // Ignore unrecognized and empty nodes - break; - } - // falls through - - case TokenKind.TAP_PLAN: - case TokenKind.TAP_PRAGMA: - case TokenKind.TAP_VERSION: - case TokenKind.TAP_BAIL_OUT: - case TokenKind.TAP_SUBTEST_POINT: - // Check if we have a buffered test point, and if so, emit it - this.#flushBufferedTestPointNode(); - - // If we have buffered comments, add them to the current node - currentNode = this.#addCommentsToCurrentNode(currentNode); - - // Emit the current node - this.#emit(currentNode); - break; - - // By default, we buffer the next test point node in case we have a diagnostic - // to add to it in the next iteration - // Note: in case we hit and EOF, we flush the comments buffer (see _flush()) - case TokenKind.TAP_TEST_POINT: - // In case of an already buffered test point, we flush it and buffer the current one - // Because diagnostic nodes are only added to the last processed test point - this.#flushBufferedTestPointNode(); - - // Buffer this node (and also add any pending comments to it) - ArrayPrototypePush( - this.#bufferedTestPoints, - this.#addCommentsToCurrentNode(currentNode), - ); - break; - - // Keep buffering comments until we hit a non-comment node, then add them to the that node - // Note: in case we hit and EOF, we flush the comments buffer (see _flush()) - case TokenKind.COMMENT: - ArrayPrototypePush(this.#bufferedComments, currentNode); - break; - - // Diagnostic nodes are added to Test points of the same nesting level - case TokenKind.TAP_YAML_END: - // Emit either the last updated test point (w/ diagnostics) or the current diagnostics node alone - this.#emit( - this.#addDiagnosticsToLastTestPoint(currentNode) || currentNode, - ); - break; - - // In case we hit an EOF, we emit it to indicate the end of the stream - case TokenKind.EOF: - this.#emit(currentNode); - break; - } - } - - #serializeChunk(chunk) { - return ArrayPrototypeJoin( - ArrayPrototypeMap( - // Exclude NewLine and EOF tokens - ArrayPrototypeFilter( - chunk, - (token) => - token.kind !== TokenKind.NEWLINE && token.kind !== TokenKind.EOF, - ), - (token) => token.value, - ), - '', - ); - } - - // --------------------------------------------------------------------------// - // ------------------------------ Parser rules ------------------------------// - // --------------------------------------------------------------------------// - - // TAPDocument := Version Plan Body | Version Body Plan - #TAPDocument(tokenChunks) { - this.#currentChunkAsString = this.#serializeChunk(tokenChunks); - const firstToken = this.#peek(false); - - if (firstToken) { - const { kind } = firstToken; - - switch (kind) { - case TokenKind.TAP: - return this.#Version(); - case TokenKind.NUMERIC: - return this.#Plan(); - case TokenKind.TAP_TEST_OK: - case TokenKind.TAP_TEST_NOTOK: - return this.#TestPoint(); - case TokenKind.COMMENT: - case TokenKind.HASH: - return this.#Comment(); - case TokenKind.TAP_PRAGMA: - return this.#Pragma(); - case TokenKind.WHITESPACE: - return this.#YAMLBlock(); - case TokenKind.LITERAL: - // Check for "Bail out!" literal (case insensitive) - if ( - RegExpPrototypeExec(/^Bail\s+out!/i, this.#currentChunkAsString) - ) { - return this.#Bailout(); - } else if (this.#isYAMLBlock) { - return this.#YAMLBlock(); - } - - // Read token because error needs the last token details - this.#next(false); - this.#error('Expected a valid token'); - - break; - case TokenKind.EOF: - return firstToken; - - case TokenKind.NEWLINE: - // Consume and ignore NewLine token - return this.#next(false); - default: - // Read token because error needs the last token details - this.#next(false); - this.#error('Expected a valid token'); - } - } - - const node = { - kind: TokenKind.UNKNOWN, - node: { - value: this.#currentChunkAsString, - }, - }; - - // We make sure the emitted node has the same shape - // both in sync and async parsing (for the stream interface) - return node; - } - - // ----------------Version---------------- - // Version := "TAP version Number\n" - #Version() { - const tapToken = this.#peek(); - - if (tapToken.kind === TokenKind.TAP) { - this.#next(); // Consume the TAP token - } else { - this.#error('Expected "TAP" keyword'); - } - - const versionToken = this.#peek(); - if (versionToken?.kind === TokenKind.TAP_VERSION) { - this.#next(); // Consume the version token - } else { - this.#error('Expected "version" keyword'); - } - - const numberToken = this.#peek(); - if (numberToken?.kind === TokenKind.NUMERIC) { - const version = this.#next().value; - const node = { kind: TokenKind.TAP_VERSION, node: { version } }; - return node; - } - this.#error('Expected a version number'); - } - - // ----------------Plan---------------- - // Plan := "1.." (Number) (" # " Reason)? "\n" - #Plan() { - // Even if specs mention plan starts at 1, we need to make sure we read the plan start value - // in case of a missing or invalid plan start value - const planStart = this.#next(); - - if (planStart.kind !== TokenKind.NUMERIC) { - this.#error('Expected a plan start count'); - } - - const planToken = this.#next(); - if (planToken?.kind !== TokenKind.TAP_PLAN) { - this.#error('Expected ".." symbol'); - } - - const planEnd = this.#next(); - if (planEnd?.kind !== TokenKind.NUMERIC) { - this.#error('Expected a plan end count'); - } - - const plan = { - start: planStart.value, - end: planEnd.value, - }; - - // Read optional reason - const hashToken = this.#peek(); - if (hashToken) { - if (hashToken.kind === TokenKind.HASH) { - this.#next(); // skip hash - plan.reason = StringPrototypeTrim(this.#readNextLiterals()); - } else if (hashToken.kind === TokenKind.LITERAL) { - this.#error('Expected "#" symbol before a reason'); - } - } - - const node = { - kind: TokenKind.TAP_PLAN, - node: plan, - }; - - return node; - } - - // ----------------TestPoint---------------- - // TestPoint := ("not ")? "ok" (" " Number)? ((" -")? (" " Description) )? (" " Directive)? "\n" (YAMLBlock)? - // Directive := " # " ("todo" | "skip") (" " Reason)? - // YAMLBlock := " ---\n" (YAMLLine)* " ...\n" - // YAMLLine := " " (YAML)* "\n" - - // Test Status: ok/not ok (required) - // Test number (recommended) - // Description (recommended, prefixed by " - ") - // Directive (only when necessary) - #TestPoint() { - const notToken = this.#peek(); - let isTestFailed = false; - - if (notToken.kind === TokenKind.TAP_TEST_NOTOK) { - this.#next(); // skip "not" token - isTestFailed = true; - } - - const okToken = this.#next(); - if (okToken.kind !== TokenKind.TAP_TEST_OK) { - this.#error('Expected "ok" or "not ok" keyword'); - } - - // Read optional test number - let numberToken = this.#peek(); - if (numberToken && numberToken.kind === TokenKind.NUMERIC) { - numberToken = this.#next().value; - } else { - numberToken = ''; // Set an empty ID to indicate that the test hasn't provider an ID - } - - const test = { - // Output both failed and passed properties to make it easier for the checker to detect the test status - status: { - fail: isTestFailed, - pass: !isTestFailed, - todo: false, - skip: false, - }, - id: numberToken, - description: '', - reason: '', - time: 0, - diagnostics: [], - }; - - // Read optional description prefix " - " - const descriptionDashToken = this.#peek(); - if (descriptionDashToken && descriptionDashToken.kind === TokenKind.DASH) { - this.#next(); // skip dash - } - - // Read optional description - if (this.#peek()) { - const description = StringPrototypeTrim(this.#readNextLiterals()); - if (description) { - test.description = description; - } - } - - // Read optional directive and reason - const hashToken = this.#peek(); - if (hashToken && hashToken.kind === TokenKind.HASH) { - this.#next(); // skip hash - } - - let todoOrSkipToken = this.#peek(); - if (todoOrSkipToken && todoOrSkipToken.kind === TokenKind.LITERAL) { - if (RegExpPrototypeExec(/todo/i, todoOrSkipToken.value)) { - todoOrSkipToken = 'todo'; - this.#next(); // skip token - } else if (RegExpPrototypeExec(/skip/i, todoOrSkipToken.value)) { - todoOrSkipToken = 'skip'; - this.#next(); // skip token - } - } - - const reason = StringPrototypeTrim(this.#readNextLiterals()); - if (todoOrSkipToken) { - if (reason) { - test.reason = reason; - } - - test.status.todo = todoOrSkipToken === 'todo'; - test.status.skip = todoOrSkipToken === 'skip'; - } - - const node = { - kind: TokenKind.TAP_TEST_POINT, - node: test, - }; - - return node; - } - - // ----------------Bailout---------------- - // BailOut := "Bail out!" (" " Reason)? "\n" - #Bailout() { - this.#next(); // skip "Bail" - this.#next(); // skip "out!" - - // Read optional reason - const hashToken = this.#peek(); - if (hashToken && hashToken.kind === TokenKind.HASH) { - this.#next(); // skip hash - } - - const reason = StringPrototypeTrim(this.#readNextLiterals()); - - const node = { - kind: TokenKind.TAP_BAIL_OUT, - node: { bailout: true, reason }, - }; - - return node; - } - - // ----------------Comment---------------- - // Comment := ^ (" ")* "#" [^\n]* "\n" - #Comment() { - const commentToken = this.#next(); - if ( - commentToken.kind !== TokenKind.COMMENT && - commentToken.kind !== TokenKind.HASH - ) { - this.#error('Expected "#" symbol'); - } - - const commentContent = this.#peek(); - if (commentContent) { - if (RegExpPrototypeExec(/^Subtest:/i, commentContent.value) !== null) { - this.#next(); // skip subtest keyword - const name = StringPrototypeTrim(this.#readNextLiterals()); - const node = { - kind: TokenKind.TAP_SUBTEST_POINT, - node: { - name, - }, - }; - - return node; - } - - const comment = StringPrototypeTrim(this.#readNextLiterals()); - const node = { - kind: TokenKind.COMMENT, - node: { comment }, - }; - - return node; - } - - // If there is no comment content, then we ignore the current node - } - - // ----------------YAMLBlock---------------- - // YAMLBlock := " ---\n" (YAMLLine)* " ...\n" - #YAMLBlock() { - const space1 = this.#peek(false); - if (space1 && space1.kind === TokenKind.WHITESPACE) { - this.#next(false); // skip 1st space - } - - const space2 = this.#peek(false); - if (space2 && space2.kind === TokenKind.WHITESPACE) { - this.#next(false); // skip 2nd space - } - - const yamlBlockSymbol = this.#peek(false); - - if (yamlBlockSymbol.kind === TokenKind.WHITESPACE) { - if (this.#isYAMLBlock === false) { - this.#next(false); // skip 3rd space - this.#error('Expected valid YAML indentation (2 spaces)'); - } - } - - if (yamlBlockSymbol.kind === TokenKind.TAP_YAML_START) { - if (this.#isYAMLBlock) { - // Looks like we have another YAML start block, but we didn't close the previous one - this.#error('Unexpected YAML start marker'); - } - - this.#isYAMLBlock = true; - this.#yamlCurrentIndentationLevel = this.#subTestNestingLevel; - this.#lastTestPointDetails = {}; - - // Consume the YAML start marker - this.#next(false); // skip "---" - - // No need to pass this token to the stream interface - return; - } else if (yamlBlockSymbol.kind === TokenKind.TAP_YAML_END) { - this.#next(false); // skip "..." - - if (!this.#isYAMLBlock) { - // Looks like we have an YAML end block, but we didn't encounter any YAML start marker - this.#error('Unexpected YAML end marker'); - } - - this.#isYAMLBlock = false; - - const diagnostics = this.#yamlBlockBuffer; - this.#yamlBlockBuffer = []; // Free the buffer for the next YAML block - - const node = { - kind: TokenKind.TAP_YAML_END, - node: { - diagnostics, - }, - }; - - return node; - } - - if (this.#isYAMLBlock) { - this.#YAMLLine(); - } else { - return { - kind: TokenKind.UNKNOWN, - node: { - value: yamlBlockSymbol.value, - }, - }; - } - } - - // ----------------YAMLLine---------------- - // YAMLLine := " " (YAML)* "\n" - #YAMLLine() { - const yamlLiteral = this.#readNextLiterals(); - const { 0: key, 1: value } = StringPrototypeSplit(yamlLiteral, ':', 2); - - // Note that this.#lastTestPointDetails has been cleared when we encounter a YAML start marker - - switch (key) { - case 'duration_ms': - this.#lastTestPointDetails.duration = Number(value); - break; - // Below are diagnostic properties introduced in https://github.com/nodejs/node/pull/44952 - case 'expected': - this.#lastTestPointDetails.expected = Boolean(value); - break; - case 'actual': - this.#lastTestPointDetails.actual = Boolean(value); - break; - case 'operator': - this.#lastTestPointDetails.operator = String(value); - break; - } - - ArrayPrototypePush(this.#yamlBlockBuffer, yamlLiteral); - } - - // ----------------PRAGMA---------------- - // Pragma := "pragma " [+-] PragmaKey "\n" - // PragmaKey := ([a-zA-Z0-9_-])+ - // TODO(@manekinekko): pragmas are parsed but not used yet! TapChecker() should take care of that. - #Pragma() { - const pragmaToken = this.#next(); - if (pragmaToken.kind !== TokenKind.TAP_PRAGMA) { - this.#error('Expected "pragma" keyword'); - } - - const pragmas = {}; - - let nextToken = this.#peek(); - while ( - nextToken && - ArrayPrototypeIncludes( - [TokenKind.NEWLINE, TokenKind.EOF, TokenKind.EOL], - nextToken.kind, - ) === false - ) { - let isEnabled = true; - const pragmaKeySign = this.#next(); - if (pragmaKeySign.kind === TokenKind.PLUS) { - isEnabled = true; - } else if (pragmaKeySign.kind === TokenKind.DASH) { - isEnabled = false; - } else { - this.#error('Expected "+" or "-" before pragma keys'); - } - - const pragmaKeyToken = this.#peek(); - if (pragmaKeyToken.kind !== TokenKind.LITERAL) { - this.#error('Expected pragma key'); - } - - let pragmaKey = this.#next().value; - - // In some cases, pragma key can be followed by a comma separator, - // so we need to remove it - pragmaKey = StringPrototypeReplaceAll(pragmaKey, ',', ''); - - pragmas[pragmaKey] = isEnabled; - nextToken = this.#peek(); - } - - const node = { - kind: TokenKind.TAP_PRAGMA, - node: { - pragmas, - }, - }; - - return node; - } -} - -module.exports = { TapParser }; diff --git a/lib/internal/test_runner/tests_stream.js b/lib/internal/test_runner/tests_stream.js index 68379fed11dda4..7640e6742c19ce 100644 --- a/lib/internal/test_runner/tests_stream.js +++ b/lib/internal/test_runner/tests_stream.js @@ -2,9 +2,11 @@ const { ArrayPrototypePush, ArrayPrototypeShift, + Symbol, } = primordials; const Readable = require('internal/streams/readable'); +const kEmitMessage = Symbol('kEmitMessage'); class TestsStream extends Readable { #buffer; #canPush; @@ -28,15 +30,15 @@ class TestsStream extends Readable { } fail(nesting, file, testNumber, name, details, directive) { - this.#emit('test:fail', { __proto__: null, name, nesting, file, testNumber, details, ...directive }); + this[kEmitMessage]('test:fail', { __proto__: null, name, nesting, file, testNumber, details, ...directive }); } ok(nesting, file, testNumber, name, details, directive) { - this.#emit('test:pass', { __proto__: null, name, nesting, file, testNumber, details, ...directive }); + this[kEmitMessage]('test:pass', { __proto__: null, name, nesting, file, testNumber, details, ...directive }); } plan(nesting, file, count) { - this.#emit('test:plan', { __proto__: null, nesting, file, count }); + this[kEmitMessage]('test:plan', { __proto__: null, nesting, file, count }); } getSkip(reason = undefined) { @@ -48,22 +50,22 @@ class TestsStream extends Readable { } start(nesting, file, name) { - this.#emit('test:start', { __proto__: null, nesting, file, name }); + this[kEmitMessage]('test:start', { __proto__: null, nesting, file, name }); } diagnostic(nesting, file, message) { - this.#emit('test:diagnostic', { __proto__: null, nesting, file, message }); + this[kEmitMessage]('test:diagnostic', { __proto__: null, nesting, file, message }); } coverage(nesting, file, summary) { - this.#emit('test:coverage', { __proto__: null, nesting, file, summary }); + this[kEmitMessage]('test:coverage', { __proto__: null, nesting, file, summary }); } end() { this.#tryPush(null); } - #emit(type, data) { + [kEmitMessage](type, data) { this.emit(type, data); this.#tryPush({ type, data }); } @@ -79,4 +81,4 @@ class TestsStream extends Readable { } } -module.exports = { TestsStream }; +module.exports = { TestsStream, kEmitMessage }; diff --git a/lib/internal/test_runner/utils.js b/lib/internal/test_runner/utils.js index 0b7cbc1de73cab..1713cd99d0c707 100644 --- a/lib/internal/test_runner/utils.js +++ b/lib/internal/test_runner/utils.js @@ -100,6 +100,7 @@ const kBuiltinReporters = new SafeMap([ ['spec', 'internal/test_runner/reporter/spec'], ['dot', 'internal/test_runner/reporter/dot'], ['tap', 'internal/test_runner/reporter/tap'], + ['v8', 'internal/test_runner/reporter/v8'], ]); const kDefaultReporter = process.stdout.isTTY ? 'spec' : 'tap'; @@ -171,13 +172,17 @@ function parseCommandLine() { const isTestRunner = getOptionValue('--test'); const coverage = getOptionValue('--experimental-test-coverage'); const isChildProcess = process.env.NODE_TEST_CONTEXT === 'child'; + const isChildProcessV8 = process.env.NODE_TEST_CONTEXT === 'child-v8'; let destinations; let reporters; let testNamePatterns; let testOnlyFlag; - if (isChildProcess) { - reporters = [kDefaultReporter]; + if (isChildProcessV8) { + reporters = ['v8']; + destinations = [kDefaultDestination]; + } else if (isChildProcess) { + reporters = ['tap']; destinations = [kDefaultDestination]; } else { destinations = getOptionValue('--test-reporter-destination'); diff --git a/lib/internal/test_runner/yaml_to_js.js b/lib/internal/test_runner/yaml_to_js.js deleted file mode 100644 index 724e71baaa67db..00000000000000 --- a/lib/internal/test_runner/yaml_to_js.js +++ /dev/null @@ -1,134 +0,0 @@ -'use strict'; -const { - codes: { - ERR_TEST_FAILURE, - }, -} = require('internal/errors'); -const AssertionError = require('internal/assert/assertion_error'); -const { - ArrayPrototypeJoin, - ArrayPrototypePush, - Error, - Number, - NumberIsNaN, - RegExpPrototypeExec, - StringPrototypeEndsWith, - StringPrototypeRepeat, - StringPrototypeSlice, - StringPrototypeStartsWith, - StringPrototypeSubstring, -} = primordials; - -const kYamlKeyRegex = /^(\s+)?(\w+):(\s)*([>|][-+])?(.*)$/; -const kStackDelimiter = ' at '; - -function reConstructError(parsedYaml) { - if (!('error' in parsedYaml)) { - return parsedYaml; - } - const isAssertionError = parsedYaml.code === 'ERR_ASSERTION' || - 'actual' in parsedYaml || 'expected' in parsedYaml || 'operator' in parsedYaml; - const isTestFailure = parsedYaml.code === 'ERR_TEST_FAILURE' || 'failureType' in parsedYaml; - const stack = parsedYaml.stack ? kStackDelimiter + ArrayPrototypeJoin(parsedYaml.stack, `\n${kStackDelimiter}`) : ''; - let error, cause; - - if (isAssertionError) { - cause = new AssertionError({ - message: parsedYaml.error, - actual: parsedYaml.actual, - expected: parsedYaml.expected, - operator: parsedYaml.operator, - }); - } else { - // eslint-disable-next-line no-restricted-syntax - cause = new Error(parsedYaml.error); - } - const name = parsedYaml.name ?? 'Error'; - cause.stack = `${name}: ${parsedYaml.error}\n${stack}`; - - if (!isAssertionError && !isTestFailure) { - cause.code = parsedYaml.code; - } - - if (isTestFailure) { - error = new ERR_TEST_FAILURE(cause, parsedYaml.failureType); - error.stack = stack; - } - - parsedYaml.error = error ?? cause; - delete parsedYaml.stack; - delete parsedYaml.code; - delete parsedYaml.failureType; - delete parsedYaml.actual; - delete parsedYaml.expected; - delete parsedYaml.operator; - - return parsedYaml; -} - -function getYamlValue(value) { - if (StringPrototypeStartsWith(value, "'") && StringPrototypeEndsWith(value, "'")) { - return StringPrototypeSlice(value, 1, -1); - } - if (value === 'true') { - return true; - } - if (value === 'false') { - return false; - } - if (value !== '') { - const valueAsNumber = Number(value); - return NumberIsNaN(valueAsNumber) ? value : valueAsNumber; - } - return value; -} - -// This parses the YAML generated by the built-in TAP reporter, -// which is a subset of the full YAML spec. There are some -// YAML features that won't be parsed here. This function should not be exposed publicly. -function YAMLToJs(lines) { - if (lines == null) { - return undefined; - } - const result = { __proto__: null }; - let context = { __proto__: null, object: result, indent: 0, currentKey: null }; - let isInYamlBlock = false; - for (let i = 0; i < lines.length; i++) { - const line = lines[i]; - if (isInYamlBlock && !StringPrototypeStartsWith(line, StringPrototypeRepeat(' ', isInYamlBlock.indent))) { - context.object[isInYamlBlock.key] = isInYamlBlock.key === 'stack' ? - context.object[isInYamlBlock.key] : ArrayPrototypeJoin(context.object[isInYamlBlock.key], '\n'); - isInYamlBlock = false; - } - if (isInYamlBlock) { - const blockLine = StringPrototypeSubstring(line, isInYamlBlock.indent); - ArrayPrototypePush(context.object[isInYamlBlock.key], blockLine); - continue; - } - const match = RegExpPrototypeExec(kYamlKeyRegex, line); - if (match !== null) { - const { 1: leadingSpaces, 2: key, 4: block, 5: value } = match; - const indent = leadingSpaces?.length ?? 0; - if (block) { - isInYamlBlock = { key, indent: indent + 2 }; - context.object[key] = []; - continue; - } - - if (indent > context.indent) { - context.object[context.currentKey] ||= {}; - context = { __proto__: null, parent: context, object: context.object[context.currentKey], indent }; - } else if (indent < context.indent) { - context = context.parent; - } - - context.currentKey = key; - context.object[key] = getYamlValue(value); - } - } - return reConstructError(result); -} - -module.exports = { - YAMLToJs, -}; diff --git a/test/fixtures/test-runner/output/output_cli.snapshot b/test/fixtures/test-runner/output/output_cli.snapshot index a4d54b29cd7778..fe192625e1f8b6 100644 --- a/test/fixtures/test-runner/output/output_cli.snapshot +++ b/test/fixtures/test-runner/output/output_cli.snapshot @@ -647,7 +647,7 @@ not ok 65 - assertion errors display actual and expected properly name: 'AssertionError' expected: bar: 2 - c: '' + c: actual: foo: 1 bar: 1 diff --git a/test/fixtures/test-runner/output/spec_reporter_cli.snapshot b/test/fixtures/test-runner/output/spec_reporter_cli.snapshot index ffbf636b748d81..e4e08764fd4925 100644 --- a/test/fixtures/test-runner/output/spec_reporter_cli.snapshot +++ b/test/fixtures/test-runner/output/spec_reporter_cli.snapshot @@ -57,7 +57,7 @@ * async assertion fail (*ms) - AssertionError: Expected values to be strictly equal: + AssertionError [ERR_ASSERTION]: Expected values to be strictly equal: true !== false @@ -68,7 +68,7 @@ * * * { - generatedMessage: false, + generatedMessage: true, code: 'ERR_ASSERTION', actual: true, expected: false, @@ -109,8 +109,7 @@ subtest sync throw fail (*ms) sync throw non-error fail (*ms) - [Error: Symbol(thrown symbol from sync throw non-error fail) - ] + Symbol(thrown symbol from sync throw non-error fail) level 0a level 1a (*ms) @@ -121,8 +120,7 @@ top level +long running (*ms) - [Error: test did not finish before its parent and was cancelled - ] + 'test did not finish before its parent and was cancelled' +short running ++short running (*ms) @@ -151,7 +149,8 @@ (*ms) # SKIP test with a name and options provided (*ms) # SKIP functionAndOptions (*ms) # SKIP - escaped description \ # \#\ \n \t \f \v \b \r (*ms) + escaped description \ # \#\ +  (*ms) escaped skip message (*ms) # SKIP escaped todo message (*ms) escaped diagnostic (*ms) @@ -166,8 +165,7 @@ async t is this in test (*ms) callback t is this in test (*ms) callback also returns a Promise (*ms) - [Error: passed a callback but also returned a Promise - ] + 'passed a callback but also returned a Promise' callback throw (*ms) Error: thrown from callback throw @@ -180,14 +178,16 @@ * callback called twice (*ms) - Error: callback invoked multiple times - * - * + 'callback invoked multiple times' callback called twice in different ticks (*ms) callback called twice in future tick (*ms) - Error: callback invoked multiple times - * + Error [ERR_TEST_FAILURE]: callback invoked multiple times + * { + failureType: 'multipleCallbackInvocations', + cause: 'callback invoked multiple times', + code: 'ERR_TEST_FAILURE' + } callback async throw (*ms) Error: thrown from callback async throw @@ -206,15 +206,10 @@ 'only' and 'runOnly' require the --test-only command-line option. custom inspect symbol fail (*ms) - [Error: customized - ] + customized custom inspect symbol that throws fail (*ms) - [Error: { - foo: 1, - [Symbol(nodejs.util.inspect.custom)]: [Function: [nodejs.util.inspect.custom]] - } - ] + { foo: 1 } subtest sync throw fails sync throw fails at first (*ms) @@ -246,19 +241,16 @@ subtest sync throw fails (*ms) timed out async test (*ms) - [Error: test timed out after *ms - ] + 'test timed out after *ms' timed out callback test (*ms) - [Error: test timed out after *ms - ] + 'test timed out after *ms' large timeout async test is ok (*ms) large timeout callback test is ok (*ms) successful thenable (*ms) rejected thenable (*ms) - [Error: custom error - ] + 'custom error' unfinished test with uncaughtException (*ms) Error: foo @@ -273,7 +265,7 @@ * assertion errors display actual and expected properly (*ms) - AssertionError: Expected values to be loosely deep-equal: + AssertionError [ERR_ASSERTION]: Expected values to be loosely deep-equal: { bar: 1, @@ -287,16 +279,15 @@ c: [Circular *1] } * { - generatedMessage: false, + generatedMessage: true, code: 'ERR_ASSERTION', - actual: [Object], - expected: [Object], + actual: { foo: 1, bar: 1 }, + expected: { bar: 2, c: [Circular *1] }, operator: 'deepEqual' } invalid subtest fail (*ms) - Error: test could not be started because its parent finished - * + 'test could not be started because its parent finished' Warning: Test "unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. Warning: Test "async unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. @@ -367,7 +358,7 @@ * async assertion fail (*ms) - AssertionError: Expected values to be strictly equal: + AssertionError [ERR_ASSERTION]: Expected values to be strictly equal: true !== false @@ -378,7 +369,7 @@ * * * { - generatedMessage: false, + generatedMessage: true, code: 'ERR_ASSERTION', actual: true, expected: false, @@ -409,12 +400,10 @@ * sync throw non-error fail (*ms) - [Error: Symbol(thrown symbol from sync throw non-error fail) - ] + Symbol(thrown symbol from sync throw non-error fail) +long running (*ms) - [Error: test did not finish before its parent and was cancelled - ] + 'test did not finish before its parent and was cancelled' sync skip option is false fail (*ms) Error: this should be executed @@ -432,8 +421,7 @@ * callback also returns a Promise (*ms) - [Error: passed a callback but also returned a Promise - ] + 'passed a callback but also returned a Promise' callback throw (*ms) Error: thrown from callback throw @@ -446,13 +434,15 @@ * callback called twice (*ms) - Error: callback invoked multiple times - * - * + 'callback invoked multiple times' callback called twice in future tick (*ms) - Error: callback invoked multiple times - * + Error [ERR_TEST_FAILURE]: callback invoked multiple times + * { + failureType: 'multipleCallbackInvocations', + cause: 'callback invoked multiple times', + code: 'ERR_TEST_FAILURE' + } callback async throw (*ms) Error: thrown from callback async throw @@ -460,15 +450,10 @@ * custom inspect symbol fail (*ms) - [Error: customized - ] + customized custom inspect symbol that throws fail (*ms) - [Error: { - foo: 1, - [Symbol(nodejs.util.inspect.custom)]: [Function: [nodejs.util.inspect.custom]] - } - ] + { foo: 1 } sync throw fails at first (*ms) Error: thrown from subtest sync throw fails at first @@ -497,16 +482,13 @@ * timed out async test (*ms) - [Error: test timed out after *ms - ] + 'test timed out after *ms' timed out callback test (*ms) - [Error: test timed out after *ms - ] + 'test timed out after *ms' rejected thenable (*ms) - [Error: custom error - ] + 'custom error' unfinished test with uncaughtException (*ms) Error: foo @@ -521,7 +503,7 @@ * assertion errors display actual and expected properly (*ms) - AssertionError: Expected values to be loosely deep-equal: + AssertionError [ERR_ASSERTION]: Expected values to be loosely deep-equal: { bar: 1, @@ -535,13 +517,12 @@ c: [Circular *1] } * { - generatedMessage: false, + generatedMessage: true, code: 'ERR_ASSERTION', - actual: [Object], - expected: [Object], + actual: { foo: 1, bar: 1 }, + expected: { bar: 2, c: [Circular *1] }, operator: 'deepEqual' } invalid subtest fail (*ms) - Error: test could not be started because its parent finished - * + 'test could not be started because its parent finished' diff --git a/test/parallel/test-error-serdes.js b/test/parallel/test-error-serdes.js index 92d0864348a831..139663fd667656 100644 --- a/test/parallel/test-error-serdes.js +++ b/test/parallel/test-error-serdes.js @@ -66,3 +66,8 @@ assert.strictEqual(cycle(Function), '[Function: Function]'); serializeError(new DynamicError()); assert.strictEqual(called, true); } + +// TODO(MoLow): add more tests for the added cases: +// - Error.cause +// - Serialize a thrown symbol +// - Object with util.inspect.custom diff --git a/test/parallel/test-runner-tap-checker.js b/test/parallel/test-runner-tap-checker.js deleted file mode 100644 index d437b025180c78..00000000000000 --- a/test/parallel/test-runner-tap-checker.js +++ /dev/null @@ -1,119 +0,0 @@ -'use strict'; -// Flags: --expose-internals - -require('../common'); -const assert = require('assert'); - -const { TapParser } = require('internal/test_runner/tap_parser'); -const { TapChecker } = require('internal/test_runner/tap_checker'); - -function TAPChecker(input) { - // parse - const parser = new TapParser({ specs: TapChecker.TAP14 }); - parser.parseSync(input); - parser.check(); -} - -[ - ['TAP version 14', 'missing TAP plan'], - [` -TAP version 14 -1..1 - `, 'missing Test Points'], - [` -TAP version 14 -1..1 -ok 2 - `, 'test 2 is out of plan range 1..1'], - [` -TAP version 14 -3..1 -ok 2 - `, 'plan start 3 is greater than plan end 1'], - [` -TAP version 14 -2..3 -ok 1 -ok 2 -ok 3 - `, 'test 1 is out of plan range 2..3'], - -].forEach(([str, message]) => { - assert.throws(() => TAPChecker(str), { - code: 'ERR_TAP_VALIDATION_ERROR', - message, - }); -}); - -// Valid TAP14 should not throw -TAPChecker(` -TAP version 14 -1..1 -ok -`); - -// Valid comment line shout not throw. -TAPChecker(` -TAP version 14 -1..5 -ok 1 - approved operating system -# $^0 is solaris -ok 2 - # SKIP no /sys directory -ok 3 - # SKIP no /sys directory -ok 4 - # SKIP no /sys directory -ok 5 - # SKIP no /sys directory -`); - -// Valid empty test plan should not throw. -TAPChecker(` -TAP version 14 -1..0 # skip because English-to-French translator isn't installed -`); - -// Valid test plan count should not throw. -TAPChecker(` -TAP version 14 -1..4 -ok 1 - Creating test program -ok 2 - Test program runs, no error -not ok 3 - infinite loop # TODO halting problem unsolved -not ok 4 - infinite loop 2 # TODO halting problem unsolved -`); - -// Valid YAML diagnostic should not throw. -TAPChecker(` -TAP version 14 -ok - created Board -ok -ok -ok -ok -ok -ok -ok - --- - message: "Board layout" - severity: comment - dump: - board: - - ' 16G 05C ' - - ' G N C C C G ' - - ' G C + ' - - '10C 01G 03C ' - - 'R N G G A G C C C ' - - ' R G C + ' - - ' 01G 17C 00C ' - - ' G A G G N R R N R ' - - ' G R G ' - ... -ok - board has 7 tiles + starter tile -1..9 -`); - -// Valid Bail out should not throw. -TAPChecker(` -TAP version 14 -1..573 -not ok 1 - database handle -Bail out! Couldn't connect to database. -`); diff --git a/test/parallel/test-runner-tap-lexer.js b/test/parallel/test-runner-tap-lexer.js deleted file mode 100644 index 96c27fecdea7d0..00000000000000 --- a/test/parallel/test-runner-tap-lexer.js +++ /dev/null @@ -1,494 +0,0 @@ -'use strict'; -// Flags: --expose-internals - -require('../common'); -const assert = require('assert'); - -const { TapLexer, TokenKind } = require('internal/test_runner/tap_lexer'); - -function TAPLexer(input) { - const lexer = new TapLexer(input); - return lexer.scan().flat(); -} - -{ - const tokens = TAPLexer(''); - - assert.strictEqual(tokens[0].kind, TokenKind.EOF); - assert.strictEqual(tokens[0].value, ''); -} - -{ - const tokens = TAPLexer('TAP version 14'); - - [ - { kind: TokenKind.TAP, value: 'TAP' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.TAP_VERSION, value: 'version' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '14' }, - { kind: TokenKind.EOL, value: '' }, - { kind: TokenKind.EOF, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('1..5 # reason'); - - [ - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.TAP_PLAN, value: '..' }, - { kind: TokenKind.NUMERIC, value: '5' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.HASH, value: '#' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'reason' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer( - '1..5 # reason "\\ !"\\#$%&\'()*+,\\-./:;<=>?@[]^_`{|}~' - ); - - [ - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.TAP_PLAN, value: '..' }, - { kind: TokenKind.NUMERIC, value: '5' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.HASH, value: '#' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'reason' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: '"' }, - { kind: TokenKind.ESCAPE, value: '\\' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: '!"' }, - { kind: TokenKind.LITERAL, value: '\\' }, - { kind: TokenKind.LITERAL, value: '#' }, - { kind: TokenKind.LITERAL, value: "$%&'()*" }, - { kind: TokenKind.PLUS, value: '+' }, - { kind: TokenKind.LITERAL, value: ',' }, - { kind: TokenKind.ESCAPE, value: '\\' }, - { kind: TokenKind.DASH, value: '-' }, - { kind: TokenKind.LITERAL, value: './:;<=>?@[]^_`{|}~' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('ok'); - - [ - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('not ok'); - - [ - { kind: TokenKind.TAP_TEST_NOTOK, value: 'not' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('ok 1'); - - [ - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer(` -ok 1 -not ok 2 -`); - - [ - { kind: TokenKind.NEWLINE, value: '\n' }, - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.NEWLINE, value: '\n' }, - { kind: TokenKind.TAP_TEST_NOTOK, value: 'not' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '2' }, - { kind: TokenKind.NEWLINE, value: '\n' }, - { kind: TokenKind.EOF, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer(` -ok 1 - ok 1 -`); - - [ - { kind: TokenKind.NEWLINE, value: '\n' }, - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.NEWLINE, value: '\n' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.NEWLINE, value: '\n' }, - { kind: TokenKind.EOF, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('ok 1 description'); - - [ - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'description' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('ok 1 - description'); - - [ - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.DASH, value: '-' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'description' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('ok 1 - description # todo'); - - [ - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.DASH, value: '-' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'description' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.HASH, value: '#' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'todo' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('ok 1 - description \\# todo'); - - [ - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.DASH, value: '-' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'description' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.ESCAPE, value: '\\' }, - { kind: TokenKind.LITERAL, value: '#' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'todo' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('ok 1 - description \\ # todo'); - - [ - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.DASH, value: '-' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'description' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.ESCAPE, value: '\\' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.HASH, value: '#' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'todo' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer( - 'ok 1 description \\# \\\\ world # TODO escape \\# characters with \\\\' - ); - [ - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'description' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.ESCAPE, value: '\\' }, - { kind: TokenKind.LITERAL, value: '#' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.ESCAPE, value: '\\' }, - { kind: TokenKind.LITERAL, value: '\\' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'world' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.HASH, value: '#' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'TODO' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'escape' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.ESCAPE, value: '\\' }, - { kind: TokenKind.LITERAL, value: '#' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'characters' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'with' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.ESCAPE, value: '\\' }, - { kind: TokenKind.LITERAL, value: '\\' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('ok 1 - description # ##'); - - [ - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.DASH, value: '-' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'description' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.HASH, value: '#' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: '#' }, - { kind: TokenKind.LITERAL, value: '#' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('# comment'); - [ - { kind: TokenKind.COMMENT, value: '#' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'comment' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('#'); - - [ - { kind: TokenKind.COMMENT, value: '#' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer(` - --- - message: "description" - severity: fail - ... -`); - - [ - { kind: TokenKind.NEWLINE, value: '\n' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.TAP_YAML_START, value: '---' }, - { kind: TokenKind.NEWLINE, value: '\n' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'message:' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: '"description"' }, - { kind: TokenKind.NEWLINE, value: '\n' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'severity:' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'fail' }, - { kind: TokenKind.NEWLINE, value: '\n' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.TAP_YAML_END, value: '...' }, - { kind: TokenKind.NEWLINE, value: '\n' }, - { kind: TokenKind.EOF, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('pragma +strict -warnings'); - - [ - { kind: TokenKind.TAP_PRAGMA, value: 'pragma' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.PLUS, value: '+' }, - { kind: TokenKind.LITERAL, value: 'strict' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.DASH, value: '-' }, - { kind: TokenKind.LITERAL, value: 'warnings' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('Bail out! Error'); - - [ - { kind: TokenKind.LITERAL, value: 'Bail' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'out!' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'Error' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -// Test isLiteralSymbol method -{ - const tokens = TAPLexer('ok 1 - description أتث讲演講👍🔥'); - - [ - { kind: TokenKind.TAP_TEST_OK, value: 'ok' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.NUMERIC, value: '1' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.DASH, value: '-' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'description' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'أتث讲演講👍🔥' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('# comment أتث讲演講👍🔥'); - [ - { kind: TokenKind.COMMENT, value: '#' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'comment' }, - { kind: TokenKind.WHITESPACE, value: ' ' }, - { kind: TokenKind.LITERAL, value: 'أتث讲演講👍🔥' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} - -{ - const tokens = TAPLexer('\x1b'); - - [ - { kind: TokenKind.ESCAPE, value: '\x1b' }, - { kind: TokenKind.EOL, value: '' }, - ].forEach((token, index) => { - assert.strictEqual(tokens[index].kind, token.kind); - assert.strictEqual(tokens[index].value, token.value); - }); -} diff --git a/test/parallel/test-runner-tap-parser-stream.js b/test/parallel/test-runner-tap-parser-stream.js deleted file mode 100644 index 80be92c121b73d..00000000000000 --- a/test/parallel/test-runner-tap-parser-stream.js +++ /dev/null @@ -1,816 +0,0 @@ -// Flags: --expose-internals -'use strict'; -const common = require('../common'); -const assert = require('node:assert'); -const { TapParser } = require('internal/test_runner/tap_parser'); -const { TapChecker } = require('internal/test_runner/tap_checker'); - -const cases = [ - { - input: 'TAP version 13', - expected: [ - { - nesting: 0, - kind: 'VersionKeyword', - node: { version: '13' }, - lexeme: 'TAP version 13', - }, - ], - }, - { - input: '123', - expected: [ - { - kind: 'Unknown', - node: { value: '123' }, - nesting: 0, - lexeme: '123', - }, - ], - }, - { - input: '# 123', - expected: [ - { - kind: 'Comment', - node: { comment: '123' }, - nesting: 0, - lexeme: '# 123', - }, - ], - }, - { - input: '1..', - expected: [ - { - kind: 'Unknown', - node: { value: '1..' }, - nesting: 0, - lexeme: '1..', - }, - ], - }, - { - input: '1..abc', - expected: [ - { - kind: 'Unknown', - node: { value: '1..abc' }, - nesting: 0, - lexeme: '1..abc', - }, - ], - }, - { - input: '1..-1', - expected: [ - { - kind: 'Unknown', - node: { value: '1..-1' }, - nesting: 0, - lexeme: '1..-1', - }, - ], - }, - { - input: '1.1', - expected: [ - { - kind: 'Unknown', - node: { value: '1.1' }, - nesting: 0, - lexeme: '1.1', - }, - ], - }, - { - input: '1.....4', - expected: [ - { - kind: 'Unknown', - node: { value: '1.....4' }, - nesting: 0, - lexeme: '1.....4', - }, - ], - }, - { - input: 'TAP 12', - expected: [ - { - kind: 'Unknown', - node: { value: 'TAP 12' }, - nesting: 0, - lexeme: 'TAP 12', - }, - ], - }, - { - input: 'TAP version', - expected: [ - { - kind: 'Unknown', - node: { value: 'TAP version' }, - nesting: 0, - lexeme: 'TAP version', - }, - ], - }, - { - input: 'TAP version v14', - expected: [ - { - kind: 'Unknown', - node: { value: 'TAP version v14' }, - nesting: 0, - lexeme: 'TAP version v14', - }, - ], - }, - { - input: 'TAP TAP TAP', - expected: [ - { - kind: 'Unknown', - node: { value: 'TAP TAP TAP' }, - nesting: 0, - lexeme: 'TAP TAP TAP', - }, - ], - }, - { - input: '--- yaml', - expected: [ - { - kind: 'Unknown', - node: { value: '--- yaml' }, - nesting: 0, - lexeme: '--- yaml', - }, - ], - }, - { - input: '... ... yaml', - expected: [ - { - kind: 'Unknown', - node: { value: '... ... yaml' }, - nesting: 0, - lexeme: '... ... yaml', - }, - ], - }, - { - input: 'ook 1', - expected: [ - { - kind: 'Unknown', - node: { value: 'ook 1' }, - nesting: 0, - lexeme: 'ook 1', - }, - ], - }, - { - input: ' ok 98', - expected: [ - { - kind: 'Unknown', - node: { value: ' ok 98' }, - nesting: 0, - lexeme: ' ok 98', - }, - ], - }, - { - input: 'pragma ++++++', - expected: [ - { - kind: 'Unknown', - node: { value: 'pragma ++++++' }, - nesting: 0, - lexeme: 'pragma ++++++', - }, - ], - }, - { - input: 'Bailout!', - expected: [ - { - kind: 'Unknown', - node: { value: 'Bailout!' }, - nesting: 0, - lexeme: 'Bailout!', - }, - ], - }, - { - input: 'invalid tap', - expected: [ - { - nesting: 0, - kind: 'Unknown', - node: { value: 'invalid tap' }, - lexeme: 'invalid tap', - }, - ], - }, - { - input: 'TAP version 13\ninvalid tap after harness', - expected: [ - { - nesting: 0, - kind: 'VersionKeyword', - node: { version: '13' }, - lexeme: 'TAP version 13', - }, - { - nesting: 0, - kind: 'Unknown', - node: { value: 'invalid tap after harness' }, - lexeme: 'invalid tap after harness', - }, - ], - }, - { - input: `TAP version 13 - # nested diagnostic -# diagnostic comment`, - expected: [ - { - nesting: 0, - kind: 'VersionKeyword', - node: { version: '13' }, - lexeme: 'TAP version 13', - }, - { - nesting: 1, - kind: 'Comment', - node: { comment: 'nested diagnostic' }, - lexeme: ' # nested diagnostic', - }, - { - nesting: 0, - kind: 'Comment', - node: { comment: 'diagnostic comment' }, - lexeme: '# diagnostic comment', - }, - ], - }, - { - input: `TAP version 13 - 1..5 -1..3 -2..2`, - expected: [ - { - nesting: 0, - kind: 'VersionKeyword', - node: { version: '13' }, - lexeme: 'TAP version 13', - }, - { - nesting: 1, - kind: 'PlanKeyword', - node: { start: '1', end: '5' }, - lexeme: ' 1..5', - }, - { - nesting: 0, - kind: 'PlanKeyword', - node: { start: '1', end: '3' }, - lexeme: '1..3', - }, - { - nesting: 0, - kind: 'PlanKeyword', - node: { start: '2', end: '2' }, - lexeme: '2..2', - }, - ], - }, - { - input: `TAP version 13 -ok 1 - test -ok 2 - test # SKIP -not ok 3 - test # TODO reason`, - expected: [ - { - nesting: 0, - kind: 'VersionKeyword', - node: { version: '13' }, - lexeme: 'TAP version 13', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'test', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1 - test', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: true }, - id: '2', - description: 'test', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 2 - test # SKIP', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: true, pass: false, todo: true, skip: false }, - id: '3', - description: 'test', - reason: 'reason', - time: 0, - diagnostics: [], - }, - lexeme: 'not ok 3 - test # TODO reason', - }, - ], - }, - { - input: `TAP version 13 -# Subtest: test -ok 1 - test -ok 2 - test`, - expected: [ - { - nesting: 0, - kind: 'VersionKeyword', - node: { version: '13' }, - lexeme: 'TAP version 13', - }, - { - nesting: 0, - kind: 'SubTestPointKeyword', - node: { name: 'test' }, - lexeme: '# Subtest: test', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'test', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1 - test', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '2', - description: 'test', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 2 - test', - }, - ], - }, - { - input: `TAP version 13 -# Subtest: test -ok 1 - test - --- - foo: bar - duration_ms: 0.0001 - prop: |- - multiple - lines - ...`, - expected: [ - { - nesting: 0, - kind: 'VersionKeyword', - node: { version: '13' }, - lexeme: 'TAP version 13', - }, - { - nesting: 0, - kind: 'SubTestPointKeyword', - node: { name: 'test' }, - lexeme: '# Subtest: test', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'test', - reason: '', - time: 0.0001, - diagnostics: [ - 'foo: bar', - 'duration_ms: 0.0001', - 'prop: |-', - ' multiple', - ' lines', - ], - }, - lexeme: 'ok 1 - test', - }, - ], - }, - { - input: `TAP version 13 -# Subtest: test/fixtures/test-runner/index.test.js - # Subtest: this should pass - ok 1 - this should pass - --- - duration_ms: 0.0001 - ... - 1..1`, - expected: [ - { - nesting: 0, - kind: 'VersionKeyword', - node: { version: '13' }, - lexeme: 'TAP version 13', - }, - { - kind: 'SubTestPointKeyword', - lexeme: '# Subtest: test/fixtures/test-runner/index.test.js', - nesting: 0, - node: { - name: 'test/fixtures/test-runner/index.test.js', - }, - }, - { - kind: 'SubTestPointKeyword', - lexeme: ' # Subtest: this should pass', - nesting: 1, - node: { - name: 'this should pass', - }, - }, - { - kind: 'TestPointKeyword', - lexeme: ' ok 1 - this should pass', - nesting: 1, - node: { - description: 'this should pass', - diagnostics: ['duration_ms: 0.0001'], - id: '1', - reason: '', - status: { - fail: false, - pass: true, - skip: false, - todo: false, - }, - time: 0.0001, - }, - }, - { - kind: 'PlanKeyword', - lexeme: ' 1..1', - nesting: 1, - node: { - end: '1', - start: '1', - }, - }, - ], - }, - { - input: `TAP version 13 -# Subtest: test 1 -ok 1 - test 1 - --- - foo: bar - duration_ms: 1.00 - prop: |- - multiple - lines - ... -# Subtest: test 2 -ok 2 - test 2 - --- - duration_ms: 2.00 - ... -# Subtest: test 3 -ok 3 - test 3 - --- - foo: bar - duration_ms: 3.00 - prop: |- - multiple - lines - ...`, - expected: [ - { - nesting: 0, - kind: 'VersionKeyword', - node: { version: '13' }, - lexeme: 'TAP version 13', - }, - { - nesting: 0, - kind: 'SubTestPointKeyword', - node: { name: 'test 1' }, - lexeme: '# Subtest: test 1', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'test 1', - reason: '', - time: 1.0, - diagnostics: [ - 'foo: bar', - 'duration_ms: 1.00', - 'prop: |-', - ' multiple', - ' lines', - ], - }, - lexeme: 'ok 1 - test 1', - }, - { - nesting: 0, - kind: 'SubTestPointKeyword', - node: { name: 'test 2' }, - lexeme: '# Subtest: test 2', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '2', - description: 'test 2', - reason: '', - time: 2.0, - diagnostics: ['duration_ms: 2.00'], - }, - lexeme: 'ok 2 - test 2', - }, - { - nesting: 0, - kind: 'SubTestPointKeyword', - node: { name: 'test 3' }, - lexeme: '# Subtest: test 3', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '3', - description: 'test 3', - reason: '', - time: 3.0, - diagnostics: [ - 'foo: bar', - 'duration_ms: 3.00', - 'prop: |-', - ' multiple', - ' lines', - ], - }, - lexeme: 'ok 3 - test 3', - }, - ], - }, - { - input: `TAP version 13 -# Subtest: test 1 -ok 1 - test 1 - --- - foo: bar - duration_ms: 1.00 - prop: |- - multiple - lines - ... - # Subtest: test 11 - ok 11 - test 11 - --- - duration_ms: 11.00 - ... - # Subtest: test 111 - ok 111 - test 111 - --- - foo: bar - duration_ms: 111.00 - prop: |- - multiple - lines - ...`, - expected: [ - { - nesting: 0, - kind: 'VersionKeyword', - node: { version: '13' }, - lexeme: 'TAP version 13', - }, - { - nesting: 0, - kind: 'SubTestPointKeyword', - node: { name: 'test 1' }, - lexeme: '# Subtest: test 1', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'test 1', - reason: '', - time: 1.0, - diagnostics: [ - 'foo: bar', - 'duration_ms: 1.00', - 'prop: |-', - ' multiple', - ' lines', - ], - }, - lexeme: 'ok 1 - test 1', - }, - { - nesting: 1, - kind: 'SubTestPointKeyword', - node: { name: 'test 11' }, - lexeme: ' # Subtest: test 11', - }, - { - nesting: 1, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '11', - description: 'test 11', - reason: '', - time: 11.0, - diagnostics: ['duration_ms: 11.00'], - }, - lexeme: ' ok 11 - test 11', - }, - { - nesting: 2, - kind: 'SubTestPointKeyword', - node: { name: 'test 111' }, - lexeme: ' # Subtest: test 111', - }, - { - nesting: 2, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '111', - description: 'test 111', - reason: '', - time: 111.0, - diagnostics: [ - 'foo: bar', - 'duration_ms: 111.00', - 'prop: |-', - ' multiple', - ' lines', - ], - }, - lexeme: ' ok 111 - test 111', - }, - ], - }, -]; - -(async () => { - for (const { input, expected } of cases) { - const parser = new TapParser(); - parser.write(input); - parser.end(); - const actual = await parser.toArray(); - assert.deepStrictEqual( - actual, - expected.map((item) => ({ __proto__: null, ...item })) - ); - } -})().then(common.mustCall()); - -(async () => { - const expected = [ - { - kind: 'PlanKeyword', - node: { start: '1', end: '3' }, - nesting: 0, - lexeme: '1..3', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'Input file opened', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1 - Input file opened', - }, - { - kind: 'TestPointKeyword', - node: { - status: { fail: true, pass: false, todo: false, skip: false }, - id: '2', - description: '', - reason: '', - time: 0, - diagnostics: [], - }, - nesting: 0, - lexeme: 'not ok 2 ', - }, - { - kind: 'SubTestPointKeyword', - node: { name: 'foobar' }, - nesting: 1, - lexeme: ' # Subtest: foobar', - }, - { - __proto__: null, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: true, skip: false }, - id: '3', - description: '', - reason: '', - time: 0.0001, - diagnostics: [ - 'foo: bar', - 'duration_ms: 0.0001', - 'prop: |-', - ' foo', - ' bar', - ], - }, - nesting: 0, - lexeme: 'ok 3 # TODO', - }, - ]; - - const parser = new TapParser({ specs: TapChecker.TAP14 }); - parser.write('\n'); - parser.write('1'); - parser.write('.'); - parser.write('.'); - parser.write('3'); - parser.write('\n'); - parser.write('ok 1 '); - parser.write('- Input file opened\n'); - parser.write('not'); - parser.write(' ok'); - parser.write(' 2 \n'); - parser.write('\n'); - parser.write(' # '); - parser.write('Subtest: foo'); - parser.write('bar'); - parser.write('\n'); - parser.write(''); - parser.write('ok'); - parser.write(' 3 #'); - parser.write(' TODO'); - parser.write('\n'); - parser.write(' ---\n'); - parser.write(' foo: bar\n'); - parser.write(' duration_ms: '); - parser.write(' 0.0001\n'); - parser.write(' prop: |-\n'); - parser.write(' foo\n'); - parser.write(' bar\n'); - parser.write(' ...\n'); - parser.end(); - const actual = await parser.toArray(); - assert.deepStrictEqual( - actual, - expected.map((item) => ({ __proto__: null, ...item })) - ); -})().then(common.mustCall()); diff --git a/test/parallel/test-runner-tap-parser.js b/test/parallel/test-runner-tap-parser.js deleted file mode 100644 index b14f7a9a6b089b..00000000000000 --- a/test/parallel/test-runner-tap-parser.js +++ /dev/null @@ -1,1179 +0,0 @@ -'use strict'; -// Flags: --expose-internals - -require('../common'); -const assert = require('assert'); - -const { TapParser } = require('internal/test_runner/tap_parser'); - -function TAPParser(input) { - const parser = new TapParser(); - const ast = parser.parseSync(input); - return ast; -} - -// Comment - -{ - const ast = TAPParser('# comment'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'Comment', - node: { comment: 'comment' }, - lexeme: '# comment', - }, - ]); -} - -{ - const ast = TAPParser('#'); - assert.deepStrictEqual(ast, [ - { - kind: 'Comment', - nesting: 0, - node: { - comment: '', - }, - lexeme: '#', - }, - ]); -} - -{ - const ast = TAPParser('####'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'Comment', - node: { comment: '###' }, - lexeme: '####', - }, - ]); -} - -// Empty input - -{ - const ast = TAPParser(''); - assert.deepStrictEqual(ast, []); -} - -// TAP version - -{ - const ast = TAPParser('TAP version 14'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'VersionKeyword', - node: { version: '14' }, - lexeme: 'TAP version 14', - }, - ]); -} - -// Test plan - -{ - const ast = TAPParser('1..5 # reason'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'PlanKeyword', - node: { start: '1', end: '5', reason: 'reason' }, - lexeme: '1..5 # reason', - }, - ]); -} - -{ - const ast = TAPParser( - '1..5 # reason "\\ !"\\#$%&\'()*+,\\-./:;<=>?@[]^_`{|}~' - ); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'PlanKeyword', - node: { - start: '1', - end: '5', - reason: 'reason " !"\\#$%&\'()*+,-./:;<=>?@[]^_`{|}~', - }, - lexeme: '1..5 # reason "\\ !"\\#$%&\'()*+,\\-./:;<=>?@[]^_`{|}~', - }, - ]); -} - -// Test point - -{ - const ast = TAPParser('ok'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '', - description: '', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok', - }, - ]); -} - -{ - const ast = TAPParser('not ok'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: true, pass: false, todo: false, skip: false }, - id: '', - description: '', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'not ok', - }, - ]); -} - -{ - const ast = TAPParser('ok 1'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: '', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1', - }, - ]); -} - -{ - const ast = TAPParser(` -ok 111 -not ok 222 -`); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '111', - description: '', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 111', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: true, pass: false, todo: false, skip: false }, - id: '222', - description: '', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'not ok 222', - }, - ]); -} - -{ - // Nested tests - const ast = TAPParser(` -ok 1 - parent - ok 2 - child -`); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'parent', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1 - parent', - }, - { - nesting: 1, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '2', - description: 'child', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 2 - child', - }, - ]); -} - -{ - const ast = TAPParser(` -# Subtest: nested1 - ok 1 - - # Subtest: nested2 - ok 2 - nested2 - - # Subtest: nested3 - ok 3 - nested3 - - # Subtest: nested4 - ok 4 - nested4 - -ok 1 - nested1 -`); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'SubTestPointKeyword', - node: { name: 'nested1' }, - lexeme: '# Subtest: nested1', - }, - { - nesting: 1, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: '', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 1', - }, - { - nesting: 1, - kind: 'SubTestPointKeyword', - node: { name: 'nested2' }, - lexeme: ' # Subtest: nested2', - }, - { - nesting: 1, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '2', - description: 'nested2', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 2 - nested2', - }, - { - nesting: 1, - kind: 'SubTestPointKeyword', - node: { name: 'nested3' }, - lexeme: ' # Subtest: nested3', - }, - { - nesting: 1, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '3', - description: 'nested3', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 3 - nested3', - }, - { - nesting: 1, - kind: 'SubTestPointKeyword', - node: { name: 'nested4' }, - lexeme: ' # Subtest: nested4', - }, - { - nesting: 1, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '4', - description: 'nested4', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 4 - nested4', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'nested1', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1 - nested1', - }, - ]); -} - -// Nested tests as comment - -{ - const ast = TAPParser(` -# Subtest: nested1 - ok 1 - test nested1 - - # Subtest: nested2 - ok 2 - test nested2 - - ok 3 - nested2 - -ok 4 - nested1 -`); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'SubTestPointKeyword', - node: { name: 'nested1' }, - lexeme: '# Subtest: nested1', - }, - { - nesting: 1, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'test nested1', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 1 - test nested1', - }, - { - nesting: 1, - kind: 'SubTestPointKeyword', - node: { name: 'nested2' }, - lexeme: ' # Subtest: nested2', - }, - { - nesting: 2, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '2', - description: 'test nested2', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 2 - test nested2', - }, - { - nesting: 1, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '3', - description: 'nested2', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 3 - nested2', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '4', - description: 'nested1', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 4 - nested1', - }, - ]); -} - -// Multiple nested tests as comment - -{ - const ast = TAPParser(` -# Subtest: nested1 - ok 1 - test nested1 - - # Subtest: nested2a - ok 2 - test nested2a - - ok 3 - nested2a - - # Subtest: nested2b - ok 4 - test nested2b - - ok 5 - nested2b - -ok 6 - nested1 -`); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'SubTestPointKeyword', - node: { name: 'nested1' }, - lexeme: '# Subtest: nested1', - }, - { - nesting: 1, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'test nested1', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 1 - test nested1', - }, - { - nesting: 1, - kind: 'SubTestPointKeyword', - node: { name: 'nested2a' }, - lexeme: ' # Subtest: nested2a', - }, - { - nesting: 2, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '2', - description: 'test nested2a', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 2 - test nested2a', - }, - { - nesting: 1, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '3', - description: 'nested2a', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 3 - nested2a', - }, - { - nesting: 1, - kind: 'SubTestPointKeyword', - node: { name: 'nested2b' }, - lexeme: ' # Subtest: nested2b', - }, - { - nesting: 2, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '4', - description: 'test nested2b', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 4 - test nested2b', - }, - { - nesting: 1, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '5', - description: 'nested2b', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: ' ok 5 - nested2b', - }, - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '6', - description: 'nested1', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 6 - nested1', - }, - ]); -} - -{ - const ast = TAPParser('ok 1 description'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'description', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1 description', - }, - ]); -} - -{ - const ast = TAPParser('ok 1 - description'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'description', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1 - description', - }, - ]); -} - -{ - const ast = TAPParser('ok 1 - description # todo'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: true, skip: false }, - id: '1', - description: 'description', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1 - description # todo', - }, - ]); -} - -{ - const ast = TAPParser('ok 1 - description \\# todo'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'description # todo', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1 - description \\# todo', - }, - ]); -} - -{ - const ast = TAPParser('ok 1 - description \\ # todo'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: true, skip: false }, - id: '1', - description: 'description', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1 - description \\ # todo', - }, - ]); -} - -{ - const ast = TAPParser( - 'ok 1 description \\# \\\\ world # TODO escape \\# characters with \\\\' - ); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: true, skip: false }, - id: '1', - description: 'description # \\ world', - reason: 'escape # characters with \\', - time: 0, - diagnostics: [], - }, - lexeme: - 'ok 1 description \\# \\\\ world # TODO escape \\# characters with \\\\', - }, - ]); -} - -{ - const ast = TAPParser('ok 1 - description # ##'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'description', - reason: '##', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1 - description # ##', - }, - ]); -} - -{ - const ast = TAPParser( - 'ok 2 not skipped: https://example.com/page.html#skip is a url' - ); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '2', - description: 'not skipped: https://example.com/page.html#skip is a url', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 2 not skipped: https://example.com/page.html#skip is a url', - }, - ]); -} - -{ - const ast = TAPParser('ok 3 - #SkIp case insensitive, so this is skipped'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: true }, - id: '3', - description: '', - reason: 'case insensitive, so this is skipped', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 3 - #SkIp case insensitive, so this is skipped', - }, - ]); -} - -{ - const ast = TAPParser('ok ok ok'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '', - description: 'ok ok', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok ok ok', - }, - ]); -} - -{ - const ast = TAPParser('ok not ok'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '', - description: 'not ok', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok not ok', - }, - ]); -} - -{ - const ast = TAPParser('ok 1..1'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: '', - reason: '', - time: 0, - diagnostics: [], - }, - lexeme: 'ok 1..1', - }, - ]); -} - -// Diagnostic - -{ - // Note the leading 2 valid spaces - const ast = TAPParser(` - --- - message: 'description' - property: 'value' - ... -`); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'YamlEndKeyword', - node: { - diagnostics: ["message: 'description'", "property: 'value'"], - }, - lexeme: ' ...', - }, - ]); -} - -{ - // Note the leading 2 valid spaces - const ast = TAPParser(` - --- - message: "Board layout" - severity: comment - dump: - board: - - ' 16G 05C ' - - ' G N C C C G ' - - ' G C + ' - - '10C 01G 03C ' - - 'R N G G A G C C C ' - - ' R G C + ' - - ' 01G 17C 00C ' - - ' G A G G N R R N R ' - - ' G R G ' - ... -`); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'YamlEndKeyword', - node: { - diagnostics: [ - 'message: "Board layout"', - 'severity: comment', - 'dump:', - ' board:', - " - ' 16G 05C '", - " - ' G N C C C G '", - " - ' G C + '", - " - '10C 01G 03C '", - " - 'R N G G A G C C C '", - " - ' R G C + '", - " - ' 01G 17C 00C '", - " - ' G A G G N R R N R '", - " - ' G R G '", - ], - }, - lexeme: ' ...', - }, - ]); -} - -{ - const ast = TAPParser(` - --- - ... -`); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'YamlEndKeyword', - node: { diagnostics: [] }, - lexeme: ' ...', - }, - ]); -} - -{ - assert.throws( - () => - TAPParser( - ` - --- - message: 'description' - property: 'value'` - ), - { - name: 'SyntaxError', - code: 'ERR_TAP_PARSER_ERROR', - message: - 'Expected end of YAML block, received "\'value\'" (Literal) at line 4, column 13 (start 44, end 50)', - } - ); -} - -{ - assert.throws( - () => - // Note the leading 5 spaces before --- - // This is a special case because the YAML block is indented by 1 space - // the extra 4 spaces are those of the subtest nesting level. - // However, the remaining content of the YAML block is indented by 2 spaces - // making it belong to the parent level. - TAPParser( - ` - --- - message: 'description' - property: 'value' - ... - ` - ), - { - name: 'SyntaxError', - code: 'ERR_TAP_PARSER_ERROR', - message: - 'Expected end of YAML block, received "\'value\'" (Literal) at line 4, column 13 (start 47, end 53)', - } - ); -} - -{ - assert.throws( - () => - // Note the leading 4 spaces before ... - TAPParser( - ` - --- - message: 'description' - property: 'value' - ... - ` - ), - { - name: 'SyntaxError', - code: 'ERR_TAP_PARSER_ERROR', - message: - 'Expected end of YAML block, received " " (Whitespace) at line 6, column 2 (start 61, end 61)', - } - ); -} - -// Pragma - -{ - const ast = TAPParser('pragma +strict, -warnings'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'PragmaKeyword', - node: { - pragmas: { strict: true, warnings: false }, - }, - lexeme: 'pragma +strict, -warnings', - }, - ]); -} - -// Bail out - -{ - const ast = TAPParser('Bail out! Error'); - assert.deepStrictEqual(ast, [ - { - nesting: 0, - kind: 'BailOutKeyword', - node: { bailout: true, reason: 'Error' }, - lexeme: 'Bail out! Error', - }, - ]); -} - -// TAP document (with diagnostics) - -{ - const ast = TAPParser(` -# Comment on version 13 -# Another comment on version 13 - -TAP version 13 - -# Subtest: /test.js - # Subtest: level 0a - # Subtest: level 1a - # Comment test point 1a - # Comment test point 1aa - ok 1 - level 1a - --- - duration_ms: 1.676996 - ... - # Comment plan 1a - # Comment plan 1aa - 1..1 - # Comment closing test point 1a - # Comment closing test point 1aa - not ok 1 - level 1a - --- - duration_ms: 0.122839 - failureType: 'testCodeFailure' - error: 'level 0b error' - code: 'ERR_TEST_FAILURE' - stack: |- - TestContext. (/test.js:23:9) - ... - 1..1 - not ok 1 - level 0a - --- - duration_ms: 84.920487 - failureType: 'subtestsFailed' - exitCode: 1 - error: '3 subtests failed' - code: 'ERR_TEST_FAILURE' - ... - # Comment plan 0a - # Comment plan 0aa - 1..1 - -# Comment closing test point 0a - -# Comment closing test point 0aa - -not ok 1 - /test.js -# tests 1 -# pass 0 -# fail 1 -# cancelled 0 -# skipped 0 -# todo 0 -# duration_ms 87.077507 -`); - - assert.deepStrictEqual(ast, [ - { - kind: 'VersionKeyword', - node: { version: '13' }, - nesting: 0, - comments: ['Comment on version 13', 'Another comment on version 13'], - lexeme: 'TAP version 13', - }, - { - kind: 'SubTestPointKeyword', - node: { name: '/test.js' }, - nesting: 0, - lexeme: '# Subtest: /test.js', - }, - { - kind: 'SubTestPointKeyword', - node: { name: 'level 0a' }, - nesting: 1, - lexeme: ' # Subtest: level 0a', - }, - { - kind: 'SubTestPointKeyword', - node: { name: 'level 1a' }, - nesting: 2, - lexeme: ' # Subtest: level 1a', - }, - { - kind: 'TestPointKeyword', - node: { - status: { fail: false, pass: true, todo: false, skip: false }, - id: '1', - description: 'level 1a', - reason: '', - time: 1.676996, - diagnostics: ['duration_ms: 1.676996'], - }, - nesting: 3, - comments: ['Comment test point 1a', 'Comment test point 1aa'], - lexeme: ' ok 1 - level 1a', - }, - { - kind: 'PlanKeyword', - node: { start: '1', end: '1' }, - nesting: 3, - comments: ['Comment plan 1a', 'Comment plan 1aa'], - lexeme: ' 1..1', - }, - { - kind: 'TestPointKeyword', - node: { - status: { fail: true, pass: false, todo: false, skip: false }, - id: '1', - description: 'level 1a', - reason: '', - time: 0.122839, - diagnostics: [ - 'duration_ms: 0.122839', - "failureType: 'testCodeFailure'", - "error: 'level 0b error'", - "code: 'ERR_TEST_FAILURE'", - 'stack: |-', - ' TestContext. (/test.js:23:9)', - ], - }, - nesting: 2, - comments: [ - 'Comment closing test point 1a', - 'Comment closing test point 1aa', - ], - lexeme: ' not ok 1 - level 1a', - }, - { - kind: 'PlanKeyword', - node: { start: '1', end: '1' }, - nesting: 2, - lexeme: ' 1..1', - }, - { - kind: 'TestPointKeyword', - node: { - status: { fail: true, pass: false, todo: false, skip: false }, - id: '1', - description: 'level 0a', - reason: '', - time: 84.920487, - diagnostics: [ - 'duration_ms: 84.920487', - "failureType: 'subtestsFailed'", - 'exitCode: 1', - "error: '3 subtests failed'", - "code: 'ERR_TEST_FAILURE'", - ], - }, - nesting: 1, - lexeme: ' not ok 1 - level 0a', - }, - { - kind: 'PlanKeyword', - node: { start: '1', end: '1' }, - nesting: 1, - comments: ['Comment plan 0a', 'Comment plan 0aa'], - lexeme: ' 1..1', - }, - { - kind: 'TestPointKeyword', - node: { - status: { fail: true, pass: false, todo: false, skip: false }, - id: '1', - description: '/test.js', - reason: '', - time: 0, - diagnostics: [], - }, - nesting: 0, - comments: [ - 'Comment closing test point 0a', - 'Comment closing test point 0aa', - ], - lexeme: 'not ok 1 - /test.js', - }, - { - kind: 'Comment', - node: { comment: 'tests 1' }, - nesting: 0, - lexeme: '# tests 1', - }, - { - kind: 'Comment', - node: { comment: 'pass 0' }, - nesting: 0, - lexeme: '# pass 0', - }, - { - kind: 'Comment', - node: { comment: 'fail 1' }, - nesting: 0, - lexeme: '# fail 1', - }, - { - kind: 'Comment', - node: { comment: 'cancelled 0' }, - nesting: 0, - lexeme: '# cancelled 0', - }, - { - kind: 'Comment', - node: { comment: 'skipped 0' }, - nesting: 0, - lexeme: '# skipped 0', - }, - { - kind: 'Comment', - node: { comment: 'todo 0' }, - nesting: 0, - lexeme: '# todo 0', - }, - { - kind: 'Comment', - node: { comment: 'duration_ms 87.077507' }, - nesting: 0, - lexeme: '# duration_ms 87.077507', - }, - ]); -}