2022-08-19 15:59:36 +02:00
|
|
|
// Copyright (C) 2019 The Qt Company Ltd.
|
2022-12-21 10:12:09 +01:00
|
|
|
// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only WITH Qt-GPL-exception-1.0
|
2018-12-07 10:00:23 +01:00
|
|
|
|
|
|
|
|
#include "boosttestoutputreader.h"
|
2022-07-13 18:31:56 +02:00
|
|
|
|
2018-12-07 10:00:23 +01:00
|
|
|
#include "boosttestsettings.h"
|
|
|
|
|
#include "boosttestresult.h"
|
TestRunner: Reuse TaskTree
Get rid of QFutureInterface argument from
ITestConfiguration::createOutputReader() and from
TestOutputReader c'tor.
The fine-grained progress reporting was broken anyway:
1. The assumption was that testCaseCount was meant to be
the total number of test functions executed. It didn't
include the initTestCase() and cleanupTestCase(),
while those were reported on runtime apparently
(and exceeding the max progress by 2).
2. In case of tst_qtcprocess, when the whole test was run,
the testCaseCount reported 41, while the real
number of functions was 26 (+2 = 28 for init/cleanup).
3. While the max progress was set to testCaseCount initially,
the corresponding FutureProgress rendered the progress
always in 0-100 range, what didn't match the reality.
Instead, rely on TaskTree progress, which resolution
is per test as a whole. So, when executing a series
of tests this should scale fine. In addition, the
progress advances fluently according to the expected
run time - with 10 seconds hardcoded.
The original code locations, where progress was bumped,
are left with a TODO comment for any possible future tweaks.
Like in case of result reporting, fine-grained progress
reporting may be implemented by providing additional signal,
so there is no need for QFutureInterface inside
TestOutputReader.
Change-Id: Idc11d55e3a49dac8d1788948b9a82f68199203c6
Reviewed-by: <github-actions-qt-creator@cristianadam.eu>
Reviewed-by: Christian Stenger <christian.stenger@qt.io>
2023-01-17 00:45:50 +01:00
|
|
|
|
2022-07-13 18:31:56 +02:00
|
|
|
#include "../autotesttr.h"
|
|
|
|
|
#include "../testtreeitem.h"
|
2018-12-07 10:00:23 +01:00
|
|
|
|
2023-05-03 17:05:35 +02:00
|
|
|
#include <utils/process.h>
|
2018-12-07 10:00:23 +01:00
|
|
|
#include <utils/qtcassert.h>
|
|
|
|
|
|
|
|
|
|
#include <QLoggingCategory>
|
|
|
|
|
#include <QRegularExpression>
|
|
|
|
|
|
2023-01-16 15:00:15 +01:00
|
|
|
using namespace Utils;
|
|
|
|
|
|
2018-12-07 10:00:23 +01:00
|
|
|
namespace Autotest {
|
|
|
|
|
namespace Internal {
|
|
|
|
|
|
|
|
|
|
static Q_LOGGING_CATEGORY(orLog, "qtc.autotest.boost.outputreader", QtWarningMsg)
|
|
|
|
|
|
2023-05-03 16:00:22 +02:00
|
|
|
BoostTestOutputReader::BoostTestOutputReader(Process *testApplication,
|
2023-01-16 15:00:15 +01:00
|
|
|
const FilePath &buildDirectory,
|
|
|
|
|
const FilePath &projectFile,
|
2018-12-07 10:00:23 +01:00
|
|
|
LogLevel log, ReportLevel report)
|
TestRunner: Reuse TaskTree
Get rid of QFutureInterface argument from
ITestConfiguration::createOutputReader() and from
TestOutputReader c'tor.
The fine-grained progress reporting was broken anyway:
1. The assumption was that testCaseCount was meant to be
the total number of test functions executed. It didn't
include the initTestCase() and cleanupTestCase(),
while those were reported on runtime apparently
(and exceeding the max progress by 2).
2. In case of tst_qtcprocess, when the whole test was run,
the testCaseCount reported 41, while the real
number of functions was 26 (+2 = 28 for init/cleanup).
3. While the max progress was set to testCaseCount initially,
the corresponding FutureProgress rendered the progress
always in 0-100 range, what didn't match the reality.
Instead, rely on TaskTree progress, which resolution
is per test as a whole. So, when executing a series
of tests this should scale fine. In addition, the
progress advances fluently according to the expected
run time - with 10 seconds hardcoded.
The original code locations, where progress was bumped,
are left with a TODO comment for any possible future tweaks.
Like in case of result reporting, fine-grained progress
reporting may be implemented by providing additional signal,
so there is no need for QFutureInterface inside
TestOutputReader.
Change-Id: Idc11d55e3a49dac8d1788948b9a82f68199203c6
Reviewed-by: <github-actions-qt-creator@cristianadam.eu>
Reviewed-by: Christian Stenger <christian.stenger@qt.io>
2023-01-17 00:45:50 +01:00
|
|
|
: TestOutputReader(testApplication, buildDirectory)
|
2018-12-07 10:00:23 +01:00
|
|
|
, m_projectFile(projectFile)
|
|
|
|
|
, m_logLevel(log)
|
|
|
|
|
, m_reportLevel(report)
|
|
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// content of "error:..." / "info:..." / ... messages
|
|
|
|
|
static QString caseFromContent(const QString &content)
|
|
|
|
|
{
|
|
|
|
|
const int length = content.length();
|
|
|
|
|
if (content.startsWith("last checkpoint:")) {
|
|
|
|
|
int index = content.indexOf('"');
|
|
|
|
|
if (index != 17 || length <= 18) {
|
|
|
|
|
qCDebug(orLog) << "double quote position" << index << " or content length" << length
|
|
|
|
|
<< "wrong on content" << content;
|
2023-01-14 16:25:51 +01:00
|
|
|
return {};
|
2018-12-07 10:00:23 +01:00
|
|
|
}
|
|
|
|
|
index = content.indexOf('"', 18);
|
|
|
|
|
if (index == -1) {
|
|
|
|
|
qCDebug(orLog) << "no closing double quote" << content;
|
2023-01-14 16:25:51 +01:00
|
|
|
return {};
|
2018-12-07 10:00:23 +01:00
|
|
|
}
|
|
|
|
|
return content.mid(18, index - 1);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int index = content.indexOf(": in ");
|
|
|
|
|
if (index == -1) // "info: check true has passed"
|
2023-01-14 16:25:51 +01:00
|
|
|
return {};
|
2018-12-07 10:00:23 +01:00
|
|
|
|
|
|
|
|
if (index <= 4 || length < index + 4) {
|
|
|
|
|
qCDebug(orLog) << "unexpected position" << index << "for info" << content;
|
2023-01-14 16:25:51 +01:00
|
|
|
return {};
|
2018-12-07 10:00:23 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
QString result = content.mid(index + 5);
|
2019-05-24 14:51:56 +02:00
|
|
|
static QRegularExpression functionName("\"(.+)\":.*");
|
2018-12-07 10:00:23 +01:00
|
|
|
const QRegularExpressionMatch matcher = functionName.match(result);
|
|
|
|
|
if (!matcher.hasMatch()) {
|
|
|
|
|
qCDebug(orLog) << "got no match";
|
2023-01-14 16:25:51 +01:00
|
|
|
return {};
|
2018-12-07 10:00:23 +01:00
|
|
|
}
|
|
|
|
|
return matcher.captured(1);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void BoostTestOutputReader::sendCompleteInformation()
|
|
|
|
|
{
|
|
|
|
|
QTC_ASSERT(m_result != ResultType::Invalid, return);
|
2023-01-14 16:25:51 +01:00
|
|
|
BoostTestResult result(id(), m_currentModule, m_projectFile, m_currentTest, m_currentSuite);
|
2018-12-07 10:00:23 +01:00
|
|
|
if (m_lineNumber) {
|
2023-01-14 16:25:51 +01:00
|
|
|
result.setLine(m_lineNumber);
|
|
|
|
|
result.setFileName(m_fileName);
|
|
|
|
|
} else if (const ITestTreeItem *it = result.findTestTreeItem()) {
|
|
|
|
|
result.setLine(it->line());
|
|
|
|
|
result.setFileName(it->filePath());
|
2019-06-03 13:17:55 +02:00
|
|
|
}
|
2018-12-07 10:00:23 +01:00
|
|
|
|
2023-01-14 16:25:51 +01:00
|
|
|
result.setDescription(m_description);
|
|
|
|
|
result.setResult(m_result);
|
|
|
|
|
reportResult(result);
|
2018-12-07 10:00:23 +01:00
|
|
|
m_result = ResultType::Invalid;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void BoostTestOutputReader::handleMessageMatch(const QRegularExpressionMatch &match)
|
|
|
|
|
{
|
|
|
|
|
m_fileName = constructSourceFilePath(m_buildDir, match.captured(1));
|
|
|
|
|
m_lineNumber = match.captured(2).toInt();
|
|
|
|
|
|
|
|
|
|
const QString &content = match.captured(3);
|
|
|
|
|
if (content.startsWith("info:")) {
|
|
|
|
|
if (m_currentTest.isEmpty() || m_logLevel > LogLevel::UnitScope) {
|
|
|
|
|
QString tmp = caseFromContent(content);
|
|
|
|
|
if (!tmp.isEmpty())
|
|
|
|
|
m_currentTest = tmp;
|
|
|
|
|
}
|
|
|
|
|
m_result = ResultType::Pass;
|
|
|
|
|
m_description = content;
|
|
|
|
|
} else if (content.startsWith("error:")) {
|
|
|
|
|
if (m_currentTest.isEmpty() || m_logLevel > LogLevel::UnitScope)
|
|
|
|
|
m_currentTest = caseFromContent(content);
|
|
|
|
|
m_result = ResultType::Fail;
|
2019-05-27 13:31:32 +02:00
|
|
|
if (m_reportLevel == ReportLevel::No)
|
|
|
|
|
++m_summary[ResultType::Fail];
|
2018-12-07 10:00:23 +01:00
|
|
|
m_description = content;
|
|
|
|
|
} else if (content.startsWith("fatal error:")) {
|
|
|
|
|
if (m_currentTest.isEmpty() || m_logLevel > LogLevel::UnitScope)
|
|
|
|
|
m_currentTest = caseFromContent(content);
|
|
|
|
|
m_result = ResultType::MessageFatal;
|
2020-09-23 09:17:39 +02:00
|
|
|
++m_summary[ResultType::MessageFatal];
|
2018-12-07 10:00:23 +01:00
|
|
|
m_description = content;
|
|
|
|
|
} else if (content.startsWith("last checkpoint:")) {
|
|
|
|
|
if (m_currentTest.isEmpty() || m_logLevel > LogLevel::UnitScope)
|
|
|
|
|
m_currentTest = caseFromContent(content);
|
|
|
|
|
m_result = ResultType::MessageInfo;
|
|
|
|
|
m_description = content;
|
|
|
|
|
} else if (content.startsWith("Entering")) {
|
|
|
|
|
m_result = ResultType::TestStart;
|
|
|
|
|
const QString type = match.captured(8);
|
|
|
|
|
if (type == "case") {
|
|
|
|
|
m_currentTest = match.captured(9);
|
2022-07-13 18:31:56 +02:00
|
|
|
m_description = Tr::tr("Executing test case %1").arg(m_currentTest);
|
2018-12-07 10:00:23 +01:00
|
|
|
} else if (type == "suite") {
|
2020-09-23 09:11:25 +02:00
|
|
|
if (m_currentSuite.isEmpty())
|
|
|
|
|
m_currentSuite = match.captured(9);
|
|
|
|
|
else
|
|
|
|
|
m_currentSuite.append("/").append(match.captured(9));
|
|
|
|
|
m_currentTest.clear();
|
2022-07-13 18:31:56 +02:00
|
|
|
m_description = Tr::tr("Executing test suite %1").arg(m_currentSuite);
|
2018-12-07 10:00:23 +01:00
|
|
|
}
|
|
|
|
|
} else if (content.startsWith("Leaving")) {
|
|
|
|
|
const QString type = match.captured(10);
|
|
|
|
|
if (type == "case") {
|
|
|
|
|
if (m_currentTest != match.captured(11) && m_currentTest.isEmpty())
|
|
|
|
|
m_currentTest = match.captured(11);
|
|
|
|
|
m_result = ResultType::TestEnd;
|
2023-06-20 08:00:49 +02:00
|
|
|
m_description = Tr::tr("Test execution took %1.").arg(match.captured(12));
|
2018-12-07 10:00:23 +01:00
|
|
|
} else if (type == "suite") {
|
2020-09-23 09:11:25 +02:00
|
|
|
if (!m_currentSuite.isEmpty()) {
|
|
|
|
|
int index = m_currentSuite.lastIndexOf('/');
|
|
|
|
|
if (QTC_GUARD(m_currentSuite.mid(index + 1) == match.captured(11))) {
|
|
|
|
|
if (index == -1)
|
|
|
|
|
m_currentSuite.clear();
|
|
|
|
|
else
|
|
|
|
|
m_currentSuite = m_currentSuite.left(index);
|
|
|
|
|
}
|
|
|
|
|
} else if (match.capturedLength(11)) { // FIXME remove this branch?
|
|
|
|
|
QTC_CHECK(false);
|
2018-12-07 10:00:23 +01:00
|
|
|
m_currentSuite = match.captured(11);
|
2020-09-23 09:11:25 +02:00
|
|
|
}
|
2018-12-07 10:00:23 +01:00
|
|
|
m_currentTest.clear();
|
|
|
|
|
m_result = ResultType::TestEnd;
|
2023-06-20 08:00:49 +02:00
|
|
|
m_description = Tr::tr("Test suite execution took %1.").arg(match.captured(12));
|
2018-12-07 10:00:23 +01:00
|
|
|
}
|
|
|
|
|
} else if (content.startsWith("Test case ")) {
|
|
|
|
|
m_currentTest = match.captured(4);
|
|
|
|
|
m_result = ResultType::Skip;
|
2019-05-27 13:31:32 +02:00
|
|
|
if (m_reportLevel == ReportLevel::Confirm || m_reportLevel == ReportLevel::No)
|
|
|
|
|
++m_summary[ResultType::Skip];
|
2018-12-07 10:00:23 +01:00
|
|
|
m_description = content;
|
|
|
|
|
}
|
2019-05-27 09:07:40 +02:00
|
|
|
|
|
|
|
|
if (m_result != ResultType::Invalid) // we got a new result
|
|
|
|
|
sendCompleteInformation();
|
2018-12-07 10:00:23 +01:00
|
|
|
}
|
|
|
|
|
|
2019-11-11 08:03:16 +01:00
|
|
|
void BoostTestOutputReader::processOutputLine(const QByteArray &outputLine)
|
2018-12-07 10:00:23 +01:00
|
|
|
{
|
|
|
|
|
static QRegularExpression newTestStart("^Running (\\d+) test cases?\\.\\.\\.$");
|
|
|
|
|
static QRegularExpression dependency("^Including test case (.+) as a dependency of "
|
|
|
|
|
"test case (.+)$");
|
|
|
|
|
static QRegularExpression messages("^(.+)\\((\\d+)\\): (info: (.+)|error: (.+)|"
|
|
|
|
|
"fatal error: (.+)|last checkpoint: (.+)"
|
|
|
|
|
"|Entering test (case|suite) \"(.+)\""
|
|
|
|
|
"|Leaving test (case|suite) \"(.+)\"; testing time: (\\d+.+)"
|
|
|
|
|
"|Test case \"(.+)\" is skipped because .+$)$");
|
|
|
|
|
static QRegularExpression moduleMssg("^(Entering test module \"(.+)\"|"
|
|
|
|
|
"Leaving test module \"(.+)\"; testing time: (\\d+.+))$");
|
|
|
|
|
static QRegularExpression noAssertion("^Test case (.*) did not check any assertions$");
|
|
|
|
|
|
|
|
|
|
static QRegularExpression summaryPreamble("^\\s*Test (module|suite|case) \"(.*)\" has "
|
2019-05-27 13:31:32 +02:00
|
|
|
"(failed|passed)( with:)?$");
|
2018-12-07 10:00:23 +01:00
|
|
|
static QRegularExpression summarySkip("^\\s+Test case \"(.*)\" was skipped$");
|
|
|
|
|
static QRegularExpression summaryDetail("^\\s+(\\d+) test cases? out of (\\d+) "
|
2019-05-27 13:31:32 +02:00
|
|
|
"(failed|passed|skipped)$");
|
2018-12-07 10:00:23 +01:00
|
|
|
static QRegularExpression summaryAssertion("^\\s+(\\d+) assertions? out of (\\d+) "
|
|
|
|
|
"(failed|passed)$");
|
|
|
|
|
|
|
|
|
|
static QRegularExpression finish("^\\*{3} (\\d+) failure(s are| is) detected in the "
|
|
|
|
|
"test module \"(.*)\"$");
|
|
|
|
|
QString noErrors("*** No errors detected");
|
|
|
|
|
|
2019-11-06 14:26:40 +01:00
|
|
|
const QString line = removeCommandlineColors(QString::fromUtf8(outputLine));
|
2018-12-07 10:00:23 +01:00
|
|
|
if (line.trimmed().isEmpty())
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
QRegularExpressionMatch match = messages.match(line);
|
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
|
handleMessageMatch(match);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match = dependency.match(line);
|
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
|
if (m_result != ResultType::Invalid)
|
|
|
|
|
sendCompleteInformation();
|
2023-01-14 16:25:51 +01:00
|
|
|
BoostTestResult result(id(), m_currentModule, m_projectFile);
|
|
|
|
|
result.setDescription(match.captured(0));
|
|
|
|
|
result.setResult(ResultType::MessageInfo);
|
|
|
|
|
reportResult(result);
|
2018-12-07 10:00:23 +01:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match = newTestStart.match(line);
|
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
|
if (m_result != ResultType::Invalid)
|
|
|
|
|
sendCompleteInformation();
|
|
|
|
|
m_testCaseCount = match.captured(1).toInt();
|
|
|
|
|
m_description.clear();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match = moduleMssg.match(line);
|
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
|
if (m_result != ResultType::Invalid)
|
|
|
|
|
sendCompleteInformation();
|
|
|
|
|
if (match.captured(1).startsWith("Entering")) {
|
|
|
|
|
m_currentModule = match.captured(2);
|
2023-01-14 16:25:51 +01:00
|
|
|
BoostTestResult result(id(), m_currentModule, m_projectFile);
|
|
|
|
|
result.setDescription(Tr::tr("Executing test module %1").arg(m_currentModule));
|
|
|
|
|
result.setResult(ResultType::TestStart);
|
|
|
|
|
reportResult(result);
|
2018-12-07 10:00:23 +01:00
|
|
|
m_description.clear();
|
|
|
|
|
} else {
|
|
|
|
|
QTC_CHECK(m_currentModule == match.captured(3));
|
2023-01-14 16:25:51 +01:00
|
|
|
BoostTestResult result(id(), m_currentModule, m_projectFile);
|
2023-06-20 08:00:49 +02:00
|
|
|
result.setDescription(Tr::tr("Test module execution took %1.").arg(match.captured(4)));
|
2023-01-14 16:25:51 +01:00
|
|
|
result.setResult(ResultType::TestEnd);
|
|
|
|
|
reportResult(result);
|
2018-12-07 10:00:23 +01:00
|
|
|
|
|
|
|
|
m_currentTest.clear();
|
|
|
|
|
m_currentSuite.clear();
|
|
|
|
|
m_currentModule.clear();
|
|
|
|
|
m_description.clear();
|
|
|
|
|
}
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match = noAssertion.match(line);
|
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
|
if (m_result != ResultType::Invalid)
|
|
|
|
|
sendCompleteInformation();
|
|
|
|
|
const QString caseWithOptionalSuite = match.captured(1);
|
|
|
|
|
int index = caseWithOptionalSuite.lastIndexOf('/');
|
|
|
|
|
if (index == -1) {
|
|
|
|
|
QTC_CHECK(caseWithOptionalSuite == m_currentTest);
|
|
|
|
|
} else {
|
|
|
|
|
QTC_CHECK(caseWithOptionalSuite.mid(index + 1) == m_currentTest);
|
|
|
|
|
int sIndex = caseWithOptionalSuite.lastIndexOf('/', index - 1);
|
|
|
|
|
if (sIndex == -1) {
|
|
|
|
|
QTC_CHECK(caseWithOptionalSuite.left(index) == m_currentSuite);
|
|
|
|
|
m_currentSuite = caseWithOptionalSuite.left(index); // FIXME should not be necessary - but we currently do not care for the whole suite path
|
|
|
|
|
} else {
|
|
|
|
|
QTC_CHECK(caseWithOptionalSuite.mid(sIndex + 1, index - sIndex - 1) == m_currentSuite);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
createAndReportResult(match.captured(0), ResultType::MessageWarn);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match = summaryPreamble.match(line);
|
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
|
createAndReportResult(match.captured(0), ResultType::MessageInfo);
|
2019-05-27 13:31:32 +02:00
|
|
|
if (m_reportLevel == ReportLevel::Detailed || match.captured(4).isEmpty()) {
|
|
|
|
|
if (match.captured(1) == "case") {
|
|
|
|
|
if (match.captured(3) == "passed")
|
|
|
|
|
++m_summary[ResultType::Pass];
|
|
|
|
|
else
|
|
|
|
|
++m_summary[ResultType::Fail];
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-12-07 10:00:23 +01:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match = summaryDetail.match(line);
|
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
|
createAndReportResult(match.captured(0), ResultType::MessageInfo);
|
2019-05-27 13:31:32 +02:00
|
|
|
int report = match.captured(1).toInt();
|
|
|
|
|
QString type = match.captured(3);
|
|
|
|
|
if (m_reportLevel != ReportLevel::Detailed) {
|
|
|
|
|
if (type == "passed")
|
|
|
|
|
m_summary[ResultType::Pass] += report;
|
|
|
|
|
else if (type == "failed")
|
|
|
|
|
m_summary[ResultType::Fail] += report;
|
|
|
|
|
else if (type == "skipped")
|
|
|
|
|
m_summary[ResultType::Skip] += report;
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-07 10:00:23 +01:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match = summaryAssertion.match(line);
|
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
|
createAndReportResult(match.captured(0), ResultType::MessageInfo);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match = summarySkip.match(line);
|
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
|
createAndReportResult(match.captured(0), ResultType::MessageInfo);
|
2019-05-27 13:31:32 +02:00
|
|
|
if (m_reportLevel == ReportLevel::Detailed)
|
|
|
|
|
++m_summary[ResultType::Skip];
|
2018-12-07 10:00:23 +01:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match = finish.match(line);
|
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
|
if (m_result != ResultType::Invalid)
|
|
|
|
|
sendCompleteInformation();
|
2023-01-14 16:25:51 +01:00
|
|
|
BoostTestResult result(id(), {}, m_projectFile);
|
|
|
|
|
const int failed = match.captured(1).toInt();
|
|
|
|
|
const int fatals = m_summary.value(ResultType::MessageFatal);
|
2022-07-13 18:31:56 +02:00
|
|
|
QString txt = Tr::tr("%1 failures detected in %2.").arg(failed).arg(match.captured(3));
|
2023-01-14 16:25:51 +01:00
|
|
|
const int passed = qMax(0, m_testCaseCount - failed);
|
2018-12-07 10:00:23 +01:00
|
|
|
if (m_testCaseCount != -1)
|
2022-07-13 18:31:56 +02:00
|
|
|
txt.append(' ').append(Tr::tr("%1 tests passed.").arg(passed));
|
2023-01-14 16:25:51 +01:00
|
|
|
result.setDescription(txt);
|
|
|
|
|
result.setResult(ResultType::MessageInfo);
|
|
|
|
|
reportResult(result);
|
2019-05-27 13:31:32 +02:00
|
|
|
if (m_reportLevel == ReportLevel::Confirm) { // for the final summary
|
|
|
|
|
m_summary[ResultType::Pass] += passed;
|
2020-09-23 09:17:39 +02:00
|
|
|
m_summary[ResultType::Fail] += failed - fatals;
|
2019-05-27 13:31:32 +02:00
|
|
|
}
|
2018-12-07 10:00:23 +01:00
|
|
|
m_testCaseCount = -1;
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (line == noErrors) {
|
|
|
|
|
if (m_result != ResultType::Invalid)
|
|
|
|
|
sendCompleteInformation();
|
2023-01-14 16:25:51 +01:00
|
|
|
BoostTestResult result(id(), {}, m_projectFile);
|
2022-07-13 18:31:56 +02:00
|
|
|
QString txt = Tr::tr("No errors detected.");
|
2018-12-07 10:00:23 +01:00
|
|
|
if (m_testCaseCount != -1)
|
2022-07-13 18:31:56 +02:00
|
|
|
txt.append(' ').append(Tr::tr("%1 tests passed.").arg(m_testCaseCount));
|
2023-01-14 16:25:51 +01:00
|
|
|
result.setDescription(txt);
|
|
|
|
|
result.setResult(ResultType::MessageInfo);
|
|
|
|
|
reportResult(result);
|
2019-05-27 13:31:32 +02:00
|
|
|
if (m_reportLevel == ReportLevel::Confirm) // for the final summary
|
|
|
|
|
m_summary.insert(ResultType::Pass, m_testCaseCount);
|
2018-12-07 10:00:23 +01:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// some plain output...
|
|
|
|
|
if (!m_description.isEmpty())
|
|
|
|
|
m_description.append('\n');
|
|
|
|
|
m_description.append(line);
|
|
|
|
|
}
|
|
|
|
|
|
2019-11-11 08:03:16 +01:00
|
|
|
void BoostTestOutputReader::processStdError(const QByteArray &outputLine)
|
2018-12-07 10:00:23 +01:00
|
|
|
{
|
|
|
|
|
// we need to process the output, Boost UTF uses both out streams
|
2020-07-27 17:52:59 +02:00
|
|
|
checkForSanitizerOutput(outputLine);
|
2019-11-11 08:03:16 +01:00
|
|
|
processOutputLine(outputLine);
|
2019-11-06 14:25:16 +01:00
|
|
|
emit newOutputLineAvailable(outputLine, OutputChannel::StdErr);
|
2018-12-07 10:00:23 +01:00
|
|
|
}
|
|
|
|
|
|
2023-01-14 16:25:51 +01:00
|
|
|
TestResult BoostTestOutputReader::createDefaultResult() const
|
2018-12-07 10:00:23 +01:00
|
|
|
{
|
2023-01-14 16:25:51 +01:00
|
|
|
return BoostTestResult(id(), m_currentModule, m_projectFile, m_currentTest, m_currentSuite);
|
2018-12-07 10:00:23 +01:00
|
|
|
}
|
|
|
|
|
|
2023-01-16 22:10:10 +01:00
|
|
|
void BoostTestOutputReader::onDone(int exitCode)
|
|
|
|
|
{
|
2019-05-27 13:31:32 +02:00
|
|
|
if (m_reportLevel == ReportLevel::No && m_testCaseCount != -1) {
|
2023-01-16 22:10:10 +01:00
|
|
|
const int reportedFailsAndSkips = m_summary[ResultType::Fail] + m_summary[ResultType::Skip];
|
2019-05-27 13:31:32 +02:00
|
|
|
m_summary.insert(ResultType::Pass, m_testCaseCount - reportedFailsAndSkips);
|
|
|
|
|
}
|
2018-12-07 10:00:23 +01:00
|
|
|
// boost::exit_success (0), boost::exit_test_failure (201)
|
|
|
|
|
// or boost::exit_exception_failure (200)
|
|
|
|
|
// be graceful and do not add a fatal for exit_test_failure
|
|
|
|
|
if (m_logLevel == LogLevel::Nothing && m_reportLevel == ReportLevel::No) {
|
|
|
|
|
switch (exitCode) {
|
|
|
|
|
case 0:
|
2023-06-20 08:00:49 +02:00
|
|
|
reportNoOutputFinish(Tr::tr("Running tests exited with %1.").arg("boost::exit_success"),
|
2018-12-07 10:00:23 +01:00
|
|
|
ResultType::Pass);
|
|
|
|
|
break;
|
|
|
|
|
case 200:
|
|
|
|
|
reportNoOutputFinish(
|
2023-06-20 08:00:49 +02:00
|
|
|
Tr::tr("Running tests exited with %1.").arg("boost::exit_test_exception"),
|
2018-12-07 10:00:23 +01:00
|
|
|
ResultType::MessageFatal);
|
|
|
|
|
break;
|
|
|
|
|
case 201:
|
2023-06-20 08:00:49 +02:00
|
|
|
reportNoOutputFinish(Tr::tr("Running tests exited with %1.")
|
|
|
|
|
.arg("boost::exit_test_failure"), ResultType::Fail);
|
2018-12-07 10:00:23 +01:00
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
} else if (exitCode != 0 && exitCode != 201 && !m_description.isEmpty()) {
|
|
|
|
|
if (m_description.startsWith("Test setup error:")) {
|
2022-07-13 18:31:56 +02:00
|
|
|
createAndReportResult(m_description + '\n' + Tr::tr("Executable: %1")
|
2018-12-07 10:00:23 +01:00
|
|
|
.arg(id()), ResultType::MessageWarn);
|
|
|
|
|
} else {
|
2022-07-13 18:31:56 +02:00
|
|
|
createAndReportResult(Tr::tr("Running tests failed.\n%1\nExecutable: %2")
|
2018-12-07 10:00:23 +01:00
|
|
|
.arg(m_description).arg(id()), ResultType::MessageFatal);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void BoostTestOutputReader::reportNoOutputFinish(const QString &description, ResultType type)
|
|
|
|
|
{
|
2023-01-14 16:25:51 +01:00
|
|
|
BoostTestResult result(id(), m_currentModule, m_projectFile,
|
|
|
|
|
Tr::tr("Running tests without output."));
|
|
|
|
|
result.setDescription(description);
|
|
|
|
|
result.setResult(type);
|
|
|
|
|
reportResult(result);
|
2018-12-07 10:00:23 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
} // namespace Internal
|
|
|
|
|
} // namespace Autotest
|