diff --git a/plugins/autotest/testnavigationwidget.cpp b/plugins/autotest/testnavigationwidget.cpp
index dea7b97874d..df2f0dcdd04 100644
--- a/plugins/autotest/testnavigationwidget.cpp
+++ b/plugins/autotest/testnavigationwidget.cpp
@@ -196,13 +196,13 @@ void TestNavigationWidget::onParsingFinished()
void TestNavigationWidget::initializeFilterMenu()
{
QAction *action = new QAction(m_filterMenu);
- action->setText(tr("Show init and cleanup functions"));
+ action->setText(tr("Show Init and Cleanup Functions"));
action->setCheckable(true);
action->setChecked(false);
action->setData(TestTreeSortFilterModel::ShowInitAndCleanup);
m_filterMenu->addAction(action);
action = new QAction(m_filterMenu);
- action->setText(tr("Show data functions"));
+ action->setText(tr("Show Data Functions"));
action->setCheckable(true);
action->setChecked(false);
action->setData(TestTreeSortFilterModel::ShowTestData);
diff --git a/plugins/autotest/testresultspane.cpp b/plugins/autotest/testresultspane.cpp
index a19daad9d8e..feace48f7ce 100644
--- a/plugins/autotest/testresultspane.cpp
+++ b/plugins/autotest/testresultspane.cpp
@@ -324,7 +324,7 @@ void TestResultsPane::initializeFilterMenu()
void TestResultsPane::updateSummaryLabel()
{
- QString labelText = QString::fromLatin1("
Test Summary: %1 %2, %3 %4")
+ QString labelText = QString::fromLatin1("
Test summary: %1 %2, %3 %4")
.arg(QString::number(m_model->resultTypeCount(Result::PASS)), tr("passes"),
QString::number(m_model->resultTypeCount(Result::FAIL)), tr("fails"));
int count = m_model->resultTypeCount(Result::UNEXPECTED_PASS);
diff --git a/plugins/autotest/testrunner.cpp b/plugins/autotest/testrunner.cpp
index 6b1c68a9d4e..29e5476ce35 100644
--- a/plugins/autotest/testrunner.cpp
+++ b/plugins/autotest/testrunner.cpp
@@ -139,7 +139,7 @@ void performTestRun(QFutureInterface &futureInterface,
QString commandFilePath = executableFilePath(testConfiguration->targetFile(), environment);
if (commandFilePath.isEmpty()) {
emitTestResultCreated(FaultyTestResult(Result::MESSAGE_FATAL,
- QObject::tr("*** Could not find command '%1' ***").arg(testConfiguration->targetFile())));
+ QObject::tr("Could not find command \"%1\".").arg(testConfiguration->targetFile())));
continue;
}
@@ -166,7 +166,7 @@ void performTestRun(QFutureInterface &futureInterface,
testProcess.kill();
testProcess.waitForFinished();
emitTestResultCreated(FaultyTestResult(Result::MESSAGE_FATAL,
- QObject::tr("*** Test Run canceled by user ***")));
+ QObject::tr("Test run canceled by user.")));
}
qApp->processEvents();
}
@@ -177,7 +177,7 @@ void performTestRun(QFutureInterface &futureInterface,
testProcess.kill();
testProcess.waitForFinished();
emitTestResultCreated(FaultyTestResult(Result::MESSAGE_FATAL, QObject::tr(
- "*** Test Case canceled due to timeout ***\nMaybe raise the timeout?")));
+ "Test case canceled due to timeout. \nMaybe raise the timeout?")));
}
}
}
@@ -200,13 +200,13 @@ void TestRunner::runTests()
if (!config->project()) {
toBeRemoved.append(config);
TestResultsPane::instance()->addTestResult(FaultyTestResult(Result::MESSAGE_WARN,
- tr("*** Project is null for '%1' - removing from Test Run ***\n"
- "This might be the case for a faulty environment or similar."
+ tr("Project is null for \"%1\". Removing from test run.\n"
+ "Check the test environment."
).arg(config->displayName())));
}
if (displayRunConfigWarnings && config->guessedConfiguration()) {
TestResultsPane::instance()->addTestResult(FaultyTestResult(Result::MESSAGE_WARN,
- tr("*** Project's run configuration was guessed for '%1' ***\n"
+ tr("Project's run configuration was guessed for \"%1\".\n"
"This might cause trouble during execution.").arg(config->displayName())));
}
}
@@ -217,16 +217,16 @@ void TestRunner::runTests()
if (m_selectedTests.empty()) {
TestResultsPane::instance()->addTestResult(FaultyTestResult(Result::MESSAGE_WARN,
- tr("*** No tests selected - canceling Test Run ***")));
+ tr("No tests selected. Canceling test run.")));
return;
}
ProjectExplorer::Project *project = m_selectedTests.at(0)->project();
if (!project) {
TestResultsPane::instance()->addTestResult(FaultyTestResult(Result::MESSAGE_WARN,
- tr("*** Project is null - canceling Test Run ***\n"
- "Actually only Desktop kits are supported - make sure the "
- "current active kit is a Desktop kit.")));
+ tr("Project is null. Canceling test run.\n"
+ "Only desktop kits are supported. Make sure the "
+ "currently active kit is a desktop kit.")));
return;
}
@@ -235,7 +235,7 @@ void TestRunner::runTests()
if (projectExplorerSettings.buildBeforeDeploy) {
if (!project->hasActiveBuildSettings()) {
TestResultsPane::instance()->addTestResult(FaultyTestResult(Result::MESSAGE_FATAL,
- tr("*** Project is not configured - canceling Test Run ***")));
+ tr("Project is not configured. Canceling test run.")));
return;
}
buildProject(project);
@@ -245,7 +245,7 @@ void TestRunner::runTests()
if (!m_buildSucceeded) {
TestResultsPane::instance()->addTestResult(FaultyTestResult(Result::MESSAGE_FATAL,
- tr("*** Build failed - canceling Test Run ***")));
+ tr("Build failed. Canceling test run.")));
return;
}
}
diff --git a/plugins/autotest/testsettingspage.ui b/plugins/autotest/testsettingspage.ui
index 164f9f7f5c1..b7154268835 100644
--- a/plugins/autotest/testsettingspage.ui
+++ b/plugins/autotest/testsettingspage.ui
@@ -32,7 +32,7 @@
-
- Timeout used when executing test cases. This will apply for each test case on its own, not the whole project.
+ Timeout used when executing each test case.
Timeout:
@@ -83,10 +83,10 @@
-
- If checked Internal Messages won't be shown by default. (You can still enable them on the test results filter)
+ Hides internal messages by default. You can still enable them by using the test results filter.
- Omit Internal Messages
+ Omit internal messages
true
@@ -96,10 +96,10 @@
-
- If checked Warnings regarding a guessed Run Configuration won't be shown.
+ Hides warnings related to a guessed run configuration.
- Omit Run Configuration Warnings
+ Omit run configuration warnings
@@ -161,7 +161,7 @@
- Use Walltime metrics for executing benchmarks. (default)
+ Uses walltime metrics for executing benchmarks (default).
Walltime
@@ -180,10 +180,10 @@
- Use tick counter for executing benchmarks.
+ Uses tick counter when executing benchmarks.
- Tickcounter
+ Tick counter
@@ -196,10 +196,10 @@
- Use event counter when executing benchmarks.
+ Uses event counter when executing benchmarks.
- Eventcounter
+ Event counter
@@ -215,7 +215,7 @@
- Use callgrind when executing benchmark. (valgrind must be installed)
+ Uses Valgrind Callgrind when executing benchmarks (it must be installed).
Callgrind
@@ -234,7 +234,7 @@
- Use perf when executing benchmarks. (perf must be installed)
+ Uses Perf when executing benchmarks (it must be installed).
Perf
diff --git a/plugins/autotest/testtreemodel.cpp b/plugins/autotest/testtreemodel.cpp
index 2ae5693febf..5bba6b7c3c3 100644
--- a/plugins/autotest/testtreemodel.cpp
+++ b/plugins/autotest/testtreemodel.cpp
@@ -213,9 +213,8 @@ QVariant TestTreeModel::data(const QModelIndex &index, int role) const
switch(role) {
case Qt::ToolTipRole:
if (item->type() == TestTreeItem::TEST_CLASS && item->name().isEmpty())
- return tr("Unnamed test cases can't be (un)checked - avoid this by assigning a name."
- "
Having unnamed test cases invalidates the check state of named test "
- "cases with the same main.cpp when executing selected tests.
");
+ return tr("Give all test cases a name to ensure correct behavior "
+ "when running test cases and to be able to select them.
");
return item->filePath();
case Qt::DecorationRole:
return testTreeIcon(item->type());
diff --git a/plugins/autotest/testxmloutputreader.cpp b/plugins/autotest/testxmloutputreader.cpp
index 31d21b959ad..fb71f56881e 100644
--- a/plugins/autotest/testxmloutputreader.cpp
+++ b/plugins/autotest/testxmloutputreader.cpp
@@ -191,7 +191,7 @@ void TestXmlOutputReader::processOutput()
lineNumber = 0;
readingDescription = false;
testResultCreated(TestResult(QString(), QString(), QString(), Result::MESSAGE_CURRENT_TEST,
- QObject::tr("Entering Test Function %1::%2").arg(className).arg(testCase)));
+ QObject::tr("Entering test function %1::%2").arg(className).arg(testCase)));
continue;
}
if (xmlStartsWith(line, QLatin1String("") && !duration.isEmpty()) {
TestResult testResult(className, testCase, QString(), Result::MESSAGE_INTERNAL,
- QObject::tr("execution took %1ms").arg(duration));
+ QObject::tr("Execution took %1 ms.").arg(duration));
testResultCreated(testResult);
emit increaseProgress();
} else if (line == QLatin1String("") && !duration.isEmpty()) {
TestResult testResult(className, QString(), QString(), Result::MESSAGE_INTERNAL,
- QObject::tr("Test execution took %1ms").arg(duration));
+ QObject::tr("Test execution took %1 ms.").arg(duration));
testResultCreated(testResult);
} else if (readingDescription) {
if (line.endsWith(QLatin1String("]]>"))) {
@@ -250,10 +250,10 @@ void TestXmlOutputReader::processOutput()
}
} else if (xmlStartsWith(line, QLatin1String(""), qtVersion)) {
testResultCreated(FaultyTestResult(Result::MESSAGE_INTERNAL,
- QObject::tr("Qt Version: %1").arg(qtVersion)));
+ QObject::tr("Qt version: %1").arg(qtVersion)));
} else if (xmlStartsWith(line, QLatin1String(""), qtestVersion)) {
testResultCreated(FaultyTestResult(Result::MESSAGE_INTERNAL,
- QObject::tr("QTest Version: %1").arg(qtestVersion)));
+ QObject::tr("QTest version: %1").arg(qtestVersion)));
} else {
// qDebug() << "Unhandled line:" << line; // TODO remove
}