From d01b474b46a9408e76b490be4b78e93d8cb94092 Mon Sep 17 00:00:00 2001 From: Ivan Donchevskii Date: Tue, 6 Feb 2018 15:48:24 +0100 Subject: [PATCH] Clang: Refactor TokenInfos Remove code duplication. Change-Id: Ib1859f2c3a04f66d0f0b669b4e93a7fc06ab8e61 Reviewed-by: Nikolai Kosjar --- src/plugins/clangcodemodel/clangcodemodel.pro | 4 +- src/plugins/clangcodemodel/clangcodemodel.qbs | 4 +- .../clangcodemodelunittestfiles.pri | 4 +- .../clangeditordocumentprocessor.cpp | 4 +- ...pp => clanghighlightingresultreporter.cpp} | 16 +- ...er.h => clanghighlightingresultreporter.h} | 4 +- .../source/clangbackendclangipc-source.pri | 7 +- .../source/clangcodemodelserver.cpp | 2 +- .../clangbackend/source/clangdocuments.cpp | 2 +- .../source/clangtranslationunit.cpp | 25 +- .../source/clangtranslationunit.h | 14 +- ...clangupdateextradocumentannotationsjob.cpp | 1 - .../clangbackend/source/fulltokeninfos.cpp | 93 ----- .../clangbackend/source/fulltokeninfos.h | 59 ---- src/tools/clangbackend/source/tokeninfos.cpp | 111 ------ src/tools/clangbackend/source/tokeninfos.h | 72 ---- .../clangbackend/source/tokenprocessor.h | 137 ++++++++ ...fositerator.h => tokenprocessoriterator.h} | 34 +- tests/unit/unittest/clangdocument-test.cpp | 2 +- .../unit/unittest/gtest-creator-printing.cpp | 13 +- tests/unit/unittest/gtest-creator-printing.h | 13 +- ...pp => highlightingresultreporter-test.cpp} | 42 +-- ...infos-test.cpp => tokenprocessor-test.cpp} | 321 +++++++++--------- tests/unit/unittest/unittest.pro | 6 +- 24 files changed, 394 insertions(+), 596 deletions(-) rename src/plugins/clangcodemodel/{clangtokeninfosreporter.cpp => clanghighlightingresultreporter.cpp} (93%) rename src/plugins/clangcodemodel/{clangtokeninfosreporter.h => clanghighlightingresultreporter.h} (94%) delete mode 100644 src/tools/clangbackend/source/fulltokeninfos.cpp delete mode 100644 src/tools/clangbackend/source/fulltokeninfos.h delete mode 100644 src/tools/clangbackend/source/tokeninfos.cpp delete mode 100644 src/tools/clangbackend/source/tokeninfos.h create mode 100644 src/tools/clangbackend/source/tokenprocessor.h rename src/tools/clangbackend/source/{tokeninfositerator.h => tokenprocessoriterator.h} (68%) rename tests/unit/unittest/{tokeninfosreporter-test.cpp => highlightingresultreporter-test.cpp} (75%) rename tests/unit/unittest/{tokeninfos-test.cpp => tokenprocessor-test.cpp} (80%) diff --git a/src/plugins/clangcodemodel/clangcodemodel.pro b/src/plugins/clangcodemodel/clangcodemodel.pro index ad0d93f2e1c..6541f55f3cc 100644 --- a/src/plugins/clangcodemodel/clangcodemodel.pro +++ b/src/plugins/clangcodemodel/clangcodemodel.pro @@ -31,8 +31,8 @@ SOURCES += \ clangfixitoperationsextractor.cpp \ clangfollowsymbol.cpp \ clangfunctionhintmodel.cpp \ + clanghighlightingresultreporter.cpp \ clanghoverhandler.cpp \ - clangtokeninfosreporter.cpp \ clangmodelmanagersupport.cpp \ clangpreprocessorassistproposalitem.cpp \ clangprojectsettings.cpp \ @@ -69,6 +69,7 @@ HEADERS += \ clangfixitoperationsextractor.h \ clangfollowsymbol.h \ clangfunctionhintmodel.h \ + clanghighlightingresultreporter.h \ clanghoverhandler.h \ clangisdiagnosticrelatedtolocation.h \ clangmodelmanagersupport.h \ @@ -77,7 +78,6 @@ HEADERS += \ clangprojectsettingswidget.h \ clangrefactoringengine.h \ clangtextmark.h \ - clangtokeninfosreporter.h \ clanguiheaderondiskmanager.h \ clangutils.h diff --git a/src/plugins/clangcodemodel/clangcodemodel.qbs b/src/plugins/clangcodemodel/clangcodemodel.qbs index a1c3d4d6df9..e9519f37e42 100644 --- a/src/plugins/clangcodemodel/clangcodemodel.qbs +++ b/src/plugins/clangcodemodel/clangcodemodel.qbs @@ -82,10 +82,10 @@ QtcPlugin { "clangfollowsymbol.h", "clangfunctionhintmodel.cpp", "clangfunctionhintmodel.h", + "clanghighlightingresultreporter.cpp", + "clanghighlightingresultreporter.h", "clanghoverhandler.cpp", "clanghoverhandler.h", - "clangtokeninfosreporter.cpp", - "clangtokeninfosreporter.h", "clangisdiagnosticrelatedtolocation.h", "clangmodelmanagersupport.cpp", "clangmodelmanagersupport.h", diff --git a/src/plugins/clangcodemodel/clangcodemodelunittestfiles.pri b/src/plugins/clangcodemodel/clangcodemodelunittestfiles.pri index 50b9817fdfd..4d4bb226391 100644 --- a/src/plugins/clangcodemodel/clangcodemodelunittestfiles.pri +++ b/src/plugins/clangcodemodel/clangcodemodelunittestfiles.pri @@ -7,7 +7,7 @@ SOURCES += \ $$PWD/clangcompletioncontextanalyzer.cpp \ $$PWD/clangdiagnosticfilter.cpp \ $$PWD/clangfixitoperation.cpp \ - $$PWD/clangtokeninfosreporter.cpp + $$PWD/clanghighlightingresultreporter.cpp HEADERS += \ $$PWD/clangactivationsequencecontextprocessor.h \ @@ -16,5 +16,5 @@ HEADERS += \ $$PWD/clangcompletioncontextanalyzer.h \ $$PWD/clangdiagnosticfilter.h \ $$PWD/clangfixitoperation.h \ - $$PWD/clangtokeninfosreporter.h \ + $$PWD/clanghighlightingresultreporter.h \ $$PWD/clangisdiagnosticrelatedtolocation.h diff --git a/src/plugins/clangcodemodel/clangeditordocumentprocessor.cpp b/src/plugins/clangcodemodel/clangeditordocumentprocessor.cpp index 3fc9ed6baf7..d353d06dbc7 100644 --- a/src/plugins/clangcodemodel/clangeditordocumentprocessor.cpp +++ b/src/plugins/clangcodemodel/clangeditordocumentprocessor.cpp @@ -30,7 +30,7 @@ #include "clangfixitoperation.h" #include "clangfixitoperationsextractor.h" #include "clangmodelmanagersupport.h" -#include "clangtokeninfosreporter.h" +#include "clanghighlightingresultreporter.h" #include "clangprojectsettings.h" #include "clangutils.h" @@ -253,7 +253,7 @@ void ClangEditorDocumentProcessor::updateHighlighting( m_tokenInfos = tokenInfos; m_semanticHighlighter.setHighlightingRunner( [tokenInfos]() { - auto *reporter = new TokenInfosReporter(tokenInfos); + auto *reporter = new HighlightingResultReporter(tokenInfos); return reporter->start(); }); m_semanticHighlighter.run(); diff --git a/src/plugins/clangcodemodel/clangtokeninfosreporter.cpp b/src/plugins/clangcodemodel/clanghighlightingresultreporter.cpp similarity index 93% rename from src/plugins/clangcodemodel/clangtokeninfosreporter.cpp rename to src/plugins/clangcodemodel/clanghighlightingresultreporter.cpp index a7a116e3486..66a45c1c2d9 100644 --- a/src/plugins/clangcodemodel/clangtokeninfosreporter.cpp +++ b/src/plugins/clangcodemodel/clanghighlightingresultreporter.cpp @@ -23,7 +23,7 @@ ** ****************************************************************************/ -#include "clangtokeninfosreporter.h" +#include "clanghighlightingresultreporter.h" #include #include @@ -134,14 +134,14 @@ TextEditor::HighlightingResult toHighlightingResult( namespace ClangCodeModel { -TokenInfosReporter::TokenInfosReporter( +HighlightingResultReporter::HighlightingResultReporter( const QVector &tokenInfos) : m_tokenInfos(tokenInfos) { m_chunksToReport.reserve(m_chunkSize + 1); } -void TokenInfosReporter::reportChunkWise( +void HighlightingResultReporter::reportChunkWise( const TextEditor::HighlightingResult &highlightingResult) { if (m_chunksToReport.size() >= m_chunkSize) { @@ -156,7 +156,7 @@ void TokenInfosReporter::reportChunkWise( m_chunksToReport.append(highlightingResult); } -void TokenInfosReporter::reportAndClearCurrentChunks() +void HighlightingResultReporter::reportAndClearCurrentChunks() { m_flushRequested = false; m_flushLine = 0; @@ -167,18 +167,18 @@ void TokenInfosReporter::reportAndClearCurrentChunks() } } -void TokenInfosReporter::setChunkSize(int chunkSize) +void HighlightingResultReporter::setChunkSize(int chunkSize) { m_chunkSize = chunkSize; } -void TokenInfosReporter::run() +void HighlightingResultReporter::run() { run_internal(); reportFinished(); } -void TokenInfosReporter::run_internal() +void HighlightingResultReporter::run_internal() { if (isCanceled()) return; @@ -199,7 +199,7 @@ void TokenInfosReporter::run_internal() reportAndClearCurrentChunks(); } -QFuture TokenInfosReporter::start() +QFuture HighlightingResultReporter::start() { this->setRunnable(this); this->reportStarted(); diff --git a/src/plugins/clangcodemodel/clangtokeninfosreporter.h b/src/plugins/clangcodemodel/clanghighlightingresultreporter.h similarity index 94% rename from src/plugins/clangcodemodel/clangtokeninfosreporter.h rename to src/plugins/clangcodemodel/clanghighlightingresultreporter.h index d525ebc5984..0f57bc10d8e 100644 --- a/src/plugins/clangcodemodel/clangtokeninfosreporter.h +++ b/src/plugins/clangcodemodel/clanghighlightingresultreporter.h @@ -36,7 +36,7 @@ namespace ClangCodeModel { -class TokenInfosReporter: +class HighlightingResultReporter: public QObject, public QRunnable, public QFutureInterface @@ -44,7 +44,7 @@ class TokenInfosReporter: Q_OBJECT public: - TokenInfosReporter(const QVector &tokenInfos); + HighlightingResultReporter(const QVector &tokenInfos); void setChunkSize(int chunkSize); diff --git a/src/tools/clangbackend/source/clangbackendclangipc-source.pri b/src/tools/clangbackend/source/clangbackendclangipc-source.pri index df38df2b1f8..c0fc7f7b9e0 100644 --- a/src/tools/clangbackend/source/clangbackendclangipc-source.pri +++ b/src/tools/clangbackend/source/clangbackendclangipc-source.pri @@ -52,15 +52,14 @@ HEADERS += \ $$PWD/diagnosticsetiterator.h \ $$PWD/fixit.h \ $$PWD/fulltokeninfo.h \ - $$PWD/fulltokeninfos.h \ $$PWD/projectpart.h \ $$PWD/projects.h \ $$PWD/skippedsourceranges.h \ $$PWD/sourcelocation.h \ $$PWD/sourcerange.h \ $$PWD/tokeninfo.h \ - $$PWD/tokeninfos.h \ - $$PWD/tokeninfositerator.h \ + $$PWD/tokenprocessor.h \ + $$PWD/tokenprocessoriterator.h \ $$PWD/unsavedfile.h \ $$PWD/unsavedfiles.h \ $$PWD/utf8positionfromlinecolumn.h @@ -111,14 +110,12 @@ SOURCES += \ $$PWD/diagnosticset.cpp \ $$PWD/fixit.cpp \ $$PWD/fulltokeninfo.cpp \ - $$PWD/fulltokeninfos.cpp \ $$PWD/projectpart.cpp \ $$PWD/projects.cpp \ $$PWD/skippedsourceranges.cpp \ $$PWD/sourcelocation.cpp \ $$PWD/sourcerange.cpp \ $$PWD/tokeninfo.cpp \ - $$PWD/tokeninfos.cpp \ $$PWD/unsavedfile.cpp \ $$PWD/unsavedfiles.cpp \ $$PWD/utf8positionfromlinecolumn.cpp diff --git a/src/tools/clangbackend/source/clangcodemodelserver.cpp b/src/tools/clangbackend/source/clangcodemodelserver.cpp index 8cd029a0a61..354389ddb3c 100644 --- a/src/tools/clangbackend/source/clangcodemodelserver.cpp +++ b/src/tools/clangbackend/source/clangcodemodelserver.cpp @@ -30,7 +30,7 @@ #include "clangfilesystemwatcher.h" #include "codecompleter.h" #include "diagnosticset.h" -#include "tokeninfos.h" +#include "tokenprocessor.h" #include "clangexceptions.h" #include "skippedsourceranges.h" diff --git a/src/tools/clangbackend/source/clangdocuments.cpp b/src/tools/clangbackend/source/clangdocuments.cpp index 7a86baa3d49..ae8481cc563 100644 --- a/src/tools/clangbackend/source/clangdocuments.cpp +++ b/src/tools/clangbackend/source/clangdocuments.cpp @@ -26,7 +26,7 @@ #include "clangdocuments.h" #include -#include +#include #include #include #include diff --git a/src/tools/clangbackend/source/clangtranslationunit.cpp b/src/tools/clangbackend/source/clangtranslationunit.cpp index 94a342723ea..baabcaf50bf 100644 --- a/src/tools/clangbackend/source/clangtranslationunit.cpp +++ b/src/tools/clangbackend/source/clangtranslationunit.cpp @@ -31,13 +31,12 @@ #include "clangtranslationunitupdater.h" #include "clangfollowsymbol.h" #include "clangfollowsymboljob.h" +#include "tokenprocessor.h" #include #include #include #include -#include -#include #include #include #include @@ -201,34 +200,24 @@ Cursor TranslationUnit::cursor() const return clang_getTranslationUnitCursor(m_cxTranslationUnit); } -TokenInfos TranslationUnit::tokenInfos() const +TokenProcessor TranslationUnit::tokenInfos() const { return tokenInfosInRange(cursor().sourceRange()); } -TokenInfos TranslationUnit::tokenInfosInRange(const SourceRange &range) const +TokenProcessor TranslationUnit::tokenInfosInRange(const SourceRange &range) const { - CXToken *cxTokens = 0; - uint cxTokensCount = 0; - - clang_tokenize(m_cxTranslationUnit, range, &cxTokens, &cxTokensCount); - - return TokenInfos(m_cxTranslationUnit, cxTokens, cxTokensCount); + return TokenProcessor(m_cxTranslationUnit, range); } -FullTokenInfos TranslationUnit::fullTokenInfos() const +TokenProcessor TranslationUnit::fullTokenInfos() const { return fullTokenInfosInRange(cursor().sourceRange()); } -FullTokenInfos TranslationUnit::fullTokenInfosInRange(const SourceRange &range) const +TokenProcessor TranslationUnit::fullTokenInfosInRange(const SourceRange &range) const { - CXToken *cxTokens = 0; - uint cxTokensCount = 0; - - clang_tokenize(m_cxTranslationUnit, range, &cxTokens, &cxTokensCount); - - return FullTokenInfos(m_cxTranslationUnit, cxTokens, cxTokensCount); + return TokenProcessor(m_cxTranslationUnit, range); } SkippedSourceRanges TranslationUnit::skippedSourceRanges() const diff --git a/src/tools/clangbackend/source/clangtranslationunit.h b/src/tools/clangbackend/source/clangtranslationunit.h index b39763efc5d..70df60f5050 100644 --- a/src/tools/clangbackend/source/clangtranslationunit.h +++ b/src/tools/clangbackend/source/clangtranslationunit.h @@ -25,6 +25,9 @@ #pragma once +#include "fulltokeninfo.h" +#include "tokenprocessor.h" + #include #include @@ -34,9 +37,6 @@ namespace ClangBackEnd { class Cursor; class DiagnosticContainer; class DiagnosticSet; -class TokenInfoContainer; -class TokenInfos; -class FullTokenInfos; class ReferencesResult; class SkippedSourceRanges; class SourceLocation; @@ -101,11 +101,11 @@ public: Cursor cursorAt(const Utf8String &filePath, uint line, uint column) const; Cursor cursor() const; - TokenInfos tokenInfos() const; - TokenInfos tokenInfosInRange(const SourceRange &range) const; + TokenProcessor tokenInfos() const; + TokenProcessor tokenInfosInRange(const SourceRange &range) const; - FullTokenInfos fullTokenInfos() const; - FullTokenInfos fullTokenInfosInRange(const SourceRange &range) const; + TokenProcessor fullTokenInfos() const; + TokenProcessor fullTokenInfosInRange(const SourceRange &range) const; SkippedSourceRanges skippedSourceRanges() const; SourceRangeContainer followSymbol(uint line, uint column) const; diff --git a/src/tools/clangbackend/source/clangupdateextradocumentannotationsjob.cpp b/src/tools/clangbackend/source/clangupdateextradocumentannotationsjob.cpp index e2a52a6c618..d7713db748b 100644 --- a/src/tools/clangbackend/source/clangupdateextradocumentannotationsjob.cpp +++ b/src/tools/clangbackend/source/clangupdateextradocumentannotationsjob.cpp @@ -24,7 +24,6 @@ ****************************************************************************/ #include "clangupdateextradocumentannotationsjob.h" -#include "fulltokeninfos.h" #include #include diff --git a/src/tools/clangbackend/source/fulltokeninfos.cpp b/src/tools/clangbackend/source/fulltokeninfos.cpp deleted file mode 100644 index bdb9ef8f047..00000000000 --- a/src/tools/clangbackend/source/fulltokeninfos.cpp +++ /dev/null @@ -1,93 +0,0 @@ -/**************************************************************************** -** -** Copyright (C) 2018 The Qt Company Ltd. -** Contact: https://www.qt.io/licensing/ -** -** This file is part of Qt Creator. -** -** Commercial License Usage -** Licensees holding valid commercial Qt licenses may use this file in -** accordance with the commercial license agreement provided with the -** Software or, alternatively, in accordance with the terms contained in -** a written agreement between you and The Qt Company. For licensing terms -** and conditions see https://www.qt.io/terms-conditions. For further -** information use the contact form at https://www.qt.io/contact-us. -** -** GNU General Public License Usage -** Alternatively, this file may be used under the terms of the GNU -** General Public License version 3 as published by the Free Software -** Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT -** included in the packaging of this file. Please review the following -** information to ensure the GNU General Public License requirements will -** be met: https://www.gnu.org/licenses/gpl-3.0.html. -** -****************************************************************************/ - -#include "fulltokeninfos.h" - -#include - -#include - -namespace ClangBackEnd { - -FullTokenInfos::FullTokenInfos(CXTranslationUnit cxTranslationUnit, CXToken *tokens, uint tokensCount) - : cxTranslationUnit(cxTranslationUnit), - cxTokens(tokens), - cxTokenCount(tokensCount) -{ - cxCursors.resize(tokensCount); - clang_annotateTokens(cxTranslationUnit, cxTokens, cxTokenCount, cxCursors.data()); -} - -FullTokenInfos::~FullTokenInfos() -{ - clang_disposeTokens(cxTranslationUnit, cxTokens, cxTokenCount); -} - -QVector FullTokenInfos::toTokenInfoContainers() const -{ - QVector containers; - containers.reserve(static_cast(size())); - - const auto isValidTokenInfo = [] (const TokenInfo &tokenInfo) { - // Do not exclude StringLiteral because it can be a filename for an #include - return !tokenInfo.hasInvalidMainType() - && !tokenInfo.hasMainType(HighlightingType::NumberLiteral) - && !tokenInfo.hasMainType(HighlightingType::Comment); - }; - for (size_t index = 0; index < cxCursors.size(); ++index) { - FullTokenInfo fullTokenInfo = (*this)[index]; - if (isValidTokenInfo(fullTokenInfo)) - containers.push_back(fullTokenInfo); - } - - return containers; -} - -bool FullTokenInfos::isEmpty() const -{ - return cxTokenCount == 0; -} - -bool FullTokenInfos::isNull() const -{ - return cxTokens == nullptr; -} - -size_t FullTokenInfos::size() const -{ - return cxTokenCount; -} - -FullTokenInfo FullTokenInfos::operator[](size_t index) const -{ - FullTokenInfo tokenInfo(cxCursors[index], - cxTokens + index, - cxTranslationUnit, - currentOutputArgumentRanges); - tokenInfo.evaluate(); - return tokenInfo; -} - -} // namespace ClangBackEnd diff --git a/src/tools/clangbackend/source/fulltokeninfos.h b/src/tools/clangbackend/source/fulltokeninfos.h deleted file mode 100644 index 970104d005c..00000000000 --- a/src/tools/clangbackend/source/fulltokeninfos.h +++ /dev/null @@ -1,59 +0,0 @@ -/**************************************************************************** -** -** Copyright (C) 2018 The Qt Company Ltd. -** Contact: https://www.qt.io/licensing/ -** -** This file is part of Qt Creator. -** -** Commercial License Usage -** Licensees holding valid commercial Qt licenses may use this file in -** accordance with the commercial license agreement provided with the -** Software or, alternatively, in accordance with the terms contained in -** a written agreement between you and The Qt Company. For licensing terms -** and conditions see https://www.qt.io/terms-conditions. For further -** information use the contact form at https://www.qt.io/contact-us. -** -** GNU General Public License Usage -** Alternatively, this file may be used under the terms of the GNU -** General Public License version 3 as published by the Free Software -** Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT -** included in the packaging of this file. Please review the following -** information to ensure the GNU General Public License requirements will -** be met: https://www.gnu.org/licenses/gpl-3.0.html. -** -****************************************************************************/ - -#pragma once - -#include "fulltokeninfo.h" - -#include - -#include - -namespace ClangBackEnd { - -class FullTokenInfos -{ -public: - FullTokenInfos() = default; - FullTokenInfos(CXTranslationUnit cxTranslationUnit, CXToken *tokens, uint tokensCount); - ~FullTokenInfos(); - - bool isEmpty() const; - bool isNull() const; - size_t size() const; - - FullTokenInfo operator[](size_t index) const; - QVector toTokenInfoContainers() const; - -private: - mutable std::vector currentOutputArgumentRanges; - CXTranslationUnit cxTranslationUnit = nullptr; - CXToken *const cxTokens = nullptr; - const uint cxTokenCount = 0; - - std::vector cxCursors; -}; - -} // namespace ClangBackEnd diff --git a/src/tools/clangbackend/source/tokeninfos.cpp b/src/tools/clangbackend/source/tokeninfos.cpp deleted file mode 100644 index bb6107e95bc..00000000000 --- a/src/tools/clangbackend/source/tokeninfos.cpp +++ /dev/null @@ -1,111 +0,0 @@ -/**************************************************************************** -** -** Copyright (C) 2016 The Qt Company Ltd. -** Contact: https://www.qt.io/licensing/ -** -** This file is part of Qt Creator. -** -** Commercial License Usage -** Licensees holding valid commercial Qt licenses may use this file in -** accordance with the commercial license agreement provided with the -** Software or, alternatively, in accordance with the terms contained in -** a written agreement between you and The Qt Company. For licensing terms -** and conditions see https://www.qt.io/terms-conditions. For further -** information use the contact form at https://www.qt.io/contact-us. -** -** GNU General Public License Usage -** Alternatively, this file may be used under the terms of the GNU -** General Public License version 3 as published by the Free Software -** Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT -** included in the packaging of this file. Please review the following -** information to ensure the GNU General Public License requirements will -** be met: https://www.gnu.org/licenses/gpl-3.0.html. -** -****************************************************************************/ - -#include "tokeninfos.h" - -#include "tokeninfocontainer.h" - -#include - -namespace ClangBackEnd { - -TokenInfos::TokenInfos(CXTranslationUnit cxTranslationUnit, CXToken *tokens, uint tokensCount) - : cxTranslationUnit(cxTranslationUnit), - cxTokens(tokens), - cxTokenCount(tokensCount) -{ - cxCursors.resize(tokensCount); - clang_annotateTokens(cxTranslationUnit, cxTokens, cxTokenCount, cxCursors.data()); -} - -TokenInfos::~TokenInfos() -{ - clang_disposeTokens(cxTranslationUnit, cxTokens, cxTokenCount); -} - -TokenInfos::const_iterator TokenInfos::begin() const -{ - return const_iterator(cxCursors.cbegin(), - cxTokens, - cxTranslationUnit, - currentOutputArgumentRanges); -} - -TokenInfos::const_iterator TokenInfos::end() const -{ - return const_iterator(cxCursors.cend(), - cxTokens + cxTokenCount, - cxTranslationUnit, - currentOutputArgumentRanges); -} - -QVector TokenInfos::toTokenInfoContainers() const -{ - QVector containers; - containers.reserve(size()); - - const auto isValidTokenInfo = [] (const TokenInfo &tokenInfo) { - return !tokenInfo.hasInvalidMainType() - && !tokenInfo.hasMainType(HighlightingType::NumberLiteral) - && !tokenInfo.hasMainType(HighlightingType::Comment); - }; - for (const TokenInfo &tokenInfo : *this) - if (isValidTokenInfo(tokenInfo)) - containers.push_back(tokenInfo); - - return containers; -} - -bool TokenInfos::currentOutputArgumentRangesAreEmpty() const -{ - return currentOutputArgumentRanges.empty(); -} - -bool TokenInfos::isEmpty() const -{ - return cxTokenCount == 0; -} - -bool ClangBackEnd::TokenInfos::isNull() const -{ - return cxTokens == nullptr; -} - -uint TokenInfos::size() const -{ - return cxTokenCount; -} - -TokenInfo TokenInfos::operator[](size_t index) const -{ - TokenInfo tokenInfo(cxCursors[index], - cxTokens + index, - cxTranslationUnit, - currentOutputArgumentRanges); - tokenInfo.evaluate(); - return tokenInfo; -} - -} // namespace ClangBackEnd diff --git a/src/tools/clangbackend/source/tokeninfos.h b/src/tools/clangbackend/source/tokeninfos.h deleted file mode 100644 index 4437ec55b32..00000000000 --- a/src/tools/clangbackend/source/tokeninfos.h +++ /dev/null @@ -1,72 +0,0 @@ -/**************************************************************************** -** -** Copyright (C) 2016 The Qt Company Ltd. -** Contact: https://www.qt.io/licensing/ -** -** This file is part of Qt Creator. -** -** Commercial License Usage -** Licensees holding valid commercial Qt licenses may use this file in -** accordance with the commercial license agreement provided with the -** Software or, alternatively, in accordance with the terms contained in -** a written agreement between you and The Qt Company. For licensing terms -** and conditions see https://www.qt.io/terms-conditions. For further -** information use the contact form at https://www.qt.io/contact-us. -** -** GNU General Public License Usage -** Alternatively, this file may be used under the terms of the GNU -** General Public License version 3 as published by the Free Software -** Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT -** included in the packaging of this file. Please review the following -** information to ensure the GNU General Public License requirements will -** be met: https://www.gnu.org/licenses/gpl-3.0.html. -** -****************************************************************************/ - -#pragma once - -#include "tokeninfositerator.h" - -#include - -#include - -namespace ClangBackEnd { - -using uint = unsigned int; -class TokenInfoContainer; - -class TokenInfos -{ -public: - using const_iterator = TokenInfosIterator; - using value_type = TokenInfo; - -public: - TokenInfos() = default; - TokenInfos(CXTranslationUnit cxTranslationUnit, CXToken *tokens, uint tokensCount); - ~TokenInfos(); - - bool isEmpty() const; - bool isNull() const; - uint size() const; - - TokenInfo operator[](size_t index) const; - - const_iterator begin() const; - const_iterator end() const; - - QVector toTokenInfoContainers() const; - - bool currentOutputArgumentRangesAreEmpty() const; - -private: - mutable std::vector currentOutputArgumentRanges; - CXTranslationUnit cxTranslationUnit = nullptr; - CXToken *const cxTokens = nullptr; - const uint cxTokenCount = 0; - - std::vector cxCursors; -}; - -} // namespace ClangBackEnd diff --git a/src/tools/clangbackend/source/tokenprocessor.h b/src/tools/clangbackend/source/tokenprocessor.h new file mode 100644 index 00000000000..48cd9478a88 --- /dev/null +++ b/src/tools/clangbackend/source/tokenprocessor.h @@ -0,0 +1,137 @@ +/**************************************************************************** +** +** Copyright (C) 2016 The Qt Company Ltd. +** Contact: https://www.qt.io/licensing/ +** +** This file is part of Qt Creator. +** +** Commercial License Usage +** Licensees holding valid commercial Qt licenses may use this file in +** accordance with the commercial license agreement provided with the +** Software or, alternatively, in accordance with the terms contained in +** a written agreement between you and The Qt Company. For licensing terms +** and conditions see https://www.qt.io/terms-conditions. For further +** information use the contact form at https://www.qt.io/contact-us. +** +** GNU General Public License Usage +** Alternatively, this file may be used under the terms of the GNU +** General Public License version 3 as published by the Free Software +** Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT +** included in the packaging of this file. Please review the following +** information to ensure the GNU General Public License requirements will +** be met: https://www.gnu.org/licenses/gpl-3.0.html. +** +****************************************************************************/ + +#pragma once + +#include "sourcerange.h" +#include "tokenprocessoriterator.h" +#include "tokeninfocontainer.h" + +#include + +#include + +#include + +namespace ClangBackEnd { + +using uint = unsigned int; + +template +class TokenProcessor +{ + static_assert (std::is_base_of::value, + "Use TokenProcessor only with classes derived from TokenInfo"); + +public: + using const_iterator = TokenProcessorIterator; + using value_type = T; + +public: + TokenProcessor() = default; + TokenProcessor(CXTranslationUnit cxTranslationUnit, const SourceRange &range) + : cxTranslationUnit(cxTranslationUnit) + { + uint cxTokensCount = 0; + clang_tokenize(cxTranslationUnit, range, &cxTokens, &cxTokensCount); + cxCursors.resize(cxTokensCount); + clang_annotateTokens(cxTranslationUnit, cxTokens, cxTokensCount, cxCursors.data()); + } + ~TokenProcessor() + { + clang_disposeTokens(cxTranslationUnit, cxTokens, cxCursors.size()); + } + + bool isEmpty() const + { + return cxCursors.empty(); + } + bool isNull() const + { + return cxTokens == nullptr; + } + uint size() const + { + return cxCursors.size(); + } + + const_iterator begin() const + { + return const_iterator(cxCursors.cbegin(), + cxTokens, + cxTranslationUnit, + currentOutputArgumentRanges); + } + + const_iterator end() const + { + return const_iterator(cxCursors.cend(), + cxTokens + cxCursors.size(), + cxTranslationUnit, + currentOutputArgumentRanges); + } + + + T operator[](size_t index) const + { + T tokenInfo(cxCursors[index], cxTokens + index, cxTranslationUnit, + currentOutputArgumentRanges); + tokenInfo.evaluate(); + return tokenInfo; + } + + QVector toTokenInfoContainers() const + { + QVector containers; + containers.reserve(size()); + + const auto isValidTokenInfo = [] (const T &tokenInfo) { + return !tokenInfo.hasInvalidMainType() + && !tokenInfo.hasMainType(HighlightingType::NumberLiteral) + && !tokenInfo.hasMainType(HighlightingType::Comment); + }; + for (size_t index = 0; index < cxCursors.size(); ++index) { + T tokenInfo = (*this)[index]; + if (isValidTokenInfo(tokenInfo)) + containers.push_back(tokenInfo); + } + + return containers; + } + + bool currentOutputArgumentRangesAreEmpty() const + { + return currentOutputArgumentRanges.empty(); + } + +private: + mutable std::vector currentOutputArgumentRanges; + CXTranslationUnit cxTranslationUnit = nullptr; + CXToken *cxTokens = nullptr; + + std::vector cxCursors; +}; + +} // namespace ClangBackEnd diff --git a/src/tools/clangbackend/source/tokeninfositerator.h b/src/tools/clangbackend/source/tokenprocessoriterator.h similarity index 68% rename from src/tools/clangbackend/source/tokeninfositerator.h rename to src/tools/clangbackend/source/tokenprocessoriterator.h index 9ecce3c2574..b12a28ded51 100644 --- a/src/tools/clangbackend/source/tokeninfositerator.h +++ b/src/tools/clangbackend/source/tokenprocessoriterator.h @@ -39,20 +39,21 @@ using uint = unsigned int; class DiagnosticSet; class Diagnostic; -class TokenInfosIterator : public std::iterator +template +class TokenProcessorIterator : public std::iterator { public: - TokenInfosIterator(std::vector::const_iterator cxCursorIterator, - CXToken *cxToken, - CXTranslationUnit cxTranslationUnit, - std::vector ¤tOutputArgumentRanges) + TokenProcessorIterator(std::vector::const_iterator cxCursorIterator, + CXToken *cxToken, + CXTranslationUnit cxTranslationUnit, + std::vector ¤tOutputArgumentRanges) : cxCursorIterator(cxCursorIterator), cxToken(cxToken), cxTranslationUnit(cxTranslationUnit), currentOutputArgumentRanges(currentOutputArgumentRanges) {} - TokenInfosIterator& operator++() + TokenProcessorIterator& operator++() { ++cxCursorIterator; ++cxToken; @@ -60,30 +61,27 @@ public: return *this; } - TokenInfosIterator operator++(int) + TokenProcessorIterator operator++(int) { - return TokenInfosIterator(cxCursorIterator++, - cxToken++, - cxTranslationUnit, - currentOutputArgumentRanges); + return TokenProcessorIterator(cxCursorIterator++, + cxToken++, + cxTranslationUnit, + currentOutputArgumentRanges); } - bool operator==(TokenInfosIterator other) const + bool operator==(TokenProcessorIterator other) const { return cxCursorIterator == other.cxCursorIterator; } - bool operator!=(TokenInfosIterator other) const + bool operator!=(TokenProcessorIterator other) const { return cxCursorIterator != other.cxCursorIterator; } - TokenInfo operator*() + T operator*() { - TokenInfo tokenInfo(*cxCursorIterator, - cxToken, - cxTranslationUnit, - currentOutputArgumentRanges); + T tokenInfo(*cxCursorIterator, cxToken, cxTranslationUnit, currentOutputArgumentRanges); tokenInfo.evaluate(); return tokenInfo; } diff --git a/tests/unit/unittest/clangdocument-test.cpp b/tests/unit/unittest/clangdocument-test.cpp index a830dbbe1a6..7ce6924e48e 100644 --- a/tests/unit/unittest/clangdocument-test.cpp +++ b/tests/unit/unittest/clangdocument-test.cpp @@ -32,7 +32,7 @@ #include #include #include -#include +#include #include #include #include diff --git a/tests/unit/unittest/gtest-creator-printing.cpp b/tests/unit/unittest/gtest-creator-printing.cpp index 5cb3330afc6..53ed6682501 100644 --- a/tests/unit/unittest/gtest-creator-printing.cpp +++ b/tests/unit/unittest/gtest-creator-printing.cpp @@ -37,12 +37,13 @@ #include #include #include +#include #include #include #include #include #include -#include +#include #include #include #include @@ -820,11 +821,12 @@ std::ostream &operator<<(std::ostream &os, const TokenInfo& tokenInfo) return os; } -std::ostream &operator<<(std::ostream &out, const TokenInfos &tokenInfos) +template +std::ostream &operator<<(std::ostream &out, const TokenProcessor &tokenInfos) { out << "["; - for (const TokenInfo &entry : tokenInfos) + for (const T &entry : tokenInfos) out << entry; out << "]"; @@ -832,6 +834,11 @@ std::ostream &operator<<(std::ostream &out, const TokenInfos &tokenInfos) return out; } +template +std::ostream &operator<<(std::ostream &out, const TokenProcessor &tokenInfos); +template +std::ostream &operator<<(std::ostream &out, const TokenProcessor &tokenInfos); + std::ostream &operator<<(std::ostream &out, const FilePath &filePath) { return out << "(" << filePath.path() << ", " << filePath.slashIndex() << ")"; diff --git a/tests/unit/unittest/gtest-creator-printing.h b/tests/unit/unittest/gtest-creator-printing.h index 8515d960f36..66ae0f8f2c4 100644 --- a/tests/unit/unittest/gtest-creator-printing.h +++ b/tests/unit/unittest/gtest-creator-printing.h @@ -98,6 +98,7 @@ class DynamicASTMatcherDiagnosticContextContainer; class DynamicASTMatcherDiagnosticMessageContainer; class FileContainer; class FixItContainer; +class FullTokenInfo; class HighlightingMarkContainer; class NativeFilePath; class PrecompiledHeadersUpdatedMessage; @@ -119,13 +120,14 @@ class SourceRangesAndDiagnosticsForQueryMessage; class SourceRangesContainer; class SourceRangesForQueryMessage; class SourceRangeWithTextContainer; +class TokenInfo; +template +class TokenProcessor; class UnregisterUnsavedFilesForEditorMessage; class UpdatePchProjectPartsMessage; class UpdateTranslationUnitsForEditorMessage; class UpdateVisibleTranslationUnitsMessage; class FilePath; -class TokenInfo; -class TokenInfos; template class AbstractFilePathView; using FilePathView = AbstractFilePathView<'/'>; @@ -193,7 +195,12 @@ std::ostream &operator<<(std::ostream &out, const UpdateVisibleTranslationUnitsM std::ostream &operator<<(std::ostream &out, const FilePath &filePath); std::ostream &operator<<(std::ostream &out, const FilePathId &filePathId); std::ostream &operator<<(std::ostream &out, const TokenInfo& tokenInfo); -std::ostream &operator<<(std::ostream &out, const TokenInfos &tokenInfos); +template +std::ostream &operator<<(std::ostream &out, const TokenProcessor &tokenInfos); +extern template +std::ostream &operator<<(std::ostream &out, const TokenProcessor &tokenInfos); +extern template +std::ostream &operator<<(std::ostream &out, const TokenProcessor &tokenInfos); std::ostream &operator<<(std::ostream &out, const FilePathView &filePathView); std::ostream &operator<<(std::ostream &out, const NativeFilePathView &nativeFilePathView); std::ostream &operator<<(std::ostream &out, const ProjectPartEntry &projectPartEntry); diff --git a/tests/unit/unittest/tokeninfosreporter-test.cpp b/tests/unit/unittest/highlightingresultreporter-test.cpp similarity index 75% rename from tests/unit/unittest/tokeninfosreporter-test.cpp rename to tests/unit/unittest/highlightingresultreporter-test.cpp index 189701e4179..e83a51ca23d 100644 --- a/tests/unit/unittest/tokeninfosreporter-test.cpp +++ b/tests/unit/unittest/highlightingresultreporter-test.cpp @@ -31,14 +31,14 @@ #include #include #include -#include -#include +#include +#include #include #include #include using ClangBackEnd::Cursor; -using ClangBackEnd::TokenInfos; +using ClangBackEnd::TokenProcessor; using ClangBackEnd::TokenInfoContainer; using ClangBackEnd::HighlightingType; using ClangBackEnd::Document; @@ -61,7 +61,7 @@ struct Data { documents}; }; -class TokenInfosReporter : public ::testing::Test +class HighlightingResultReporter : public ::testing::Test { public: static void SetUpTestCase(); @@ -88,9 +88,9 @@ QVector generateTokenInfos(uint count) return container; } -TEST_F(TokenInfosReporter, StartAndFinish) +TEST_F(HighlightingResultReporter, StartAndFinish) { - auto reporter = new ClangCodeModel::TokenInfosReporter(noTokenInfos()); + auto reporter = new ClangCodeModel::HighlightingResultReporter(noTokenInfos()); auto future = reporter->start(); @@ -98,9 +98,9 @@ TEST_F(TokenInfosReporter, StartAndFinish) ASSERT_THAT(future.isFinished(), true); } -TEST_F(TokenInfosReporter, ReportNothingIfNothingToReport) +TEST_F(HighlightingResultReporter, ReportNothingIfNothingToReport) { - auto reporter = new ClangCodeModel::TokenInfosReporter(generateTokenInfos(0)); + auto reporter = new ClangCodeModel::HighlightingResultReporter(generateTokenInfos(0)); auto future = reporter->start(); @@ -108,9 +108,9 @@ TEST_F(TokenInfosReporter, ReportNothingIfNothingToReport) ASSERT_THAT(monitor.resultsReadyCounter(), 0L); } -TEST_F(TokenInfosReporter, ReportSingleResultAsOneChunk) +TEST_F(HighlightingResultReporter, ReportSingleResultAsOneChunk) { - auto reporter = new ClangCodeModel::TokenInfosReporter(generateTokenInfos(1)); + auto reporter = new ClangCodeModel::HighlightingResultReporter(generateTokenInfos(1)); reporter->setChunkSize(1); auto future = reporter->start(); @@ -119,9 +119,9 @@ TEST_F(TokenInfosReporter, ReportSingleResultAsOneChunk) ASSERT_THAT(monitor.resultsReadyCounter(), 1L); } -TEST_F(TokenInfosReporter, ReportRestIfChunkSizeNotReached) +TEST_F(HighlightingResultReporter, ReportRestIfChunkSizeNotReached) { - auto reporter = new ClangCodeModel::TokenInfosReporter(generateTokenInfos(1)); + auto reporter = new ClangCodeModel::HighlightingResultReporter(generateTokenInfos(1)); const int notReachedChunkSize = 100; reporter->setChunkSize(notReachedChunkSize); @@ -131,9 +131,9 @@ TEST_F(TokenInfosReporter, ReportRestIfChunkSizeNotReached) ASSERT_THAT(monitor.resultsReadyCounter(), 1L); } -TEST_F(TokenInfosReporter, ReportChunksWithoutRest) +TEST_F(HighlightingResultReporter, ReportChunksWithoutRest) { - auto reporter = new ClangCodeModel::TokenInfosReporter(generateTokenInfos(4)); + auto reporter = new ClangCodeModel::HighlightingResultReporter(generateTokenInfos(4)); reporter->setChunkSize(1); auto future = reporter->start(); @@ -142,9 +142,9 @@ TEST_F(TokenInfosReporter, ReportChunksWithoutRest) ASSERT_THAT(monitor.resultsReadyCounter(), 2L); } -TEST_F(TokenInfosReporter, ReportSingleChunkAndRest) +TEST_F(HighlightingResultReporter, ReportSingleChunkAndRest) { - auto reporter = new ClangCodeModel::TokenInfosReporter(generateTokenInfos(5)); + auto reporter = new ClangCodeModel::HighlightingResultReporter(generateTokenInfos(5)); reporter->setChunkSize(2); auto future = reporter->start(); @@ -153,14 +153,14 @@ TEST_F(TokenInfosReporter, ReportSingleChunkAndRest) ASSERT_THAT(monitor.resultsReadyCounter(), 2L); } -TEST_F(TokenInfosReporter, ReportCompleteLines) +TEST_F(HighlightingResultReporter, ReportCompleteLines) { QVector tokenInfos { TokenInfoContainer(1, 1, 1, {HighlightingType::Type, {}}), TokenInfoContainer(1, 2, 1, {HighlightingType::Type, {}}), TokenInfoContainer(2, 1, 1, {HighlightingType::Type, {}}), }; - auto reporter = new ClangCodeModel::TokenInfosReporter(tokenInfos); + auto reporter = new ClangCodeModel::HighlightingResultReporter(tokenInfos); reporter->setChunkSize(1); auto future = reporter->start(); @@ -169,14 +169,14 @@ TEST_F(TokenInfosReporter, ReportCompleteLines) ASSERT_THAT(monitor.resultsReadyCounter(), 2L); } -Data *TokenInfosReporter::d; +Data *HighlightingResultReporter::d; -void TokenInfosReporter::SetUpTestCase() +void HighlightingResultReporter::SetUpTestCase() { d = new Data; } -void TokenInfosReporter::TearDownTestCase() +void HighlightingResultReporter::TearDownTestCase() { delete d; d = nullptr; diff --git a/tests/unit/unittest/tokeninfos-test.cpp b/tests/unit/unittest/tokenprocessor-test.cpp similarity index 80% rename from tests/unit/unittest/tokeninfos-test.cpp rename to tests/unit/unittest/tokenprocessor-test.cpp index bfd3bf22fa3..6aa03a5742d 100644 --- a/tests/unit/unittest/tokeninfos-test.cpp +++ b/tests/unit/unittest/tokenprocessor-test.cpp @@ -32,14 +32,13 @@ #include #include #include +#include #include #include #include #include #include -#include -#include -#include +#include #include #include @@ -47,7 +46,7 @@ using ClangBackEnd::Cursor; using ClangBackEnd::HighlightingTypes; using ClangBackEnd::TokenInfo; -using ClangBackEnd::TokenInfos; +using ClangBackEnd::TokenProcessor; using ClangBackEnd::HighlightingType; using ClangBackEnd::Document; using ClangBackEnd::Documents; @@ -144,7 +143,7 @@ struct Data { document.translationUnit().cxTranslationUnit()}; }; -class TokenInfos : public ::testing::Test +class TokenProcessor : public ::testing::Test { public: static void SetUpTestCase(); @@ -157,21 +156,21 @@ protected: const TranslationUnit &translationUnit = d->translationUnit; }; -TEST_F(TokenInfos, CreateNullInformations) +TEST_F(TokenProcessor, CreateNullInformations) { - ::TokenInfos infos; + ::TokenProcessor infos; ASSERT_TRUE(infos.isNull()); } -TEST_F(TokenInfos, NullInformationsAreEmpty) +TEST_F(TokenProcessor, NullInformationsAreEmpty) { - ::TokenInfos infos; + ::TokenProcessor infos; ASSERT_TRUE(infos.isEmpty()); } -TEST_F(TokenInfos, IsNotNull) +TEST_F(TokenProcessor, IsNotNull) { const auto aRange = translationUnit.sourceRange(3, 1, 5, 1); @@ -180,7 +179,7 @@ TEST_F(TokenInfos, IsNotNull) ASSERT_FALSE(infos.isNull()); } -TEST_F(TokenInfos, IteratorBeginEnd) +TEST_F(TokenProcessor, IteratorBeginEnd) { const auto aRange = translationUnit.sourceRange(3, 1, 5, 1); const auto infos = translationUnit.tokenInfosInRange(aRange); @@ -190,7 +189,7 @@ TEST_F(TokenInfos, IteratorBeginEnd) ASSERT_THAT(infos.end(), endIterator); } -TEST_F(TokenInfos, ForFullTranslationUnitRange) +TEST_F(TokenProcessor, ForFullTranslationUnitRange) { const auto infos = translationUnit.tokenInfos(); @@ -198,7 +197,7 @@ TEST_F(TokenInfos, ForFullTranslationUnitRange) Contains(IsHighlightingMark(277u, 5u, 15u, HighlightingType::Function)))); } -TEST_F(TokenInfos, Size) +TEST_F(TokenProcessor, Size) { const auto range = translationUnit.sourceRange(5, 5, 5, 10); @@ -207,616 +206,616 @@ TEST_F(TokenInfos, Size) ASSERT_THAT(infos.size(), 1); } -TEST_F(TokenInfos, DISABLED_Keyword) +TEST_F(TokenProcessor, DISABLED_Keyword) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(5, 12)); ASSERT_THAT(infos[0], IsHighlightingMark(5u, 5u, 6u, HighlightingType::Keyword)); } -TEST_F(TokenInfos, StringLiteral) +TEST_F(TokenProcessor, StringLiteral) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(1, 29)); ASSERT_THAT(infos[4], IsHighlightingMark(1u, 24u, 10u, HighlightingType::StringLiteral)); } -TEST_F(TokenInfos, Utf8StringLiteral) +TEST_F(TokenProcessor, Utf8StringLiteral) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(2, 33)); ASSERT_THAT(infos[4], IsHighlightingMark(2u, 24u, 12u, HighlightingType::StringLiteral)); } -TEST_F(TokenInfos, RawStringLiteral) +TEST_F(TokenProcessor, RawStringLiteral) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(3, 34)); ASSERT_THAT(infos[4], IsHighlightingMark(3u, 24u, 13u, HighlightingType::StringLiteral)); } -TEST_F(TokenInfos, CharacterLiteral) +TEST_F(TokenProcessor, CharacterLiteral) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(4, 28)); ASSERT_THAT(infos[3], IsHighlightingMark(4u, 24u, 3u, HighlightingType::StringLiteral)); } -TEST_F(TokenInfos, IntegerLiteral) +TEST_F(TokenProcessor, IntegerLiteral) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(23, 26)); ASSERT_THAT(infos[3], IsHighlightingMark(23u, 24u, 1u, HighlightingType::NumberLiteral)); } -TEST_F(TokenInfos, FloatLiteral) +TEST_F(TokenProcessor, FloatLiteral) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(24, 29)); ASSERT_THAT(infos[3], IsHighlightingMark(24u, 24u, 4u, HighlightingType::NumberLiteral)); } -TEST_F(TokenInfos, FunctionDefinition) +TEST_F(TokenProcessor, FunctionDefinition) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(45, 20)); ASSERT_THAT(infos[1], HasThreeTypes(HighlightingType::Function, HighlightingType::Declaration, HighlightingType::FunctionDefinition)); } -TEST_F(TokenInfos, MemberFunctionDefinition) +TEST_F(TokenProcessor, MemberFunctionDefinition) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(52, 29)); ASSERT_THAT(infos[1], HasThreeTypes(HighlightingType::Function, HighlightingType::Declaration, HighlightingType::FunctionDefinition)); } -TEST_F(TokenInfos, VirtualMemberFunctionDefinitionOutsideOfClassBody) +TEST_F(TokenProcessor, VirtualMemberFunctionDefinitionOutsideOfClassBody) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(586, 37)); ASSERT_THAT(infos[3], HasThreeTypes(HighlightingType::VirtualFunction, HighlightingType::Declaration, HighlightingType::FunctionDefinition)); } -TEST_F(TokenInfos, VirtualMemberFunctionDefinitionInsideOfClassBody) +TEST_F(TokenProcessor, VirtualMemberFunctionDefinitionInsideOfClassBody) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(589, 47)); ASSERT_THAT(infos[2], HasThreeTypes(HighlightingType::VirtualFunction, HighlightingType::Declaration, HighlightingType::FunctionDefinition)); } -TEST_F(TokenInfos, FunctionDeclaration) +TEST_F(TokenProcessor, FunctionDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(55, 32)); ASSERT_THAT(infos[1], HasTwoTypes(HighlightingType::Function, HighlightingType::Declaration)); } -TEST_F(TokenInfos, MemberFunctionDeclaration) +TEST_F(TokenProcessor, MemberFunctionDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(59, 27)); ASSERT_THAT(infos[1], HasTwoTypes(HighlightingType::Function, HighlightingType::Declaration)); } -TEST_F(TokenInfos, MemberFunctionReference) +TEST_F(TokenProcessor, MemberFunctionReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(104, 35)); ASSERT_THAT(infos[0], IsHighlightingMark(104u, 9u, 23u, HighlightingType::Function)); } -TEST_F(TokenInfos, FunctionCall) +TEST_F(TokenProcessor, FunctionCall) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(64, 16)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Function)); } -TEST_F(TokenInfos, TypeConversionFunction) +TEST_F(TokenProcessor, TypeConversionFunction) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(68, 20)); ASSERT_THAT(infos[1], IsHighlightingMark(68u, 14u, 3u, HighlightingType::Type)); } -TEST_F(TokenInfos, InbuiltTypeConversionFunction) +TEST_F(TokenProcessor, InbuiltTypeConversionFunction) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(69, 20)); ASSERT_THAT(infos[1], IsHighlightingMark(69u, 14u, 3u, HighlightingType::PrimitiveType)); } -TEST_F(TokenInfos, TypeReference) +TEST_F(TokenProcessor, TypeReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(74, 13)); ASSERT_THAT(infos[0], IsHighlightingMark(74u, 5u, 3u, HighlightingType::Type)); } -TEST_F(TokenInfos, LocalVariable) +TEST_F(TokenProcessor, LocalVariable) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(79, 13)); ASSERT_THAT(infos[1], IsHighlightingMark(79u, 9u, 3u, HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, LocalVariableDeclaration) +TEST_F(TokenProcessor, LocalVariableDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(79, 13)); ASSERT_THAT(infos[1], IsHighlightingMark(79u, 9u, 3u, HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, LocalVariableReference) +TEST_F(TokenProcessor, LocalVariableReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(81, 26)); ASSERT_THAT(infos[0], IsHighlightingMark(81u, 5u, 3u, HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, LocalVariableFunctionArgumentDeclaration) +TEST_F(TokenProcessor, LocalVariableFunctionArgumentDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(84, 45)); ASSERT_THAT(infos[5], IsHighlightingMark(84u, 41u, 3u, HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, LocalVariableFunctionArgumentReference) +TEST_F(TokenProcessor, LocalVariableFunctionArgumentReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(86, 26)); ASSERT_THAT(infos[0], IsHighlightingMark(86u, 5u, 3u, HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, ClassVariableDeclaration) +TEST_F(TokenProcessor, ClassVariableDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(90, 21)); ASSERT_THAT(infos[1], IsHighlightingMark(90u, 9u, 11u, HighlightingType::Field)); } -TEST_F(TokenInfos, ClassVariableReference) +TEST_F(TokenProcessor, ClassVariableReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(94, 23)); ASSERT_THAT(infos[0], IsHighlightingMark(94u, 9u, 11u, HighlightingType::Field)); } -TEST_F(TokenInfos, StaticMethodDeclaration) +TEST_F(TokenProcessor, StaticMethodDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(110, 25)); ASSERT_THAT(infos[1], HasTwoTypes(HighlightingType::Function, HighlightingType::Declaration)); } -TEST_F(TokenInfos, StaticMethodReference) +TEST_F(TokenProcessor, StaticMethodReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(114, 30)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::Function)); } -TEST_F(TokenInfos, Enumeration) +TEST_F(TokenProcessor, Enumeration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(118, 17)); ASSERT_THAT(infos[1], HasThreeTypes(HighlightingType::Type, HighlightingType::Declaration, HighlightingType::Enum)); } -TEST_F(TokenInfos, Enumerator) +TEST_F(TokenProcessor, Enumerator) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(120, 15)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Enumeration)); } -TEST_F(TokenInfos, EnumerationReferenceDeclarationType) +TEST_F(TokenProcessor, EnumerationReferenceDeclarationType) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(125, 28)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::Enum)); } -TEST_F(TokenInfos, EnumerationReferenceDeclarationVariable) +TEST_F(TokenProcessor, EnumerationReferenceDeclarationVariable) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(125, 28)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, EnumerationReference) +TEST_F(TokenProcessor, EnumerationReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(127, 30)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, EnumeratorReference) +TEST_F(TokenProcessor, EnumeratorReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(127, 30)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::Enumeration)); } -TEST_F(TokenInfos, ClassForwardDeclaration) +TEST_F(TokenProcessor, ClassForwardDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(130, 12)); ASSERT_THAT(infos[1], HasThreeTypes(HighlightingType::Type, HighlightingType::Declaration, HighlightingType::Class)); } -TEST_F(TokenInfos, ConstructorDeclaration) +TEST_F(TokenProcessor, ConstructorDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(134, 13)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Function, HighlightingType::Declaration)); } -TEST_F(TokenInfos, DestructorDeclaration) +TEST_F(TokenProcessor, DestructorDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(135, 15)); ASSERT_THAT(infos[1], HasTwoTypes(HighlightingType::Function, HighlightingType::Declaration)); } -TEST_F(TokenInfos, ClassForwardDeclarationReference) +TEST_F(TokenProcessor, ClassForwardDeclarationReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(138, 23)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::Class)); } -TEST_F(TokenInfos, ClassTypeReference) +TEST_F(TokenProcessor, ClassTypeReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(140, 32)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::Class)); } -TEST_F(TokenInfos, ConstructorReferenceVariable) +TEST_F(TokenProcessor, ConstructorReferenceVariable) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(140, 32)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, UnionDeclaration) +TEST_F(TokenProcessor, UnionDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(145, 12)); ASSERT_THAT(infos[1], HasThreeTypes(HighlightingType::Type, HighlightingType::Declaration, HighlightingType::Union)); } -TEST_F(TokenInfos, UnionDeclarationReference) +TEST_F(TokenProcessor, UnionDeclarationReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(150, 33)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::Union)); } -TEST_F(TokenInfos, GlobalVariable) +TEST_F(TokenProcessor, GlobalVariable) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(150, 33)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::GlobalVariable)); } -TEST_F(TokenInfos, StructDeclaration) +TEST_F(TokenProcessor, StructDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(50, 11)); ASSERT_THAT(infos[1], HasThreeTypes(HighlightingType::Type, HighlightingType::Declaration, HighlightingType::Struct)); } -TEST_F(TokenInfos, NameSpace) +TEST_F(TokenProcessor, NameSpace) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(160, 22)); ASSERT_THAT(infos[1], HasThreeTypes(HighlightingType::Type, HighlightingType::Declaration, HighlightingType::Namespace)); } -TEST_F(TokenInfos, NameSpaceAlias) +TEST_F(TokenProcessor, NameSpaceAlias) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(164, 38)); ASSERT_THAT(infos[1], HasTwoTypes(HighlightingType::Type, HighlightingType::Namespace)); } -TEST_F(TokenInfos, UsingStructInNameSpace) +TEST_F(TokenProcessor, UsingStructInNameSpace) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(165, 36)); ASSERT_THAT(infos[3], HasOnlyType(HighlightingType::Type)); } -TEST_F(TokenInfos, NameSpaceReference) +TEST_F(TokenProcessor, NameSpaceReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(166, 35)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::Namespace)); } -TEST_F(TokenInfos, StructInNameSpaceReference) +TEST_F(TokenProcessor, StructInNameSpaceReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(166, 35)); ASSERT_THAT(infos[2], HasTwoTypes(HighlightingType::Type, HighlightingType::Struct)); } -TEST_F(TokenInfos, VirtualFunctionDeclaration) +TEST_F(TokenProcessor, VirtualFunctionDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(170, 35)); ASSERT_THAT(infos[2], HasTwoTypes(HighlightingType::VirtualFunction, HighlightingType::Declaration)); } -TEST_F(TokenInfos, DISABLED_NonVirtualFunctionCall) +TEST_F(TokenProcessor, DISABLED_NonVirtualFunctionCall) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(177, 46)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::Function)); } -TEST_F(TokenInfos, DISABLED_NonVirtualFunctionCallPointer) +TEST_F(TokenProcessor, DISABLED_NonVirtualFunctionCallPointer) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(180, 54)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::Function)); } -TEST_F(TokenInfos, VirtualFunctionCallPointer) +TEST_F(TokenProcessor, VirtualFunctionCallPointer) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(192, 51)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::VirtualFunction)); } -TEST_F(TokenInfos, FinalVirtualFunctionCallPointer) +TEST_F(TokenProcessor, FinalVirtualFunctionCallPointer) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(202, 61)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::Function)); } -TEST_F(TokenInfos, NonFinalVirtualFunctionCallPointer) +TEST_F(TokenProcessor, NonFinalVirtualFunctionCallPointer) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(207, 61)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::VirtualFunction)); } -TEST_F(TokenInfos, PlusOperator) +TEST_F(TokenProcessor, PlusOperator) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(224, 49)); ASSERT_THAT(infos[6], HasOnlyType(HighlightingType::Operator)); } -TEST_F(TokenInfos, PlusAssignOperator) +TEST_F(TokenProcessor, PlusAssignOperator) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(226, 24)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::Operator)); } -TEST_F(TokenInfos, Comment) +TEST_F(TokenProcessor, Comment) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(229, 14)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Comment)); } -TEST_F(TokenInfos, PreprocessingDirective) +TEST_F(TokenProcessor, PreprocessingDirective) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(231, 37)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::Preprocessor)); } -TEST_F(TokenInfos, PreprocessorMacroDefinition) +TEST_F(TokenProcessor, PreprocessorMacroDefinition) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(231, 37)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::PreprocessorDefinition)); } -TEST_F(TokenInfos, PreprocessorFunctionMacroDefinition) +TEST_F(TokenProcessor, PreprocessorFunctionMacroDefinition) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(232, 47)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::PreprocessorDefinition)); } -TEST_F(TokenInfos, PreprocessorMacroExpansion) +TEST_F(TokenProcessor, PreprocessorMacroExpansion) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(236, 27)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::PreprocessorExpansion)); } -TEST_F(TokenInfos, PreprocessorMacroExpansionArgument) +TEST_F(TokenProcessor, PreprocessorMacroExpansionArgument) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(236, 27)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::NumberLiteral)); } -TEST_F(TokenInfos, PreprocessorInclusionDirective) +TEST_F(TokenProcessor, PreprocessorInclusionDirective) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(239, 18)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::StringLiteral)); } -TEST_F(TokenInfos, GotoLabelStatement) +TEST_F(TokenProcessor, GotoLabelStatement) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(242, 12)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Label)); } -TEST_F(TokenInfos, GotoLabelStatementReference) +TEST_F(TokenProcessor, GotoLabelStatementReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(244, 21)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::Label)); } -TEST_F(TokenInfos, TemplateReference) +TEST_F(TokenProcessor, TemplateReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(254, 25)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Function)); } -TEST_F(TokenInfos, TemplateTypeParameter) +TEST_F(TokenProcessor, TemplateTypeParameter) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(265, 135)); ASSERT_THAT(infos[3], HasOnlyType(HighlightingType::Type)); } -TEST_F(TokenInfos, TemplateDefaultParameter) +TEST_F(TokenProcessor, TemplateDefaultParameter) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(265, 135)); ASSERT_THAT(infos[5], HasTwoTypes(HighlightingType::Type, HighlightingType::Struct)); } -TEST_F(TokenInfos, NonTypeTemplateParameter) +TEST_F(TokenProcessor, NonTypeTemplateParameter) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(265, 135)); ASSERT_THAT(infos[8], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, NonTypeTemplateParameterDefaultArgument) +TEST_F(TokenProcessor, NonTypeTemplateParameterDefaultArgument) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(265, 135)); ASSERT_THAT(infos[10], HasOnlyType(HighlightingType::NumberLiteral)); } -TEST_F(TokenInfos, TemplateTemplateParameter) +TEST_F(TokenProcessor, TemplateTemplateParameter) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(265, 135)); ASSERT_THAT(infos[17], HasOnlyType(HighlightingType::Type)); } -TEST_F(TokenInfos, TemplateTemplateParameterDefaultArgument) +TEST_F(TokenProcessor, TemplateTemplateParameterDefaultArgument) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(265, 135)); ASSERT_THAT(infos[19], HasTwoTypes(HighlightingType::Type, HighlightingType::Class)); } -TEST_F(TokenInfos, TemplateFunctionDeclaration) +TEST_F(TokenProcessor, TemplateFunctionDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(266, 63)); ASSERT_THAT(infos[1], HasThreeTypes(HighlightingType::Function, HighlightingType::Declaration, HighlightingType::FunctionDefinition)); } -TEST_F(TokenInfos, TemplateTypeParameterReference) +TEST_F(TokenProcessor, TemplateTypeParameterReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(268, 58)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Type)); } -TEST_F(TokenInfos, TemplateTypeParameterDeclarationReference) +TEST_F(TokenProcessor, TemplateTypeParameterDeclarationReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(268, 58)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, NonTypeTemplateParameterReference) +TEST_F(TokenProcessor, NonTypeTemplateParameterReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(269, 71)); ASSERT_THAT(infos[3], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, NonTypeTemplateParameterReferenceReference) +TEST_F(TokenProcessor, NonTypeTemplateParameterReferenceReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(269, 71)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, TemplateTemplateParameterReference) +TEST_F(TokenProcessor, TemplateTemplateParameterReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(270, 89)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Type)); } -TEST_F(TokenInfos, TemplateTemplateContainerParameterReference) +TEST_F(TokenProcessor, TemplateTemplateContainerParameterReference) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(270, 89)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::Type)); } -TEST_F(TokenInfos, TemplateTemplateParameterReferenceVariable) +TEST_F(TokenProcessor, TemplateTemplateParameterReferenceVariable) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(270, 89)); ASSERT_THAT(infos[4], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, ClassFinalVirtualFunctionCallPointer) +TEST_F(TokenProcessor, ClassFinalVirtualFunctionCallPointer) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(212, 61)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::Function)); } -TEST_F(TokenInfos, ClassFinalVirtualFunctionCall) +TEST_F(TokenProcessor, ClassFinalVirtualFunctionCall) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(277, 23)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Function)); } -TEST_F(TokenInfos, HasFunctionArguments) +TEST_F(TokenProcessor, HasFunctionArguments) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(286, 29)); ASSERT_TRUE(infos[1].hasFunctionArguments()); } -TEST_F(TokenInfos, PreprocessorInclusionDirectiveWithAngleBrackets ) +TEST_F(TokenProcessor, PreprocessorInclusionDirectiveWithAngleBrackets ) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(289, 38)); ASSERT_THAT(infos[3], HasOnlyType(HighlightingType::StringLiteral)); } -TEST_F(TokenInfos, ArgumentInMacroExpansionIsKeyword) +TEST_F(TokenProcessor, ArgumentInMacroExpansionIsKeyword) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(302, 36)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::PrimitiveType)); } -TEST_F(TokenInfos, DISABLED_FirstArgumentInMacroExpansionIsLocalVariable) +TEST_F(TokenProcessor, DISABLED_FirstArgumentInMacroExpansionIsLocalVariable) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(302, 36)); ASSERT_THAT(infos[3], HasOnlyType(HighlightingType::Invalid)); } -TEST_F(TokenInfos, DISABLED_SecondArgumentInMacroExpansionIsLocalVariable) +TEST_F(TokenProcessor, DISABLED_SecondArgumentInMacroExpansionIsLocalVariable) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(302, 36)); ASSERT_THAT(infos[5], HasOnlyType(HighlightingType::Invalid)); } -TEST_F(TokenInfos, DISABLED_SecondArgumentInMacroExpansionIsField) +TEST_F(TokenProcessor, DISABLED_SecondArgumentInMacroExpansionIsField) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(310, 40)); @@ -824,28 +823,28 @@ TEST_F(TokenInfos, DISABLED_SecondArgumentInMacroExpansionIsField) } -TEST_F(TokenInfos, EnumerationType) +TEST_F(TokenProcessor, EnumerationType) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(316, 30)); ASSERT_THAT(infos[3], HasThreeTypes(HighlightingType::Type, HighlightingType::Declaration, HighlightingType::Enum)); } -TEST_F(TokenInfos, TypeInStaticCast) +TEST_F(TokenProcessor, TypeInStaticCast) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(328, 64)); ASSERT_THAT(infos[4], HasOnlyType(HighlightingType::Type)); } -TEST_F(TokenInfos, StaticCastIsKeyword) +TEST_F(TokenProcessor, StaticCastIsKeyword) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(328, 64)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Keyword)); } -TEST_F(TokenInfos, StaticCastPunctationIsInvalid) +TEST_F(TokenProcessor, StaticCastPunctationIsInvalid) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(328, 64)); @@ -854,154 +853,154 @@ TEST_F(TokenInfos, StaticCastPunctationIsInvalid) ASSERT_THAT(infos[5], HasOnlyType(HighlightingType::Invalid)); } -TEST_F(TokenInfos, TypeInReinterpretCast) +TEST_F(TokenProcessor, TypeInReinterpretCast) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(329, 69)); ASSERT_THAT(infos[4], HasOnlyType(HighlightingType::Type)); } -TEST_F(TokenInfos, IntegerAliasDeclaration) +TEST_F(TokenProcessor, IntegerAliasDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(333, 41)); ASSERT_THAT(infos[1], HasTwoTypes(HighlightingType::Type, HighlightingType::TypeAlias)); } -TEST_F(TokenInfos, IntegerAlias) +TEST_F(TokenProcessor, IntegerAlias) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(341, 31)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::TypeAlias)); } -TEST_F(TokenInfos, SecondIntegerAlias) +TEST_F(TokenProcessor, SecondIntegerAlias) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(342, 43)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::TypeAlias)); } -TEST_F(TokenInfos, IntegerTypedef) +TEST_F(TokenProcessor, IntegerTypedef) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(343, 35)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::Typedef)); } -TEST_F(TokenInfos, FunctionAlias) +TEST_F(TokenProcessor, FunctionAlias) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(344, 16)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::TypeAlias)); } -TEST_F(TokenInfos, FriendTypeDeclaration) +TEST_F(TokenProcessor, FriendTypeDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(350, 28)); ASSERT_THAT(infos[2], HasTwoTypes(HighlightingType::Type, HighlightingType::Class)); } -TEST_F(TokenInfos, FriendArgumentTypeDeclaration) +TEST_F(TokenProcessor, FriendArgumentTypeDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(351, 65)); ASSERT_THAT(infos[6], HasTwoTypes(HighlightingType::Type, HighlightingType::Class)); } -TEST_F(TokenInfos, FriendArgumentDeclaration) +TEST_F(TokenProcessor, FriendArgumentDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(351, 65)); ASSERT_THAT(infos[8], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, FieldInitialization) +TEST_F(TokenProcessor, FieldInitialization) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(358, 18)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Field)); } -TEST_F(TokenInfos, TemplateFunctionCall) +TEST_F(TokenProcessor, TemplateFunctionCall) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(372, 29)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Function)); } -TEST_F(TokenInfos, TemplatedType) +TEST_F(TokenProcessor, TemplatedType) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(377, 21)); ASSERT_THAT(infos[1], HasThreeTypes(HighlightingType::Type, HighlightingType::Declaration, HighlightingType::Class)); } -TEST_F(TokenInfos, TemplatedTypeDeclaration) +TEST_F(TokenProcessor, TemplatedTypeDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(384, 49)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::Class)); } -TEST_F(TokenInfos, NoOperator) +TEST_F(TokenProcessor, NoOperator) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(389, 24)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::Invalid)); } -TEST_F(TokenInfos, ScopeOperator) +TEST_F(TokenProcessor, ScopeOperator) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(400, 33)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::Invalid)); } -TEST_F(TokenInfos, TemplateClassNamespace) +TEST_F(TokenProcessor, TemplateClassNamespace) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(413, 78)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::Namespace)); } -TEST_F(TokenInfos, TemplateClass) +TEST_F(TokenProcessor, TemplateClass) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(413, 78)); ASSERT_THAT(infos[2], HasTwoTypes(HighlightingType::Type, HighlightingType::Class)); } -TEST_F(TokenInfos, TemplateClassParameter) +TEST_F(TokenProcessor, TemplateClassParameter) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(413, 78)); ASSERT_THAT(infos[4], HasTwoTypes(HighlightingType::Type, HighlightingType::Class)); } -TEST_F(TokenInfos, TemplateClassDeclaration) +TEST_F(TokenProcessor, TemplateClassDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(413, 78)); ASSERT_THAT(infos[6], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, TypeDefDeclaration) +TEST_F(TokenProcessor, TypeDefDeclaration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(418, 36)); ASSERT_THAT(infos[2], HasTwoTypes(HighlightingType::Type, HighlightingType::Typedef)); } -TEST_F(TokenInfos, TypeDefDeclarationUsage) +TEST_F(TokenProcessor, TypeDefDeclarationUsage) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(419, 48)); ASSERT_THAT(infos[0], HasTwoTypes(HighlightingType::Type, HighlightingType::Typedef)); } -TEST_F(TokenInfos, NonConstReferenceArgument) +TEST_F(TokenProcessor, NonConstReferenceArgument) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(455, 35)); @@ -1011,7 +1010,7 @@ TEST_F(TokenInfos, NonConstReferenceArgument) HasTwoTypes(HighlightingType::LocalVariable, HighlightingType::OutputArgument)); } -TEST_F(TokenInfos, ConstReferenceArgument) +TEST_F(TokenProcessor, ConstReferenceArgument) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(464, 32)); @@ -1021,7 +1020,7 @@ TEST_F(TokenInfos, ConstReferenceArgument) HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, RValueReferenceArgument) +TEST_F(TokenProcessor, RValueReferenceArgument) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(473, 52)); @@ -1031,7 +1030,7 @@ TEST_F(TokenInfos, RValueReferenceArgument) HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, NonConstPointerArgument) +TEST_F(TokenProcessor, NonConstPointerArgument) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(482, 33)); @@ -1041,7 +1040,7 @@ TEST_F(TokenInfos, NonConstPointerArgument) HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, PointerToConstArgument) +TEST_F(TokenProcessor, PointerToConstArgument) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(490, 31)); @@ -1051,7 +1050,7 @@ TEST_F(TokenInfos, PointerToConstArgument) HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, ConstPointerArgument) +TEST_F(TokenProcessor, ConstPointerArgument) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(491, 30)); @@ -1061,7 +1060,7 @@ TEST_F(TokenInfos, ConstPointerArgument) HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, NonConstPointerGetterAsArgument) +TEST_F(TokenProcessor, NonConstPointerGetterAsArgument) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(580, 42)); @@ -1075,7 +1074,7 @@ TEST_F(TokenInfos, NonConstPointerGetterAsArgument) ASSERT_THAT(infos[7], Not(HasMixin(HighlightingType::OutputArgument))); } -TEST_F(TokenInfos, NonConstReferenceArgumentCallInsideCall) +TEST_F(TokenProcessor, NonConstReferenceArgumentCallInsideCall) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(501, 64)); infos[1]; @@ -1086,7 +1085,7 @@ TEST_F(TokenInfos, NonConstReferenceArgumentCallInsideCall) HasTwoTypes(HighlightingType::LocalVariable, HighlightingType::OutputArgument)); } -TEST_F(TokenInfos, OutputArgumentsAreEmptyAfterIteration) +TEST_F(TokenProcessor, OutputArgumentsAreEmptyAfterIteration) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(501, 63)); @@ -1095,7 +1094,7 @@ TEST_F(TokenInfos, OutputArgumentsAreEmptyAfterIteration) ASSERT_TRUE(infos.currentOutputArgumentRangesAreEmpty()); } -TEST_F(TokenInfos, NonConstReferenceArgumentFromFunctionParameter) +TEST_F(TokenProcessor, NonConstReferenceArgumentFromFunctionParameter) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(506, 42)); @@ -1105,7 +1104,7 @@ TEST_F(TokenInfos, NonConstReferenceArgumentFromFunctionParameter) HasTwoTypes(HighlightingType::LocalVariable, HighlightingType::OutputArgument)); } -TEST_F(TokenInfos, NonConstPointerArgumentAsExpression) +TEST_F(TokenProcessor, NonConstPointerArgumentAsExpression) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(513, 33)); @@ -1115,7 +1114,7 @@ TEST_F(TokenInfos, NonConstPointerArgumentAsExpression) HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, NonConstPointerArgumentAsInstanceWithMember) +TEST_F(TokenProcessor, NonConstPointerArgumentAsInstanceWithMember) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(525, 46)); @@ -1125,7 +1124,7 @@ TEST_F(TokenInfos, NonConstPointerArgumentAsInstanceWithMember) HasTwoTypes(HighlightingType::LocalVariable, HighlightingType::OutputArgument)); } -TEST_F(TokenInfos, NonConstPointerArgumentAsMemberOfInstance) +TEST_F(TokenProcessor, NonConstPointerArgumentAsMemberOfInstance) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(525, 46)); @@ -1136,7 +1135,7 @@ TEST_F(TokenInfos, NonConstPointerArgumentAsMemberOfInstance) HasTwoTypes(HighlightingType::Field, HighlightingType::OutputArgument)); } -TEST_F(TokenInfos, DISABLED_NonConstReferenceArgumentConstructor) +TEST_F(TokenProcessor, DISABLED_NonConstReferenceArgumentConstructor) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(540, 57)); @@ -1146,7 +1145,7 @@ TEST_F(TokenInfos, DISABLED_NonConstReferenceArgumentConstructor) HasTwoTypes(HighlightingType::LocalVariable, HighlightingType::OutputArgument)); } -TEST_F(TokenInfos, DISABLED_NonConstReferenceMemberInitialization) +TEST_F(TokenProcessor, DISABLED_NonConstReferenceMemberInitialization) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(546, 19)); @@ -1156,7 +1155,7 @@ TEST_F(TokenInfos, DISABLED_NonConstReferenceMemberInitialization) HasTwoTypes(HighlightingType::LocalVariable, HighlightingType::OutputArgument)); } -TEST_F(TokenInfos, EnumerationTypeDef) +TEST_F(TokenProcessor, EnumerationTypeDef) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(424, 41)); @@ -1164,35 +1163,35 @@ TEST_F(TokenInfos, EnumerationTypeDef) } // QTCREATORBUG-15473 -TEST_F(TokenInfos, DISABLED_ArgumentToUserDefinedIndexOperator) +TEST_F(TokenProcessor, DISABLED_ArgumentToUserDefinedIndexOperator) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(434, 19)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, ClassTemplateParticalSpecialization) +TEST_F(TokenProcessor, ClassTemplateParticalSpecialization) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(553, 33)); ASSERT_THAT(infos[6], HasThreeTypes(HighlightingType::Type, HighlightingType::Declaration, HighlightingType::Class)); } -TEST_F(TokenInfos, UsingFunction) +TEST_F(TokenProcessor, UsingFunction) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(556, 27)); ASSERT_THAT(infos[3], HasOnlyType(HighlightingType::Function)); } -TEST_F(TokenInfos, PreprocessorIfDirective) +TEST_F(TokenProcessor, PreprocessorIfDirective) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(558, 6)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::Preprocessor)); } -TEST_F(TokenInfos, PreprocessorInclusionDirectiveWithKeyword) +TEST_F(TokenProcessor, PreprocessorInclusionDirectiveWithKeyword) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(561, 15)); @@ -1200,90 +1199,90 @@ TEST_F(TokenInfos, PreprocessorInclusionDirectiveWithKeyword) } // CLANG-UPGRADE-CHECK: Enable once https://bugs.llvm.org//show_bug.cgi?id=12972 is resolved. -TEST_F(TokenInfos, DISABLED_VariableInOperatorFunctionCall) +TEST_F(TokenProcessor, DISABLED_VariableInOperatorFunctionCall) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(566, 12)); ASSERT_THAT(infos[2], HasOnlyType(HighlightingType::LocalVariable)); } -TEST_F(TokenInfos, UsingTemplateFunction) +TEST_F(TokenProcessor, UsingTemplateFunction) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(584, 17)); ASSERT_THAT(infos[3], HasOnlyType(HighlightingType::Function)); } -TEST_F(TokenInfos, HeaderNameIsInclusion) +TEST_F(TokenProcessor, HeaderNameIsInclusion) { const auto infos = translationUnit.fullTokenInfosInRange(sourceRange(239, 31)); ClangBackEnd::TokenInfoContainer container(infos[2]); ASSERT_THAT(container.extraInfo().includeDirectivePath, true); } -TEST_F(TokenInfos, HeaderNameIsInclusionWithAngleBrackets) +TEST_F(TokenProcessor, HeaderNameIsInclusionWithAngleBrackets) { const auto infos = translationUnit.fullTokenInfosInRange(sourceRange(289, 31)); ClangBackEnd::TokenInfoContainer container(infos[2]); ASSERT_THAT(container.extraInfo().includeDirectivePath, true); } -TEST_F(TokenInfos, NotInclusion) +TEST_F(TokenProcessor, NotInclusion) { const auto infos = translationUnit.fullTokenInfosInRange(sourceRange(241, 13)); ClangBackEnd::TokenInfoContainer container(infos[1]); ASSERT_THAT(container.extraInfo().includeDirectivePath, false); } -TEST_F(TokenInfos, MacroIsIdentifier) +TEST_F(TokenProcessor, MacroIsIdentifier) { const auto infos = translationUnit.fullTokenInfosInRange(sourceRange(232, 30)); ClangBackEnd::TokenInfoContainer container(infos[2]); ASSERT_THAT(container.extraInfo().identifier, true); } -TEST_F(TokenInfos, DefineIsNotIdentifier) +TEST_F(TokenProcessor, DefineIsNotIdentifier) { const auto infos = translationUnit.fullTokenInfosInRange(sourceRange(232, 30)); ClangBackEnd::TokenInfoContainer container(infos[1]); ASSERT_THAT(container.extraInfo().includeDirectivePath, false); } -TEST_F(TokenInfos, NamespaceTypeSpelling) +TEST_F(TokenProcessor, NamespaceTypeSpelling) { const auto infos = translationUnit.fullTokenInfosInRange(sourceRange(592, 59)); ClangBackEnd::TokenInfoContainer container(infos[10]); ASSERT_THAT(container.extraInfo().semanticParentTypeSpelling, Utf8StringLiteral("NFoo::NBar::NTest")); } -TEST_F(TokenInfos, DISABLED_WITHOUT_INVALIDDECL_PATCH(TypeNameOfInvalidDeclarationIsInvalid)) +TEST_F(TokenProcessor, DISABLED_WITHOUT_INVALIDDECL_PATCH(TypeNameOfInvalidDeclarationIsInvalid)) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(592, 14)); ASSERT_THAT(infos[0], HasOnlyType(HighlightingType::Invalid)); } -TEST_F(TokenInfos, DISABLED_WITHOUT_INVALIDDECL_PATCH(VariableNameOfInvalidDeclarationIsInvalid)) +TEST_F(TokenProcessor, DISABLED_WITHOUT_INVALIDDECL_PATCH(VariableNameOfInvalidDeclarationIsInvalid)) { const auto infos = translationUnit.tokenInfosInRange(sourceRange(592, 14)); ASSERT_THAT(infos[1], HasOnlyType(HighlightingType::Invalid)); } -Data *TokenInfos::d; +Data *TokenProcessor::d; -void TokenInfos::SetUpTestCase() +void TokenProcessor::SetUpTestCase() { d = new Data; } -void TokenInfos::TearDownTestCase() +void TokenProcessor::TearDownTestCase() { delete d; d = nullptr; } -ClangBackEnd::SourceRange TokenInfos::sourceRange(uint line, uint columnEnd) const +ClangBackEnd::SourceRange TokenProcessor::sourceRange(uint line, uint columnEnd) const { return translationUnit.sourceRange(line, 1, line, columnEnd); } diff --git a/tests/unit/unittest/unittest.pro b/tests/unit/unittest/unittest.pro index b886e219efd..c40571bd0a2 100644 --- a/tests/unit/unittest/unittest.pro +++ b/tests/unit/unittest/unittest.pro @@ -90,7 +90,9 @@ SOURCES += \ processcreator-test.cpp \ nativefilepath-test.cpp \ nativefilepathview-test.cpp \ - mocktimer.cpp + mocktimer.cpp \ + tokenprocessor-test.cpp \ + highlightingresultreporter-test.cpp !isEmpty(LIBCLANG_LIBS) { SOURCES += \ @@ -146,8 +148,6 @@ SOURCES += \ sqlitestatement-test.cpp \ sqlitetable-test.cpp \ sqlstatementbuilder-test.cpp \ - tokeninfos-test.cpp \ - tokeninfosreporter-test.cpp \ translationunitupdater-test.cpp \ unsavedfiles-test.cpp \ unsavedfile-test.cpp \