2009-03-20 14:57:12 +01:00
|
|
|
/**************************************************************************
|
|
|
|
|
**
|
2008-12-02 12:01:29 +01:00
|
|
|
** This file is part of Qt Creator
|
|
|
|
|
**
|
2012-01-26 18:33:46 +01:00
|
|
|
** Copyright (c) 2012 Nokia Corporation and/or its subsidiary(-ies).
|
2008-12-02 12:01:29 +01:00
|
|
|
**
|
2011-11-02 15:59:12 +01:00
|
|
|
** Contact: Nokia Corporation (qt-info@nokia.com)
|
2008-12-02 12:01:29 +01:00
|
|
|
**
|
2008-12-02 14:17:16 +01:00
|
|
|
**
|
2009-02-25 09:15:00 +01:00
|
|
|
** GNU Lesser General Public License Usage
|
2008-12-02 14:17:16 +01:00
|
|
|
**
|
2011-04-13 08:42:33 +02:00
|
|
|
** This file may be used under the terms of the GNU Lesser General Public
|
|
|
|
|
** License version 2.1 as published by the Free Software Foundation and
|
|
|
|
|
** appearing in the file LICENSE.LGPL included in the packaging of this file.
|
|
|
|
|
** Please review the following information to ensure the GNU Lesser General
|
|
|
|
|
** Public License version 2.1 requirements will be met:
|
|
|
|
|
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
2008-12-02 14:17:16 +01:00
|
|
|
**
|
2010-12-17 16:01:08 +01:00
|
|
|
** In addition, as a special exception, Nokia gives you certain additional
|
2011-04-13 08:42:33 +02:00
|
|
|
** rights. These rights are described in the Nokia Qt LGPL Exception
|
2010-12-17 16:01:08 +01:00
|
|
|
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
|
|
|
|
|
**
|
2011-04-13 08:42:33 +02:00
|
|
|
** Other Usage
|
|
|
|
|
**
|
|
|
|
|
** Alternatively, this file may be used in accordance with the terms and
|
|
|
|
|
** conditions contained in a signed written agreement between you and Nokia.
|
|
|
|
|
**
|
2010-12-17 16:01:08 +01:00
|
|
|
** If you have questions regarding the use of this file, please contact
|
2011-11-02 15:59:12 +01:00
|
|
|
** Nokia at qt-info@nokia.com.
|
2008-12-02 12:01:29 +01:00
|
|
|
**
|
2009-02-25 09:15:00 +01:00
|
|
|
**************************************************************************/
|
2008-12-02 12:01:29 +01:00
|
|
|
/*
|
|
|
|
|
Copyright 2005 Roberto Raggi <roberto@kdevelop.org>
|
|
|
|
|
|
|
|
|
|
Permission to use, copy, modify, distribute, and sell this software and its
|
|
|
|
|
documentation for any purpose is hereby granted without fee, provided that
|
|
|
|
|
the above copyright notice appear in all copies and that both that
|
|
|
|
|
copyright notice and this permission notice appear in supporting
|
|
|
|
|
documentation.
|
|
|
|
|
|
|
|
|
|
The above copyright notice and this permission notice shall be included in
|
|
|
|
|
all copies or substantial portions of the Software.
|
|
|
|
|
|
|
|
|
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
|
KDEVELOP TEAM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
|
|
|
|
|
AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
|
|
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
#include "pp.h"
|
2009-05-15 16:01:02 +02:00
|
|
|
#include "pp-cctype.h"
|
2008-12-02 15:08:31 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
#include <Control.h>
|
2008-12-02 12:01:29 +01:00
|
|
|
#include <Lexer.h>
|
|
|
|
|
#include <Token.h>
|
2009-03-02 19:03:07 +01:00
|
|
|
#include <Literals.h>
|
2009-03-19 18:15:33 +01:00
|
|
|
#include <cctype>
|
2009-03-02 19:03:07 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
#include <QtDebug>
|
2008-12-02 12:57:59 +01:00
|
|
|
#include <algorithm>
|
2012-02-15 10:42:41 +01:00
|
|
|
#include <QList>
|
2012-03-26 15:18:01 +02:00
|
|
|
#include <QDate>
|
|
|
|
|
#include <QTime>
|
|
|
|
|
|
|
|
|
|
#define NO_DEBUG
|
|
|
|
|
|
|
|
|
|
#ifndef NO_DEBUG
|
|
|
|
|
# include <iostream>
|
|
|
|
|
#endif // NO_DEBUG
|
|
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
enum {
|
|
|
|
|
eagerExpansion = 1,
|
|
|
|
|
MAX_TOKEN_EXPANSION_COUNT = 5000
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
template<typename _T>
|
|
|
|
|
class ScopedSwap
|
|
|
|
|
{
|
|
|
|
|
_T oldValue;
|
|
|
|
|
_T &ref;
|
|
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
ScopedSwap(_T &var, _T newValue)
|
|
|
|
|
: oldValue(newValue)
|
|
|
|
|
, ref(var)
|
|
|
|
|
{
|
|
|
|
|
std::swap(ref, oldValue);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
~ScopedSwap()
|
|
|
|
|
{
|
|
|
|
|
std::swap(ref, oldValue);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
typedef ScopedSwap<bool> ScopedBoolSwap;
|
|
|
|
|
typedef ScopedSwap<unsigned> ScopedUnsignedSwap;
|
|
|
|
|
} // anonymous namespace
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2009-03-03 10:41:20 +01:00
|
|
|
namespace CPlusPlus {
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
namespace Internal {
|
|
|
|
|
struct TokenBuffer
|
|
|
|
|
{
|
|
|
|
|
std::list<PPToken> tokens;
|
|
|
|
|
const Macro *macro;
|
|
|
|
|
TokenBuffer *next;
|
|
|
|
|
QVector<QByteArray> blockedMacros;
|
|
|
|
|
|
|
|
|
|
template <typename _Iterator>
|
|
|
|
|
TokenBuffer(_Iterator firstToken, _Iterator lastToken, const Macro *macro, TokenBuffer *next)
|
|
|
|
|
: tokens(firstToken, lastToken), macro(macro), next(next)
|
|
|
|
|
{}
|
|
|
|
|
|
|
|
|
|
bool isBlocked(const QByteArray ¯oName) const {
|
|
|
|
|
for (const TokenBuffer *it = this; it; it = it->next)
|
|
|
|
|
if (it->blockedMacros.contains(macroName))
|
|
|
|
|
return true;
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void blockMacro(const QByteArray ¯oName)
|
|
|
|
|
{ blockedMacros.append(macroName); }
|
|
|
|
|
};
|
|
|
|
|
|
2009-03-03 10:41:20 +01:00
|
|
|
struct Value
|
|
|
|
|
{
|
|
|
|
|
enum Kind {
|
|
|
|
|
Kind_Long,
|
2009-07-27 21:47:03 +02:00
|
|
|
Kind_ULong
|
2009-03-03 10:41:20 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
Kind kind;
|
|
|
|
|
|
|
|
|
|
union {
|
|
|
|
|
long l;
|
|
|
|
|
unsigned long ul;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Value()
|
|
|
|
|
: kind(Kind_Long), l(0)
|
|
|
|
|
{ }
|
|
|
|
|
|
|
|
|
|
inline bool is_ulong () const
|
|
|
|
|
{ return kind == Kind_ULong; }
|
|
|
|
|
|
|
|
|
|
inline void set_ulong (unsigned long v)
|
|
|
|
|
{
|
|
|
|
|
ul = v;
|
|
|
|
|
kind = Kind_ULong;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inline void set_long (long v)
|
|
|
|
|
{
|
|
|
|
|
l = v;
|
|
|
|
|
kind = Kind_Long;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inline bool is_zero () const
|
|
|
|
|
{ return l == 0; }
|
|
|
|
|
|
|
|
|
|
#define PP_DEFINE_BIN_OP(name, op) \
|
|
|
|
|
inline Value operator op(const Value &other) const \
|
|
|
|
|
{ \
|
|
|
|
|
Value v = *this; \
|
|
|
|
|
if (v.is_ulong () || other.is_ulong ()) \
|
|
|
|
|
v.set_ulong (v.ul op other.ul); \
|
|
|
|
|
else \
|
|
|
|
|
v.set_long (v.l op other.l); \
|
|
|
|
|
return v; \
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
PP_DEFINE_BIN_OP(op_add, +)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_sub, -)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_mult, *)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_div, /)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_mod, %)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_lhs, <<)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_rhs, >>)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_lt, <)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_gt, >)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_le, <=)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_ge, >=)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_eq, ==)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_ne, !=)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_bit_and, &)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_bit_or, |)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_bit_xor, ^)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_and, &&)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_or, ||)
|
|
|
|
|
|
|
|
|
|
#undef PP_DEFINE_BIN_OP
|
|
|
|
|
};
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
} // namespace Internal
|
2011-02-04 09:52:39 +01:00
|
|
|
} // namespace CPlusPlus
|
2009-03-03 10:41:20 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
using namespace CPlusPlus;
|
2012-03-26 15:18:01 +02:00
|
|
|
using namespace CPlusPlus::Internal;
|
2009-03-03 10:41:20 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
namespace {
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
inline bool isValidToken(const PPToken &tk)
|
|
|
|
|
{
|
|
|
|
|
return tk.isNot(T_EOF_SYMBOL) && (! tk.newline() || tk.joined());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Macro *macroDefinition(const QByteArray &name, unsigned offset, Environment *env, Client *client)
|
2009-09-25 16:00:14 +02:00
|
|
|
{
|
|
|
|
|
Macro *m = env->resolve(name);
|
|
|
|
|
if (client) {
|
|
|
|
|
if (m)
|
|
|
|
|
client->passedMacroDefinitionCheck(offset, *m);
|
|
|
|
|
else
|
|
|
|
|
client->failedMacroDefinitionCheck(offset, name);
|
|
|
|
|
}
|
2010-03-12 15:24:45 +01:00
|
|
|
return m;
|
2009-09-25 16:00:14 +02:00
|
|
|
}
|
|
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
class RangeLexer
|
|
|
|
|
{
|
|
|
|
|
const Token *first;
|
|
|
|
|
const Token *last;
|
|
|
|
|
Token trivial;
|
|
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
inline RangeLexer(const Token *first, const Token *last)
|
|
|
|
|
: first(first), last(last)
|
|
|
|
|
{
|
|
|
|
|
// WARN: `last' must be a valid iterator.
|
|
|
|
|
trivial.offset = last->offset;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inline operator bool() const
|
|
|
|
|
{ return first != last; }
|
|
|
|
|
|
|
|
|
|
inline bool isValid() const
|
|
|
|
|
{ return first != last; }
|
|
|
|
|
|
|
|
|
|
inline int size() const
|
|
|
|
|
{ return std::distance(first, last); }
|
|
|
|
|
|
|
|
|
|
inline const Token *dot() const
|
|
|
|
|
{ return first; }
|
|
|
|
|
|
|
|
|
|
inline const Token &operator*() const
|
|
|
|
|
{
|
|
|
|
|
if (first != last)
|
|
|
|
|
return *first;
|
|
|
|
|
|
|
|
|
|
return trivial;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inline const Token *operator->() const
|
|
|
|
|
{
|
|
|
|
|
if (first != last)
|
|
|
|
|
return first;
|
|
|
|
|
|
|
|
|
|
return &trivial;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inline RangeLexer &operator++()
|
|
|
|
|
{
|
|
|
|
|
++first;
|
|
|
|
|
return *this;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
class ExpressionEvaluator
|
|
|
|
|
{
|
|
|
|
|
ExpressionEvaluator(const ExpressionEvaluator &other);
|
|
|
|
|
void operator = (const ExpressionEvaluator &other);
|
|
|
|
|
|
|
|
|
|
public:
|
2009-09-25 16:00:14 +02:00
|
|
|
ExpressionEvaluator(Client *client, Environment *env)
|
|
|
|
|
: client(client), env(env), _lex(0)
|
2008-12-02 12:01:29 +01:00
|
|
|
{ }
|
|
|
|
|
|
|
|
|
|
Value operator()(const Token *firstToken, const Token *lastToken,
|
|
|
|
|
const QByteArray &source)
|
|
|
|
|
{
|
|
|
|
|
this->source = source;
|
|
|
|
|
const Value previousValue = switchValue(Value());
|
|
|
|
|
RangeLexer tmp(firstToken, lastToken);
|
|
|
|
|
RangeLexer *previousLex = _lex;
|
|
|
|
|
_lex = &tmp;
|
|
|
|
|
process_expression();
|
|
|
|
|
_lex = previousLex;
|
|
|
|
|
return switchValue(previousValue);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
protected:
|
|
|
|
|
Value switchValue(const Value &value)
|
|
|
|
|
{
|
|
|
|
|
Value previousValue = _value;
|
|
|
|
|
_value = value;
|
|
|
|
|
return previousValue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool isTokenDefined() const
|
|
|
|
|
{
|
|
|
|
|
if ((*_lex)->isNot(T_IDENTIFIER))
|
|
|
|
|
return false;
|
|
|
|
|
const QByteArray spell = tokenSpell();
|
|
|
|
|
if (spell.size() != 7)
|
|
|
|
|
return false;
|
|
|
|
|
return spell == "defined";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
QByteArray tokenSpell() const
|
|
|
|
|
{
|
|
|
|
|
const QByteArray text = QByteArray::fromRawData(source.constData() + (*_lex)->offset,
|
2009-07-27 21:47:03 +02:00
|
|
|
(*_lex)->f.length);
|
2008-12-02 12:01:29 +01:00
|
|
|
return text;
|
|
|
|
|
}
|
|
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
inline void process_expression()
|
|
|
|
|
{ process_constant_expression(); }
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
void process_primary()
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2009-07-10 15:44:47 +02:00
|
|
|
if ((*_lex)->is(T_NUMERIC_LITERAL)) {
|
2008-12-10 10:47:26 +01:00
|
|
|
int base = 10;
|
2010-04-29 12:33:05 +02:00
|
|
|
QByteArray spell = tokenSpell();
|
2008-12-10 10:47:26 +01:00
|
|
|
if (spell.at(0) == '0') {
|
|
|
|
|
if (spell.size() > 1 && (spell.at(1) == 'x' || spell.at(1) == 'X'))
|
|
|
|
|
base = 16;
|
|
|
|
|
else
|
|
|
|
|
base = 8;
|
|
|
|
|
}
|
2010-04-29 12:33:05 +02:00
|
|
|
|
|
|
|
|
while (! spell.isEmpty()) {
|
|
|
|
|
const QChar ch = spell.at(spell.length() - 1);
|
|
|
|
|
|
|
|
|
|
if (! (ch == QLatin1Char('u') || ch == QLatin1Char('U') ||
|
|
|
|
|
ch == QLatin1Char('l') || ch == QLatin1Char('L')))
|
|
|
|
|
break;
|
|
|
|
|
spell.chop(1);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
_value.set_long(spell.toLong(0, base));
|
2008-12-02 12:01:29 +01:00
|
|
|
++(*_lex);
|
|
|
|
|
} else if (isTokenDefined()) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
if ((*_lex)->is(T_IDENTIFIER)) {
|
2010-03-12 15:24:45 +01:00
|
|
|
_value.set_long(macroDefinition(tokenSpell(), (*_lex)->offset, env, client) != 0);
|
2008-12-02 12:01:29 +01:00
|
|
|
++(*_lex);
|
|
|
|
|
} else if ((*_lex)->is(T_LPAREN)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
if ((*_lex)->is(T_IDENTIFIER)) {
|
2010-03-12 15:24:45 +01:00
|
|
|
_value.set_long(macroDefinition(tokenSpell(), (*_lex)->offset, env, client) != 0);
|
2008-12-02 12:01:29 +01:00
|
|
|
++(*_lex);
|
|
|
|
|
if ((*_lex)->is(T_RPAREN)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else if ((*_lex)->is(T_IDENTIFIER)) {
|
|
|
|
|
_value.set_long(0);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
} else if ((*_lex)->is(T_MINUS)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_primary();
|
|
|
|
|
_value.set_long(- _value.l);
|
|
|
|
|
} else if ((*_lex)->is(T_PLUS)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_primary();
|
2010-04-27 09:55:19 +02:00
|
|
|
} else if ((*_lex)->is(T_TILDE)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_primary();
|
|
|
|
|
_value.set_long(~ _value.l);
|
2008-12-02 12:01:29 +01:00
|
|
|
} else if ((*_lex)->is(T_EXCLAIM)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_primary();
|
|
|
|
|
_value.set_long(_value.is_zero());
|
|
|
|
|
} else if ((*_lex)->is(T_LPAREN)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_expression();
|
|
|
|
|
if ((*_lex)->is(T_RPAREN))
|
|
|
|
|
++(*_lex);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
Value process_expression_with_operator_precedence(const Value &lhs, int minPrecedence)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2010-01-28 15:22:36 +01:00
|
|
|
Value result = lhs;
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
while (precedence((*_lex)->kind()) >= minPrecedence) {
|
|
|
|
|
const int oper = (*_lex)->kind();
|
|
|
|
|
const int operPrecedence = precedence(oper);
|
2008-12-02 12:01:29 +01:00
|
|
|
++(*_lex);
|
|
|
|
|
process_primary();
|
2010-01-28 15:22:36 +01:00
|
|
|
Value rhs = _value;
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
for (int LA_token_kind = (*_lex)->kind(), LA_precedence = precedence(LA_token_kind);
|
2010-01-28 17:05:13 +01:00
|
|
|
LA_precedence > operPrecedence && isBinaryOperator(LA_token_kind);
|
2010-01-28 15:22:36 +01:00
|
|
|
LA_token_kind = (*_lex)->kind(), LA_precedence = precedence(LA_token_kind)) {
|
|
|
|
|
rhs = process_expression_with_operator_precedence(rhs, LA_precedence);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
result = evaluate_expression(oper, result, rhs);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
return result;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
void process_constant_expression()
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2010-01-28 15:22:36 +01:00
|
|
|
process_primary();
|
|
|
|
|
_value = process_expression_with_operator_precedence(_value, precedence(T_PIPE_PIPE));
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
if ((*_lex)->is(T_QUESTION)) {
|
|
|
|
|
const Value cond = _value;
|
2008-12-02 12:01:29 +01:00
|
|
|
++(*_lex);
|
2010-01-28 15:22:36 +01:00
|
|
|
process_constant_expression();
|
|
|
|
|
Value left = _value, right;
|
|
|
|
|
if ((*_lex)->is(T_COLON)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_constant_expression();
|
|
|
|
|
right = _value;
|
|
|
|
|
}
|
|
|
|
|
_value = ! cond.is_zero() ? left : right;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
private:
|
|
|
|
|
inline int precedence(int tokenKind) const
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2010-01-28 15:22:36 +01:00
|
|
|
switch (tokenKind) {
|
|
|
|
|
case T_PIPE_PIPE: return 0;
|
|
|
|
|
case T_AMPER_AMPER: return 1;
|
|
|
|
|
case T_PIPE: return 2;
|
|
|
|
|
case T_CARET: return 3;
|
|
|
|
|
case T_AMPER: return 4;
|
|
|
|
|
case T_EQUAL_EQUAL:
|
|
|
|
|
case T_EXCLAIM_EQUAL: return 5;
|
|
|
|
|
case T_GREATER:
|
|
|
|
|
case T_LESS:
|
|
|
|
|
case T_LESS_EQUAL:
|
|
|
|
|
case T_GREATER_EQUAL: return 6;
|
|
|
|
|
case T_LESS_LESS:
|
|
|
|
|
case T_GREATER_GREATER: return 7;
|
|
|
|
|
case T_PLUS:
|
|
|
|
|
case T_MINUS: return 8;
|
|
|
|
|
case T_STAR:
|
|
|
|
|
case T_SLASH:
|
|
|
|
|
case T_PERCENT: return 9;
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
return -1;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
static inline bool isBinaryOperator(int tokenKind)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2010-01-28 15:22:36 +01:00
|
|
|
switch (tokenKind) {
|
|
|
|
|
case T_PIPE_PIPE:
|
|
|
|
|
case T_AMPER_AMPER:
|
|
|
|
|
case T_PIPE:
|
|
|
|
|
case T_CARET:
|
|
|
|
|
case T_AMPER:
|
|
|
|
|
case T_EQUAL_EQUAL:
|
|
|
|
|
case T_EXCLAIM_EQUAL:
|
|
|
|
|
case T_GREATER:
|
|
|
|
|
case T_LESS:
|
|
|
|
|
case T_LESS_EQUAL:
|
|
|
|
|
case T_GREATER_EQUAL:
|
|
|
|
|
case T_LESS_LESS:
|
|
|
|
|
case T_GREATER_GREATER:
|
|
|
|
|
case T_PLUS:
|
|
|
|
|
case T_MINUS:
|
|
|
|
|
case T_STAR:
|
|
|
|
|
case T_SLASH:
|
|
|
|
|
case T_PERCENT:
|
|
|
|
|
return true;
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
default:
|
|
|
|
|
return false;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2010-01-28 15:22:36 +01:00
|
|
|
static inline Value evaluate_expression(int tokenKind, const Value &lhs, const Value &rhs)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2010-01-28 15:22:36 +01:00
|
|
|
switch (tokenKind) {
|
|
|
|
|
case T_PIPE_PIPE: return lhs || rhs;
|
|
|
|
|
case T_AMPER_AMPER: return lhs && rhs;
|
|
|
|
|
case T_PIPE: return lhs | rhs;
|
|
|
|
|
case T_CARET: return lhs ^ rhs;
|
|
|
|
|
case T_AMPER: return lhs & rhs;
|
|
|
|
|
case T_EQUAL_EQUAL: return lhs == rhs;
|
|
|
|
|
case T_EXCLAIM_EQUAL: return lhs != rhs;
|
|
|
|
|
case T_GREATER: return lhs > rhs;
|
|
|
|
|
case T_LESS: return lhs < rhs;
|
|
|
|
|
case T_LESS_EQUAL: return lhs <= rhs;
|
|
|
|
|
case T_GREATER_EQUAL: return lhs >= rhs;
|
|
|
|
|
case T_LESS_LESS: return lhs << rhs;
|
|
|
|
|
case T_GREATER_GREATER: return lhs >> rhs;
|
|
|
|
|
case T_PLUS: return lhs + rhs;
|
|
|
|
|
case T_MINUS: return lhs - rhs;
|
|
|
|
|
case T_STAR: return lhs * rhs;
|
|
|
|
|
case T_SLASH: return rhs.is_zero() ? Value() : lhs / rhs;
|
|
|
|
|
case T_PERCENT: return rhs.is_zero() ? Value() : lhs % rhs;
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
return Value();
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
2009-09-25 16:00:14 +02:00
|
|
|
Client *client;
|
2008-12-02 12:01:29 +01:00
|
|
|
Environment *env;
|
|
|
|
|
QByteArray source;
|
|
|
|
|
RangeLexer *_lex;
|
|
|
|
|
Value _value;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
} // end of anonymous namespace
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
Preprocessor::State::State()
|
|
|
|
|
: m_lexer(0)
|
|
|
|
|
, m_skipping(MAX_LEVEL)
|
|
|
|
|
, m_trueTest(MAX_LEVEL)
|
|
|
|
|
, m_ifLevel(0)
|
|
|
|
|
, m_tokenBuffer(0)
|
|
|
|
|
, m_inPreprocessorDirective(false)
|
|
|
|
|
, m_result(0)
|
|
|
|
|
, m_markGeneratedTokens(true)
|
|
|
|
|
, m_noLines(false)
|
|
|
|
|
, m_inCondition(false)
|
|
|
|
|
, m_inDefine(false)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
m_skipping[m_ifLevel] = false;
|
|
|
|
|
m_trueTest[m_ifLevel] = false;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
|
|
|
|
|
Preprocessor::Preprocessor(Client *client, Environment *env)
|
|
|
|
|
: m_client(client)
|
|
|
|
|
, m_env(env)
|
|
|
|
|
, m_expandMacros(true)
|
|
|
|
|
, m_keepComments(false)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::pushState(const State &newState)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
m_savedStates.append(m_state);
|
|
|
|
|
m_state = newState;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::popState()
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
const State &s = m_savedStates.last();
|
|
|
|
|
delete m_state.m_lexer;
|
|
|
|
|
m_state = s;
|
|
|
|
|
m_savedStates.removeLast();
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2009-06-15 15:38:20 +02:00
|
|
|
QByteArray Preprocessor::operator()(const QString &fileName, const QString &source)
|
|
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
const QString previousOriginalSource = m_originalSource;
|
|
|
|
|
m_originalSource = source;
|
2009-06-15 15:38:20 +02:00
|
|
|
const QByteArray bytes = source.toLatin1();
|
|
|
|
|
const QByteArray preprocessedCode = operator()(fileName, bytes);
|
2012-03-26 15:18:01 +02:00
|
|
|
m_originalSource = previousOriginalSource;
|
2009-06-15 15:38:20 +02:00
|
|
|
return preprocessedCode;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
QByteArray Preprocessor::operator()(const QString &fileName,
|
2012-03-26 15:18:01 +02:00
|
|
|
const QByteArray &source,
|
|
|
|
|
bool noLines,
|
|
|
|
|
bool markGeneratedTokens)
|
2009-03-02 11:30:43 +01:00
|
|
|
{
|
|
|
|
|
QByteArray preprocessed;
|
2012-03-26 15:18:01 +02:00
|
|
|
// qDebug()<<"running" << fileName<<"with"<<source.count('\n')<<"lines...";
|
|
|
|
|
preprocess(fileName, source, &preprocessed, noLines, markGeneratedTokens, false);
|
2009-03-02 11:30:43 +01:00
|
|
|
return preprocessed;
|
|
|
|
|
}
|
|
|
|
|
|
2009-07-04 20:54:31 +02:00
|
|
|
bool Preprocessor::expandMacros() const
|
|
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
return m_expandMacros;
|
2009-07-04 20:54:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Preprocessor::setExpandMacros(bool expandMacros)
|
|
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
m_expandMacros = expandMacros;
|
2009-07-04 20:54:31 +02:00
|
|
|
}
|
|
|
|
|
|
2011-08-23 14:05:47 +02:00
|
|
|
bool Preprocessor::keepComments() const
|
|
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
return m_keepComments;
|
2011-08-23 14:05:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Preprocessor::setKeepComments(bool keepComments)
|
|
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
m_keepComments = keepComments;
|
2011-08-23 14:05:47 +02:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
Preprocessor::State Preprocessor::createStateFromSource(const QString &fileName,
|
|
|
|
|
const QByteArray &source,
|
|
|
|
|
QByteArray *result,
|
|
|
|
|
bool noLines,
|
|
|
|
|
bool markGeneratedTokens,
|
|
|
|
|
bool inCondition) const
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
State state;
|
2012-03-26 15:18:01 +02:00
|
|
|
state.m_currentFileName = fileName;
|
|
|
|
|
state.m_source = source;
|
|
|
|
|
state.m_lexer = new Lexer(source.constBegin(), source.constEnd());
|
|
|
|
|
state.m_lexer->setScanKeywords(false);
|
|
|
|
|
state.m_lexer->setScanAngleStringLiteralTokens(false);
|
|
|
|
|
if (m_keepComments)
|
|
|
|
|
state.m_lexer->setScanCommentTokens(true);
|
|
|
|
|
state.m_result = result;
|
|
|
|
|
state.m_noLines = noLines;
|
|
|
|
|
state.m_markGeneratedTokens = markGeneratedTokens;
|
|
|
|
|
state.m_inCondition = inCondition;
|
2008-12-02 12:01:29 +01:00
|
|
|
return state;
|
|
|
|
|
}
|
|
|
|
|
|
2012-03-30 13:01:32 +02:00
|
|
|
void Preprocessor::genLine(unsigned lineno, const QByteArray &fileName) const
|
|
|
|
|
{
|
|
|
|
|
startNewOutputLine();
|
|
|
|
|
out("# ");
|
|
|
|
|
out(QByteArray::number(lineno));
|
|
|
|
|
out(" \"");
|
|
|
|
|
out(fileName);
|
|
|
|
|
out("\"\n");
|
|
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::handleDefined(PPToken *tk)
|
2009-03-03 10:27:02 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
unsigned lineno = tk->lineno;
|
|
|
|
|
lex(tk); // consume "defined" token
|
|
|
|
|
bool lparenSeen = tk->is(T_LPAREN);
|
|
|
|
|
if (lparenSeen)
|
|
|
|
|
lex(tk); // consume "(" token
|
|
|
|
|
if (tk->isNot(T_IDENTIFIER))
|
|
|
|
|
//### TODO: generate error message
|
2009-03-03 10:27:02 +01:00
|
|
|
return;
|
2012-03-26 15:18:01 +02:00
|
|
|
PPToken idToken = *tk;
|
|
|
|
|
do {
|
|
|
|
|
lex(tk);
|
|
|
|
|
if (tk->isNot(T_POUND_POUND))
|
|
|
|
|
break;
|
|
|
|
|
lex(tk);
|
|
|
|
|
if (tk->is(T_IDENTIFIER))
|
|
|
|
|
idToken = generateConcatenated(idToken, *tk);
|
|
|
|
|
else
|
|
|
|
|
break;
|
|
|
|
|
} while (isValidToken(*tk));
|
|
|
|
|
pushToken(tk);
|
|
|
|
|
QByteArray result(1, '0');
|
|
|
|
|
if (m_env->resolve(idToken.asByteArrayRef()))
|
|
|
|
|
result[0] = '1';
|
|
|
|
|
*tk = generateToken(T_NUMERIC_LITERAL, ByteArrayRef(&result), lineno, false);
|
2009-03-03 10:27:02 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::pushToken(Preprocessor::PPToken *tk)
|
2009-03-03 10:27:02 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
const PPToken currentTokenBuffer[] = { *tk };
|
|
|
|
|
m_state.m_tokenBuffer = new TokenBuffer(currentTokenBuffer,
|
|
|
|
|
currentTokenBuffer + 1,
|
|
|
|
|
/*macro */ 0,
|
|
|
|
|
m_state.m_tokenBuffer);
|
|
|
|
|
}
|
2009-03-03 10:27:02 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::lex(PPToken *tk)
|
|
|
|
|
{
|
|
|
|
|
_Lagain:
|
|
|
|
|
if (m_state.m_tokenBuffer) {
|
|
|
|
|
if (m_state.m_tokenBuffer->tokens.empty()) {
|
|
|
|
|
TokenBuffer *r = m_state.m_tokenBuffer;
|
|
|
|
|
m_state.m_tokenBuffer = m_state.m_tokenBuffer->next;
|
|
|
|
|
delete r;
|
|
|
|
|
goto _Lagain;
|
|
|
|
|
}
|
|
|
|
|
*tk = m_state.m_tokenBuffer->tokens.front();
|
|
|
|
|
m_state.m_tokenBuffer->tokens.pop_front();
|
|
|
|
|
} else {
|
|
|
|
|
tk->setSource(m_state.m_source);
|
|
|
|
|
m_state.m_lexer->scan(tk);
|
|
|
|
|
}
|
2009-03-03 10:27:02 +01:00
|
|
|
|
2012-03-30 13:01:32 +02:00
|
|
|
// if (tk->isValid() && !tk->generated() && !tk->is(T_EOF_SYMBOL))
|
|
|
|
|
// m_env->currentLine = tk->lineno;
|
2012-03-26 15:18:01 +02:00
|
|
|
|
|
|
|
|
_Lclassify:
|
|
|
|
|
if (! m_state.m_inPreprocessorDirective) {
|
|
|
|
|
if (tk->newline() && tk->is(T_POUND)) {
|
|
|
|
|
handlePreprocessorDirective(tk);
|
|
|
|
|
goto _Lclassify;
|
|
|
|
|
} else if (tk->newline() && skipping()) {
|
|
|
|
|
ScopedBoolSwap s(m_state.m_inPreprocessorDirective, true);
|
|
|
|
|
do {
|
|
|
|
|
lex(tk);
|
|
|
|
|
} while (isValidToken(*tk));
|
|
|
|
|
goto _Lclassify;
|
|
|
|
|
} else if (tk->is(T_IDENTIFIER) && !isQtReservedWord(tk->asByteArrayRef())) {
|
|
|
|
|
static const QByteArray ppDefined("defined");
|
|
|
|
|
if (m_state.m_inCondition && tk->asByteArrayRef() == ppDefined)
|
|
|
|
|
handleDefined(tk);
|
|
|
|
|
else if (handleIdentifier(tk))
|
|
|
|
|
goto _Lagain;
|
2009-03-03 10:27:02 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::skipPreprocesorDirective(PPToken *tk)
|
2012-03-16 15:16:06 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
ScopedBoolSwap s(m_state.m_inPreprocessorDirective, true);
|
|
|
|
|
|
|
|
|
|
while (isValidToken(*tk)) {
|
|
|
|
|
lex(tk);
|
2012-03-16 15:16:06 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
bool Preprocessor::handleIdentifier(PPToken *tk)
|
2009-03-04 14:00:56 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
ScopedBoolSwap s(m_state.m_inPreprocessorDirective, true);
|
|
|
|
|
|
|
|
|
|
static const QByteArray ppLine("__LINE__");
|
|
|
|
|
static const QByteArray ppFile("__FILE__");
|
|
|
|
|
static const QByteArray ppDate("__DATE__");
|
|
|
|
|
static const QByteArray ppTime("__TIME__");
|
|
|
|
|
|
|
|
|
|
ByteArrayRef macroNameRef = tk->asByteArrayRef();
|
|
|
|
|
bool newline = tk->newline();
|
|
|
|
|
|
|
|
|
|
if (!m_state.m_inDefine && macroNameRef.size() == 8 && macroNameRef[0] == '_' && macroNameRef[1] == '_') {
|
|
|
|
|
PPToken newTk;
|
|
|
|
|
if (macroNameRef == ppLine) {
|
|
|
|
|
QByteArray txt = QByteArray::number(tk->lineno);
|
|
|
|
|
newTk = generateToken(T_STRING_LITERAL, &txt, tk->lineno, false);
|
|
|
|
|
} else if (macroNameRef == ppFile) {
|
|
|
|
|
QByteArray txt;
|
|
|
|
|
txt.append('"');
|
|
|
|
|
txt.append(m_env->currentFile.toUtf8());
|
|
|
|
|
txt.append('"');
|
|
|
|
|
newTk = generateToken(T_STRING_LITERAL, &txt, tk->lineno, false);
|
|
|
|
|
} else if (macroNameRef == ppDate) {
|
|
|
|
|
QByteArray txt;
|
|
|
|
|
txt.append('"');
|
|
|
|
|
txt.append(QDate::currentDate().toString().toUtf8());
|
|
|
|
|
txt.append('"');
|
|
|
|
|
newTk = generateToken(T_STRING_LITERAL, &txt, tk->lineno, false);
|
|
|
|
|
} else if (macroNameRef == ppTime) {
|
|
|
|
|
QByteArray txt;
|
|
|
|
|
txt.append('"');
|
|
|
|
|
txt.append(QTime::currentTime().toString().toUtf8());
|
|
|
|
|
txt.append('"');
|
|
|
|
|
newTk = generateToken(T_STRING_LITERAL, &txt, tk->lineno, false);
|
2009-03-04 14:00:56 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (newTk.isValid()) {
|
|
|
|
|
newTk.f.newline = newline;
|
|
|
|
|
newTk.f.whitespace = tk->whitespace();
|
|
|
|
|
*tk = newTk;
|
|
|
|
|
return false;
|
2009-03-04 14:00:56 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
const QByteArray macroName = macroNameRef.toByteArray();
|
|
|
|
|
if (tk->generated() && m_state.m_tokenBuffer && m_state.m_tokenBuffer->isBlocked(macroName))
|
|
|
|
|
return false;
|
2009-03-04 14:00:56 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
Macro *macro = m_env->resolve(macroName);
|
|
|
|
|
if (!macro)
|
|
|
|
|
return false;
|
|
|
|
|
// qDebug() << "expanding" << macro->name() << "on line" << tk->lineno;
|
2010-03-03 14:09:38 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (m_client)
|
|
|
|
|
m_client->startExpandingMacro(tk->offset, *macro, macroName);
|
|
|
|
|
QVector<PPToken> body = macro->definitionTokens();
|
2010-03-03 14:09:38 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (macro->isFunctionLike()) {
|
|
|
|
|
if (!expandMacros() || !handleFunctionLikeMacro(tk, macro, body, !m_state.m_inDefine))
|
|
|
|
|
// the call is not function like or expandMacros() returns false, so stop
|
|
|
|
|
return false;
|
2010-03-03 14:09:38 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
}
|
2011-08-30 14:29:40 +02:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (body.isEmpty()) {
|
|
|
|
|
if (!m_state.m_inDefine) {
|
|
|
|
|
// macro expanded to empty, so characters disappeared, hence force a re-indent.
|
|
|
|
|
PPToken forceWhitespacingToken;
|
|
|
|
|
// special case: for a macro that expanded to empty, we do not want
|
|
|
|
|
// to generate a new #line and re-indent, but just generate the
|
|
|
|
|
// amount of spaces that the macro name took up.
|
|
|
|
|
forceWhitespacingToken.f.length = tk->length() + (tk->whitespace() ? 1 : 0);
|
|
|
|
|
body.push_front(forceWhitespacingToken);
|
2011-08-30 14:29:40 +02:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
} else {
|
|
|
|
|
PPToken &firstNewTk = body.first();
|
|
|
|
|
firstNewTk.f.newline = newline;
|
|
|
|
|
firstNewTk.f.whitespace = true; // the macro call is removed, so space the first token correctly.
|
|
|
|
|
}
|
2011-08-30 14:29:40 +02:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
m_state.m_tokenBuffer = new TokenBuffer(body.begin(), body.end(),
|
|
|
|
|
macro, m_state.m_tokenBuffer);
|
|
|
|
|
m_state.m_tokenBuffer->blockMacro(macroName);
|
2011-08-30 14:29:40 +02:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (m_client)
|
|
|
|
|
m_client->stopExpandingMacro(tk->offset, *macro);
|
|
|
|
|
|
|
|
|
|
return true;
|
2011-08-30 14:29:40 +02:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
bool Preprocessor::handleFunctionLikeMacro(PPToken *tk, const Macro *macro, QVector<PPToken> &body, bool addWhitespaceMarker)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
static const QByteArray ppVaArgs("__VA_ARGS__");
|
|
|
|
|
|
|
|
|
|
QVector<QVector<PPToken> > actuals;
|
|
|
|
|
PPToken idToken = *tk;
|
|
|
|
|
if (!collectActualArguments(tk, &actuals)) {
|
|
|
|
|
pushToken(tk);
|
|
|
|
|
*tk = idToken;
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2009-03-03 10:27:02 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
QVector<PPToken> expanded;
|
|
|
|
|
for (size_t i = 0, bodySize = body.size(); i < bodySize && expanded.size() < MAX_TOKEN_EXPANSION_COUNT; ++i) {
|
|
|
|
|
int expandedSize = expanded.size();
|
|
|
|
|
const PPToken &token = body[i];
|
|
|
|
|
|
|
|
|
|
if (token.is(T_IDENTIFIER)) {
|
|
|
|
|
const ByteArrayRef id = token.asByteArrayRef();
|
|
|
|
|
const QVector<QByteArray> &formals = macro->formals();
|
|
|
|
|
int j = 0;
|
|
|
|
|
for (; j < formals.size() && expanded.size() < MAX_TOKEN_EXPANSION_COUNT; ++j) {
|
|
|
|
|
if (formals[j] == id) {
|
|
|
|
|
if (actuals.size() <= j) {
|
|
|
|
|
// too few actual parameters
|
|
|
|
|
//### TODO: error message
|
|
|
|
|
goto exitNicely;
|
2009-07-09 17:52:31 +02:00
|
|
|
}
|
2010-01-19 15:26:08 +10:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
QVector<PPToken> actualsForThisParam = actuals[j];
|
|
|
|
|
if (id == ppVaArgs || (macro->isVariadic() && j + 1 == formals.size())) {
|
|
|
|
|
unsigned lineno = 0;
|
|
|
|
|
QByteArray comma(",");
|
|
|
|
|
ByteArrayRef commaRef(&comma);
|
|
|
|
|
for (int k = j + 1; k < actuals.size(); ++k) {
|
|
|
|
|
if (!actualsForThisParam.isEmpty())
|
|
|
|
|
lineno = actualsForThisParam.last().lineno;
|
|
|
|
|
actualsForThisParam.append(generateToken(T_COMMA, commaRef, lineno, true));
|
|
|
|
|
actualsForThisParam += actuals[k];
|
2010-02-22 13:11:09 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
}
|
2009-03-03 10:41:20 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (i > 0 && body[i - 1].is(T_POUND)) {
|
|
|
|
|
QByteArray newText;
|
|
|
|
|
newText.reserve(256);
|
|
|
|
|
unsigned lineno = 0;
|
|
|
|
|
for (int i = 0, ei = actualsForThisParam.size(); i < ei; ++i) {
|
|
|
|
|
const PPToken &t = actualsForThisParam.at(i);
|
|
|
|
|
if (i == 0)
|
|
|
|
|
lineno = t.lineno;
|
|
|
|
|
else if (t.whitespace())
|
|
|
|
|
newText.append(' ');
|
|
|
|
|
newText.append(t.start(), t.length());
|
2009-03-03 17:06:42 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
newText.replace("\\", "\\\\");
|
|
|
|
|
newText.replace("\"", "\\\"");
|
|
|
|
|
expanded.push_back(generateToken(T_STRING_LITERAL, ByteArrayRef(&newText), lineno, true));
|
|
|
|
|
} else {
|
|
|
|
|
expanded += actualsForThisParam;
|
2009-03-03 17:06:42 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
break;
|
2009-03-03 17:18:12 +01:00
|
|
|
}
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (j == formals.size())
|
|
|
|
|
expanded.push_back(token);
|
|
|
|
|
} else if (token.isNot(T_POUND) && token.isNot(T_POUND_POUND)) {
|
|
|
|
|
expanded.push_back(token);
|
|
|
|
|
}
|
2009-03-03 09:58:43 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (i > 1 && body[i - 1].is(T_POUND_POUND)) {
|
|
|
|
|
if (expandedSize < 1 || expanded.size() == expandedSize) //### TODO: [cpp.concat] placemarkers
|
|
|
|
|
continue;
|
|
|
|
|
const PPToken &leftTk = expanded[expandedSize - 1];
|
|
|
|
|
const PPToken &rightTk = expanded[expandedSize];
|
|
|
|
|
expanded[expandedSize - 1] = generateConcatenated(leftTk, rightTk);
|
|
|
|
|
expanded.remove(expandedSize);
|
|
|
|
|
}
|
|
|
|
|
}
|
2009-12-09 16:01:20 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
exitNicely:
|
|
|
|
|
pushToken(tk);
|
|
|
|
|
if (addWhitespaceMarker) {
|
|
|
|
|
PPToken forceWhitespacingToken;
|
|
|
|
|
expanded.push_front(forceWhitespacingToken);
|
|
|
|
|
}
|
|
|
|
|
body = expanded;
|
|
|
|
|
return true;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
/// invalid pp-tokens are used as markers to force whitespace checks.
|
|
|
|
|
void Preprocessor::preprocess(const QString &fileName, const QByteArray &source,
|
|
|
|
|
QByteArray *result, bool noLines,
|
|
|
|
|
bool markGeneratedTokens, bool inCondition)
|
2009-03-03 16:59:55 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
if (source.isEmpty())
|
2009-03-04 11:47:30 +01:00
|
|
|
return;
|
2009-03-03 17:06:42 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
pushState(createStateFromSource(fileName, source, result, noLines, markGeneratedTokens, inCondition));
|
2009-03-03 16:59:55 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
const QString previousFileName = m_env->currentFile;
|
|
|
|
|
m_env->currentFile = fileName;
|
2009-03-04 11:47:30 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
const unsigned previousCurrentLine = m_env->currentLine;
|
2012-03-30 13:01:32 +02:00
|
|
|
m_env->currentLine = 1;
|
2009-03-03 16:59:55 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
const QByteArray fn = fileName.toUtf8();
|
2012-03-30 13:01:32 +02:00
|
|
|
if (!m_state.m_noLines)
|
|
|
|
|
genLine(1, fn);
|
2009-03-04 11:47:30 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
PPToken tk(m_state.m_source), prevTk;
|
|
|
|
|
do {
|
|
|
|
|
_Lrestart:
|
|
|
|
|
bool forceLine = false;
|
|
|
|
|
lex(&tk);
|
2009-03-04 11:47:30 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (!tk.isValid()) {
|
|
|
|
|
bool wasGenerated = prevTk.generated();
|
|
|
|
|
prevTk = tk;
|
|
|
|
|
prevTk.f.generated = wasGenerated;
|
|
|
|
|
goto _Lrestart;
|
|
|
|
|
}
|
2009-03-04 11:47:30 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (m_state.m_markGeneratedTokens && tk.generated() && !prevTk.generated()) {
|
|
|
|
|
startNewOutputLine();
|
|
|
|
|
out("#gen true\n");
|
2012-03-30 13:01:32 +02:00
|
|
|
++m_env->currentLine;
|
2012-03-26 15:18:01 +02:00
|
|
|
forceLine = true;
|
|
|
|
|
} else if (m_state.m_markGeneratedTokens && !tk.generated() && prevTk.generated()) {
|
|
|
|
|
startNewOutputLine();
|
|
|
|
|
out("#gen false\n");
|
2012-03-30 13:01:32 +02:00
|
|
|
++m_env->currentLine;
|
2012-03-26 15:18:01 +02:00
|
|
|
forceLine = true;
|
|
|
|
|
}
|
2009-03-04 11:47:30 +01:00
|
|
|
|
2012-03-30 13:01:32 +02:00
|
|
|
if (forceLine || m_env->currentLine != tk.lineno) {
|
|
|
|
|
if (forceLine || m_env->currentLine > tk.lineno || tk.lineno - m_env->currentLine > 3) {
|
2012-03-26 15:18:01 +02:00
|
|
|
if (m_state.m_noLines) {
|
|
|
|
|
if (!m_state.m_markGeneratedTokens)
|
|
|
|
|
out(' ');
|
|
|
|
|
} else {
|
2012-03-30 13:01:32 +02:00
|
|
|
genLine(tk.lineno, fn);
|
2009-03-04 11:47:30 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
} else {
|
2012-03-30 13:01:32 +02:00
|
|
|
for (unsigned i = m_env->currentLine; i < tk.lineno; ++i)
|
2012-03-26 15:18:01 +02:00
|
|
|
out('\n');
|
2009-03-04 11:47:30 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
} else {
|
|
|
|
|
if (tk.newline() && prevTk.isValid())
|
|
|
|
|
out('\n');
|
2009-03-04 11:47:30 +01:00
|
|
|
}
|
2009-03-03 16:59:55 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (tk.whitespace() || prevTk.generated() != tk.generated() || !prevTk.isValid()) {
|
|
|
|
|
if (prevTk.generated() && tk.generated()) {
|
|
|
|
|
out(' ');
|
2012-03-30 13:01:32 +02:00
|
|
|
} else if (tk.isValid() && !prevTk.isValid() && tk.lineno == m_env->currentLine) {
|
2012-03-26 15:18:01 +02:00
|
|
|
out(QByteArray(prevTk.length() + (tk.whitespace() ? 1 : 0), ' '));
|
|
|
|
|
} else if (prevTk.generated() != tk.generated() || !prevTk.isValid()) {
|
|
|
|
|
const char *begin = tk.source().constBegin();
|
|
|
|
|
const char *end = begin + tk.offset;
|
|
|
|
|
const char *it = end - 1;
|
|
|
|
|
for (; it >= begin; --it)
|
|
|
|
|
if (*it == '\n')
|
|
|
|
|
break;
|
|
|
|
|
++it;
|
|
|
|
|
for (; it < end; ++it)
|
|
|
|
|
out(' ');
|
|
|
|
|
} else {
|
|
|
|
|
const char *begin = tk.source().constBegin();
|
|
|
|
|
const char *end = begin + tk.offset;
|
|
|
|
|
const char *it = end - 1;
|
|
|
|
|
for (; it >= begin; --it)
|
|
|
|
|
if (!pp_isspace(*it) || *it == '\n')
|
|
|
|
|
break;
|
|
|
|
|
++it;
|
|
|
|
|
for (; it < end; ++it)
|
|
|
|
|
out(*it);
|
2009-03-03 16:56:55 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
const ByteArrayRef tkBytes = tk.asByteArrayRef();
|
|
|
|
|
out(tkBytes);
|
2012-03-30 13:01:32 +02:00
|
|
|
m_env->currentLine = tk.lineno;
|
2012-03-26 15:18:01 +02:00
|
|
|
if (tk.is(T_COMMENT) || tk.is(T_DOXY_COMMENT))
|
2012-03-30 13:01:32 +02:00
|
|
|
m_env->currentLine += tkBytes.count('\n');
|
2012-03-26 15:18:01 +02:00
|
|
|
prevTk = tk;
|
|
|
|
|
} while (tk.isNot(T_EOF_SYMBOL));
|
2009-03-03 16:56:55 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
popState();
|
2009-03-03 16:56:55 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
m_env->currentFile = previousFileName;
|
|
|
|
|
m_env->currentLine = previousCurrentLine;
|
2009-03-03 16:56:55 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
bool Preprocessor::collectActualArguments(PPToken *tk, QVector<QVector<PPToken> > *actuals)
|
2009-03-03 17:18:12 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
Q_ASSERT(tk);
|
|
|
|
|
Q_ASSERT(actuals);
|
2009-03-03 17:18:12 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
lex(tk); // consume the identifier
|
2009-03-03 16:46:21 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (tk->isNot(T_LPAREN))
|
|
|
|
|
//### TODO: error message
|
|
|
|
|
return false;
|
2009-03-03 16:46:21 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
QVector<PPToken> tokens;
|
|
|
|
|
lex(tk);
|
|
|
|
|
scanActualArgument(tk, &tokens);
|
2009-03-03 16:46:21 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
actuals->append(tokens);
|
2009-03-03 16:19:03 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
while (tk->is(T_COMMA)) {
|
|
|
|
|
lex(tk);
|
2009-03-03 16:19:03 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
QVector<PPToken> tokens;
|
|
|
|
|
scanActualArgument(tk, &tokens);
|
|
|
|
|
actuals->append(tokens);
|
2009-03-03 16:19:03 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (tk->is(T_RPAREN))
|
|
|
|
|
lex(tk);
|
|
|
|
|
//###TODO: else error message
|
|
|
|
|
return true;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::scanActualArgument(PPToken *tk, QVector<PPToken> *tokens)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
Q_ASSERT(tokens);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
int count = 0;
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
while (tk->isNot(T_EOF_SYMBOL)) {
|
|
|
|
|
if (tk->is(T_LPAREN)) {
|
|
|
|
|
++count;
|
|
|
|
|
} else if (tk->is(T_RPAREN)) {
|
|
|
|
|
if (! count)
|
|
|
|
|
break;
|
|
|
|
|
--count;
|
|
|
|
|
} else if (! count && tk->is(T_COMMA)) {
|
2008-12-02 12:01:29 +01:00
|
|
|
break;
|
2012-03-26 15:18:01 +02:00
|
|
|
}
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
tokens->append(*tk);
|
|
|
|
|
lex(tk);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::handlePreprocessorDirective(PPToken *tk)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
ScopedBoolSwap s(m_state.m_inPreprocessorDirective, true);
|
2009-03-03 09:52:50 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
PPToken poundToken = *tk;
|
|
|
|
|
lex(tk); // scan the directive
|
2009-03-03 09:52:50 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (tk->newline() && ! tk->joined())
|
|
|
|
|
return; // nothing to do.
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
static const QByteArray ppDefine("define");
|
|
|
|
|
static const QByteArray ppIf("if");
|
|
|
|
|
static const QByteArray ppIfDef("ifdef");
|
|
|
|
|
static const QByteArray ppIfNDef("ifndef");
|
|
|
|
|
static const QByteArray ppEndIf("endif");
|
|
|
|
|
static const QByteArray ppElse("else");
|
|
|
|
|
static const QByteArray ppUndef("undef");
|
|
|
|
|
static const QByteArray ppElif("elif");
|
|
|
|
|
static const QByteArray ppInclude("include");
|
|
|
|
|
static const QByteArray ppIncludeNext("include_next");
|
|
|
|
|
static const QByteArray ppImport("import");
|
|
|
|
|
//### TODO:
|
|
|
|
|
// line
|
|
|
|
|
// error
|
|
|
|
|
// pragma
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (tk->is(T_IDENTIFIER)) {
|
|
|
|
|
const ByteArrayRef directive = tk->asByteArrayRef();
|
|
|
|
|
|
|
|
|
|
if (!skipping() && directive == ppDefine)
|
|
|
|
|
handleDefineDirective(tk);
|
|
|
|
|
else if (!skipping() && directive == ppUndef)
|
|
|
|
|
handleUndefDirective(tk);
|
|
|
|
|
else if (!skipping() && (directive == ppInclude
|
|
|
|
|
|| directive == ppIncludeNext
|
|
|
|
|
|| directive == ppImport))
|
|
|
|
|
handleIncludeDirective(tk);
|
|
|
|
|
else if (directive == ppIf)
|
|
|
|
|
handleIfDirective(tk);
|
|
|
|
|
else if (directive == ppIfDef)
|
|
|
|
|
handleIfDefDirective(false, tk);
|
|
|
|
|
else if (directive == ppIfNDef)
|
|
|
|
|
handleIfDefDirective(true, tk);
|
|
|
|
|
else if (directive == ppEndIf)
|
|
|
|
|
handleEndIfDirective(tk, poundToken);
|
|
|
|
|
else if (directive == ppElse)
|
|
|
|
|
handleElseDirective(tk, poundToken);
|
|
|
|
|
else if (directive == ppElif)
|
|
|
|
|
handleElifDirective(tk, poundToken);
|
|
|
|
|
|
|
|
|
|
skipPreprocesorDirective(tk);
|
|
|
|
|
}
|
|
|
|
|
}
|
2009-03-03 09:52:50 +01:00
|
|
|
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::handleIncludeDirective(PPToken *tk)
|
|
|
|
|
{
|
|
|
|
|
m_state.m_lexer->setScanAngleStringLiteralTokens(true);
|
|
|
|
|
lex(tk); // consume "include" token
|
|
|
|
|
m_state.m_lexer->setScanAngleStringLiteralTokens(false);
|
2012-03-29 17:14:42 +02:00
|
|
|
const unsigned line = tk->lineno;
|
2012-03-26 15:18:01 +02:00
|
|
|
QByteArray included;
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (tk->is(T_STRING_LITERAL) || tk->is(T_ANGLE_STRING_LITERAL)) {
|
|
|
|
|
included = tk->asByteArrayRef().toByteArray();
|
2012-03-29 17:14:42 +02:00
|
|
|
lex(tk); // consume string token
|
2012-03-26 15:18:01 +02:00
|
|
|
} else {
|
|
|
|
|
included = expand(tk);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
included = included.trimmed();
|
|
|
|
|
|
2012-03-30 13:34:46 +02:00
|
|
|
if (included.isEmpty()) {
|
|
|
|
|
//### TODO: error message
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
// qDebug("include [[%s]]", included.toUtf8().constData());
|
|
|
|
|
Client::IncludeType mode;
|
|
|
|
|
if (included.at(0) == '"')
|
|
|
|
|
mode = Client::IncludeLocal;
|
|
|
|
|
else if (included.at(0) == '<')
|
|
|
|
|
mode = Client::IncludeGlobal;
|
|
|
|
|
else
|
|
|
|
|
return; //### TODO: add error message?
|
|
|
|
|
|
|
|
|
|
included = included.mid(1, included.size() - 2);
|
|
|
|
|
QString inc = QString::fromUtf8(included.constData());
|
|
|
|
|
if (m_client)
|
|
|
|
|
m_client->sourceNeeded(inc, mode, line);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::handleDefineDirective(PPToken *tk)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
const unsigned defineOffset = tk->offset;
|
|
|
|
|
lex(tk); // consume "define" token
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
bool hasIdentifier = false;
|
|
|
|
|
if (tk->isNot(T_IDENTIFIER))
|
2008-12-02 12:01:29 +01:00
|
|
|
return;
|
2012-03-26 15:18:01 +02:00
|
|
|
|
|
|
|
|
ScopedBoolSwap inDefine(m_state.m_inDefine, true);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
|
|
|
|
Macro macro;
|
2012-03-26 15:18:01 +02:00
|
|
|
macro.setFileName(m_env->currentFile);
|
|
|
|
|
macro.setLine(m_env->currentLine);
|
|
|
|
|
QByteArray macroName = tk->asByteArrayRef().toByteArray();
|
|
|
|
|
macro.setName(macroName);
|
|
|
|
|
macro.setOffset(tk->offset);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
lex(tk);
|
|
|
|
|
|
|
|
|
|
if (isValidToken(*tk) && tk->is(T_LPAREN) && ! tk->whitespace()) {
|
2008-12-22 13:55:42 +01:00
|
|
|
macro.setFunctionLike(true);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
lex(tk); // skip `('
|
|
|
|
|
|
|
|
|
|
if (isValidToken(*tk) && tk->is(T_IDENTIFIER)) {
|
2012-03-16 14:06:09 +01:00
|
|
|
hasIdentifier = true;
|
2012-03-26 15:18:01 +02:00
|
|
|
macro.addFormal(tk->asByteArrayRef().toByteArray());
|
|
|
|
|
|
|
|
|
|
lex(tk);
|
|
|
|
|
|
|
|
|
|
while (isValidToken(*tk) && tk->is(T_COMMA)) {
|
|
|
|
|
lex(tk);
|
|
|
|
|
|
|
|
|
|
if (isValidToken(*tk) && tk->is(T_IDENTIFIER)) {
|
|
|
|
|
macro.addFormal(tk->asByteArrayRef().toByteArray());
|
|
|
|
|
lex(tk);
|
|
|
|
|
} else {
|
2012-03-16 14:06:09 +01:00
|
|
|
hasIdentifier = false;
|
|
|
|
|
}
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (tk->is(T_DOT_DOT_DOT)) {
|
2008-12-22 13:55:42 +01:00
|
|
|
macro.setVariadic(true);
|
2012-03-16 14:06:09 +01:00
|
|
|
if (!hasIdentifier)
|
|
|
|
|
macro.addFormal("__VA_ARGS__");
|
2012-03-26 15:18:01 +02:00
|
|
|
lex(tk); // consume elipsis token
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
if (isValidToken(*tk) && tk->is(T_RPAREN))
|
|
|
|
|
lex(tk); // consume ")" token
|
|
|
|
|
}
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
QVector<PPToken> bodyTokens;
|
|
|
|
|
PPToken firstBodyToken = *tk;
|
|
|
|
|
while (isValidToken(*tk)) {
|
|
|
|
|
tk->f.generated = true;
|
|
|
|
|
bodyTokens.push_back(*tk);
|
|
|
|
|
lex(tk);
|
|
|
|
|
if (eagerExpansion)
|
|
|
|
|
while (tk->is(T_IDENTIFIER) && !isQtReservedWord(tk->asByteArrayRef()) && handleIdentifier(tk))
|
|
|
|
|
lex(tk);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (isQtReservedWord(ByteArrayRef(¯oName))) {
|
2008-12-22 13:55:42 +01:00
|
|
|
QByteArray macroId = macro.name();
|
|
|
|
|
|
|
|
|
|
if (macro.isFunctionLike()) {
|
|
|
|
|
macroId += '(';
|
|
|
|
|
bool fst = true;
|
2011-04-19 15:42:14 +02:00
|
|
|
foreach (const QByteArray &formal, macro.formals()) {
|
2008-12-22 13:55:42 +01:00
|
|
|
if (! fst)
|
|
|
|
|
macroId += ", ";
|
|
|
|
|
fst = false;
|
|
|
|
|
macroId += formal;
|
|
|
|
|
}
|
|
|
|
|
macroId += ')';
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
bodyTokens.clear();
|
|
|
|
|
macro.setDefinition(macroId, bodyTokens);
|
2008-12-22 13:55:42 +01:00
|
|
|
} else {
|
2012-03-26 15:18:01 +02:00
|
|
|
int start = firstBodyToken.offset;
|
|
|
|
|
int len = tk->offset - start;
|
|
|
|
|
QByteArray bodyText = firstBodyToken.source().mid(start, len).trimmed();
|
|
|
|
|
for (int i = 0, count = bodyTokens.size(); i < count; ++i) {
|
|
|
|
|
PPToken &t = bodyTokens[i];
|
|
|
|
|
if (t.isValid())
|
|
|
|
|
t.squeeze();
|
2011-09-06 11:50:58 +02:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
macro.setDefinition(bodyText, bodyTokens);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
macro.setLength(tk->offset - defineOffset);
|
|
|
|
|
m_env->bind(macro);
|
|
|
|
|
|
|
|
|
|
// qDebug() << "adding macro" << macro.name() << "defined at" << macro.fileName() << ":"<<macro.line();
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (m_client)
|
|
|
|
|
m_client->macroAdded(macro);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
QByteArray Preprocessor::expand(PPToken *tk, PPToken *lastConditionToken)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
QByteArray condition;
|
|
|
|
|
condition.reserve(256);
|
|
|
|
|
while (isValidToken(*tk)) {
|
|
|
|
|
const ByteArrayRef s = tk->asByteArrayRef();
|
|
|
|
|
condition.append(s.start(), s.length());
|
|
|
|
|
condition += ' ';
|
2012-03-29 17:14:42 +02:00
|
|
|
if (lastConditionToken)
|
|
|
|
|
*lastConditionToken = *tk;
|
2012-03-26 15:18:01 +02:00
|
|
|
lex(tk);
|
|
|
|
|
}
|
|
|
|
|
// qDebug("*** Condition before: [%s]", condition.constData());
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
QByteArray result;
|
|
|
|
|
result.reserve(256);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
preprocess(m_state.m_currentFileName, condition, &result, true, false, true);
|
|
|
|
|
result.squeeze();
|
|
|
|
|
// qDebug("*** Condition after: [%s]", result.constData());
|
|
|
|
|
return result;
|
|
|
|
|
}
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
const PPToken Preprocessor::evalExpression(PPToken *tk, Value &result)
|
|
|
|
|
{
|
|
|
|
|
PPToken lastConditionToken;
|
|
|
|
|
const QByteArray expanded = expand(tk, &lastConditionToken);
|
|
|
|
|
Lexer lexer(expanded.constData(), expanded.constData() + expanded.size());
|
|
|
|
|
std::vector<Token> buf;
|
|
|
|
|
Token t;
|
|
|
|
|
do {
|
|
|
|
|
lexer.scan(&t);
|
|
|
|
|
buf.push_back(t);
|
|
|
|
|
} while (t.isNot(T_EOF_SYMBOL));
|
|
|
|
|
ExpressionEvaluator eval(m_client, m_env);
|
|
|
|
|
result = eval(&buf[0], &buf[buf.size() - 1], expanded);
|
|
|
|
|
return lastConditionToken;
|
|
|
|
|
}
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::handleIfDirective(PPToken *tk)
|
|
|
|
|
{
|
|
|
|
|
lex(tk); // consume "if" token
|
|
|
|
|
Value result;
|
|
|
|
|
const PPToken lastExpressionToken = evalExpression(tk, result);
|
|
|
|
|
const bool value = !result.is_zero();
|
|
|
|
|
|
|
|
|
|
const bool wasSkipping = m_state.m_skipping[m_state.m_ifLevel];
|
|
|
|
|
++m_state.m_ifLevel;
|
|
|
|
|
m_state.m_trueTest[m_state.m_ifLevel] = value;
|
|
|
|
|
if (wasSkipping) {
|
|
|
|
|
m_state.m_skipping[m_state.m_ifLevel] = wasSkipping;
|
|
|
|
|
} else {
|
|
|
|
|
bool startSkipping = !value;
|
|
|
|
|
m_state.m_skipping[m_state.m_ifLevel] = startSkipping;
|
|
|
|
|
if (startSkipping && m_client)
|
|
|
|
|
startSkippingBlocks(lastExpressionToken);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::handleElifDirective(PPToken *tk, const PPToken £Token)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
if (m_state.m_ifLevel == 0) {
|
|
|
|
|
// std::cerr << "*** WARNING #elif without #if" << std::endl;
|
|
|
|
|
handleIfDirective(tk);
|
2008-12-02 12:01:29 +01:00
|
|
|
} else {
|
2012-03-26 15:18:01 +02:00
|
|
|
lex(tk); // consume "elif" token
|
|
|
|
|
if (m_state.m_skipping[m_state.m_ifLevel - 1]) {
|
|
|
|
|
// we keep on skipping because we are nested in a skipped block
|
|
|
|
|
m_state.m_skipping[m_state.m_ifLevel] = true;
|
|
|
|
|
} else if (m_state.m_trueTest[m_state.m_ifLevel]) {
|
|
|
|
|
if (!m_state.m_skipping[m_state.m_ifLevel]) {
|
|
|
|
|
// start skipping because the preceeding then-part was not skipped
|
|
|
|
|
m_state.m_skipping[m_state.m_ifLevel] = true;
|
|
|
|
|
if (m_client)
|
|
|
|
|
startSkippingBlocks(poundToken);
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// preceeding then-part was skipped, so calculate if we should start
|
|
|
|
|
// skipping, depending on the condition
|
|
|
|
|
Value result;
|
|
|
|
|
evalExpression(tk, result);
|
|
|
|
|
|
|
|
|
|
bool startSkipping = result.is_zero();
|
|
|
|
|
m_state.m_trueTest[m_state.m_ifLevel] = !startSkipping;
|
|
|
|
|
m_state.m_skipping[m_state.m_ifLevel] = startSkipping;
|
|
|
|
|
if (m_client && !startSkipping)
|
|
|
|
|
m_client->stopSkippingBlocks(poundToken.offset - 1);
|
|
|
|
|
}
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::handleElseDirective(PPToken *tk, const PPToken £Token)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
lex(tk); // consume "else" token
|
2009-01-19 20:06:39 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (m_state.m_ifLevel != 0) {
|
|
|
|
|
if (m_state.m_skipping[m_state.m_ifLevel - 1]) {
|
|
|
|
|
// we keep on skipping because we are nested in a skipped block
|
|
|
|
|
m_state.m_skipping[m_state.m_ifLevel] = true;
|
|
|
|
|
} else {
|
|
|
|
|
bool wasSkipping = m_state.m_skipping[m_state.m_ifLevel];
|
|
|
|
|
bool startSkipping = m_state.m_trueTest[m_state.m_ifLevel];
|
|
|
|
|
m_state.m_skipping[m_state.m_ifLevel] = startSkipping;
|
|
|
|
|
|
|
|
|
|
if (m_client && wasSkipping && !startSkipping)
|
|
|
|
|
m_client->stopSkippingBlocks(poundToken.offset - 1);
|
|
|
|
|
else if (m_client && !wasSkipping && startSkipping)
|
|
|
|
|
startSkippingBlocks(poundToken);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#ifndef NO_DEBUG
|
|
|
|
|
else {
|
|
|
|
|
std::cerr << "*** WARNING #else without #if" << std::endl;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
#endif // NO_DEBUG
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::handleEndIfDirective(PPToken *tk, const PPToken £Token)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
if (m_state.m_ifLevel == 0) {
|
|
|
|
|
#ifndef NO_DEBUG
|
|
|
|
|
std::cerr << "*** WARNING #endif without #if";
|
|
|
|
|
if (!tk->generated())
|
|
|
|
|
std::cerr << " on line " << tk->lineno << " of file " << m_state.m_currentFileName.toUtf8().constData();
|
|
|
|
|
std::cerr << std::endl;
|
|
|
|
|
#endif // NO_DEBUG
|
2008-12-02 12:01:29 +01:00
|
|
|
} else {
|
2012-03-26 15:18:01 +02:00
|
|
|
bool wasSkipping = m_state.m_skipping[m_state.m_ifLevel];
|
|
|
|
|
m_state.m_skipping[m_state.m_ifLevel] = false;
|
|
|
|
|
m_state.m_trueTest[m_state.m_ifLevel] = false;
|
|
|
|
|
--m_state.m_ifLevel;
|
|
|
|
|
if (m_client && wasSkipping && !m_state.m_skipping[m_state.m_ifLevel])
|
|
|
|
|
m_client->stopSkippingBlocks(poundToken.offset - 1);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
|
|
|
|
|
lex(tk); // consume "endif" token
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::handleIfDefDirective(bool checkUndefined, PPToken *tk)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
static const QByteArray qCreatorRun("Q_CREATOR_RUN");
|
2010-03-12 15:24:45 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
lex(tk); // consume "ifdef" token
|
|
|
|
|
if (tk->is(T_IDENTIFIER)) {
|
|
|
|
|
bool value = false;
|
|
|
|
|
const ByteArrayRef macroName = tk->asByteArrayRef();
|
|
|
|
|
if (Macro *macro = macroDefinition(macroName.toByteArray(), tk->offset, m_env, m_client)) {
|
|
|
|
|
value = true;
|
2010-03-12 15:24:45 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
// the macro is a feature constraint(e.g. QT_NO_XXX)
|
|
|
|
|
if (checkUndefined && macroName.startsWith("QT_NO_")) {
|
|
|
|
|
if (macro->fileName() == QLatin1String("<configuration>")) {
|
|
|
|
|
// and it' defined in a pro file (e.g. DEFINES += QT_NO_QOBJECT)
|
2010-03-12 15:24:45 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
value = false; // take the branch
|
2010-03-12 15:24:45 +01:00
|
|
|
}
|
|
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
} else if (m_env->isBuiltinMacro(macroName)) {
|
|
|
|
|
value = true;
|
|
|
|
|
} else if (macroName == qCreatorRun) {
|
|
|
|
|
value = true;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (checkUndefined)
|
|
|
|
|
value = !value;
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
const bool wasSkipping = m_state.m_skipping[m_state.m_ifLevel];
|
|
|
|
|
++m_state.m_ifLevel;
|
|
|
|
|
m_state.m_trueTest[m_state.m_ifLevel] = value;
|
|
|
|
|
m_state.m_skipping[m_state.m_ifLevel] = wasSkipping ? wasSkipping : !value;
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (m_client && !wasSkipping && !value)
|
|
|
|
|
startSkippingBlocks(*tk);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
lex(tk); // consume the identifier
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
#ifndef NO_DEBUG
|
|
|
|
|
else {
|
|
|
|
|
std::cerr << "*** WARNING #ifdef without identifier" << std::endl;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
2012-03-26 15:18:01 +02:00
|
|
|
#endif // NO_DEBUG
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
void Preprocessor::handleUndefDirective(PPToken *tk)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
lex(tk); // consume "undef" token
|
|
|
|
|
if (tk->is(T_IDENTIFIER)) {
|
|
|
|
|
const ByteArrayRef macroName = tk->asByteArrayRef();
|
|
|
|
|
const Macro *macro = m_env->remove(macroName.toByteArray());
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
if (m_client && macro)
|
|
|
|
|
m_client->macroAdded(*macro);
|
|
|
|
|
lex(tk); // consume macro name
|
|
|
|
|
}
|
|
|
|
|
#ifndef NO_DEBUG
|
|
|
|
|
else {
|
|
|
|
|
std::cerr << "*** WARNING #undef without identifier" << std::endl;
|
|
|
|
|
}
|
|
|
|
|
#endif // NO_DEBUG
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
bool Preprocessor::isQtReservedWord(const ByteArrayRef ¯oId)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
const int size = macroId.size();
|
|
|
|
|
if (size == 9 && macroId.at(0) == 'Q' && macroId == "Q_SIGNALS")
|
|
|
|
|
return true;
|
2009-07-03 09:11:52 +02:00
|
|
|
else if (size == 9 && macroId.at(0) == 'Q' && macroId == "Q_FOREACH")
|
|
|
|
|
return true;
|
2008-12-02 12:01:29 +01:00
|
|
|
else if (size == 7 && macroId.at(0) == 'Q' && macroId == "Q_SLOTS")
|
|
|
|
|
return true;
|
2009-03-25 11:50:17 +01:00
|
|
|
else if (size == 8 && macroId.at(0) == 'Q' && macroId == "Q_SIGNAL")
|
|
|
|
|
return true;
|
|
|
|
|
else if (size == 6 && macroId.at(0) == 'Q' && macroId == "Q_SLOT")
|
|
|
|
|
return true;
|
2009-11-30 16:30:21 +01:00
|
|
|
else if (size == 3 && macroId.at(0) == 'Q' && macroId == "Q_D")
|
|
|
|
|
return true;
|
|
|
|
|
else if (size == 3 && macroId.at(0) == 'Q' && macroId == "Q_Q")
|
|
|
|
|
return true;
|
2010-02-22 13:11:09 +01:00
|
|
|
else if (size == 10 && macroId.at(0) == 'Q' && macroId == "Q_PROPERTY")
|
|
|
|
|
return true;
|
2010-12-08 15:08:03 +01:00
|
|
|
else if (size == 18 && macroId.at(0) == 'Q' && macroId == "Q_PRIVATE_PROPERTY")
|
|
|
|
|
return true;
|
2010-02-22 13:11:09 +01:00
|
|
|
else if (size == 7 && macroId.at(0) == 'Q' && macroId == "Q_ENUMS")
|
|
|
|
|
return true;
|
|
|
|
|
else if (size == 7 && macroId.at(0) == 'Q' && macroId == "Q_FLAGS")
|
|
|
|
|
return true;
|
2010-03-16 17:29:40 +01:00
|
|
|
else if (size == 12 && macroId.at(0) == 'Q' && macroId == "Q_INTERFACES")
|
|
|
|
|
return true;
|
2010-12-03 15:40:08 +01:00
|
|
|
else if (size == 11 && macroId.at(0) == 'Q' && macroId == "Q_INVOKABLE")
|
|
|
|
|
return true;
|
2008-12-02 12:01:29 +01:00
|
|
|
else if (size == 6 && macroId.at(0) == 'S' && macroId == "SIGNAL")
|
|
|
|
|
return true;
|
|
|
|
|
else if (size == 4 && macroId.at(0) == 'S' && macroId == "SLOT")
|
|
|
|
|
return true;
|
|
|
|
|
else if (size == 7 && macroId.at(0) == 's' && macroId == "signals")
|
|
|
|
|
return true;
|
2009-07-03 11:00:36 +02:00
|
|
|
else if (size == 7 && macroId.at(0) == 'f' && macroId == "foreach")
|
|
|
|
|
return true;
|
2008-12-02 12:01:29 +01:00
|
|
|
else if (size == 5 && macroId.at(0) == 's' && macroId == "slots")
|
|
|
|
|
return true;
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2009-06-15 15:38:20 +02:00
|
|
|
|
|
|
|
|
QString Preprocessor::string(const char *first, int length) const
|
|
|
|
|
{
|
2012-03-26 15:18:01 +02:00
|
|
|
if (m_originalSource.isEmpty())
|
2009-06-15 15:38:20 +02:00
|
|
|
return QString::fromUtf8(first, length);
|
|
|
|
|
|
2012-03-26 15:18:01 +02:00
|
|
|
const int position = first - m_state.m_source.constData();
|
|
|
|
|
return m_originalSource.mid(position, length);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
PPToken Preprocessor::generateToken(enum Kind kind, const ByteArrayRef &content, unsigned lineno, bool addQuotes)
|
|
|
|
|
{
|
|
|
|
|
size_t len = content.size();
|
|
|
|
|
const size_t pos = m_scratchBuffer.size();
|
|
|
|
|
|
|
|
|
|
if (kind == T_STRING_LITERAL && addQuotes)
|
|
|
|
|
m_scratchBuffer.append('"');
|
|
|
|
|
m_scratchBuffer.append(content.start(), content.length());
|
|
|
|
|
if (kind == T_STRING_LITERAL && addQuotes) {
|
|
|
|
|
m_scratchBuffer.append('"');
|
|
|
|
|
len += 2;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
PPToken tok(m_scratchBuffer);
|
|
|
|
|
tok.f.kind = kind;
|
|
|
|
|
if (m_state.m_lexer->control()) {
|
|
|
|
|
if (kind == T_STRING_LITERAL)
|
|
|
|
|
tok.string = m_state.m_lexer->control()->stringLiteral(m_scratchBuffer.constData() + pos, len);
|
|
|
|
|
else if (kind == T_IDENTIFIER)
|
|
|
|
|
tok.identifier = m_state.m_lexer->control()->identifier(m_scratchBuffer.constData() + pos, len);
|
|
|
|
|
else if (kind == T_NUMERIC_LITERAL)
|
|
|
|
|
tok.number = m_state.m_lexer->control()->numericLiteral(m_scratchBuffer.constData() + pos, len);
|
|
|
|
|
}
|
|
|
|
|
tok.offset = pos;
|
|
|
|
|
tok.f.length = len;
|
|
|
|
|
tok.f.generated = true;
|
|
|
|
|
tok.lineno = lineno;
|
|
|
|
|
return tok;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
PPToken Preprocessor::generateConcatenated(const PPToken &leftTk, const PPToken &rightTk)
|
|
|
|
|
{
|
|
|
|
|
QByteArray newText;
|
|
|
|
|
newText.reserve(leftTk.length() + rightTk.length());
|
|
|
|
|
newText.append(leftTk.start(), leftTk.length());
|
|
|
|
|
newText.append(rightTk.start(), rightTk.length());
|
|
|
|
|
return generateToken(T_IDENTIFIER, ByteArrayRef(&newText), leftTk.lineno, true);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Preprocessor::startSkippingBlocks(const Preprocessor::PPToken &tk) const
|
|
|
|
|
{
|
|
|
|
|
if (!m_client)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
int iter = tk.end();
|
|
|
|
|
const QByteArray &txt = tk.source();
|
|
|
|
|
for (; iter < txt.size(); ++iter) {
|
|
|
|
|
if (txt.at(iter) == '\n') {
|
|
|
|
|
m_client->startSkippingBlocks(iter + 1);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
2009-06-15 15:38:20 +02:00
|
|
|
}
|