2009-03-19 18:15:33 +01:00
|
|
|
/**
|
2008-12-02 12:01:29 +01:00
|
|
|
** This file is part of Qt Creator
|
|
|
|
|
**
|
2009-02-25 09:15:00 +01:00
|
|
|
** Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
|
2008-12-02 12:01:29 +01:00
|
|
|
**
|
|
|
|
|
** Contact: Qt Software Information (qt-info@nokia.com)
|
|
|
|
|
**
|
2009-02-25 09:15:00 +01:00
|
|
|
** Commercial Usage
|
2008-12-02 14:17:16 +01:00
|
|
|
**
|
2009-02-25 09:15:00 +01:00
|
|
|
** Licensees holding valid Qt Commercial licenses may use this file in
|
|
|
|
|
** accordance with the Qt Commercial License Agreement provided with the
|
|
|
|
|
** Software or, alternatively, in accordance with the terms contained in
|
|
|
|
|
** a written agreement between you and Nokia.
|
2008-12-02 14:17:16 +01:00
|
|
|
**
|
2009-02-25 09:15:00 +01:00
|
|
|
** GNU Lesser General Public License Usage
|
2008-12-02 14:17:16 +01:00
|
|
|
**
|
2009-02-25 09:15:00 +01:00
|
|
|
** Alternatively, this file may be used under the terms of the GNU Lesser
|
|
|
|
|
** General Public License version 2.1 as published by the Free Software
|
|
|
|
|
** Foundation and appearing in the file LICENSE.LGPL included in the
|
|
|
|
|
** packaging of this file. Please review the following information to
|
|
|
|
|
** ensure the GNU Lesser General Public License version 2.1 requirements
|
|
|
|
|
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
|
2008-12-02 14:17:16 +01:00
|
|
|
**
|
2009-02-25 09:15:00 +01:00
|
|
|
** If you are unsure which license is appropriate for your use, please
|
|
|
|
|
** contact the sales department at qt-sales@nokia.com.
|
2008-12-02 12:01:29 +01:00
|
|
|
**
|
2009-02-25 09:15:00 +01:00
|
|
|
**************************************************************************/
|
2008-12-02 12:01:29 +01:00
|
|
|
/*
|
|
|
|
|
Copyright 2005 Roberto Raggi <roberto@kdevelop.org>
|
|
|
|
|
|
|
|
|
|
Permission to use, copy, modify, distribute, and sell this software and its
|
|
|
|
|
documentation for any purpose is hereby granted without fee, provided that
|
|
|
|
|
the above copyright notice appear in all copies and that both that
|
|
|
|
|
copyright notice and this permission notice appear in supporting
|
|
|
|
|
documentation.
|
|
|
|
|
|
|
|
|
|
The above copyright notice and this permission notice shall be included in
|
|
|
|
|
all copies or substantial portions of the Software.
|
|
|
|
|
|
|
|
|
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
|
KDEVELOP TEAM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
|
|
|
|
|
AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
|
|
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
#include "pp.h"
|
2008-12-02 15:08:31 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
#include <Lexer.h>
|
|
|
|
|
#include <Token.h>
|
2009-03-02 19:03:07 +01:00
|
|
|
#include <Literals.h>
|
2009-03-19 18:15:33 +01:00
|
|
|
#include <cctype>
|
2009-03-02 19:03:07 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
#include <QtDebug>
|
2008-12-02 12:57:59 +01:00
|
|
|
#include <algorithm>
|
2009-03-03 13:46:37 +01:00
|
|
|
#include <cctype>
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2009-03-03 10:41:20 +01:00
|
|
|
namespace CPlusPlus {
|
|
|
|
|
|
|
|
|
|
struct Value
|
|
|
|
|
{
|
|
|
|
|
enum Kind {
|
|
|
|
|
Kind_Long,
|
|
|
|
|
Kind_ULong,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
Kind kind;
|
|
|
|
|
|
|
|
|
|
union {
|
|
|
|
|
long l;
|
|
|
|
|
unsigned long ul;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Value()
|
|
|
|
|
: kind(Kind_Long), l(0)
|
|
|
|
|
{ }
|
|
|
|
|
|
|
|
|
|
inline bool is_ulong () const
|
|
|
|
|
{ return kind == Kind_ULong; }
|
|
|
|
|
|
|
|
|
|
inline void set_ulong (unsigned long v)
|
|
|
|
|
{
|
|
|
|
|
ul = v;
|
|
|
|
|
kind = Kind_ULong;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inline void set_long (long v)
|
|
|
|
|
{
|
|
|
|
|
l = v;
|
|
|
|
|
kind = Kind_Long;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inline bool is_zero () const
|
|
|
|
|
{ return l == 0; }
|
|
|
|
|
|
|
|
|
|
#define PP_DEFINE_BIN_OP(name, op) \
|
|
|
|
|
inline Value operator op(const Value &other) const \
|
|
|
|
|
{ \
|
|
|
|
|
Value v = *this; \
|
|
|
|
|
if (v.is_ulong () || other.is_ulong ()) \
|
|
|
|
|
v.set_ulong (v.ul op other.ul); \
|
|
|
|
|
else \
|
|
|
|
|
v.set_long (v.l op other.l); \
|
|
|
|
|
return v; \
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
PP_DEFINE_BIN_OP(op_add, +)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_sub, -)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_mult, *)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_div, /)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_mod, %)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_lhs, <<)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_rhs, >>)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_lt, <)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_gt, >)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_le, <=)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_ge, >=)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_eq, ==)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_ne, !=)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_bit_and, &)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_bit_or, |)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_bit_xor, ^)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_and, &&)
|
|
|
|
|
PP_DEFINE_BIN_OP(op_or, ||)
|
|
|
|
|
|
|
|
|
|
#undef PP_DEFINE_BIN_OP
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
} // end of namespace CPlusPlus
|
|
|
|
|
|
|
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
using namespace CPlusPlus;
|
|
|
|
|
|
2009-03-03 10:41:20 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
|
|
class RangeLexer
|
|
|
|
|
{
|
|
|
|
|
const Token *first;
|
|
|
|
|
const Token *last;
|
|
|
|
|
Token trivial;
|
|
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
inline RangeLexer(const Token *first, const Token *last)
|
|
|
|
|
: first(first), last(last)
|
|
|
|
|
{
|
|
|
|
|
// WARN: `last' must be a valid iterator.
|
|
|
|
|
trivial.offset = last->offset;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inline operator bool() const
|
|
|
|
|
{ return first != last; }
|
|
|
|
|
|
|
|
|
|
inline bool isValid() const
|
|
|
|
|
{ return first != last; }
|
|
|
|
|
|
|
|
|
|
inline int size() const
|
|
|
|
|
{ return std::distance(first, last); }
|
|
|
|
|
|
|
|
|
|
inline const Token *dot() const
|
|
|
|
|
{ return first; }
|
|
|
|
|
|
|
|
|
|
inline const Token &operator*() const
|
|
|
|
|
{
|
|
|
|
|
if (first != last)
|
|
|
|
|
return *first;
|
|
|
|
|
|
|
|
|
|
return trivial;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inline const Token *operator->() const
|
|
|
|
|
{
|
|
|
|
|
if (first != last)
|
|
|
|
|
return first;
|
|
|
|
|
|
|
|
|
|
return &trivial;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inline RangeLexer &operator++()
|
|
|
|
|
{
|
|
|
|
|
++first;
|
|
|
|
|
return *this;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
class ExpressionEvaluator
|
|
|
|
|
{
|
|
|
|
|
ExpressionEvaluator(const ExpressionEvaluator &other);
|
|
|
|
|
void operator = (const ExpressionEvaluator &other);
|
|
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
ExpressionEvaluator(Environment *env)
|
|
|
|
|
: env(env), _lex(0)
|
|
|
|
|
{ }
|
|
|
|
|
|
|
|
|
|
Value operator()(const Token *firstToken, const Token *lastToken,
|
|
|
|
|
const QByteArray &source)
|
|
|
|
|
{
|
|
|
|
|
this->source = source;
|
|
|
|
|
const Value previousValue = switchValue(Value());
|
|
|
|
|
RangeLexer tmp(firstToken, lastToken);
|
|
|
|
|
RangeLexer *previousLex = _lex;
|
|
|
|
|
_lex = &tmp;
|
|
|
|
|
process_expression();
|
|
|
|
|
_lex = previousLex;
|
|
|
|
|
return switchValue(previousValue);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
protected:
|
|
|
|
|
Value switchValue(const Value &value)
|
|
|
|
|
{
|
|
|
|
|
Value previousValue = _value;
|
|
|
|
|
_value = value;
|
|
|
|
|
return previousValue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool isTokenDefined() const
|
|
|
|
|
{
|
|
|
|
|
if ((*_lex)->isNot(T_IDENTIFIER))
|
|
|
|
|
return false;
|
|
|
|
|
const QByteArray spell = tokenSpell();
|
|
|
|
|
if (spell.size() != 7)
|
|
|
|
|
return false;
|
|
|
|
|
return spell == "defined";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
QByteArray tokenSpell() const
|
|
|
|
|
{
|
|
|
|
|
const QByteArray text = QByteArray::fromRawData(source.constData() + (*_lex)->offset,
|
|
|
|
|
(*_lex)->length);
|
|
|
|
|
return text;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_expression()
|
|
|
|
|
{ return process_constant_expression(); }
|
|
|
|
|
|
|
|
|
|
bool process_primary()
|
|
|
|
|
{
|
|
|
|
|
if ((*_lex)->is(T_INT_LITERAL)) {
|
2008-12-10 10:47:26 +01:00
|
|
|
int base = 10;
|
|
|
|
|
const QByteArray spell = tokenSpell();
|
|
|
|
|
if (spell.at(0) == '0') {
|
|
|
|
|
if (spell.size() > 1 && (spell.at(1) == 'x' || spell.at(1) == 'X'))
|
|
|
|
|
base = 16;
|
|
|
|
|
else
|
|
|
|
|
base = 8;
|
|
|
|
|
}
|
|
|
|
|
_value.set_long(tokenSpell().toLong(0, base));
|
2008-12-02 12:01:29 +01:00
|
|
|
++(*_lex);
|
|
|
|
|
return true;
|
|
|
|
|
} else if (isTokenDefined()) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
if ((*_lex)->is(T_IDENTIFIER)) {
|
|
|
|
|
_value.set_long(env->resolve(tokenSpell()) != 0);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
return true;
|
|
|
|
|
} else if ((*_lex)->is(T_LPAREN)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
if ((*_lex)->is(T_IDENTIFIER)) {
|
|
|
|
|
_value.set_long(env->resolve(tokenSpell()) != 0);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
if ((*_lex)->is(T_RPAREN)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
} else if ((*_lex)->is(T_IDENTIFIER)) {
|
|
|
|
|
_value.set_long(0);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
return true;
|
|
|
|
|
} else if ((*_lex)->is(T_MINUS)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_primary();
|
|
|
|
|
_value.set_long(- _value.l);
|
|
|
|
|
return true;
|
|
|
|
|
} else if ((*_lex)->is(T_PLUS)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_primary();
|
|
|
|
|
return true;
|
|
|
|
|
} else if ((*_lex)->is(T_EXCLAIM)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_primary();
|
|
|
|
|
_value.set_long(_value.is_zero());
|
|
|
|
|
return true;
|
|
|
|
|
} else if ((*_lex)->is(T_LPAREN)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_expression();
|
|
|
|
|
if ((*_lex)->is(T_RPAREN))
|
|
|
|
|
++(*_lex);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_multiplicative()
|
|
|
|
|
{
|
|
|
|
|
process_primary();
|
|
|
|
|
|
|
|
|
|
while ((*_lex)->is(T_STAR) || (*_lex)->is(T_SLASH) || (*_lex)->is(T_PERCENT)) {
|
|
|
|
|
const Token op = *(*_lex);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
|
|
|
|
|
const Value left = _value;
|
|
|
|
|
process_primary();
|
|
|
|
|
|
|
|
|
|
if (op.is(T_STAR)) {
|
|
|
|
|
_value = left * _value;
|
|
|
|
|
} else if (op.is(T_SLASH)) {
|
|
|
|
|
if (_value.is_zero())
|
|
|
|
|
_value.set_long(0);
|
|
|
|
|
else
|
|
|
|
|
_value = left / _value;
|
|
|
|
|
} else if (op.is(T_PERCENT)) {
|
|
|
|
|
if (_value.is_zero())
|
|
|
|
|
_value.set_long(0);
|
|
|
|
|
else
|
|
|
|
|
_value = left % _value;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_additive()
|
|
|
|
|
{
|
|
|
|
|
process_multiplicative();
|
|
|
|
|
|
|
|
|
|
while ((*_lex)->is(T_PLUS) || (*_lex)->is(T_MINUS)) {
|
|
|
|
|
const Token op = *(*_lex);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
|
|
|
|
|
const Value left = _value;
|
|
|
|
|
process_multiplicative();
|
|
|
|
|
|
|
|
|
|
if (op.is(T_PLUS))
|
|
|
|
|
_value = left + _value;
|
|
|
|
|
else if (op.is(T_MINUS))
|
|
|
|
|
_value = left - _value;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_shift()
|
|
|
|
|
{
|
|
|
|
|
process_additive();
|
|
|
|
|
|
|
|
|
|
while ((*_lex)->is(T_MINUS_MINUS) || (*_lex)->is(T_GREATER_GREATER)) {
|
|
|
|
|
const Token op = *(*_lex);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
|
|
|
|
|
const Value left = _value;
|
|
|
|
|
process_additive();
|
|
|
|
|
|
|
|
|
|
if (op.is(T_MINUS_MINUS))
|
|
|
|
|
_value = left << _value;
|
|
|
|
|
else if (op.is(T_GREATER_GREATER))
|
|
|
|
|
_value = left >> _value;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_relational()
|
|
|
|
|
{
|
|
|
|
|
process_shift();
|
|
|
|
|
|
|
|
|
|
while ((*_lex)->is(T_LESS) || (*_lex)->is(T_LESS_EQUAL) ||
|
|
|
|
|
(*_lex)->is(T_GREATER) || (*_lex)->is(T_GREATER_EQUAL)) {
|
|
|
|
|
const Token op = *(*_lex);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
|
|
|
|
|
const Value left = _value;
|
|
|
|
|
process_shift();
|
|
|
|
|
|
|
|
|
|
if (op.is(T_LESS))
|
|
|
|
|
_value = left < _value;
|
|
|
|
|
else if (op.is(T_LESS_EQUAL))
|
|
|
|
|
_value = left <= _value;
|
|
|
|
|
else if (op.is(T_GREATER))
|
|
|
|
|
_value = left > _value;
|
|
|
|
|
else if (op.is(T_GREATER_EQUAL))
|
|
|
|
|
_value = left >= _value;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_equality()
|
|
|
|
|
{
|
|
|
|
|
process_relational();
|
|
|
|
|
|
|
|
|
|
while ((*_lex)->is(T_EXCLAIM_EQUAL) || (*_lex)->is(T_EQUAL_EQUAL)) {
|
|
|
|
|
const Token op = *(*_lex);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
|
|
|
|
|
const Value left = _value;
|
|
|
|
|
process_relational();
|
|
|
|
|
|
|
|
|
|
if (op.is(T_EXCLAIM_EQUAL))
|
|
|
|
|
_value = left != _value;
|
|
|
|
|
else if (op.is(T_EQUAL_EQUAL))
|
|
|
|
|
_value = left == _value;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_and()
|
|
|
|
|
{
|
|
|
|
|
process_equality();
|
|
|
|
|
|
|
|
|
|
while ((*_lex)->is(T_AMPER)) {
|
|
|
|
|
const Token op = *(*_lex);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
|
|
|
|
|
const Value left = _value;
|
|
|
|
|
process_equality();
|
|
|
|
|
|
|
|
|
|
_value = left & _value;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_xor()
|
|
|
|
|
{
|
|
|
|
|
process_and();
|
|
|
|
|
|
|
|
|
|
while ((*_lex)->is(T_CARET)) {
|
|
|
|
|
const Token op = *(*_lex);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
|
|
|
|
|
const Value left = _value;
|
|
|
|
|
process_and();
|
|
|
|
|
|
|
|
|
|
_value = left ^ _value;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_or()
|
|
|
|
|
{
|
|
|
|
|
process_xor();
|
|
|
|
|
|
2008-12-10 10:47:26 +01:00
|
|
|
while ((*_lex)->is(T_PIPE)) {
|
2008-12-02 12:01:29 +01:00
|
|
|
const Token op = *(*_lex);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
|
|
|
|
|
const Value left = _value;
|
|
|
|
|
process_xor();
|
|
|
|
|
|
|
|
|
|
_value = left | _value;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_logical_and()
|
|
|
|
|
{
|
|
|
|
|
process_or();
|
|
|
|
|
|
|
|
|
|
while ((*_lex)->is(T_AMPER_AMPER)) {
|
|
|
|
|
const Token op = *(*_lex);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
|
|
|
|
|
const Value left = _value;
|
|
|
|
|
process_or();
|
|
|
|
|
|
|
|
|
|
_value = left && _value;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_logical_or()
|
|
|
|
|
{
|
|
|
|
|
process_logical_and();
|
|
|
|
|
|
|
|
|
|
while ((*_lex)->is(T_PIPE_PIPE)) {
|
|
|
|
|
const Token op = *(*_lex);
|
|
|
|
|
++(*_lex);
|
|
|
|
|
|
|
|
|
|
const Value left = _value;
|
|
|
|
|
process_logical_and();
|
|
|
|
|
|
|
|
|
|
_value = left || _value;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool process_constant_expression()
|
|
|
|
|
{
|
|
|
|
|
process_logical_or();
|
|
|
|
|
const Value cond = _value;
|
|
|
|
|
if ((*_lex)->is(T_QUESTION)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_constant_expression();
|
|
|
|
|
Value left = _value, right;
|
|
|
|
|
if ((*_lex)->is(T_COLON)) {
|
|
|
|
|
++(*_lex);
|
|
|
|
|
process_constant_expression();
|
|
|
|
|
right = _value;
|
|
|
|
|
}
|
|
|
|
|
_value = ! cond.is_zero() ? left : right;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
Environment *env;
|
|
|
|
|
QByteArray source;
|
|
|
|
|
RangeLexer *_lex;
|
|
|
|
|
Value _value;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
} // end of anonymous namespace
|
|
|
|
|
|
|
|
|
|
|
2009-03-03 10:41:20 +01:00
|
|
|
Preprocessor::Preprocessor(Client *client, Environment *env)
|
2008-12-02 12:01:29 +01:00
|
|
|
: client(client),
|
|
|
|
|
env(env),
|
2009-03-02 19:03:07 +01:00
|
|
|
_expand(env),
|
2009-03-04 14:00:56 +01:00
|
|
|
_result(0),
|
|
|
|
|
_markGeneratedTokens(false)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
resetIfLevel ();
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::pushState(const State &s)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
_savedStates.append(state());
|
|
|
|
|
_source = s.source;
|
|
|
|
|
_tokens = s.tokens;
|
|
|
|
|
_dot = s.dot;
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
Preprocessor::State Preprocessor::state() const
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
State state;
|
|
|
|
|
state.source = _source;
|
|
|
|
|
state.tokens = _tokens;
|
|
|
|
|
state.dot = _dot;
|
|
|
|
|
return state;
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::popState()
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
const State &state = _savedStates.last();
|
|
|
|
|
_source = state.source;
|
|
|
|
|
_tokens = state.tokens;
|
|
|
|
|
_dot = state.dot;
|
|
|
|
|
_savedStates.removeLast();
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-02 11:30:43 +01:00
|
|
|
QByteArray Preprocessor::operator()(const QByteArray &filename,
|
|
|
|
|
const QByteArray &source)
|
|
|
|
|
{
|
|
|
|
|
QByteArray preprocessed;
|
2009-03-02 18:15:58 +01:00
|
|
|
preprocess(filename, source, &preprocessed);
|
2009-03-02 11:30:43 +01:00
|
|
|
return preprocessed;
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-03 10:13:51 +01:00
|
|
|
QByteArray Preprocessor::expand(const QByteArray &source)
|
|
|
|
|
{
|
|
|
|
|
QByteArray result;
|
2009-03-03 10:16:43 +01:00
|
|
|
result.reserve(256);
|
2009-03-03 10:13:51 +01:00
|
|
|
expand(source, &result);
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-02 19:03:07 +01:00
|
|
|
void Preprocessor::expand(const QByteArray &source, QByteArray *result)
|
|
|
|
|
{
|
|
|
|
|
_expand(source, result);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Preprocessor::expand(const char *first, const char *last, QByteArray *result)
|
|
|
|
|
{
|
|
|
|
|
const QByteArray source = QByteArray::fromRawData(first, last - first);
|
|
|
|
|
return expand(source, result);
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
Preprocessor::State Preprocessor::createStateFromSource(const QByteArray &source) const
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
State state;
|
|
|
|
|
state.source = source;
|
|
|
|
|
Lexer lex(state.source.constBegin(), state.source.constEnd());
|
|
|
|
|
lex.setScanKeywords(false);
|
|
|
|
|
Token tok;
|
|
|
|
|
do {
|
|
|
|
|
lex(&tok);
|
|
|
|
|
state.tokens.append(tok);
|
|
|
|
|
} while (tok.isNot(T_EOF_SYMBOL));
|
|
|
|
|
state.dot = state.tokens.constBegin();
|
|
|
|
|
return state;
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-04 14:00:56 +01:00
|
|
|
void Preprocessor::processNewline(bool force)
|
2009-03-03 10:27:02 +01:00
|
|
|
{
|
2009-03-04 14:00:56 +01:00
|
|
|
if (! force && env->currentLine == _dot->lineno)
|
2009-03-03 10:27:02 +01:00
|
|
|
return;
|
|
|
|
|
|
2009-03-04 14:00:56 +01:00
|
|
|
if (force || env->currentLine > _dot->lineno) {
|
2009-03-03 10:27:02 +01:00
|
|
|
_result->append("\n# ");
|
|
|
|
|
_result->append(QByteArray::number(_dot->lineno));
|
|
|
|
|
_result->append(' ');
|
|
|
|
|
_result->append('"');
|
2009-03-03 10:41:20 +01:00
|
|
|
_result->append(env->currentFile);
|
2009-03-03 10:27:02 +01:00
|
|
|
_result->append('"');
|
|
|
|
|
_result->append('\n');
|
|
|
|
|
} else {
|
2009-03-03 10:41:20 +01:00
|
|
|
for (unsigned i = env->currentLine; i < _dot->lineno; ++i)
|
2009-03-03 10:27:02 +01:00
|
|
|
_result->append('\n');
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-03 10:41:20 +01:00
|
|
|
env->currentLine = _dot->lineno;
|
2009-03-03 10:27:02 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Preprocessor::processSkippingBlocks(bool skippingBlocks,
|
|
|
|
|
TokenIterator start, TokenIterator /*end*/)
|
|
|
|
|
{
|
|
|
|
|
if (! client)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
if (skippingBlocks != _skipping[iflevel]) {
|
|
|
|
|
unsigned offset = start->offset;
|
|
|
|
|
|
|
|
|
|
if (_skipping[iflevel]) {
|
|
|
|
|
if (_dot->newline)
|
|
|
|
|
++offset;
|
|
|
|
|
|
|
|
|
|
client->startSkippingBlocks(offset);
|
|
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
if (offset)
|
|
|
|
|
--offset;
|
|
|
|
|
|
|
|
|
|
client->stopSkippingBlocks(offset);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-04 14:00:56 +01:00
|
|
|
bool Preprocessor::markGeneratedTokens(bool markGeneratedTokens,
|
|
|
|
|
TokenIterator dot)
|
|
|
|
|
{
|
|
|
|
|
bool previous = _markGeneratedTokens;
|
|
|
|
|
_markGeneratedTokens = markGeneratedTokens;
|
|
|
|
|
|
|
|
|
|
if (previous != _markGeneratedTokens) {
|
|
|
|
|
if (! dot)
|
|
|
|
|
dot = _dot;
|
|
|
|
|
|
|
|
|
|
if (_markGeneratedTokens)
|
2009-03-04 14:35:59 +01:00
|
|
|
_result->append("\n#gen true");
|
2009-03-04 14:00:56 +01:00
|
|
|
else
|
2009-03-04 14:35:59 +01:00
|
|
|
_result->append("\n#gen false");
|
2009-03-04 14:00:56 +01:00
|
|
|
|
|
|
|
|
processNewline(/*force = */ true);
|
|
|
|
|
|
|
|
|
|
const char *begin = _source.constBegin();
|
|
|
|
|
const char *end = begin;
|
|
|
|
|
|
|
|
|
|
if (markGeneratedTokens)
|
|
|
|
|
end += dot->begin();
|
|
|
|
|
else
|
|
|
|
|
end += (dot - 1)->end();
|
|
|
|
|
|
|
|
|
|
const char *it = end - 1;
|
|
|
|
|
for (; it != begin - 1; --it) {
|
|
|
|
|
if (*it == '\n')
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
++it;
|
|
|
|
|
|
|
|
|
|
for (; it != end; ++it) {
|
|
|
|
|
if (! std::isspace(*it))
|
|
|
|
|
_result->append(' ');
|
|
|
|
|
|
|
|
|
|
else
|
|
|
|
|
_result->append(*it);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return previous;
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-03 09:58:43 +01:00
|
|
|
void Preprocessor::preprocess(const QByteArray &fileName, const QByteArray &source,
|
|
|
|
|
QByteArray *result)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2009-03-02 19:03:07 +01:00
|
|
|
QByteArray *previousResult = _result;
|
|
|
|
|
_result = result;
|
|
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
pushState(createStateFromSource(source));
|
|
|
|
|
|
2009-03-03 10:41:20 +01:00
|
|
|
const QByteArray previousFileName = env->currentFile;
|
|
|
|
|
env->currentFile = fileName;
|
2009-03-03 09:58:43 +01:00
|
|
|
|
2009-03-03 10:41:20 +01:00
|
|
|
const unsigned previousCurrentLine = env->currentLine;
|
|
|
|
|
env->currentLine = 0;
|
2008-12-02 12:01:29 +01:00
|
|
|
|
|
|
|
|
while (true) {
|
2009-03-03 10:27:02 +01:00
|
|
|
processNewline();
|
2008-12-02 12:01:29 +01:00
|
|
|
|
|
|
|
|
if (_dot->is(T_EOF_SYMBOL)) {
|
|
|
|
|
break;
|
2009-03-03 10:27:02 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
} else if (_dot->is(T_POUND) && (! _dot->joined && _dot->newline)) {
|
2009-03-03 10:27:02 +01:00
|
|
|
// handle the preprocessor directive
|
|
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
TokenIterator start = _dot;
|
|
|
|
|
do {
|
|
|
|
|
++_dot;
|
|
|
|
|
} while (_dot->isNot(T_EOF_SYMBOL) && (_dot->joined || ! _dot->newline));
|
|
|
|
|
|
|
|
|
|
const bool skippingBlocks = _skipping[iflevel];
|
|
|
|
|
|
|
|
|
|
processDirective(start, _dot);
|
2009-03-03 10:27:02 +01:00
|
|
|
processSkippingBlocks(skippingBlocks, start, _dot);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
|
|
|
|
} else if (skipping()) {
|
2009-03-03 10:27:02 +01:00
|
|
|
// skip the current line
|
|
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
do {
|
|
|
|
|
++_dot;
|
|
|
|
|
} while (_dot->isNot(T_EOF_SYMBOL) && (_dot->joined || ! _dot->newline));
|
2009-03-03 10:27:02 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
} else {
|
2009-03-03 10:27:02 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
if (_dot->joined)
|
2009-03-02 19:03:07 +01:00
|
|
|
_result->append("\\\n");
|
2009-03-03 10:27:02 +01:00
|
|
|
|
2009-03-03 11:14:12 +01:00
|
|
|
else if (_dot->whitespace) {
|
|
|
|
|
const unsigned endOfPreviousToken = (_dot - 1)->end();
|
|
|
|
|
const unsigned beginOfToken = _dot->begin();
|
|
|
|
|
|
2009-03-03 11:28:36 +01:00
|
|
|
const char *start = _source.constBegin() + endOfPreviousToken;
|
2009-03-03 11:14:12 +01:00
|
|
|
const char *end = _source.constBegin() + beginOfToken;
|
|
|
|
|
|
2009-03-03 11:28:36 +01:00
|
|
|
const char *it = end - 1;
|
|
|
|
|
for (; it != start - 1; --it) {
|
|
|
|
|
if (*it == '\n')
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
++it;
|
|
|
|
|
|
2009-03-03 11:14:12 +01:00
|
|
|
for (; it != end; ++it) {
|
|
|
|
|
if (std::isspace(*it))
|
|
|
|
|
_result->append(*it);
|
|
|
|
|
|
|
|
|
|
else
|
|
|
|
|
_result->append(' ');
|
|
|
|
|
}
|
|
|
|
|
}
|
2008-12-02 12:01:29 +01:00
|
|
|
|
|
|
|
|
if (_dot->isNot(T_IDENTIFIER)) {
|
2009-03-02 19:03:07 +01:00
|
|
|
_result->append(tokenSpell(*_dot));
|
2008-12-02 12:01:29 +01:00
|
|
|
++_dot;
|
2009-03-03 10:27:02 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
} else {
|
|
|
|
|
const TokenIterator identifierToken = _dot;
|
|
|
|
|
++_dot; // skip T_IDENTIFIER
|
|
|
|
|
|
|
|
|
|
const QByteArray spell = tokenSpell(*identifierToken);
|
2009-03-03 10:41:20 +01:00
|
|
|
|
2009-03-03 17:18:12 +01:00
|
|
|
if (env->isBuiltinMacro(spell))
|
|
|
|
|
expandBuiltinMacro(identifierToken, spell);
|
2008-12-04 12:05:04 +01:00
|
|
|
|
2009-03-03 17:18:12 +01:00
|
|
|
else {
|
|
|
|
|
if (Macro *m = env->resolve(spell)) {
|
|
|
|
|
if (! m->isFunctionLike()) {
|
|
|
|
|
if (0 == (m = processObjectLikeMacro(identifierToken, spell, m)))
|
|
|
|
|
continue;
|
2008-12-04 12:05:04 +01:00
|
|
|
|
2009-03-03 17:18:12 +01:00
|
|
|
// the macro expansion generated something that looks like
|
|
|
|
|
// a function-like macro.
|
|
|
|
|
}
|
2009-03-03 10:41:20 +01:00
|
|
|
|
2009-03-03 17:18:12 +01:00
|
|
|
// `m' is function-like macro.
|
|
|
|
|
if (_dot->is(T_LPAREN)) {
|
2009-03-04 11:47:30 +01:00
|
|
|
QVector<MacroArgumentReference> actuals;
|
|
|
|
|
collectActualArguments(&actuals);
|
2009-03-03 16:19:03 +01:00
|
|
|
|
2009-03-03 17:18:12 +01:00
|
|
|
if (_dot->is(T_RPAREN)) {
|
2009-03-04 11:47:30 +01:00
|
|
|
expandFunctionLikeMacro(identifierToken, m, actuals);
|
2009-03-03 17:18:12 +01:00
|
|
|
continue;
|
|
|
|
|
}
|
2009-03-03 17:06:42 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-03 17:18:12 +01:00
|
|
|
// it's not a function or object-like macro.
|
|
|
|
|
_result->append(spell);
|
|
|
|
|
}
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
popState();
|
2009-03-03 09:58:43 +01:00
|
|
|
|
2009-03-03 10:41:20 +01:00
|
|
|
env->currentFile = previousFileName;
|
|
|
|
|
env->currentLine = previousCurrentLine;
|
2009-03-02 19:03:07 +01:00
|
|
|
_result = previousResult;
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2009-03-04 11:47:30 +01:00
|
|
|
void Preprocessor::collectActualArguments(QVector<MacroArgumentReference> *actuals)
|
2009-03-03 16:59:55 +01:00
|
|
|
{
|
2009-03-04 11:47:30 +01:00
|
|
|
if (_dot->isNot(T_LPAREN))
|
|
|
|
|
return;
|
2009-03-03 17:06:42 +01:00
|
|
|
|
2009-03-04 11:47:30 +01:00
|
|
|
++_dot;
|
2009-03-03 16:59:55 +01:00
|
|
|
|
2009-03-04 11:47:30 +01:00
|
|
|
if (_dot->is(T_RPAREN))
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
actuals->append(collectOneActualArgument());
|
2009-03-03 16:59:55 +01:00
|
|
|
|
2009-03-04 11:47:30 +01:00
|
|
|
while (_dot->is(T_COMMA)) {
|
2009-03-03 16:59:55 +01:00
|
|
|
++_dot;
|
2009-03-04 11:47:30 +01:00
|
|
|
|
|
|
|
|
actuals->append(collectOneActualArgument());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
MacroArgumentReference Preprocessor::collectOneActualArgument()
|
|
|
|
|
{
|
|
|
|
|
const unsigned position = _dot->begin();
|
|
|
|
|
|
|
|
|
|
while (_dot->isNot(T_EOF_SYMBOL)) {
|
|
|
|
|
if (_dot->is(T_COMMA) || _dot->is(T_RPAREN))
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
if (_dot->isNot(T_LPAREN))
|
|
|
|
|
++_dot;
|
|
|
|
|
|
|
|
|
|
else {
|
|
|
|
|
int count = 0;
|
|
|
|
|
|
|
|
|
|
for (; _dot->isNot(T_EOF_SYMBOL); ++_dot) {
|
|
|
|
|
if (_dot->is(T_LPAREN))
|
|
|
|
|
++count;
|
|
|
|
|
|
|
|
|
|
else if (_dot->is(T_RPAREN)) {
|
|
|
|
|
if (! --count) {
|
|
|
|
|
++_dot;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2009-03-03 16:59:55 +01:00
|
|
|
}
|
2009-03-04 11:47:30 +01:00
|
|
|
|
|
|
|
|
const unsigned end = _dot->begin();
|
|
|
|
|
|
|
|
|
|
return MacroArgumentReference(position, end - position);
|
2009-03-03 16:59:55 +01:00
|
|
|
}
|
|
|
|
|
|
2009-03-03 16:56:55 +01:00
|
|
|
Macro *Preprocessor::processObjectLikeMacro(TokenIterator identifierToken,
|
|
|
|
|
const QByteArray &spell,
|
|
|
|
|
Macro *m)
|
|
|
|
|
{
|
|
|
|
|
QByteArray tmp;
|
|
|
|
|
expandObjectLikeMacro(identifierToken, spell, m, &tmp);
|
|
|
|
|
|
|
|
|
|
if (_dot->is(T_LPAREN)) {
|
|
|
|
|
// check if the expension generated a function-like macro.
|
|
|
|
|
|
|
|
|
|
m = 0; // reset the active the macro
|
|
|
|
|
|
|
|
|
|
pushState(createStateFromSource(tmp));
|
|
|
|
|
|
|
|
|
|
if (_dot->is(T_IDENTIFIER)) {
|
|
|
|
|
const QByteArray id = tokenSpell(*_dot);
|
|
|
|
|
|
|
|
|
|
if (Macro *macro = env->resolve(id)) {
|
|
|
|
|
if (macro->isFunctionLike())
|
|
|
|
|
m = macro;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
popState();
|
|
|
|
|
|
|
|
|
|
if (m != 0)
|
|
|
|
|
return m;
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-04 14:00:56 +01:00
|
|
|
const bool was = markGeneratedTokens(true, identifierToken);
|
2009-03-03 16:56:55 +01:00
|
|
|
_result->append(tmp);
|
2009-03-04 14:00:56 +01:00
|
|
|
(void) markGeneratedTokens(was);
|
2009-03-03 16:56:55 +01:00
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-03 17:18:12 +01:00
|
|
|
void Preprocessor::expandBuiltinMacro(TokenIterator identifierToken,
|
|
|
|
|
const QByteArray &spell)
|
|
|
|
|
{
|
|
|
|
|
const Macro trivial;
|
|
|
|
|
|
|
|
|
|
if (client)
|
|
|
|
|
client->startExpandingMacro(identifierToken->offset,
|
|
|
|
|
trivial, spell);
|
|
|
|
|
|
2009-03-04 14:00:56 +01:00
|
|
|
const bool was = markGeneratedTokens(true, identifierToken);
|
2009-03-03 17:18:12 +01:00
|
|
|
expand(spell, _result);
|
2009-03-04 14:00:56 +01:00
|
|
|
(void) markGeneratedTokens(was);
|
2009-03-03 17:18:12 +01:00
|
|
|
|
|
|
|
|
if (client)
|
|
|
|
|
client->stopExpandingMacro(_dot->offset, trivial);
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-03 16:46:21 +01:00
|
|
|
void Preprocessor::expandObjectLikeMacro(TokenIterator identifierToken,
|
|
|
|
|
const QByteArray &spell,
|
|
|
|
|
Macro *m,
|
|
|
|
|
QByteArray *result)
|
|
|
|
|
{
|
|
|
|
|
if (client)
|
|
|
|
|
client->startExpandingMacro(identifierToken->offset,
|
|
|
|
|
*m, spell);
|
|
|
|
|
|
|
|
|
|
m->setHidden(true);
|
|
|
|
|
expand(m->definition(), result);
|
|
|
|
|
m->setHidden(false);
|
|
|
|
|
|
|
|
|
|
if (client)
|
|
|
|
|
client->stopExpandingMacro(_dot->offset, *m);
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-04 11:47:30 +01:00
|
|
|
void Preprocessor::expandFunctionLikeMacro(TokenIterator identifierToken,
|
|
|
|
|
Macro *m,
|
|
|
|
|
const QVector<MacroArgumentReference> &actuals)
|
2009-03-03 16:19:03 +01:00
|
|
|
{
|
|
|
|
|
const char *beginOfText = startOfToken(*identifierToken);
|
|
|
|
|
const char *endOfText = endOfToken(*_dot);
|
|
|
|
|
++_dot; // skip T_RPAREN
|
|
|
|
|
|
|
|
|
|
if (client) {
|
|
|
|
|
const QByteArray text =
|
|
|
|
|
QByteArray::fromRawData(beginOfText,
|
|
|
|
|
endOfText - beginOfText);
|
|
|
|
|
|
|
|
|
|
client->startExpandingMacro(identifierToken->offset,
|
2009-03-04 11:47:30 +01:00
|
|
|
*m, text, actuals);
|
2009-03-03 16:19:03 +01:00
|
|
|
}
|
|
|
|
|
|
2009-03-04 14:00:56 +01:00
|
|
|
const bool was = markGeneratedTokens(true, identifierToken);
|
2009-03-03 16:19:03 +01:00
|
|
|
expand(beginOfText, endOfText, _result);
|
2009-03-04 14:00:56 +01:00
|
|
|
(void) markGeneratedTokens(was);
|
2009-03-03 16:19:03 +01:00
|
|
|
|
|
|
|
|
if (client)
|
|
|
|
|
client->stopExpandingMacro(_dot->offset, *m);
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
const char *Preprocessor::startOfToken(const Token &token) const
|
2008-12-02 12:01:29 +01:00
|
|
|
{ return _source.constBegin() + token.begin(); }
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
const char *Preprocessor::endOfToken(const Token &token) const
|
2008-12-02 12:01:29 +01:00
|
|
|
{ return _source.constBegin() + token.end(); }
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
QByteArray Preprocessor::tokenSpell(const Token &token) const
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
const QByteArray text = QByteArray::fromRawData(_source.constBegin() + token.offset,
|
2009-03-03 10:13:51 +01:00
|
|
|
token.length);
|
2008-12-02 12:01:29 +01:00
|
|
|
return text;
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
QByteArray Preprocessor::tokenText(const Token &token) const
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
const QByteArray text(_source.constBegin() + token.offset,
|
|
|
|
|
token.length);
|
|
|
|
|
return text;
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::processDirective(TokenIterator firstToken, TokenIterator lastToken)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
RangeLexer tk(firstToken, lastToken);
|
|
|
|
|
++tk; // skip T_POUND
|
|
|
|
|
|
|
|
|
|
if (tk->is(T_IDENTIFIER)) {
|
|
|
|
|
const QByteArray directive = tokenSpell(*tk);
|
|
|
|
|
switch (PP_DIRECTIVE_TYPE d = classifyDirective(directive)) {
|
|
|
|
|
case PP_DEFINE:
|
|
|
|
|
if (! skipping())
|
|
|
|
|
processDefine(firstToken, lastToken);
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case PP_INCLUDE:
|
|
|
|
|
case PP_INCLUDE_NEXT:
|
2009-02-05 13:00:50 +01:00
|
|
|
case PP_IMPORT:
|
2008-12-02 12:01:29 +01:00
|
|
|
if (! skipping())
|
|
|
|
|
processInclude(d == PP_INCLUDE_NEXT, firstToken, lastToken);
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case PP_UNDEF:
|
|
|
|
|
if (! skipping())
|
|
|
|
|
processUndef(firstToken, lastToken);
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case PP_ELIF:
|
|
|
|
|
processElif(firstToken, lastToken);
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case PP_ELSE:
|
|
|
|
|
processElse(firstToken, lastToken);
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case PP_ENDIF:
|
|
|
|
|
processEndif(firstToken, lastToken);
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case PP_IF:
|
|
|
|
|
processIf(firstToken, lastToken);
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case PP_IFDEF:
|
|
|
|
|
case PP_IFNDEF:
|
|
|
|
|
processIfdef(d == PP_IFNDEF, firstToken, lastToken);
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
break;
|
|
|
|
|
} // switch
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
QVector<Token> Preprocessor::tokenize(const QByteArray &text) const
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
QVector<Token> tokens;
|
|
|
|
|
Lexer lex(text.constBegin(), text.constEnd());
|
|
|
|
|
lex.setScanKeywords(false);
|
|
|
|
|
Token tk;
|
|
|
|
|
do {
|
|
|
|
|
lex(&tk);
|
|
|
|
|
tokens.append(tk);
|
|
|
|
|
} while (tk.isNot(T_EOF_SYMBOL));
|
|
|
|
|
return tokens;
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-02 19:03:07 +01:00
|
|
|
void Preprocessor::processInclude(bool, TokenIterator firstToken,
|
|
|
|
|
TokenIterator lastToken, bool acceptMacros)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2009-03-03 09:52:50 +01:00
|
|
|
if (! client)
|
|
|
|
|
return; // nothing to do.
|
|
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
RangeLexer tk(firstToken, lastToken);
|
|
|
|
|
++tk; // skip T_POUND
|
|
|
|
|
++tk; // skip `include|nclude_next'
|
|
|
|
|
|
|
|
|
|
if (acceptMacros && tk->is(T_IDENTIFIER)) {
|
2009-02-16 18:56:20 +01:00
|
|
|
// ### TODO: implement me
|
2008-12-02 12:01:29 +01:00
|
|
|
#if 0
|
|
|
|
|
QByteArray name;
|
|
|
|
|
name.reserve(256);
|
|
|
|
|
MacroExpander expandInclude(env);
|
|
|
|
|
expandInclude(startOfToken(tokens.at(2)),
|
|
|
|
|
startOfToken(tokens.last()),
|
|
|
|
|
&name);
|
|
|
|
|
const QByteArray previousSource = switchSource(name);
|
|
|
|
|
//processInclude(skipCurentPath, tokenize(name), /*accept macros=*/ false);
|
|
|
|
|
(void) switchSource(previousSource);
|
|
|
|
|
#endif
|
2009-03-03 09:52:50 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
} else if (tk->is(T_LESS)) {
|
2009-03-03 09:52:50 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
TokenIterator start = tk.dot();
|
2009-03-03 09:52:50 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
for (; tk->isNot(T_EOF_SYMBOL); ++tk) {
|
|
|
|
|
if (tk->is(T_GREATER))
|
|
|
|
|
break;
|
|
|
|
|
}
|
2009-03-03 09:52:50 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
const char *beginOfPath = endOfToken(*start);
|
|
|
|
|
const char *endOfPath = startOfToken(*tk);
|
|
|
|
|
|
2009-03-03 09:52:50 +01:00
|
|
|
QString fn = QString::fromUtf8(beginOfPath, endOfPath - beginOfPath);
|
|
|
|
|
client->sourceNeeded(fn, Client::IncludeGlobal, firstToken->lineno);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
|
|
|
|
} else if (tk->is(T_ANGLE_STRING_LITERAL) || tk->is(T_STRING_LITERAL)) {
|
2009-03-03 09:52:50 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
const QByteArray spell = tokenSpell(*tk);
|
|
|
|
|
const char *beginOfPath = spell.constBegin();
|
|
|
|
|
const char *endOfPath = spell.constEnd();
|
|
|
|
|
const char quote = *beginOfPath;
|
2009-03-03 09:52:50 +01:00
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
if (beginOfPath + 1 != endOfPath && ((quote == '"' && endOfPath[-1] == '"') ||
|
2009-03-03 09:52:50 +01:00
|
|
|
(quote == '<' && endOfPath[-1] == '>'))) {
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2009-03-03 09:52:50 +01:00
|
|
|
QString fn = QString::fromUtf8(beginOfPath + 1, spell.length() - 2);
|
|
|
|
|
client->sourceNeeded(fn, Client::IncludeLocal, firstToken->lineno);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::processDefine(TokenIterator firstToken, TokenIterator lastToken)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
RangeLexer tk(firstToken, lastToken);
|
|
|
|
|
|
|
|
|
|
if (tk.size() < 3)
|
|
|
|
|
return; // nothing to do
|
|
|
|
|
|
|
|
|
|
++tk; // skip T_POUND
|
|
|
|
|
++tk; // skip T_DEFINE
|
|
|
|
|
|
|
|
|
|
if (tk->isNot(T_IDENTIFIER)) {
|
|
|
|
|
// ### warning expected an `identifier'
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Macro macro;
|
2009-03-03 10:41:20 +01:00
|
|
|
macro.setFileName(env->currentFile);
|
|
|
|
|
macro.setLine(env->currentLine);
|
2008-12-22 13:55:42 +01:00
|
|
|
macro.setName(tokenText(*tk));
|
2008-12-02 12:01:29 +01:00
|
|
|
++tk; // skip T_IDENTIFIER
|
|
|
|
|
|
|
|
|
|
if (tk->is(T_LPAREN) && ! tk->whitespace) {
|
|
|
|
|
// a function-like macro definition
|
2008-12-22 13:55:42 +01:00
|
|
|
macro.setFunctionLike(true);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
|
|
|
|
++tk; // skip T_LPAREN
|
|
|
|
|
if (tk->is(T_IDENTIFIER)) {
|
2008-12-22 13:55:42 +01:00
|
|
|
macro.addFormal(tokenText(*tk));
|
2008-12-02 12:01:29 +01:00
|
|
|
++tk; // skip T_IDENTIFIER
|
|
|
|
|
while (tk->is(T_COMMA)) {
|
|
|
|
|
++tk;// skip T_COMMA
|
|
|
|
|
if (tk->isNot(T_IDENTIFIER))
|
|
|
|
|
break;
|
2008-12-22 13:55:42 +01:00
|
|
|
macro.addFormal(tokenText(*tk));
|
2008-12-02 12:01:29 +01:00
|
|
|
++tk; // skip T_IDENTIFIER
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (tk->is(T_DOT_DOT_DOT)) {
|
2008-12-22 13:55:42 +01:00
|
|
|
macro.setVariadic(true);
|
2008-12-02 12:01:29 +01:00
|
|
|
++tk; // skip T_DOT_DOT_DOT
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (tk->isNot(T_RPAREN)) {
|
|
|
|
|
// ### warning expected `)'
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
++tk; // skip T_RPAREN
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 13:55:42 +01:00
|
|
|
if (isQtReservedWord(macro.name())) {
|
|
|
|
|
QByteArray macroId = macro.name();
|
|
|
|
|
|
|
|
|
|
if (macro.isFunctionLike()) {
|
|
|
|
|
macroId += '(';
|
|
|
|
|
bool fst = true;
|
|
|
|
|
foreach (const QByteArray formal, macro.formals()) {
|
|
|
|
|
if (! fst)
|
|
|
|
|
macroId += ", ";
|
|
|
|
|
fst = false;
|
|
|
|
|
macroId += formal;
|
|
|
|
|
}
|
|
|
|
|
macroId += ')';
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2008-12-22 13:55:42 +01:00
|
|
|
macro.setDefinition(macroId);
|
|
|
|
|
} else {
|
2008-12-02 15:33:07 +01:00
|
|
|
// ### make me fast!
|
2008-12-02 12:01:29 +01:00
|
|
|
const char *startOfDefinition = startOfToken(*tk);
|
|
|
|
|
const char *endOfDefinition = startOfToken(*lastToken);
|
2008-12-22 13:55:42 +01:00
|
|
|
QByteArray definition(startOfDefinition,
|
|
|
|
|
endOfDefinition - startOfDefinition);
|
|
|
|
|
definition.replace("\\\n", " ");
|
|
|
|
|
definition.replace('\n', ' ');
|
|
|
|
|
macro.setDefinition(definition.trimmed());
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2009-03-03 10:41:20 +01:00
|
|
|
env->bind(macro);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2008-12-08 12:59:33 +01:00
|
|
|
if (client)
|
|
|
|
|
client->macroAdded(macro);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::processIf(TokenIterator firstToken, TokenIterator lastToken)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
RangeLexer tk(firstToken, lastToken);
|
|
|
|
|
|
|
|
|
|
++tk; // skip T_POUND
|
|
|
|
|
++tk; // skipt `if'
|
|
|
|
|
|
|
|
|
|
if (testIfLevel()) {
|
|
|
|
|
const char *first = startOfToken(*tk);
|
|
|
|
|
const char *last = startOfToken(*lastToken);
|
|
|
|
|
|
|
|
|
|
MacroExpander expandCondition (env);
|
|
|
|
|
QByteArray condition;
|
|
|
|
|
condition.reserve(256);
|
|
|
|
|
expandCondition(first, last, &condition);
|
|
|
|
|
|
|
|
|
|
QVector<Token> tokens = tokenize(condition);
|
|
|
|
|
|
|
|
|
|
const Value result = evalExpression(tokens.constBegin(),
|
|
|
|
|
tokens.constEnd() - 1,
|
|
|
|
|
condition);
|
|
|
|
|
|
|
|
|
|
_true_test[iflevel] = ! result.is_zero ();
|
|
|
|
|
_skipping[iflevel] = result.is_zero ();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::processElse(TokenIterator firstToken, TokenIterator lastToken)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
RangeLexer tk(firstToken, lastToken);
|
|
|
|
|
|
|
|
|
|
if (iflevel == 0 && !skipping ()) {
|
|
|
|
|
// std::cerr << "*** WARNING #else without #if" << std::endl;
|
|
|
|
|
} else if (iflevel > 0 && _skipping[iflevel - 1]) {
|
|
|
|
|
_skipping[iflevel] = true;
|
|
|
|
|
} else {
|
|
|
|
|
_skipping[iflevel] = _true_test[iflevel];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::processElif(TokenIterator firstToken, TokenIterator lastToken)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
RangeLexer tk(firstToken, lastToken);
|
|
|
|
|
++tk; // skip T_POUND
|
|
|
|
|
++tk; // skipt `elif'
|
|
|
|
|
|
|
|
|
|
if (! (iflevel > 0)) {
|
|
|
|
|
// std::cerr << "*** WARNING: " << __FILE__ << __LINE__ << std::endl;
|
|
|
|
|
} else if (iflevel == 0 && !skipping()) {
|
|
|
|
|
// std::cerr << "*** WARNING #else without #if" << std::endl;
|
|
|
|
|
} else if (!_true_test[iflevel] && !_skipping[iflevel - 1]) {
|
2009-01-19 20:06:39 +01:00
|
|
|
|
|
|
|
|
const char *first = startOfToken(*tk);
|
|
|
|
|
const char *last = startOfToken(*lastToken);
|
|
|
|
|
|
|
|
|
|
MacroExpander expandCondition (env);
|
|
|
|
|
QByteArray condition;
|
|
|
|
|
condition.reserve(256);
|
|
|
|
|
expandCondition(first, last, &condition);
|
|
|
|
|
|
|
|
|
|
QVector<Token> tokens = tokenize(condition);
|
|
|
|
|
|
|
|
|
|
const Value result = evalExpression(tokens.constBegin(),
|
|
|
|
|
tokens.constEnd() - 1,
|
|
|
|
|
condition);
|
|
|
|
|
|
2008-12-02 12:01:29 +01:00
|
|
|
_true_test[iflevel] = ! result.is_zero ();
|
|
|
|
|
_skipping[iflevel] = result.is_zero ();
|
|
|
|
|
} else {
|
|
|
|
|
_skipping[iflevel] = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::processEndif(TokenIterator, TokenIterator)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
if (iflevel == 0 && !skipping()) {
|
|
|
|
|
// std::cerr << "*** WARNING #endif without #if" << std::endl;
|
|
|
|
|
} else {
|
|
|
|
|
_skipping[iflevel] = false;
|
|
|
|
|
_true_test[iflevel] = false;
|
|
|
|
|
|
|
|
|
|
--iflevel;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::processIfdef(bool checkUndefined,
|
2009-03-02 19:03:07 +01:00
|
|
|
TokenIterator firstToken,
|
|
|
|
|
TokenIterator lastToken)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
RangeLexer tk(firstToken, lastToken);
|
|
|
|
|
|
|
|
|
|
++tk; // skip T_POUND
|
|
|
|
|
++tk; // skip `ifdef'
|
|
|
|
|
if (testIfLevel()) {
|
|
|
|
|
if (tk->is(T_IDENTIFIER)) {
|
|
|
|
|
const QByteArray macroName = tokenSpell(*tk);
|
2009-03-03 10:41:20 +01:00
|
|
|
bool value = env->resolve(macroName) != 0 || env->isBuiltinMacro(macroName);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
|
|
|
|
if (checkUndefined)
|
|
|
|
|
value = ! value;
|
|
|
|
|
|
|
|
|
|
_true_test[iflevel] = value;
|
|
|
|
|
_skipping [iflevel] = ! value;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::processUndef(TokenIterator firstToken, TokenIterator lastToken)
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
RangeLexer tk(firstToken, lastToken);
|
|
|
|
|
|
|
|
|
|
++tk; // skip T_POUND
|
|
|
|
|
++tk; // skip `undef'
|
|
|
|
|
|
|
|
|
|
if (tk->is(T_IDENTIFIER)) {
|
|
|
|
|
const QByteArray macroName = tokenText(*tk);
|
2009-03-03 10:41:20 +01:00
|
|
|
const Macro *macro = env->remove(macroName);
|
2008-12-02 12:01:29 +01:00
|
|
|
|
2008-12-08 12:59:33 +01:00
|
|
|
if (client && macro)
|
|
|
|
|
client->macroAdded(*macro);
|
2008-12-02 12:01:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
void Preprocessor::resetIfLevel ()
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
iflevel = 0;
|
|
|
|
|
_skipping[iflevel] = false;
|
|
|
|
|
_true_test[iflevel] = false;
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-02 19:03:07 +01:00
|
|
|
Preprocessor::PP_DIRECTIVE_TYPE Preprocessor::classifyDirective(const QByteArray &directive) const
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2009-03-02 19:03:07 +01:00
|
|
|
switch (directive.size())
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
case 2:
|
2009-03-02 19:03:07 +01:00
|
|
|
if (directive[0] == 'i' && directive[1] == 'f')
|
2008-12-02 12:01:29 +01:00
|
|
|
return PP_IF;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case 4:
|
2009-03-02 19:03:07 +01:00
|
|
|
if (directive[0] == 'e' && directive == "elif")
|
2008-12-02 12:01:29 +01:00
|
|
|
return PP_ELIF;
|
2009-03-02 19:03:07 +01:00
|
|
|
else if (directive[0] == 'e' && directive == "else")
|
2008-12-02 12:01:29 +01:00
|
|
|
return PP_ELSE;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case 5:
|
2009-03-02 19:03:07 +01:00
|
|
|
if (directive[0] == 'i' && directive == "ifdef")
|
2008-12-02 12:01:29 +01:00
|
|
|
return PP_IFDEF;
|
2009-03-02 19:03:07 +01:00
|
|
|
else if (directive[0] == 'u' && directive == "undef")
|
2008-12-02 12:01:29 +01:00
|
|
|
return PP_UNDEF;
|
2009-03-02 19:03:07 +01:00
|
|
|
else if (directive[0] == 'e' && directive == "endif")
|
2008-12-02 12:01:29 +01:00
|
|
|
return PP_ENDIF;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case 6:
|
2009-03-02 19:03:07 +01:00
|
|
|
if (directive[0] == 'i' && directive == "ifndef")
|
2008-12-02 12:01:29 +01:00
|
|
|
return PP_IFNDEF;
|
2009-03-02 19:03:07 +01:00
|
|
|
else if (directive[0] == 'i' && directive == "import")
|
2009-02-05 13:00:50 +01:00
|
|
|
return PP_IMPORT;
|
2009-03-02 19:03:07 +01:00
|
|
|
else if (directive[0] == 'd' && directive == "define")
|
2008-12-02 12:01:29 +01:00
|
|
|
return PP_DEFINE;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case 7:
|
2009-03-02 19:03:07 +01:00
|
|
|
if (directive[0] == 'i' && directive == "include")
|
2008-12-02 12:01:29 +01:00
|
|
|
return PP_INCLUDE;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case 12:
|
2009-03-02 19:03:07 +01:00
|
|
|
if (directive[0] == 'i' && directive == "include_next")
|
2008-12-02 12:01:29 +01:00
|
|
|
return PP_INCLUDE_NEXT;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return PP_UNKNOWN_DIRECTIVE;
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
bool Preprocessor::testIfLevel()
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
const bool result = !_skipping[iflevel++];
|
|
|
|
|
_skipping[iflevel] = _skipping[iflevel - 1];
|
|
|
|
|
_true_test[iflevel] = false;
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
int Preprocessor::skipping() const
|
2008-12-02 12:01:29 +01:00
|
|
|
{ return _skipping[iflevel]; }
|
|
|
|
|
|
2008-12-22 11:40:53 +01:00
|
|
|
Value Preprocessor::evalExpression(TokenIterator firstToken, TokenIterator lastToken,
|
2009-03-02 19:03:07 +01:00
|
|
|
const QByteArray &source) const
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
2009-03-03 10:41:20 +01:00
|
|
|
ExpressionEvaluator eval(env);
|
2008-12-02 12:01:29 +01:00
|
|
|
const Value result = eval(firstToken, lastToken, source);
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
2009-03-03 09:52:50 +01:00
|
|
|
bool Preprocessor::isQtReservedWord(const QByteArray ¯oId) const
|
2008-12-02 12:01:29 +01:00
|
|
|
{
|
|
|
|
|
const int size = macroId.size();
|
|
|
|
|
if (size == 9 && macroId.at(0) == 'Q' && macroId == "Q_SIGNALS")
|
|
|
|
|
return true;
|
|
|
|
|
else if (size == 7 && macroId.at(0) == 'Q' && macroId == "Q_SLOTS")
|
|
|
|
|
return true;
|
|
|
|
|
else if (size == 6 && macroId.at(0) == 'S' && macroId == "SIGNAL")
|
|
|
|
|
return true;
|
|
|
|
|
else if (size == 4 && macroId.at(0) == 'S' && macroId == "SLOT")
|
|
|
|
|
return true;
|
|
|
|
|
else if (size == 7 && macroId.at(0) == 's' && macroId == "signals")
|
|
|
|
|
return true;
|
|
|
|
|
else if (size == 5 && macroId.at(0) == 's' && macroId == "slots")
|
|
|
|
|
return true;
|
|
|
|
|
return false;
|
|
|
|
|
}
|