diff options
author | Rémi Verschelde <rverschelde@gmail.com> | 2020-05-14 13:23:58 +0200 |
---|---|---|
committer | Rémi Verschelde <rverschelde@gmail.com> | 2020-05-14 16:54:55 +0200 |
commit | 0be6d925dc3c6413bce7a3ccb49631b8e4a6e67a (patch) | |
tree | a27e497da7104dd0a64f98a04fa3067668735e91 /modules/gdscript/gdscript_tokenizer.cpp | |
parent | 710b34b70227becdc652b4ae027fe0ac47409642 (diff) | |
download | redot-engine-0be6d925dc3c6413bce7a3ccb49631b8e4a6e67a.tar.gz |
Style: clang-format: Disable KeepEmptyLinesAtTheStartOfBlocks
Which means that reduz' beloved style which we all became used to
will now be changed automatically to remove the first empty line.
This makes us lean closer to 1TBS (the one true brace style) instead
of hybridating it with some Allman-inspired spacing.
There's still the case of braces around single-statement blocks that
needs to be addressed (but clang-format can't help with that, but
clang-tidy may if we agree about it).
Part of #33027.
Diffstat (limited to 'modules/gdscript/gdscript_tokenizer.cpp')
-rw-r--r-- | modules/gdscript/gdscript_tokenizer.cpp | 73 |
1 files changed, 0 insertions, 73 deletions
diff --git a/modules/gdscript/gdscript_tokenizer.cpp b/modules/gdscript/gdscript_tokenizer.cpp index 1c8282e13e..f532a6b797 100644 --- a/modules/gdscript/gdscript_tokenizer.cpp +++ b/modules/gdscript/gdscript_tokenizer.cpp @@ -240,7 +240,6 @@ static const _kws _keyword_list[] = { }; const char *GDScriptTokenizer::get_token_name(Token p_token) { - ERR_FAIL_INDEX_V(p_token, TK_MAX, "<error>"); return token_names[p_token]; } @@ -364,27 +363,22 @@ StringName GDScriptTokenizer::get_token_literal(int p_offset) const { } static bool _is_text_char(CharType c) { - return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_'; } static bool _is_number(CharType c) { - return (c >= '0' && c <= '9'); } static bool _is_hex(CharType c) { - return (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F'); } static bool _is_bin(CharType c) { - return (c == '0' || c == '1'); } void GDScriptTokenizerText::_make_token(Token p_type) { - TokenData &tk = tk_rb[tk_rb_pos]; tk.type = p_type; @@ -394,7 +388,6 @@ void GDScriptTokenizerText::_make_token(Token p_type) { tk_rb_pos = (tk_rb_pos + 1) % TK_RB_SIZE; } void GDScriptTokenizerText::_make_identifier(const StringName &p_identifier) { - TokenData &tk = tk_rb[tk_rb_pos]; tk.type = TK_IDENTIFIER; @@ -406,7 +399,6 @@ void GDScriptTokenizerText::_make_identifier(const StringName &p_identifier) { } void GDScriptTokenizerText::_make_built_in_func(GDScriptFunctions::Function p_func) { - TokenData &tk = tk_rb[tk_rb_pos]; tk.type = TK_BUILT_IN_FUNC; @@ -417,7 +409,6 @@ void GDScriptTokenizerText::_make_built_in_func(GDScriptFunctions::Function p_fu tk_rb_pos = (tk_rb_pos + 1) % TK_RB_SIZE; } void GDScriptTokenizerText::_make_constant(const Variant &p_constant) { - TokenData &tk = tk_rb[tk_rb_pos]; tk.type = TK_CONSTANT; @@ -429,7 +420,6 @@ void GDScriptTokenizerText::_make_constant(const Variant &p_constant) { } void GDScriptTokenizerText::_make_type(const Variant::Type &p_type) { - TokenData &tk = tk_rb[tk_rb_pos]; tk.type = TK_BUILT_IN_TYPE; @@ -441,7 +431,6 @@ void GDScriptTokenizerText::_make_type(const Variant::Type &p_type) { } void GDScriptTokenizerText::_make_error(const String &p_error) { - error_flag = true; last_error = p_error; @@ -454,7 +443,6 @@ void GDScriptTokenizerText::_make_error(const String &p_error) { } void GDScriptTokenizerText::_make_newline(int p_indentation, int p_tabs) { - TokenData &tk = tk_rb[tk_rb_pos]; tk.type = TK_NEWLINE; tk.constant = Vector2(p_indentation, p_tabs); @@ -464,7 +452,6 @@ void GDScriptTokenizerText::_make_newline(int p_indentation, int p_tabs) { } void GDScriptTokenizerText::_advance() { - if (error_flag) { //parser broke _make_error(last_error); @@ -482,7 +469,6 @@ void GDScriptTokenizerText::_advance() { column += m_amount; \ } while (true) { - bool is_string_name = false; StringMode string_mode = STRING_DOUBLE_QUOTE; @@ -570,7 +556,6 @@ void GDScriptTokenizerText::_advance() { return; } case '/': { - switch (GETCHAR(1)) { case '=': { // diveq @@ -593,7 +578,6 @@ void GDScriptTokenizerText::_advance() { } break; case '<': { if (GETCHAR(1) == '=') { - _make_token(TK_OP_LESS_EQUAL); INCPOS(1); } else if (GETCHAR(1) == '<') { @@ -684,7 +668,6 @@ void GDScriptTokenizerText::_advance() { break; case '&': { if (GETCHAR(1) == '&') { - _make_token(TK_OP_AND); INCPOS(1); } else if (GETCHAR(1) == '=') { @@ -696,7 +679,6 @@ void GDScriptTokenizerText::_advance() { } break; case '|': { if (GETCHAR(1) == '|') { - _make_token(TK_OP_OR); INCPOS(1); } else if (GETCHAR(1) == '=') { @@ -707,7 +689,6 @@ void GDScriptTokenizerText::_advance() { } } break; case '*': { - if (GETCHAR(1) == '=') { _make_token(TK_OP_ASSIGN_MUL); INCPOS(1); @@ -716,7 +697,6 @@ void GDScriptTokenizerText::_advance() { } } break; case '+': { - if (GETCHAR(1) == '=') { _make_token(TK_OP_ASSIGN_ADD); INCPOS(1); @@ -731,7 +711,6 @@ void GDScriptTokenizerText::_advance() { } break; case '-': { - if (GETCHAR(1) == '=') { _make_token(TK_OP_ASSIGN_SUB); INCPOS(1); @@ -743,7 +722,6 @@ void GDScriptTokenizerText::_advance() { } } break; case '%': { - if (GETCHAR(1) == '=') { _make_token(TK_OP_ASSIGN_MOD); INCPOS(1); @@ -761,7 +739,6 @@ void GDScriptTokenizerText::_advance() { [[fallthrough]]; case '\'': case '"': { - if (GETCHAR(0) == '\'') string_mode = STRING_SINGLE_QUOTE; @@ -774,7 +751,6 @@ void GDScriptTokenizerText::_advance() { String str; while (true) { if (CharType(GETCHAR(i)) == 0) { - _make_error("Unterminated String"); return; } else if (string_mode == STRING_DOUBLE_QUOTE && CharType(GETCHAR(i)) == '"') { @@ -802,7 +778,6 @@ void GDScriptTokenizerText::_advance() { CharType res = 0; switch (next) { - case 'a': res = '\a'; break; @@ -869,7 +844,6 @@ void GDScriptTokenizerText::_advance() { column = 1; } break; default: { - _make_error("Invalid escape sequence"); return; } break; @@ -901,7 +875,6 @@ void GDScriptTokenizerText::_advance() { _make_token(TK_CURSOR); } break; default: { - if (_is_number(GETCHAR(0)) || (GETCHAR(0) == '.' && _is_number(GETCHAR(1)))) { // parse number bool period_found = false; @@ -933,7 +906,6 @@ void GDScriptTokenizerText::_advance() { } hexa_found = true; } else if (hexa_found && _is_hex(GETCHAR(i))) { - } else if (!hexa_found && GETCHAR(i) == 'b') { if (bin_found || str.length() != 1 || !((i == 1 && str[0] == '0') || (i == 2 && str[1] == '0' && str[0] == '-'))) { _make_error("Invalid numeric constant at 'b'"); @@ -950,7 +922,6 @@ void GDScriptTokenizerText::_advance() { //all ok } else if (bin_found && _is_bin(GETCHAR(i))) { - } else if ((GETCHAR(i) == '-' || GETCHAR(i) == '+') && exponent_found) { if (sign_found) { _make_error("Invalid numeric constant at '-'"); @@ -1018,15 +989,12 @@ void GDScriptTokenizerText::_advance() { } else if (str == "false") { _make_constant(false); } else { - bool found = false; { - int idx = 0; while (_type_list[idx].text) { - if (str == _type_list[idx].text) { _make_type(_type_list[idx].type); found = true; @@ -1037,13 +1005,10 @@ void GDScriptTokenizerText::_advance() { } if (!found) { - //built in func? for (int j = 0; j < GDScriptFunctions::FUNC_MAX; j++) { - if (str == GDScriptFunctions::get_func_name(GDScriptFunctions::Function(j))) { - _make_built_in_func(GDScriptFunctions::Function(j)); found = true; break; @@ -1058,7 +1023,6 @@ void GDScriptTokenizerText::_advance() { found = false; while (_keyword_list[idx].text) { - if (str == _keyword_list[idx].text) { _make_token(_keyword_list[idx].token); found = true; @@ -1091,7 +1055,6 @@ void GDScriptTokenizerText::_advance() { } void GDScriptTokenizerText::set_code(const String &p_code) { - code = p_code; len = p_code.length(); if (len) { @@ -1146,7 +1109,6 @@ const Variant &GDScriptTokenizerText::get_token_constant(int p_offset) const { } StringName GDScriptTokenizerText::get_token_identifier(int p_offset) const { - ERR_FAIL_COND_V(p_offset <= -MAX_LOOKAHEAD, StringName()); ERR_FAIL_COND_V(p_offset >= MAX_LOOKAHEAD, StringName()); @@ -1156,7 +1118,6 @@ StringName GDScriptTokenizerText::get_token_identifier(int p_offset) const { } GDScriptFunctions::Function GDScriptTokenizerText::get_token_built_in_func(int p_offset) const { - ERR_FAIL_COND_V(p_offset <= -MAX_LOOKAHEAD, GDScriptFunctions::FUNC_MAX); ERR_FAIL_COND_V(p_offset >= MAX_LOOKAHEAD, GDScriptFunctions::FUNC_MAX); @@ -1166,7 +1127,6 @@ GDScriptFunctions::Function GDScriptTokenizerText::get_token_built_in_func(int p } Variant::Type GDScriptTokenizerText::get_token_type(int p_offset) const { - ERR_FAIL_COND_V(p_offset <= -MAX_LOOKAHEAD, Variant::NIL); ERR_FAIL_COND_V(p_offset >= MAX_LOOKAHEAD, Variant::NIL); @@ -1176,7 +1136,6 @@ Variant::Type GDScriptTokenizerText::get_token_type(int p_offset) const { } int GDScriptTokenizerText::get_token_line_indent(int p_offset) const { - ERR_FAIL_COND_V(p_offset <= -MAX_LOOKAHEAD, 0); ERR_FAIL_COND_V(p_offset >= MAX_LOOKAHEAD, 0); @@ -1186,7 +1145,6 @@ int GDScriptTokenizerText::get_token_line_indent(int p_offset) const { } int GDScriptTokenizerText::get_token_line_tab_indent(int p_offset) const { - ERR_FAIL_COND_V(p_offset <= -MAX_LOOKAHEAD, 0); ERR_FAIL_COND_V(p_offset >= MAX_LOOKAHEAD, 0); @@ -1196,7 +1154,6 @@ int GDScriptTokenizerText::get_token_line_tab_indent(int p_offset) const { } String GDScriptTokenizerText::get_token_error(int p_offset) const { - ERR_FAIL_COND_V(p_offset <= -MAX_LOOKAHEAD, String()); ERR_FAIL_COND_V(p_offset >= MAX_LOOKAHEAD, String()); @@ -1206,7 +1163,6 @@ String GDScriptTokenizerText::get_token_error(int p_offset) const { } void GDScriptTokenizerText::advance(int p_amount) { - ERR_FAIL_COND(p_amount <= 0); for (int i = 0; i < p_amount; i++) _advance(); @@ -1217,7 +1173,6 @@ void GDScriptTokenizerText::advance(int p_amount) { #define BYTECODE_VERSION 13 Error GDScriptTokenizerBuffer::set_code_buffer(const Vector<uint8_t> &p_buffer) { - const uint8_t *buf = p_buffer.ptr(); int total_len = p_buffer.size(); ERR_FAIL_COND_V(p_buffer.size() < 24 || p_buffer[0] != 'G' || p_buffer[1] != 'D' || p_buffer[2] != 'S' || p_buffer[3] != 'C', ERR_INVALID_DATA); @@ -1235,7 +1190,6 @@ Error GDScriptTokenizerBuffer::set_code_buffer(const Vector<uint8_t> &p_buffer) identifiers.resize(identifier_count); for (int i = 0; i < identifier_count; i++) { - int len = decode_uint32(b); ERR_FAIL_COND_V(len > total_len, ERR_INVALID_DATA); b += 4; @@ -1255,7 +1209,6 @@ Error GDScriptTokenizerBuffer::set_code_buffer(const Vector<uint8_t> &p_buffer) constants.resize(constant_count); for (int i = 0; i < constant_count; i++) { - Variant v; int len; // An object cannot be constant, never decode objects @@ -1270,7 +1223,6 @@ Error GDScriptTokenizerBuffer::set_code_buffer(const Vector<uint8_t> &p_buffer) ERR_FAIL_COND_V(line_count * 8 > total_len, ERR_INVALID_DATA); for (int i = 0; i < line_count; i++) { - uint32_t token = decode_uint32(b); b += 4; uint32_t linecol = decode_uint32(b); @@ -1283,7 +1235,6 @@ Error GDScriptTokenizerBuffer::set_code_buffer(const Vector<uint8_t> &p_buffer) tokens.resize(token_count); for (int i = 0; i < token_count; i++) { - ERR_FAIL_COND_V(total_len < 1, ERR_INVALID_DATA); if ((*b) & TOKEN_BYTE_MASK) { //little endian always @@ -1304,7 +1255,6 @@ Error GDScriptTokenizerBuffer::set_code_buffer(const Vector<uint8_t> &p_buffer) } Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code) { - Vector<uint8_t> buf; Map<StringName, int> identifier_map; @@ -1317,16 +1267,13 @@ Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code) int line = -1; while (true) { - if (tt.get_token_line() != line) { - line = tt.get_token_line(); line_map[line] = token_array.size(); } uint32_t token = tt.get_token(); switch (tt.get_token()) { - case TK_IDENTIFIER: { StringName id = tt.get_token_identifier(); if (!identifier_map.has(id)) { @@ -1336,7 +1283,6 @@ Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code) token |= identifier_map[id] << TOKEN_BITS; } break; case TK_CONSTANT: { - const Variant &c = tt.get_token_constant(); if (!constant_map.has(c)) { int idx = constant_map.size(); @@ -1345,20 +1291,16 @@ Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code) token |= constant_map[c] << TOKEN_BITS; } break; case TK_BUILT_IN_TYPE: { - token |= tt.get_token_type() << TOKEN_BITS; } break; case TK_BUILT_IN_FUNC: { - token |= tt.get_token_built_in_func() << TOKEN_BITS; } break; case TK_NEWLINE: { - token |= tt.get_token_line_indent() << TOKEN_BITS; } break; case TK_ERROR: { - ERR_FAIL_V(Vector<uint8_t>()); } break; default: { @@ -1405,7 +1347,6 @@ Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code) //save identifiers for (Map<int, StringName>::Element *E = rev_identifier_map.front(); E; E = E->next()) { - CharString cs = String(E->get()).utf8(); int len = cs.length() + 1; int extra = 4 - (len % 4); @@ -1426,7 +1367,6 @@ Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code) } for (Map<int, Variant>::Element *E = rev_constant_map.front(); E; E = E->next()) { - int len; // Objects cannot be constant, never encode objects Error err = encode_variant(E->get(), nullptr, len, false); @@ -1437,7 +1377,6 @@ Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code) } for (Map<int, uint32_t>::Element *E = rev_line_map.front(); E; E = E->next()) { - uint8_t ibuf[8]; encode_uint32(E->key(), &ibuf[0]); encode_uint32(E->get(), &ibuf[4]); @@ -1446,7 +1385,6 @@ Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code) } for (int i = 0; i < token_array.size(); i++) { - uint32_t token = token_array[i]; if (token & ~TOKEN_MASK) { @@ -1464,7 +1402,6 @@ Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code) } GDScriptTokenizerBuffer::Token GDScriptTokenizerBuffer::get_token(int p_offset) const { - int offset = token + p_offset; if (offset < 0 || offset >= tokens.size()) @@ -1474,7 +1411,6 @@ GDScriptTokenizerBuffer::Token GDScriptTokenizerBuffer::get_token(int p_offset) } StringName GDScriptTokenizerBuffer::get_token_identifier(int p_offset) const { - int offset = token + p_offset; ERR_FAIL_INDEX_V(offset, tokens.size(), StringName()); @@ -1485,14 +1421,12 @@ StringName GDScriptTokenizerBuffer::get_token_identifier(int p_offset) const { } GDScriptFunctions::Function GDScriptTokenizerBuffer::get_token_built_in_func(int p_offset) const { - int offset = token + p_offset; ERR_FAIL_INDEX_V(offset, tokens.size(), GDScriptFunctions::FUNC_MAX); return GDScriptFunctions::Function(tokens[offset] >> TOKEN_BITS); } Variant::Type GDScriptTokenizerBuffer::get_token_type(int p_offset) const { - int offset = token + p_offset; ERR_FAIL_INDEX_V(offset, tokens.size(), Variant::NIL); @@ -1500,7 +1434,6 @@ Variant::Type GDScriptTokenizerBuffer::get_token_type(int p_offset) const { } int GDScriptTokenizerBuffer::get_token_line(int p_offset) const { - int offset = token + p_offset; int pos = lines.find_nearest(offset); @@ -1513,7 +1446,6 @@ int GDScriptTokenizerBuffer::get_token_line(int p_offset) const { return l & TOKEN_LINE_MASK; } int GDScriptTokenizerBuffer::get_token_column(int p_offset) const { - int offset = token + p_offset; int pos = lines.find_nearest(offset); if (pos < 0) @@ -1525,13 +1457,11 @@ int GDScriptTokenizerBuffer::get_token_column(int p_offset) const { return l >> TOKEN_LINE_BITS; } int GDScriptTokenizerBuffer::get_token_line_indent(int p_offset) const { - int offset = token + p_offset; ERR_FAIL_INDEX_V(offset, tokens.size(), 0); return tokens[offset] >> TOKEN_BITS; } const Variant &GDScriptTokenizerBuffer::get_token_constant(int p_offset) const { - int offset = token + p_offset; ERR_FAIL_INDEX_V(offset, tokens.size(), nil); uint32_t constant = tokens[offset] >> TOKEN_BITS; @@ -1539,16 +1469,13 @@ const Variant &GDScriptTokenizerBuffer::get_token_constant(int p_offset) const { return constants[constant]; } String GDScriptTokenizerBuffer::get_token_error(int p_offset) const { - ERR_FAIL_V(String()); } void GDScriptTokenizerBuffer::advance(int p_amount) { - ERR_FAIL_INDEX(p_amount + token, tokens.size()); token += p_amount; } GDScriptTokenizerBuffer::GDScriptTokenizerBuffer() { - token = 0; } |