summaryrefslogtreecommitdiffstats
path: root/modules/gdscript/gd_parser.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'modules/gdscript/gd_parser.cpp')
-rw-r--r--modules/gdscript/gd_parser.cpp102
1 files changed, 92 insertions, 10 deletions
diff --git a/modules/gdscript/gd_parser.cpp b/modules/gdscript/gd_parser.cpp
index f540660cd3..ef9e85a8c2 100644
--- a/modules/gdscript/gd_parser.cpp
+++ b/modules/gdscript/gd_parser.cpp
@@ -29,6 +29,7 @@
#include "gd_parser.h"
#include "print_string.h"
#include "io/resource_loader.h"
+#include "os/file_access.h"
/* TODO:
*Property reduce constant expressions
@@ -224,12 +225,23 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
String path = tokenizer->get_token_constant();
if (!path.is_abs_path() && base_path!="")
path=base_path+"/"+path;
- path = path.replace("///","//");
+ path = path.replace("///","//");
- Ref<Resource> res = ResourceLoader::load(path);
- if (!res.is_valid()) {
- _set_error("Can't preload resource at path: "+path);
- return NULL;
+ Ref<Resource> res;
+ if (!validating) {
+
+ //this can be too slow for just validating code
+ res = ResourceLoader::load(path);
+ if (!res.is_valid()) {
+ _set_error("Can't preload resource at path: "+path);
+ return NULL;
+ }
+ } else {
+
+ if (!FileAccess::exists(path)) {
+ _set_error("Can't preload resource at path: "+path);
+ return NULL;
+ }
}
tokenizer->advance();
@@ -244,6 +256,55 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
tokenizer->advance();
expr=constant;
+ } else if (tokenizer->get_token()==GDTokenizer::TK_PR_YIELD) {
+
+ //constant defined by tokenizer
+
+ tokenizer->advance();
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
+ _set_error("Expected '(' after 'yield'");
+ return NULL;
+ }
+
+ tokenizer->advance();
+
+ OperatorNode *yield = alloc_node<OperatorNode>();
+ yield->op=OperatorNode::OP_YIELD;
+
+ if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ expr=yield;
+ tokenizer->advance();
+ } else {
+
+ Node *object = _parse_and_reduce_expression(p_parent,p_static);
+ if (!object)
+ return NULL;
+ yield->arguments.push_back(object);
+
+ if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
+
+ _set_error("Expected ',' after first argument of 'yield'");
+ return NULL;
+ }
+
+ tokenizer->advance();
+
+ Node *signal = _parse_and_reduce_expression(p_parent,p_static);
+ if (!signal)
+ return NULL;
+ yield->arguments.push_back(signal);
+
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
+
+ _set_error("Expected ')' after second argument of 'yield'");
+ return NULL;
+ }
+
+ tokenizer->advance();
+
+ expr=yield;
+ }
+
} else if (tokenizer->get_token()==GDTokenizer::TK_SELF) {
@@ -1055,6 +1116,10 @@ GDParser::Node* GDParser::_reduce_expression(Node *p_node,bool p_to_const) {
}
return op; //don't reduce yet
+
+ } else if (op->op==OperatorNode::OP_YIELD) {
+ return op;
+
} else if (op->op==OperatorNode::OP_INDEX) {
//can reduce indices into constant arrays or dictionaries
@@ -1221,6 +1286,15 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
return; //go back a level
}
+ if (pending_newline!=-1) {
+
+ NewLineNode *nl = alloc_node<NewLineNode>();
+ nl->line=pending_newline;
+ p_block->statements.push_back(nl);
+ pending_newline=-1;
+
+ }
+
switch(token) {
@@ -1234,16 +1308,19 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
} break;
case GDTokenizer::TK_NEWLINE: {
- NewLineNode *nl = alloc_node<NewLineNode>();
- nl->line=tokenizer->get_token_line();
- p_block->statements.push_back(nl);
-
if (!_parse_newline()) {
if (!error_set) {
p_block->end_line=tokenizer->get_token_line();
+ pending_newline=p_block->end_line;
+
}
return;
}
+
+ NewLineNode *nl = alloc_node<NewLineNode>();
+ nl->line=tokenizer->get_token_line();
+ p_block->statements.push_back(nl);
+
} break;
case GDTokenizer::TK_CF_PASS: {
if (tokenizer->get_token(1)!=GDTokenizer::TK_SEMICOLON && tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE ) {
@@ -1782,6 +1859,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
case GDTokenizer::TK_PR_FUNCTION: {
bool _static=false;
+ pending_newline=-1;
if (tokenizer->get_token(-1)==GDTokenizer::TK_PR_STATIC) {
@@ -2455,12 +2533,13 @@ Error GDParser::parse_bytecode(const Vector<uint8_t> &p_bytecode,const String& p
}
-Error GDParser::parse(const String& p_code,const String& p_base_path) {
+Error GDParser::parse(const String& p_code,const String& p_base_path,bool p_just_validate) {
GDTokenizerText *tt = memnew( GDTokenizerText );
tt->set_code(p_code);
+ validating=p_just_validate;
tokenizer=tt;
Error ret = _parse(p_base_path);
memdelete(tt);
@@ -2485,11 +2564,13 @@ void GDParser::clear() {
head=NULL;
list=NULL;
+ validating=false;
error_set=false;
tab_level.clear();
tab_level.push_back(0);
error_line=0;
error_column=0;
+ pending_newline=-1;
parenthesis=0;
current_export.type=Variant::NIL;
error="";
@@ -2501,6 +2582,7 @@ GDParser::GDParser() {
head=NULL;
list=NULL;
tokenizer=NULL;
+ pending_newline=-1;
clear();
}