diff options
Diffstat (limited to 'methods.py')
| -rwxr-xr-x | methods.py | 303 |
1 files changed, 168 insertions, 135 deletions
diff --git a/methods.py b/methods.py index e3cf51be84..730558a064 100755 --- a/methods.py +++ b/methods.py @@ -15,19 +15,19 @@ def add_source_files(self, sources, filetype, lib_env = None, shared = False): else: for f in filetype: sources.append(self.Object(f)) - -def build_shader_header( target, source, env ): + +def build_shader_header( target, source, env ): for x in source: print x - + name = str(x) name = name[ name.rfind("/")+1: ] name = name[ name.rfind("\\")+1: ] name = name.replace(".","_") - - + + fs = open(str(x),"r") fd = open(str(x)+".h","w") fd.write("/* this file has been generated by SCons, do not edit! */\n") @@ -40,17 +40,17 @@ def build_shader_header( target, source, env ): line=line.replace("\"","\\\"") fd.write("\""+line+"\\n\"\n") line=fs.readline() - + fd.write(";\n") - + return 0 - - -def build_glsl_header( filename ): - - fs = open(filename,"r") + + +def build_glsl_header( filename ): + + fs = open(filename,"r") line=fs.readline() - + vertex_lines=[] fragment_lines=[] uniforms=[] @@ -68,21 +68,21 @@ def build_glsl_header( filename ): fragment_offset=0 while(line): - + if (line.find("[vertex]")!=-1): reading="vertex" line=fs.readline() line_offset+=1 vertex_offset=line_offset continue - + if (line.find("[fragment]")!=-1): reading="fragment" line=fs.readline() line_offset+=1 fragment_offset=line_offset continue - + if (line.find("#ifdef ")!=-1): ifdefline = line.replace("#ifdef ","").strip() if (not ifdefline in conditionals): @@ -159,49 +159,49 @@ def build_glsl_header( filename ): uline = uline.replace("attribute ",""); uline = uline.replace(";",""); uline = uline[ uline.find(" "): ].strip() - - + + if (uline.find("//")!=-1): name,bind = uline.split("//") if (bind.find("attrib:")!=-1): name=name.strip() bind=bind.replace("attrib:","").strip() attributes+=[(name,bind)] - + if (line.strip().find("out ")==0): uline = line.replace("out","").strip(); uline = uline.replace(";",""); uline = uline[ uline.find(" "): ].strip() - - + + if (uline.find("//")!=-1): name,bind = uline.split("//") if (bind.find("drawbuffer:")!=-1): name=name.strip() bind=bind.replace("drawbuffer:","").strip() fbos+=[(name,bind)] - + line=line.replace("\r","") line=line.replace("\n","") line=line.replace("\\","\\\\") line=line.replace("\"","\\\"") #line=line+"\\n\\" no need to anymore - + if (reading=="vertex"): vertex_lines+=[line] if (reading=="fragment"): fragment_lines+=[line] - + line=fs.readline() line_offset+=1 fs.close(); - + out_file = filename+".h" fd = open(out_file,"w") - + fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n"); - + out_file_base = out_file out_file_base = out_file_base[ out_file_base.rfind("/")+1: ] out_file_base = out_file_base[ out_file_base.rfind("\\")+1: ] @@ -209,14 +209,14 @@ def build_glsl_header( filename ): out_file_ifdef = out_file_base.replace(".","_").upper() fd.write("#ifndef "+out_file_ifdef+"\n") fd.write("#define "+out_file_ifdef+"\n") - + out_file_class = out_file_base.replace(".glsl.h","").title().replace("_","").replace(".","")+"ShaderGL"; fd.write("\n\n"); fd.write("#include \"drivers/opengl/shader_gl.h\"\n\n\n"); fd.write("class "+out_file_class+" : public ShaderGL {\n\n"); fd.write("\t virtual String get_shader_name() const { return \""+out_file_class+"\"; }\n"); fd.write("public:\n\n"); - + if (len(conditionals)): fd.write("\tenum Conditionals {\n"); for x in conditionals: @@ -227,11 +227,11 @@ def build_glsl_header( filename ): for x in uniforms: fd.write("\t\t"+x.upper()+",\n"); fd.write("\t};\n\n"); - - fd.write("\t_FORCE_INLINE_ int get_uniform(Uniforms p_uniform) const { return _get_uniform(p_uniform); }\n\n"); + + fd.write("\t_FORCE_INLINE_ int get_uniform(Uniforms p_uniform) const { return _get_uniform(p_uniform); }\n\n"); if (len(conditionals)): - fd.write("\t_FORCE_INLINE_ void set_conditional(Conditionals p_conditional,bool p_enable) { _set_conditional(p_conditional,p_enable); }\n\n"); + fd.write("\t_FORCE_INLINE_ void set_conditional(Conditionals p_conditional,bool p_enable) { _set_conditional(p_conditional,p_enable); }\n\n"); fd.write("\t#define _FU if (get_uniform(p_uniform)<0) return; ERR_FAIL_COND( get_active()!=this );\n\n "); fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n"); fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, double p_value) { _FU glUniform1f(get_uniform(p_uniform),p_value); }\n\n"); @@ -251,11 +251,11 @@ def build_glsl_header( filename ): fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b) { _FU glUniform2f(get_uniform(p_uniform),p_a,p_b); }\n\n"); fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c) { _FU glUniform3f(get_uniform(p_uniform),p_a,p_b,p_c); }\n\n"); fd.write("\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, float p_a, float p_b, float p_c, float p_d) { _FU glUniform4f(get_uniform(p_uniform),p_a,p_b,p_c,p_d); }\n\n"); - + fd.write("""\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Transform& p_transform) { _FU const Transform &tr = p_transform; - + GLfloat matrix[16]={ /* build a 16x16 matrix */ tr.basis.elements[0][0], tr.basis.elements[1][0], @@ -272,15 +272,15 @@ def build_glsl_header( filename ): tr.origin.x, tr.origin.y, tr.origin.z, - 1 + 1 }; - - + + glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix); - - + + } - + """); fd.write("""\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const Matrix32& p_transform) { _FU @@ -315,25 +315,25 @@ def build_glsl_header( filename ): """); fd.write("""\t_FORCE_INLINE_ void set_uniform(Uniforms p_uniform, const CameraMatrix& p_matrix) { _FU - + GLfloat matrix[16]; - + for (int i=0;i<4;i++) { for (int j=0;j<4;j++) { - + matrix[i*4+j]=p_matrix.matrix[i][j]; - } + } } - + glUniformMatrix4fv(get_uniform(p_uniform),1,false,matrix); }; """); - + fd.write("\n\n#undef _FU\n\n\n"); - - + + fd.write("\tvirtual void init() {\n\n"); if (len(conditionals)): - + fd.write("\t\tstatic const char* _conditional_strings[]={\n") if (len(conditionals)): for x in conditionals: @@ -341,9 +341,9 @@ def build_glsl_header( filename ): fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic const char **_conditional_strings=NULL;\n") - + if (len(uniforms)): - + fd.write("\t\tstatic const char* _uniform_strings[]={\n") if (len(uniforms)): for x in uniforms: @@ -351,18 +351,18 @@ def build_glsl_header( filename ): fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic const char **_uniform_strings=NULL;\n") - + if (len(attributes)): - + fd.write("\t\tstatic AttributePair _attribute_pairs[]={\n") for x in attributes: fd.write("\t\t\t{\""+x[0]+"\","+x[1]+"},\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic AttributePair *_attribute_pairs=NULL;\n") - - + + if (len(fbos)): fd.write("\t\tstatic FBOPair _fbo_pairs[]={\n") for x in fbos: @@ -404,20 +404,20 @@ def build_glsl_header( filename ): fd.write("\t\tsetup(_conditional_strings,"+str(len(conditionals))+",_uniform_strings,"+str(len(uniforms))+",_attribute_pairs,"+str(len(attributes))+",_fbo_pairs,"+str(len(fbos))+",_ubo_pairs,"+str(len(ubos))+",_texunit_pairs,"+str(len(texunits))+",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") fd.write("\t};\n\n") - + fd.write("};\n\n"); fd.write("#endif\n\n"); fd.close(); - -def build_glsl_headers( target, source, env ): + +def build_glsl_headers( target, source, env ): for x in source: - + build_glsl_header(str(x)); - - return 0 + + return 0 @@ -664,44 +664,64 @@ def build_hlsl_dx9_headers( target, source, env ): return 0 -def build_legacygl_header( filename, include, class_suffix, output_attribs ): - +class LegacyGLHeaderStruct: + def __init__(self): + self.vertex_lines=[] + self.fragment_lines=[] + self.uniforms=[] + self.attributes=[] + self.fbos=[] + self.conditionals=[] + self.enums={} + self.texunits=[] + self.texunit_names=[] + self.ubos=[] + self.ubo_names=[] + + self.vertex_included_files=[] + self.fragment_included_files=[] + + self.reading="" + self.line_offset=0 + self.vertex_offset=0 + self.fragment_offset=0 + +def include_file_in_legacygl_header( filename, header_data, depth ): fs = open(filename,"r") line=fs.readline() - vertex_lines=[] - fragment_lines=[] - uniforms=[] - attributes=[] - fbos=[] - conditionals=[] - enums={} - enum_constants=[] - texunits=[] - texunit_names=[] - ubos=[] - ubo_names=[] - - reading="" - line_offset=0 - vertex_offset=0 - fragment_offset=0 - while(line): if (line.find("[vertex]")!=-1): - reading="vertex" + header_data.reading="vertex" line=fs.readline() - line_offset+=1 - vertex_offset=line_offset + header_data.line_offset+=1 + header_data.vertex_offset=header_data.line_offset continue if (line.find("[fragment]")!=-1): - reading="fragment" + header_data.reading="fragment" line=fs.readline() - line_offset+=1 - fragment_offset=line_offset + header_data.line_offset+=1 + header_data.fragment_offset=header_data.line_offset continue + + while(line.find("#include ")!=-1): + includeline = line.replace("#include ","").strip()[1:-1] + + import os.path + + included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline) + if (not included_file in header_data.vertex_included_files and header_data.reading=="vertex"): + header_data.vertex_included_files+=[included_file] + if(include_file_in_legacygl_header( included_file, header_data, depth + 1 ) == None): + print "Error in file '" + filename + "': #include " + includeline + "could not be found!" + elif (not included_file in header_data.fragment_included_files and header_data.reading=="fragment"): + header_data.fragment_included_files+=[included_file] + if(include_file_in_legacygl_header( included_file, header_data, depth + 1 ) == None): + print "Error in file '" + filename + "': #include " + includeline + "could not be found!" + + line=fs.readline() if (line.find("#ifdef ")!=-1 or line.find("#elif defined(")!=-1): if (line.find("#ifdef ")!=-1): @@ -715,13 +735,13 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): ifdefline = ifdefline.replace("_EN_","_") line = line.replace("_EN_","_") # print(enumbase+":"+ifdefline); - if (enumbase not in enums): - enums[enumbase]=[] - if (ifdefline not in enums[enumbase]): - enums[enumbase].append(ifdefline); + if (enumbase not in header_data.enums): + header_data.enums[enumbase]=[] + if (ifdefline not in header_data.enums[enumbase]): + header_data.enums[enumbase].append(ifdefline); - elif (not ifdefline in conditionals): - conditionals+=[ifdefline] + elif (not ifdefline in header_data.conditionals): + header_data.conditionals+=[ifdefline] if (line.find("uniform")!=-1 and line.lower().find("texunit:")!=-1): #texture unit @@ -743,9 +763,9 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): #unfiorm array x = x[ :x.find("[") ] - if (not x in texunit_names): - texunits+=[(x,texunit)] - texunit_names+=[x] + if (not x in header_data.texunit_names): + header_data.texunits+=[(x,texunit)] + header_data.texunit_names+=[x] @@ -761,8 +781,8 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): #unfiorm array x = x[ :x.find("[") ] - if (not x in uniforms): - uniforms+=[x] + if (not x in header_data.uniforms): + header_data.uniforms+=[x] if ((line.strip().find("in ")==0 or line.strip().find("attribute ")==0) and line.find("attrib:")!=-1): @@ -778,7 +798,7 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): if (bind.find("attrib:")!=-1): name=name.strip() bind=bind.replace("attrib:","").strip() - attributes+=[(name,bind)] + header_data.attributes+=[(name,bind)] line=line.replace("\r","") @@ -787,18 +807,29 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): #line=line.replace("\"","\\\"") #line=line+"\\n\\" - if (reading=="vertex"): - vertex_lines+=[line] - if (reading=="fragment"): - fragment_lines+=[line] + if (header_data.reading=="vertex"): + header_data.vertex_lines+=[line] + if (header_data.reading=="fragment"): + header_data.fragment_lines+=[line] line=fs.readline() - line_offset+=1 - + header_data.line_offset+=1 + fs.close(); + + return header_data + + + +def build_legacygl_header( filename, include, class_suffix, output_attribs ): + + header_data = LegacyGLHeaderStruct() + include_file_in_legacygl_header( filename, header_data, 0 ) out_file = filename+".h" fd = open(out_file,"w") + + enum_constants=[] fd.write("/* WARNING, THIS FILE WAS GENERATED, DO NOT EDIT */\n"); @@ -819,21 +850,21 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): fd.write("public:\n\n"); - if (len(conditionals)): + if (len(header_data.conditionals)): fd.write("\tenum Conditionals {\n"); - for x in conditionals: + for x in header_data.conditionals: fd.write("\t\t"+x.upper()+",\n"); fd.write("\t};\n\n"); - if (len(uniforms)): + if (len(header_data.uniforms)): fd.write("\tenum Uniforms {\n"); - for x in uniforms: + for x in header_data.uniforms: fd.write("\t\t"+x.upper()+",\n"); fd.write("\t};\n\n"); fd.write("\t_FORCE_INLINE_ int get_uniform(Uniforms p_uniform) const { return _get_uniform(p_uniform); }\n\n"); - if (len(conditionals)): + if (len(header_data.conditionals)): fd.write("\t_FORCE_INLINE_ void set_conditional(Conditionals p_conditional,bool p_enable) { _set_conditional(p_conditional,p_enable); }\n\n"); fd.write("\t#define _FU if (get_uniform(p_uniform)<0) return; ERR_FAIL_COND( get_active()!=this );\n\n "); @@ -940,16 +971,16 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): enum_value_count=0; - if (len(enums)): + if (len(header_data.enums)): fd.write("\t\t//Written using math, given nonstandarity of 64 bits integer constants..\n"); fd.write("\t\tstatic const Enum _enums[]={\n") - bitofs=len(conditionals) + bitofs=len(header_data.conditionals) enum_vals=[] - for xv in enums: - x=enums[xv] + for xv in header_data.enums: + x=header_data.enums[xv] bits=1 amt = len(x); # print(x) @@ -985,70 +1016,70 @@ def build_legacygl_header( filename, include, class_suffix, output_attribs ): fd.write("\t\tstatic const Enum *_enums=NULL;\n") fd.write("\t\tstatic const EnumValue *_enum_values=NULL;\n") - if (len(conditionals)): + if (len(header_data.conditionals)): fd.write("\t\tstatic const char* _conditional_strings[]={\n") - if (len(conditionals)): - for x in conditionals: + if (len(header_data.conditionals)): + for x in header_data.conditionals: fd.write("\t\t\t\"#define "+x+"\\n\",\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic const char **_conditional_strings=NULL;\n") - if (len(uniforms)): + if (len(header_data.uniforms)): fd.write("\t\tstatic const char* _uniform_strings[]={\n") - if (len(uniforms)): - for x in uniforms: + if (len(header_data.uniforms)): + for x in header_data.uniforms: fd.write("\t\t\t\""+x+"\",\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic const char **_uniform_strings=NULL;\n") if output_attribs: - if (len(attributes)): + if (len(header_data.attributes)): fd.write("\t\tstatic AttributePair _attribute_pairs[]={\n") - for x in attributes: + for x in header_data.attributes: fd.write("\t\t\t{\""+x[0]+"\","+x[1]+"},\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic AttributePair *_attribute_pairs=NULL;\n") - if (len(texunits)): + if (len(header_data.texunits)): fd.write("\t\tstatic TexUnitPair _texunit_pairs[]={\n") - for x in texunits: + for x in header_data.texunits: fd.write("\t\t\t{\""+x[0]+"\","+x[1]+"},\n"); fd.write("\t\t};\n\n"); else: fd.write("\t\tstatic TexUnitPair *_texunit_pairs=NULL;\n") fd.write("\t\tstatic const char _vertex_code[]={\n") - for x in vertex_lines: + for x in header_data.vertex_lines: for i in range(len(x)): fd.write(str(ord(x[i]))+","); fd.write(str(ord('\n'))+","); fd.write("\t\t0};\n\n"); - fd.write("\t\tstatic const int _vertex_code_start="+str(vertex_offset)+";\n") + fd.write("\t\tstatic const int _vertex_code_start="+str(header_data.vertex_offset)+";\n") fd.write("\t\tstatic const char _fragment_code[]={\n") - for x in fragment_lines: + for x in header_data.fragment_lines: for i in range(len(x)): fd.write(str(ord(x[i]))+","); fd.write(str(ord('\n'))+","); fd.write("\t\t0};\n\n"); - fd.write("\t\tstatic const int _fragment_code_start="+str(fragment_offset)+";\n") + fd.write("\t\tstatic const int _fragment_code_start="+str(header_data.fragment_offset)+";\n") if output_attribs: - fd.write("\t\tsetup(_conditional_strings,"+str(len(conditionals))+",_uniform_strings,"+str(len(uniforms))+",_attribute_pairs,"+str(len(attributes))+", _texunit_pairs,"+str(len(texunits))+",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + fd.write("\t\tsetup(_conditional_strings,"+str(len(header_data.conditionals))+",_uniform_strings,"+str(len(header_data.uniforms))+",_attribute_pairs,"+str(len(header_data.attributes))+", _texunit_pairs,"+str(len(header_data.texunits))+",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") else: - fd.write("\t\tsetup(_conditional_strings,"+str(len(conditionals))+",_uniform_strings,"+str(len(uniforms))+",_texunit_pairs,"+str(len(texunits))+",_enums,"+str(len(enums))+",_enum_values,"+str(enum_value_count)+",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") + fd.write("\t\tsetup(_conditional_strings,"+str(len(header_data.conditionals))+",_uniform_strings,"+str(len(header_data.uniforms))+",_texunit_pairs,"+str(len(header_data.texunits))+",_enums,"+str(len(header_data.enums))+",_enum_values,"+str(enum_value_count)+",_vertex_code,_fragment_code,_vertex_code_start,_fragment_code_start);\n") fd.write("\t};\n\n") @@ -1084,18 +1115,20 @@ def build_gles2_headers( target, source, env ): def update_version(): rev = "custom_build" - + if (os.getenv("BUILD_REVISION")!=None): rev=os.getenv("BUILD_REVISION") print("Using custom revision: "+rev) import version - + f=open("core/version.h","wb") f.write("#define VERSION_SHORT_NAME "+str(version.short_name)+"\n") f.write("#define VERSION_NAME "+str(version.name)+"\n") f.write("#define VERSION_MAJOR "+str(version.major)+"\n") f.write("#define VERSION_MINOR "+str(version.minor)+"\n") + if (hasattr(version, 'patch')): + f.write("#define VERSION_PATCH "+str(version.patch)+"\n") f.write("#define VERSION_REVISION "+str(rev)+"\n") f.write("#define VERSION_STATUS "+str(version.status)+"\n") import datetime @@ -1132,7 +1165,7 @@ def parse_cg_file(fname, uniforms, sizes, conditionals): line = fs.readline(); - + def build_cg_shader(sname): vp_uniforms = [] @@ -1170,7 +1203,7 @@ def build_cg_shader(sname): fd.write("\t};\n"); - + import glob def detect_modules(): @@ -1369,7 +1402,7 @@ def save_active_platforms(apnames,ap): str+="," str+="};\n" - + wf = x+"/logo.h" logow = open(wf,"wb") logow.write(str) |
