1
0
mirror of https://github.com/godotengine/godot.git synced 2025-11-07 12:30:27 +00:00

-scripts are converted to bytecode on export

-fix bug in doc where touchscreen events were not documented
This commit is contained in:
Juan Linietsky
2014-02-25 09:31:47 -03:00
parent 06e358199f
commit b2ce682f6e
20 changed files with 957 additions and 336 deletions

View File

@@ -29,6 +29,9 @@
#include "gd_tokenizer.h"
#include "print_string.h"
#include "gd_functions.h"
#include "io/marshalls.h"
#include "map.h"
const char* GDTokenizer::token_names[TK_MAX]={
"Empty",
"Identifier",
@@ -128,7 +131,7 @@ static bool _is_hex(CharType c) {
return (c>='0' && c<='9') || (c>='a' && c<='f') || (c>='A' && c<='F');
}
void GDTokenizer::_make_token(Token p_type) {
void GDTokenizerText::_make_token(Token p_type) {
TokenData &tk=tk_rb[tk_rb_pos];
@@ -138,7 +141,7 @@ void GDTokenizer::_make_token(Token p_type) {
tk_rb_pos=(tk_rb_pos+1)%TK_RB_SIZE;
}
void GDTokenizer::_make_identifier(const StringName& p_identifier) {
void GDTokenizerText::_make_identifier(const StringName& p_identifier) {
TokenData &tk=tk_rb[tk_rb_pos];
@@ -151,7 +154,7 @@ void GDTokenizer::_make_identifier(const StringName& p_identifier) {
}
void GDTokenizer::_make_built_in_func(GDFunctions::Function p_func) {
void GDTokenizerText::_make_built_in_func(GDFunctions::Function p_func) {
TokenData &tk=tk_rb[tk_rb_pos];
@@ -163,7 +166,7 @@ void GDTokenizer::_make_built_in_func(GDFunctions::Function p_func) {
tk_rb_pos=(tk_rb_pos+1)%TK_RB_SIZE;
}
void GDTokenizer::_make_constant(const Variant& p_constant) {
void GDTokenizerText::_make_constant(const Variant& p_constant) {
TokenData &tk=tk_rb[tk_rb_pos];
@@ -176,7 +179,7 @@ void GDTokenizer::_make_constant(const Variant& p_constant) {
}
void GDTokenizer::_make_type(const Variant::Type& p_type) {
void GDTokenizerText::_make_type(const Variant::Type& p_type) {
TokenData &tk=tk_rb[tk_rb_pos];
@@ -191,7 +194,7 @@ void GDTokenizer::_make_type(const Variant::Type& p_type) {
}
void GDTokenizer::_make_error(const String& p_error) {
void GDTokenizerText::_make_error(const String& p_error) {
error_flag=true;
last_error=p_error;
@@ -206,7 +209,7 @@ void GDTokenizer::_make_error(const String& p_error) {
}
void GDTokenizer::_make_newline(int p_spaces) {
void GDTokenizerText::_make_newline(int p_spaces) {
TokenData &tk=tk_rb[tk_rb_pos];
tk.type=TK_NEWLINE;
@@ -216,7 +219,7 @@ void GDTokenizer::_make_newline(int p_spaces) {
tk_rb_pos=(tk_rb_pos+1)%TK_RB_SIZE;
}
void GDTokenizer::_advance() {
void GDTokenizerText::_advance() {
if (error_flag) {
//parser broke
@@ -859,7 +862,7 @@ void GDTokenizer::_advance() {
}
void GDTokenizer::set_code(const String& p_code) {
void GDTokenizerText::set_code(const String& p_code) {
code=p_code;
len = p_code.length();
@@ -878,7 +881,7 @@ void GDTokenizer::set_code(const String& p_code) {
_advance();
}
GDTokenizer::Token GDTokenizer::get_token(int p_offset) const {
GDTokenizerText::Token GDTokenizerText::get_token(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, TK_ERROR);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, TK_ERROR);
@@ -886,7 +889,7 @@ GDTokenizer::Token GDTokenizer::get_token(int p_offset) const {
return tk_rb[ofs].type;
}
int GDTokenizer::get_token_line(int p_offset) const {
int GDTokenizerText::get_token_line(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, -1);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, -1);
@@ -894,7 +897,7 @@ int GDTokenizer::get_token_line(int p_offset) const {
return tk_rb[ofs].line;
}
int GDTokenizer::get_token_column(int p_offset) const {
int GDTokenizerText::get_token_column(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, -1);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, -1);
@@ -902,7 +905,7 @@ int GDTokenizer::get_token_column(int p_offset) const {
return tk_rb[ofs].col;
}
const Variant& GDTokenizer::get_token_constant(int p_offset) const {
const Variant& GDTokenizerText::get_token_constant(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, tk_rb[0].constant);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, tk_rb[0].constant);
@@ -910,7 +913,7 @@ const Variant& GDTokenizer::get_token_constant(int p_offset) const {
ERR_FAIL_COND_V(tk_rb[ofs].type!=TK_CONSTANT,tk_rb[0].constant);
return tk_rb[ofs].constant;
}
StringName GDTokenizer::get_token_identifier(int p_offset) const {
StringName GDTokenizerText::get_token_identifier(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, StringName());
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, StringName());
@@ -921,7 +924,7 @@ StringName GDTokenizer::get_token_identifier(int p_offset) const {
}
GDFunctions::Function GDTokenizer::get_token_built_in_func(int p_offset) const {
GDFunctions::Function GDTokenizerText::get_token_built_in_func(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, GDFunctions::FUNC_MAX);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, GDFunctions::FUNC_MAX);
@@ -932,7 +935,7 @@ GDFunctions::Function GDTokenizer::get_token_built_in_func(int p_offset) const {
}
Variant::Type GDTokenizer::get_token_type(int p_offset) const {
Variant::Type GDTokenizerText::get_token_type(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, Variant::NIL);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, Variant::NIL);
@@ -944,7 +947,7 @@ Variant::Type GDTokenizer::get_token_type(int p_offset) const {
}
int GDTokenizer::get_token_line_indent(int p_offset) const {
int GDTokenizerText::get_token_line_indent(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, 0);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, 0);
@@ -955,7 +958,7 @@ int GDTokenizer::get_token_line_indent(int p_offset) const {
}
String GDTokenizer::get_token_error(int p_offset) const {
String GDTokenizerText::get_token_error(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, String());
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, String());
@@ -965,9 +968,377 @@ String GDTokenizer::get_token_error(int p_offset) const {
return tk_rb[ofs].constant;
}
void GDTokenizer::advance(int p_amount) {
void GDTokenizerText::advance(int p_amount) {
ERR_FAIL_COND( p_amount <=0 );
for(int i=0;i<p_amount;i++)
_advance();
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
#define BYTECODE_VERSION 1
Error GDTokenizerBuffer::set_code_buffer(const Vector<uint8_t> & p_buffer) {
const uint8_t *buf=p_buffer.ptr();
int total_len=p_buffer.size();
ERR_FAIL_COND_V( p_buffer.size()<24 || p_buffer[0]!='G' || p_buffer[1]!='D' || p_buffer[2]!='S' || p_buffer[3]!='C',ERR_INVALID_DATA);
int version = decode_uint32(&buf[4]);
if (version>1) {
ERR_EXPLAIN("Bytecode is too New!");
ERR_FAIL_COND_V(version>BYTECODE_VERSION,ERR_INVALID_DATA);
}
int identifier_count = decode_uint32(&buf[8]);
int constant_count = decode_uint32(&buf[12]);
int line_count = decode_uint32(&buf[16]);
int token_count = decode_uint32(&buf[20]);
const uint8_t *b=buf;
b=&buf[24];
total_len-=24;
identifiers.resize(identifier_count);
for(int i=0;i<identifier_count;i++) {
int len = decode_uint32(b);
ERR_FAIL_COND_V(len>total_len,ERR_INVALID_DATA);
b+=4;
Vector<uint8_t> cs;
cs.resize(len);
for(int j=0;j<len;j++) {
cs[j]=b[j]^0xb6;
}
cs[cs.size()-1]=0;
String s;
s.parse_utf8((const char*)cs.ptr());
b+=len;
total_len-=len+4;
identifiers[i]=s;
}
constants.resize(constant_count);
for(int i=0;i<constant_count;i++) {
Variant v;
int len;
Error err = decode_variant(v,b,total_len,&len);
if (err)
return err;
b+=len;
total_len-=len;
constants[i]=v;
}
ERR_FAIL_COND_V(line_count*8>total_len,ERR_INVALID_DATA);
for(int i=0;i<line_count;i++) {
uint32_t token=decode_uint32(b);
b+=4;
uint32_t linecol=decode_uint32(b);
b+=4;
lines.insert(token,linecol);
total_len-=8;
}
tokens.resize(token_count);
for(int i=0;i<token_count;i++) {
ERR_FAIL_COND_V( total_len < 1, ERR_INVALID_DATA);
if ((*b)&TOKEN_BYTE_MASK) { //little endian always
ERR_FAIL_COND_V( total_len < 4, ERR_INVALID_DATA);
tokens[i]=decode_uint32(b)&~TOKEN_BYTE_MASK;
b+=4;
} else {
tokens[i]=*b;
b+=1;
total_len--;
}
}
token=0;
return OK;
}
Vector<uint8_t> GDTokenizerBuffer::parse_code_string(const String& p_code) {
Vector<uint8_t> buf;
Map<StringName,int> identifier_map;
HashMap<Variant,int,VariantHasher> constant_map;
Map<uint32_t,int> line_map;
Vector<uint32_t> token_array;
GDTokenizerText tt;
tt.set_code(p_code);
int line=-1;
int col=0;
while(true) {
if (tt.get_token_line()!=line) {
line=tt.get_token_line();
line_map[line]=token_array.size();
}
uint32_t token=tt.get_token();
switch(tt.get_token()) {
case TK_IDENTIFIER: {
StringName id = tt.get_token_identifier();
if (!identifier_map.has(id)) {
int idx = identifier_map.size();
identifier_map[id]=idx;
}
token|=identifier_map[id]<<TOKEN_BITS;
} break;
case TK_CONSTANT: {
Variant c = tt.get_token_constant();
if (!constant_map.has(c)) {
int idx = constant_map.size();
constant_map[c]=idx;
}
token|=constant_map[c]<<TOKEN_BITS;
} break;
case TK_BUILT_IN_TYPE: {
token|=tt.get_token_type()<<TOKEN_BITS;
} break;
case TK_BUILT_IN_FUNC: {
token|=tt.get_token_built_in_func()<<TOKEN_BITS;
} break;
case TK_NEWLINE: {
token|=tt.get_token_line_indent()<<TOKEN_BITS;
} break;
case TK_ERROR: {
ERR_FAIL_V(Vector<uint8_t>());
} break;
default: {}
};
token_array.push_back(token);
if (tt.get_token()==TK_EOF)
break;
tt.advance();
}
//reverse maps
Map<int,StringName> rev_identifier_map;
for(Map<StringName,int>::Element *E=identifier_map.front();E;E=E->next()) {
rev_identifier_map[E->get()]=E->key();
}
Map<int,Variant> rev_constant_map;
const Variant *K =NULL;
while((K=constant_map.next(K))) {
rev_constant_map[constant_map[*K]]=*K;
}
Map<int,uint32_t> rev_line_map;
for(Map<uint32_t,int>::Element *E=line_map.front();E;E=E->next()) {
rev_line_map[E->get()]=E->key();
}
//save header
buf.resize(24);
buf[0]='G';
buf[1]='D';
buf[2]='S';
buf[3]='C';
encode_uint32(BYTECODE_VERSION,&buf[4]);
encode_uint32(identifier_map.size(),&buf[8]);
encode_uint32(constant_map.size(),&buf[12]);
encode_uint32(line_map.size(),&buf[16]);
encode_uint32(token_array.size(),&buf[20]);
//save identifiers
for(Map<int,StringName>::Element *E=rev_identifier_map.front();E;E=E->next()) {
CharString cs = String(E->get()).utf8();
int len = cs.length()+1;
int extra = 4-(len%4);
if (extra==4)
extra=0;
uint8_t ibuf[4];
encode_uint32(len+extra,ibuf);
for(int i=0;i<4;i++) {
buf.push_back(ibuf[i]);
}
for(int i=0;i<len;i++) {
buf.push_back(cs[i]^0xb6);
}
for(int i=0;i<extra;i++) {
buf.push_back(0^0xb6);
}
}
for(Map<int,Variant>::Element *E=rev_constant_map.front();E;E=E->next()) {
int len;
Error err = encode_variant(E->get(),NULL,len);
ERR_FAIL_COND_V(err!=OK,Vector<uint8_t>());
int pos=buf.size();
buf.resize(pos+len);
encode_variant(E->get(),&buf[pos],len);
}
for(Map<int,uint32_t>::Element *E=rev_line_map.front();E;E=E->next()) {
uint8_t ibuf[8];
encode_uint32(E->key(),&ibuf[0]);
encode_uint32(E->get(),&ibuf[4]);
for(int i=0;i<8;i++)
buf.push_back(ibuf[i]);
}
for(int i=0;i<token_array.size();i++) {
uint32_t token = token_array[i];
if (token&~TOKEN_MASK) {
uint8_t buf4[4];
encode_uint32(token_array[i]|TOKEN_BYTE_MASK,&buf4[0]);
for(int j=0;j<4;j++) {
buf.push_back(buf4[j]);
}
} else {
buf.push_back(token);
}
}
return buf;
}
GDTokenizerBuffer::Token GDTokenizerBuffer::get_token(int p_offset) const {
int offset = token+p_offset;
if (offset<0 || offset>=tokens.size())
return TK_EOF;
return GDTokenizerBuffer::Token(tokens[offset]&TOKEN_MASK);
}
StringName GDTokenizerBuffer::get_token_identifier(int p_offset) const{
int offset = token+p_offset;
ERR_FAIL_INDEX_V(offset,tokens.size(),StringName());
uint32_t identifier = tokens[offset]>>TOKEN_BITS;
ERR_FAIL_INDEX_V(identifier,identifiers.size(),StringName());
return identifiers[identifier];
}
GDFunctions::Function GDTokenizerBuffer::get_token_built_in_func(int p_offset) const{
int offset = token+p_offset;
ERR_FAIL_INDEX_V(offset,tokens.size(),GDFunctions::FUNC_MAX);
return GDFunctions::Function(tokens[offset]>>TOKEN_BITS);
}
Variant::Type GDTokenizerBuffer::get_token_type(int p_offset) const{
int offset = token+p_offset;
ERR_FAIL_INDEX_V(offset,tokens.size(),Variant::NIL);
return Variant::Type(tokens[offset]>>TOKEN_BITS);
}
int GDTokenizerBuffer::get_token_line(int p_offset) const{
int offset = token+p_offset;
int pos = lines.find_nearest(offset);
if (pos<0)
return -1;
if (pos>=lines.size())
pos=lines.size()-1;
uint32_t l = lines.getv(pos);
return l&TOKEN_LINE_MASK;
}
int GDTokenizerBuffer::get_token_column(int p_offset) const{
int offset = token+p_offset;
int pos = lines.find_nearest(offset);
if (pos<0)
return -1;
if (pos>=lines.size())
pos=lines.size()-1;
uint32_t l = lines.getv(pos);
return l>>TOKEN_LINE_BITS;
}
int GDTokenizerBuffer::get_token_line_indent(int p_offset) const{
int offset = token+p_offset;
ERR_FAIL_INDEX_V(offset,tokens.size(),0);
return tokens[offset]>>TOKEN_BITS;
}
const Variant& GDTokenizerBuffer::get_token_constant(int p_offset) const{
int offset = token+p_offset;
ERR_FAIL_INDEX_V(offset,tokens.size(),nil);
uint32_t constant = tokens[offset]>>TOKEN_BITS;
ERR_FAIL_INDEX_V(constant,constants.size(),nil);
return constants[constant];
}
String GDTokenizerBuffer::get_token_error(int p_offset) const{
ERR_FAIL_V(String());
}
void GDTokenizerBuffer::advance(int p_amount){
ERR_FAIL_INDEX(p_amount+token,tokens.size());
token+=p_amount;
}
GDTokenizerBuffer::GDTokenizerBuffer(){
token=0;
}