diff options
-rw-r--r-- | eo_tokenizer.rl | 54 |
1 files changed, 52 insertions, 2 deletions
diff --git a/eo_tokenizer.rl b/eo_tokenizer.rl index f340e89..5fbd131 100644 --- a/eo_tokenizer.rl +++ b/eo_tokenizer.rl @@ -172,6 +172,56 @@ _eo_tokenizer_accessor_get(Eo_Tokenizer *toknz, Eo_Accessor_Type type) write data; +###### TOKENIZE ACCESSOR + + action end_accessor_comment { + if (toknz->tmp.accessor->comment != NULL) + ABORT(toknz, "accessor has already a comment"); + toknz->tmp.accessor->comment = _eo_tokenizer_token_get(toknz, fpc-1); + } + + action end_rettype { + if (toknz->tmp.accessor->ret.type != NULL) + ABORT(toknz, "accessor has already a rettype"); + toknz->tmp.accessor->ret.type = _eo_tokenizer_token_get(toknz, fpc); + } + + action end_rettype_comment { + if (toknz->tmp.accessor->ret.comment != NULL) + ABORT(toknz, "accessor rettype has already a comment"); + toknz->tmp.accessor->ret.comment = _eo_tokenizer_token_get(toknz, fpc-2); + } + + action end_legacy_name { + const char *legacy = _eo_tokenizer_token_get(toknz, fpc); + toknz->tmp.accessor->legacies = eina_list_append(toknz->tmp.accessor->legacies, legacy); + } + + action end_accessor { + INF(" }"); + toknz->tmp.prop->accessors = eina_list_append(toknz->tmp.prop->accessors, toknz->tmp.accessor); + toknz->tmp.accessor = NULL; + toknz->current_nesting--; + fgoto tokenize_property; + } + + rettype_comment = ws* eo_comment %end_rettype_comment; + rettype = 'rettype' ws+ '=' ws+ alpha+ >save_fpc (alnum_u | '*' | ws )+ %end_rettype end_statement rettype_comment?; + + legacy_item = ident %end_legacy_name ignore*; + legacy_item_next = list_separator ignore* legacy_item; + legacies = 'legacy' ignore* begin_def ignore* (legacy_item legacy_item_next*)? end_def; + + tokenize_accessor := |* + ignore+; #=> show_ignore; + eo_comment => end_accessor_comment; + comment => show_comment; + rettype; + legacies; + end_def => end_accessor; + any => show_error; + *|; + ###### TOKENIZE PARAMS action end_param_comment { @@ -214,14 +264,14 @@ _eo_tokenizer_accessor_get(Eo_Tokenizer *toknz, Eo_Accessor_Type type) INF(" get {"); toknz->tmp.accessor = _eo_tokenizer_accessor_get(toknz, GETTER); toknz->current_nesting++; - #fgoto tokenize_accessor; + fgoto tokenize_accessor; } action begin_property_set { INF(" set {"); toknz->tmp.accessor = _eo_tokenizer_accessor_get(toknz, SETTER); toknz->current_nesting++; - #fgoto tokenize_accessor; + fgoto tokenize_accessor; } action begin_property_params { |