diff options
author | Max Horn | 2009-03-09 22:25:33 +0000 |
---|---|---|
committer | Max Horn | 2009-03-09 22:25:33 +0000 |
commit | 6c932497151f54f31d7b6fbf34a2ee24fd362d63 (patch) | |
tree | 30c9d7b4c10f02f64089afff4524bd292f0c3264 /engines/sci/engine/scriptdebug.cpp | |
parent | f3e50fba7c7e7715c2e54f8d7de22434f87beae5 (diff) | |
download | scummvm-rg350-6c932497151f54f31d7b6fbf34a2ee24fd362d63.tar.gz scummvm-rg350-6c932497151f54f31d7b6fbf34a2ee24fd362d63.tar.bz2 scummvm-rg350-6c932497151f54f31d7b6fbf34a2ee24fd362d63.zip |
SCI: result_word_t -> ResultWord; switched to Common::List for storing lists of ResultWords
svn-id: r39278
Diffstat (limited to 'engines/sci/engine/scriptdebug.cpp')
-rw-r--r-- | engines/sci/engine/scriptdebug.cpp | 28 |
1 files changed, 11 insertions, 17 deletions
diff --git a/engines/sci/engine/scriptdebug.cpp b/engines/sci/engine/scriptdebug.cpp index 7a579179c8..0aefa2b331 100644 --- a/engines/sci/engine/scriptdebug.cpp +++ b/engines/sci/engine/scriptdebug.cpp @@ -786,17 +786,16 @@ int c_sim_parse(EngineState *s) { if (!flag) { char *openb = strchr(token, '['); // look for opening braces - result_word_t *result; + ResultWord result; if (openb) *openb = 0; // remove them and the rest result = vocab_lookup_word(token, strlen(token), s->parser_words, s->parser_words_nr, s->_parserSuffixes); - if (result) { + if (result.w_class != -1) { s->parser_nodes[i].type = 0; - s->parser_nodes[i].content.value = result->group; - free(result); + s->parser_nodes[i].content.value = result.group; } else { // group name was specified directly? int val = strtol(token, NULL, 0); if (val) { @@ -1008,13 +1007,10 @@ int c_set_parse_nodes(EngineState *s) { return 0; } -int vocab_gnf_parse(parse_tree_node_t *nodes, result_word_t *words, int words_nr, - parse_tree_branch_t *branch0, parse_rule_list_t *tlist, int verbose); // parses with a GNF rule set int c_parse(EngineState *s) { - result_word_t *words; - int words_nr; + ResultWordList words; char *error; char *string; @@ -1025,23 +1021,21 @@ int c_parse(EngineState *s) { string = cmd_params[0].str; sciprintf("Parsing '%s'\n", string); - words = vocab_tokenize_string(string, &words_nr, s->parser_words, s->parser_words_nr, + words = vocab_tokenize_string(string, s->parser_words, s->parser_words_nr, s->_parserSuffixes, &error); - if (words) { - int i, syntax_fail = 0; + if (!words.empty()) { + int syntax_fail = 0; - vocab_synonymize_tokens(words, words_nr, s->synonyms, s->synonyms_nr); + vocab_synonymize_tokens(words, s->synonyms, s->synonyms_nr); sciprintf("Parsed to the following blocks:\n"); - for (i = 0; i < words_nr; i++) - sciprintf(" Type[%04x] Group[%04x]\n", words[i].w_class, words[i].group); + for (ResultWordList::const_iterator i = words.begin(); i != words.end(); ++i) + sciprintf(" Type[%04x] Group[%04x]\n", i->w_class, i->group); - if (vocab_gnf_parse(&(s->parser_nodes[0]), words, words_nr, s->parser_branches, s->parser_rules, 1)) + if (vocab_gnf_parse(&(s->parser_nodes[0]), words, s->parser_branches, s->parser_rules, 1)) syntax_fail = 1; // Building a tree failed - free(words); - if (syntax_fail) sciprintf("Building a tree failed.\n"); else |