aboutsummaryrefslogtreecommitdiff
path: root/engines/sci/engine
diff options
context:
space:
mode:
Diffstat (limited to 'engines/sci/engine')
-rw-r--r--engines/sci/engine/grammar.cpp24
-rw-r--r--engines/sci/engine/kstring.cpp21
-rw-r--r--engines/sci/engine/script.cpp2
-rw-r--r--engines/sci/engine/scriptdebug.cpp28
4 files changed, 33 insertions, 42 deletions
diff --git a/engines/sci/engine/grammar.cpp b/engines/sci/engine/grammar.cpp
index 0fde92ba46..19ceded6ce 100644
--- a/engines/sci/engine/grammar.cpp
+++ b/engines/sci/engine/grammar.cpp
@@ -195,7 +195,7 @@ static parse_rule_t *_vbuild_rule(parse_tree_branch_t *branch) {
return rule;
}
-static parse_rule_t *_vsatisfy_rule(parse_rule_t *rule, result_word_t *input) {
+static parse_rule_t *_vsatisfy_rule(parse_rule_t *rule, const ResultWord &input) {
int dep;
if (!rule->specials_nr)
@@ -203,15 +203,15 @@ static parse_rule_t *_vsatisfy_rule(parse_rule_t *rule, result_word_t *input) {
dep = rule->data[rule->first_special];
- if (((dep & TOKEN_TERMINAL_CLASS) && ((dep & 0xffff) & input->w_class)) ||
- ((dep & TOKEN_TERMINAL_GROUP) && ((dep & 0xffff) & input->group))) {
+ if (((dep & TOKEN_TERMINAL_CLASS) && ((dep & 0xffff) & input.w_class)) ||
+ ((dep & TOKEN_TERMINAL_GROUP) && ((dep & 0xffff) & input.group))) {
parse_rule_t *retval = (parse_rule_t*)sci_malloc(sizeof(int) * (4 + rule->length));
++_allocd_rules;
retval->id = rule->id;
retval->specials_nr = rule->specials_nr - 1;
retval->length = rule->length;
memcpy(retval->data, rule->data, sizeof(int) * retval->length);
- retval->data[rule->first_special] = TOKEN_STUFFING_WORD | input->group;
+ retval->data[rule->first_special] = TOKEN_STUFFING_WORD | input.group;
retval->first_special = 0;
if (retval->specials_nr) { // find first special, if it exists
@@ -417,9 +417,9 @@ void vocab_gnf_dump(parse_tree_branch_t *branches, int branches_nr) {
vocab_free_rule_list(tlist);
}
-int vocab_build_parse_tree(parse_tree_node_t *nodes, result_word_t *words, int words_nr,
+int vocab_build_parse_tree(parse_tree_node_t *nodes, const ResultWordList &words,
parse_tree_branch_t *branch0, parse_rule_list_t *rules) {
- return vocab_gnf_parse(nodes, words, words_nr, branch0, rules, 0);
+ return vocab_gnf_parse(nodes, words, branch0, rules, 0);
}
static int
@@ -478,7 +478,7 @@ static int _vbpt_write_subexpression(parse_tree_node_t *nodes, int *pos, parse_r
else
writepos = _vbpt_append(nodes, pos, writepos, token & 0xffff);
} else {
- sciprintf("\nError in parser (grammar.c, _vbpt_write_subexpression()): Rule data broken in rule ");
+ sciprintf("\nError in parser (grammar.cpp, _vbpt_write_subexpression()): Rule data broken in rule ");
vocab_print_rule(rule);
sciprintf(", at token position %d\n", *pos);
return rulepos;
@@ -488,14 +488,16 @@ static int _vbpt_write_subexpression(parse_tree_node_t *nodes, int *pos, parse_r
return rulepos;
}
-int vocab_gnf_parse(parse_tree_node_t *nodes, result_word_t *words, int words_nr,
+int vocab_gnf_parse(parse_tree_node_t *nodes, const ResultWordList &words,
parse_tree_branch_t *branch0, parse_rule_list_t *tlist, int verbose) {
// Get the start rules:
parse_rule_list_t *work = _vocab_clone_rule_list_by_id(tlist, branch0->data[1]);
parse_rule_list_t *results = NULL;
- int word;
+ int word = 0;
+ const int words_nr = words.size();
+ ResultWordList::const_iterator word_iter = words.begin();
- for (word = 0; word < words_nr; word++) {
+ for (word_iter = words.begin(); word_iter != words.end(); ++word_iter, ++word) {
parse_rule_list_t *new_work = NULL;
parse_rule_list_t *reduced_rules = NULL;
parse_rule_list_t *seeker, *subseeker;
@@ -506,7 +508,7 @@ int vocab_gnf_parse(parse_tree_node_t *nodes, result_word_t *words, int words_nr
seeker = work;
while (seeker) {
if (seeker->rule->specials_nr <= (words_nr - word))
- reduced_rules = _vocab_add_rule(reduced_rules, _vsatisfy_rule(seeker->rule, words + word));
+ reduced_rules = _vocab_add_rule(reduced_rules, _vsatisfy_rule(seeker->rule, *word_iter));
seeker = seeker->next;
}
diff --git a/engines/sci/engine/kstring.cpp b/engines/sci/engine/kstring.cpp
index fa61813044..03378b7ce8 100644
--- a/engines/sci/engine/kstring.cpp
+++ b/engines/sci/engine/kstring.cpp
@@ -254,9 +254,8 @@ reg_t kSetSynonyms(EngineState *s, int funct_nr, int argc, reg_t *argv) {
reg_t kParse(EngineState *s, int funct_nr, int argc, reg_t *argv) {
reg_t stringpos = argv[0];
char *string = kernel_dereference_char_pointer(s, stringpos, 0);
- int words_nr;
char *error;
- result_word_t *words;
+ ResultWordList words;
reg_t event = argv[1];
s->parser_event = event;
@@ -268,39 +267,35 @@ reg_t kParse(EngineState *s, int funct_nr, int argc, reg_t *argv) {
return s->r_acc;
}
- words = vocab_tokenize_string(string, &words_nr,
+ words = vocab_tokenize_string(string,
s->parser_words, s->parser_words_nr,
s->_parserSuffixes,
&error);
s->parser_valid = 0; /* not valid */
- if (words) {
+ if (!words.empty()) {
int syntax_fail = 0;
- vocab_synonymize_tokens(words, words_nr, s->synonyms, s->synonyms_nr);
+ vocab_synonymize_tokens(words, s->synonyms, s->synonyms_nr);
s->r_acc = make_reg(0, 1);
if (s->debug_mode & (1 << SCIkPARSER_NR)) {
- int i;
-
SCIkdebug(SCIkPARSER, "Parsed to the following blocks:\n", 0);
- for (i = 0; i < words_nr; i++)
- SCIkdebug(SCIkPARSER, " Type[%04x] Group[%04x]\n", words[i].w_class, words[i].group);
+ for (ResultWordList::const_iterator i = words.begin(); i != words.end(); ++i)
+ SCIkdebug(SCIkPARSER, " Type[%04x] Group[%04x]\n", i->w_class, i->group);
}
- if (vocab_build_parse_tree(&(s->parser_nodes[0]), words, words_nr, s->parser_branches,
+ if (vocab_build_parse_tree(&(s->parser_nodes[0]), words, s->parser_branches,
s->parser_rules))
syntax_fail = 1; /* Building a tree failed */
#ifdef SCI_SIMPLE_SAID_CODE
- vocab_build_simple_parse_tree(&(s->parser_nodes[0]), words, words_nr);
+ vocab_build_simple_parse_tree(&(s->parser_nodes[0]), words);
#endif /* SCI_SIMPLE_SAID_CODE */
- free(words);
-
if (syntax_fail) {
s->r_acc = make_reg(0, 1);
diff --git a/engines/sci/engine/script.cpp b/engines/sci/engine/script.cpp
index 78eadb9830..aabb89122d 100644
--- a/engines/sci/engine/script.cpp
+++ b/engines/sci/engine/script.cpp
@@ -113,7 +113,7 @@ int script_find_selector(EngineState *s, const char *selectorname) {
return pos;
}
- warning("Could not map '%s' to any selector!", selectorname);
+ warning("Could not map '%s' to any selector", selectorname);
return -1;
}
diff --git a/engines/sci/engine/scriptdebug.cpp b/engines/sci/engine/scriptdebug.cpp
index 7a579179c8..0aefa2b331 100644
--- a/engines/sci/engine/scriptdebug.cpp
+++ b/engines/sci/engine/scriptdebug.cpp
@@ -786,17 +786,16 @@ int c_sim_parse(EngineState *s) {
if (!flag) {
char *openb = strchr(token, '['); // look for opening braces
- result_word_t *result;
+ ResultWord result;
if (openb)
*openb = 0; // remove them and the rest
result = vocab_lookup_word(token, strlen(token), s->parser_words, s->parser_words_nr, s->_parserSuffixes);
- if (result) {
+ if (result.w_class != -1) {
s->parser_nodes[i].type = 0;
- s->parser_nodes[i].content.value = result->group;
- free(result);
+ s->parser_nodes[i].content.value = result.group;
} else { // group name was specified directly?
int val = strtol(token, NULL, 0);
if (val) {
@@ -1008,13 +1007,10 @@ int c_set_parse_nodes(EngineState *s) {
return 0;
}
-int vocab_gnf_parse(parse_tree_node_t *nodes, result_word_t *words, int words_nr,
- parse_tree_branch_t *branch0, parse_rule_list_t *tlist, int verbose);
// parses with a GNF rule set
int c_parse(EngineState *s) {
- result_word_t *words;
- int words_nr;
+ ResultWordList words;
char *error;
char *string;
@@ -1025,23 +1021,21 @@ int c_parse(EngineState *s) {
string = cmd_params[0].str;
sciprintf("Parsing '%s'\n", string);
- words = vocab_tokenize_string(string, &words_nr, s->parser_words, s->parser_words_nr,
+ words = vocab_tokenize_string(string, s->parser_words, s->parser_words_nr,
s->_parserSuffixes, &error);
- if (words) {
- int i, syntax_fail = 0;
+ if (!words.empty()) {
+ int syntax_fail = 0;
- vocab_synonymize_tokens(words, words_nr, s->synonyms, s->synonyms_nr);
+ vocab_synonymize_tokens(words, s->synonyms, s->synonyms_nr);
sciprintf("Parsed to the following blocks:\n");
- for (i = 0; i < words_nr; i++)
- sciprintf(" Type[%04x] Group[%04x]\n", words[i].w_class, words[i].group);
+ for (ResultWordList::const_iterator i = words.begin(); i != words.end(); ++i)
+ sciprintf(" Type[%04x] Group[%04x]\n", i->w_class, i->group);
- if (vocab_gnf_parse(&(s->parser_nodes[0]), words, words_nr, s->parser_branches, s->parser_rules, 1))
+ if (vocab_gnf_parse(&(s->parser_nodes[0]), words, s->parser_branches, s->parser_rules, 1))
syntax_fail = 1; // Building a tree failed
- free(words);
-
if (syntax_fail)
sciprintf("Building a tree failed.\n");
else