@misc{cogprints4027, editor = {Nicolas Nicolov and Kalina Botcheva and Galia Angelova and Ruslan Mitkov}, title = {Combining Independent Modules in Lexical Multiple-Choice Problems}, author = {Peter D. Turney and Michael L. Littman and Jeffrey Bigham and Victor Shnayder}, publisher = {John Benjamins}, year = {2004}, pages = {101--110}, journal = {Recent Advances in Natural Language Processing III: Selected Papers from RANLP 2003}, url = {http://cogprints.org/4027/}, abstract = {Existing statistical approaches to natural language problems are very coarse approximations to the true complexity of language processing. As such, no single technique will be best for all problem instances. Many researchers are examining ensemble methods that combine the output of multiple modules to create more accurate solutions. This paper examines three merging rules for combining probability distributions: the familiar mixture rule, the logarithmic rule, and a novel product rule. These rules were applied with state-of-the-art results to two problems used to assess human mastery of lexical semantics -- synonym questions and analogy questions. All three merging rules result in ensembles that are more accurate than any of their component modules. The differences among the three rules are not statistically significant, but it is suggestive that the popular mixture rule is not the best rule for either of the two problems.} }