@conference {8, title = {Conference v2.0: An uncertain version of the OAEI Conference benchmark}, booktitle = {13th International Semantic Web Conference (ISWC 2014)}, volume = {8797}, year = {2014}, month = {10/2014}, pages = {148-163}, publisher = {Lecture Notes in Computer Science, Springer}, organization = {Lecture Notes in Computer Science, Springer}, address = {Riva del Garda, Italy}, abstract = {The Ontology Alignment Evaluation Initiative is a set of benchmarks for evaluating the performance of ontology alignment systems. In this paper we re-examine the Conference track of the OAEI, with a focus on the degree of agreement between the reference alignments within this track and the opinion of experts. We propose a new version of this benchmark that more closely corresponds to expert opinion and confidence on the matches. The performance of top alignment systems is compared on both versions of the benchmark. Additionally, a general method for crowdsourcing the development of more benchmarks of this type using Amazon{\textquoteright}s Mechanical Turk is introduced and shown to be scalable, cost-effective and to agree well with expert opinion.}, keywords = {benchmark, OAEI, Ontology Alignment}, author = {Michelle Cheatham and Pascal Hitzler}, editor = {Peter Mika and Tania Tudorache and Abraham Bernstein and Chris Welty and Craig A. Knoblock and Denny Vrandecic and Paul T. Groth and Natasha F. Noy and Krzysztof Janowicz and Carole A. Goble} }