@article {16060, title = {Visual-Analytics Evaluation}, journal = {Computer Graphics and Applications, IEEE}, volume = {29}, year = {2009}, month = {2009/06//may}, pages = {16 - 17}, abstract = {Visual analytics (VA) is the science of analytical reasoning facilitated by interactive visual interfaces. Assessing VA technology{\textquoteright}s effectiveness is challenging because VA tools combine several disparate components, both low and high level, integrated in complex interactive systems used by analysts, emergency responders, and others. These components include analytical reasoning, visual representations, computer-human interaction techniques, data representations and transformations, collaboration tools, and especially tools for communicating the results of their use. VA tool users{\textquoteright} activities can be exploratory and can take place over days, weeks, or months. Users might not follow a predefined or even linear work flow. They might work alone or in groups. To understand these complex behaviors, an evaluation can target the component level, the system level, or the work environment level, and requires realistic data and tasks. Traditional evaluation metrics such as task completion time, number of errors, or recall and precision are insufficient to quantify the utility of VA tools, and new research is needed to improve our VA evaluation methodology.}, keywords = {analytic, analytics, evaluation;cognition;data, generation;user, interactive, interfaces;synthetic-data-set, reasoning;visual, systems;, tools;visual, visual, visualisation;interactive}, isbn = {0272-1716}, doi = {10.1109/MCG.2009.56}, author = {Plaisant, Catherine and Grinstein,G. and Scholtz,J.} }