@unpublished{6855, keywords = {Learning, Citizen Science, trace data}, author = {Corey Jackson and Carsten Oesterlund and Kevin Crowston and Mahboobeh Harandi and The Team}, title = {Supporting Crowd Workers: Assembling Resources in Online Citizen Science Projects}, abstract = {When individuals join online production communities, they may engage with project resources—e.g., FAQs, tutorials, and comment forums—to learn the practices and norms for contribution. These resources can be large and unorganized, making it difficult for users to know which are relevant. Furthermore, some resources might be more suitable for newcomers while others might work only for experienced ones. To identify which resources are most relevant for learning, we analyzed the interaction of users with an online citizen science project. Volunteers in this project are occasionally given items with known answers to classify, which allows an estimation of their accuracy on the task. We used this data to determine if resources are used differently by accurate and less accurate users. Methodologically, we applied a Random Forest model to system trace data in order to identify which resources are most predictive of volunteer accuracy. We augmented this analysis with findings from interviews with advanced users. The resources most predictive of accuracy during early participation seem to center on the social spaces where users gain access to organizational and social practice. In subsequent sessions, predictive activities center on work-related resources that support independent work. This research suggests specific resources might be highlighted to support user development during distinctive stages of a user’s history.}, year = {2017}, journal = {Syracuse University School of Information Studies}, language = {eng}, }