@techreport{FORCE11FAIRDATAPRINCIPLES,
  timestamp = {2017-05-26T18:19:14Z},
  title = {THE {FAIR} DATA PRINCIPLES},
  urldate = {2017-05-26},
  url = {https://www.force11.org/group/fairgroup/fairprinciples},
  author = {{FORCE11}}
}



@INPROCEEDINGS{Coffman2017-si,
  title           = "Replications: A Proposal to Increase their Visibility and
                     Promote them",
  author          = "Coffman, Lucas and Niederle, Muriel and Wilson, Alistair J",
  year            =  2017,
  conference      = "American Economic Association Meetings"
}

@ARTICLE{Clemens2017-zj,
  title     = "The meaning of failed replications: A review and proposal",
  author    = "Clemens, M A",
  abstract  = "Abstract The welcome rise of replication tests in economics has
               not been accompanied by a consensus standard for determining
               what constitutes a replication. A discrepant replication, in
               current usage of the term, can signal anything from an
               unremarkable disagreement over",
  journal   = "Journal of Economic Surveys",
  publisher = "Wiley Online Library",
  volume    =  31,
  number    =  1,
  year      =  2017,
  keywords  = "Ethics; Open data; Replication; Robustness; Transparency"
}

@MISC{American_Economic_Association2008-az,
  title        = "Data Availability Policy",
  author       = "{American Economic Association}",
  year         =  2008,
  howpublished = "\url{https://www.aeaweb.org/journals/policies/data-availability-policy}",
  note         = "(accessed: 2017-04-06)"
}

@MISC{Hoeffler2017-aa,
  title      = "Replication and Economics Journal Policies",
  author     = "Hoeffler, Jan H",
  abstract   = "We investigate the impact of the introduction of replication
                policies for leading journals in economics on citations. As has
                previously been shown for other social sciences, there is an
                indication that the introduction of a replication policy
                increases the number of citations for a journal, presumably
                because readers use the data for their own investigation,
                possibly also because of a reliability effect. We see our
                results as an incentive for journals to introduce and enforce
                replication policies. Lamentably, only a minority of journals
                so far enforce their policies in a way that ensures
                replicability of most of the empirical work. With several
                examples we show how replication becomes difficult if policies
                are not enforced, and we suggest a pool of replicability
                editors as a solution: Since it would be too much to expect
                from journals to have experts for every single topic and
                software package, a joint effort of journals for such a pool of
                experts could help to ensure each empirical study is published
                with data, code, and instructions how to use them together such
                that all published results can easily be replicated. Reviewers
                can join the effort for replicability by following the
                principles of the Agenda for Open Research and refuse to
                comprehensively review empirical work that does not guarantee
                fully replicable empirical results. Further study is needed to
                investigate the citation impact on single articles, and we
                suggest a design for such research.",
  month      =  jan,
  year       =  2017,
  conference = "ASSA Annual Meeting"
}

% The entry below contains non-ASCII chars that could not be converted
% to a LaTeX equivalent.
@ARTICLE{Lagoze2017-qv,
  title    = "Making confidential data part of reproducible research",
  author   = "Lagoze, C and Vilhuber, L",
  abstract = "The rise of data-centric research practices has uncovered
              shortcomings in the traditional scholarly communication system.
              The foundation of that system, the peer-reviewed
              publication,``[the] selective distribution of ink on paper, or…
              electronic facsimiles of the same''(Bourne, et al., 2011), does
              not adequately support what has become an essential element of
              scholarship; the reproducibility of research results. That is,
              duplicating a ...",
  journal  = "Chance",
  year     =  2017
}

@ARTICLE{Camerer2016-kl,
  title    = "Evaluating replicability of laboratory experiments in economics",
  author   = "Camerer, Colin F and Dreber, Anna and Forsell, Eskil and Ho,
              Teck-Hua and Huber, J{\"u}rgen and Johannesson, Magnus and
              Kirchler, Michael and Almenberg, Johan and Altmejd, Adam and
              Chan, Taizan and Heikensten, Emma and Holzmeister, Felix and
              Imai, Taisuke and Isaksson, Siri and Nave, Gideon and Pfeiffer,
              Thomas and Razen, Michael and Wu, Hang",
  abstract = "The replicability of some scientific findings has recently been
              called into question. To contribute data about replicability in
              economics, we replicated 18 studies published in the American
              Economic Review and the Quarterly Journal of Economics between
              2011 and 2014. All of these replications followed predefined
              analysis plans that were made publicly available beforehand, and
              they all have a statistical power of at least 90\% to detect the
              original effect size at the 5\% significance level. We found a
              significant effect in the same direction as in the original study
              for 11 replications (61\%); on average, the replicated effect
              size is 66\% of the original. The replicability rate varies
              between 67\% and 78\% for four additional replicability
              indicators, including a prediction market measure of peer
              beliefs.",
  journal  = "Science",
  volume   =  351,
  number   =  6280,
  pages    = "1433--1436",
  month    =  mar,
  year     =  2016,
  language = "en"
}

@MISC{Foote2017-uc,
  title  = "{MobZ} - Replication archive for a re-examinination of Local Labor
            Market Definitions",
  author = "Foote, Andrew and Kutzbach, Mark and Vilhuber, Lars",
  month  =  apr,
  year   =  2017
}

@TECHREPORT{Joskow2015-hd,
  title       = "President's Letter, Alfred P. Sloan Foundation Annual Report
                 2014",
  author      = "Joskow, Paul L",
  institution = "Alfred P. Sloan Foundation",
  month       =  sep,
  year        =  2015
}

@MISC{Duvendack2017-js,
  title      = "What is Meant by `Replication' and Why Does It Encounter
                Resistance in Economics?",
  author     = "Duvendack, Maren and Palmer-Jones, Richard and Robert Reed, W",
  abstract   = "This paper discusses recent trends in the use of replications
                in economics. We identify a number of sources of progress,
                including the results of recent replication studies that have
                attempted to identify replication rates within the discipline.
                These studies generally find that replication rates are
                relatively low, though they may be higher for laboratory
                experiments in economics. We also identify two web-based
                resources for replications, the Replication in Economics wiki
                and The Replication Network. We then consider obstacles to
                undertaking replication studies in economics. Two obstacles are
                the lack of publishing outlets and difficulties in obtaining
                data and code for published studies. We identify journals that
                publish replication studies and that ``regularly'' include data
                and code as supplementary files for their published research.
                Finally, we highlight replication initiatives in psychology and
                political science, behind which economics appears to lag.
                Whether this is because the problems that beset those
                disciplines are less severe in economics, or because economics
                is more resistant to replications, is arguable.",
  month      =  jan,
  year       =  2017,
  keywords   = "Replication; data sharing; publication bias",
  conference = "ASSA Annual Meeting"
}

@ARTICLE{Nature_Scientific_Data2016-hl,
  title    = "Nature Scientific Data recommended repositories",
  author   = "{Nature Scientific Data}",
  abstract = "Spreadsheet listing data repositories that are recommended by
              Scientific Data (Springer Nature) as being suitable for hosting
              data associated with peer-reviewed articles. Please see the
              repository list on Scientific Data's website for the most up to
              date list.",
  journal  = "figshare",
  month    =  dec,
  year     =  2016
}

@MISC{Hamermesh2017-kq,
  title      = "What is Replication? The Possibly Exemplary Example of Labor
                Economics",
  author     = "Hamermesh, Daniel",
  month      =  jan,
  year       =  2017,
  conference = "ASSA Annual Meetings"
}

@ARTICLE{Moffitt2016-wl,
  title    = "Report: American Economic Association Committee on Statistics
              ({AEAStat})",
  author   = "Moffitt, Robert",
  journal  = "American Economic Review",
  volume   =  106,
  number   =  5,
  pages    = "788--793",
  month    =  may,
  year     =  2016
}

@ARTICLE{Gentzkow2014-zd,
  title    = "Competition and Ideological Diversity: Historical Evidence from
              {US} Newspapers",
  author   = "Gentzkow, Matthew and Shapiro, Jesse M and Sinkinson, Michael",
  journal  = "Am. Econ. Rev.",
  volume   =  104,
  number   =  10,
  pages    = "3073--3114",
  month    =  oct,
  year     =  2014
}

@ARTICLE{Wilson2016-bt,
  title         = "Good Enough Practices in Scientific Computing",
  author        = "Wilson, Greg and Bryan, Jennifer and Cranston, Karen and
                   Kitzes, Justin and Nederbragt, Lex and Teal, Tracy K",
  abstract      = "We present a set of computing tools and techniques that
                   every researcher can and should adopt. These recommendations
                   synthesize inspiration from our own work, from the
                   experiences of the thousands of people who have taken part
                   in Software Carpentry and Data Carpentry workshops over the
                   past six years, and from a variety of other guides. Unlike
                   some other guides, our recommendations are aimed
                   specifically at people who are new to research computing.",
  month         =  aug,
  year          =  2016,
  archivePrefix = "arXiv",
  primaryClass  = "cs.SE",
  eprint        = "1609.00037"
}

@MISC{Elsevier2014-gl,
  title        = "The case for Data in Brief",
  author       = "{Elsevier}",
  abstract     = "Data in Brief articles describe research data that you've
                  made publicly available either through a repository or
                  directly in your Data in...",
  month        =  jun,
  year         =  2014,
  howpublished = "\url{https://www.journals.elsevier.com/data-in-brief/submit-your-data-description-paper/case-for-the-data-in-brief}",
  note         = "Accessed: 2017-4-6"
}

@MISC{Open_Science_Framework2017-zc,
  title        = "Badges to Acknowledge Open Practices Wiki",
  author       = "{Open Science Framework}",
  abstract     = "The aim is to specify a standard by which we can say that a
                  scientific study has been conducted in accordance with
                  open-science principles and provide visual icons to allow
                  advertising of such good behaviours. | Hosted on the Open
                  Science Framework",
  year         =  2017,
  howpublished = "\url{https://osf.io/tvyxz/wiki/home/}",
  note         = "Accessed: 2017-10-18"
}

@MISC{Simms2017-dx,
  title  = "{NSF} {EAGER} {DMPRoadmap}: Making Data Management Plans Actionable",
  author = "Simms, Stephanie",
  month  =  sep,
  year   =  2017
}

@MISC{Australian_National_Data_Service_ANDS2017-re,
  title        = "Data citation",
  author       = "{Australian National Data Service (ANDS)}",
  month        =  jan,
  year         =  2017,
  howpublished = "\url{http://www.ands.org.au/__data/assets/pdf_file/0005/724334/Data-citation.pdf}",
  note         = "Accessed: 2017-4-18"
}

@ARTICLE{Stodden2016-uc,
  title    = "Enhancing reproducibility for computational methods",
  author   = "Stodden, Victoria and McNutt, Marcia and Bailey, David H and
              Deelman, Ewa and Gil, Yolanda and Hanson, Brooks and Heroux,
              Michael A and Ioannidis, John P A and Taufer, Michela",
  journal  = "Science",
  volume   =  354,
  number   =  6317,
  pages    = "1240--1241",
  month    =  dec,
  year     =  2016,
  language = "en"
}

@ARTICLE{Altman2013-fl,
  title   = "The Evolution of Data Citation: From Principles to Implementation",
  author  = "Altman, Micah and Crosas, Merc{\`e}",
  journal = "IASSIST Quarterly",
  pages   = "62--70",
  year    =  2013
}

@TECHREPORT{Bollen2015-vb,
  title       = "Social, Behavioral, and Economic Sciences Perspectives on
                 Robust and Reliable Science",
  author      = "Bollen, Kenneth and Cacioppo, John T and Kaplan, Robert M and
                 Korsnick, Jon A and Olds, James L",
  institution = "Subcommittee on Replicability in Science, National Science
                 Foundation Directorate for Social, Behavioral, and Economic
                 Sciences.",
  month       =  may,
  year        =  2015
}

@TECHREPORT{Gentzkow2014-va,
  title       = "Code and data for the social sciences: A practitioner's guide",
  author      = "Gentzkow, M and Shapiro, Jesse",
  abstract    = "What does it mean to do empirical social science? Asking good
                 questions. Digging up novel data. Designing statistical
                 analysis. Writing up results. For many of us, most of the
                 time, what it means is writing and debugging code. We write
                 code to clean data, to transform data, to",
  series      = "mimeo",
  url         = "https://web.stanford.edu/~gentzkow/research/CodeAndData.pdf",
  institution = "University of Chicago",
  year        =  2014
}

@MISC{Fuentes2016-wz,
  title        = "Reproducible Research in {JASA}",
  author       = "Fuentes, Montse",
  abstract     = "JASA is leading the effort to establish publication standards
                  that improve research quality and reproducibility.",
  month        =  jul,
  year         =  2016,
  howpublished = "\url{http://magazine.amstat.org/blog/2016/07/01/jasa-reproducible16/}",
  note         = "Accessed: 2017-4-4"
}

@MISC{Gentzkow2016-gn,
  title     = "Circulation of {US} Daily Newspapers, 1924, Audit Bureau of
               Circulations. (Version V4)",
  author    = "Gentzkow, M and Shapiro, J and Sinkinson, M",
  publisher = "ICPSR - Interuniversity Consortium for Political and Social
               Research.",
  year      =  2016
}
