-
Notifications
You must be signed in to change notification settings - Fork 39
/
bib.bib
3143 lines (2875 loc) · 206 KB
/
bib.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
@book{agrestiFoundationsLinearGeneralized2015,
title = {Foundations of Linear and Generalized Linear Models},
author = {Agresti, Alan},
date = {2015-01-15},
eprint = {dgIzBgAAQBAJ},
eprinttype = {googlebooks},
publisher = {{John Wiley \& Sons}},
url = {https://www.wiley.com/en-us/Foundations+of+Linear+and+Generalized+Linear+Models-p-9781118730034},
abstract = {A valuable overview of the most important ideas and results in statistical modeling Written by a highly-experienced author, Foundations of Linear and Generalized Linear Models is a clear and comprehensive guide to the key concepts and results of linearstatistical models. The book presents a broad, in-depth overview of the most commonly usedstatistical models by discussing the theory underlying the models, R software applications,and examples with crafted models to elucidate key ideas and promote practical modelbuilding. The book begins by illustrating the fundamentals of linear models, such as how the model-fitting projects the data onto a model vector subspace and how orthogonal decompositions of the data yield information about the effects of explanatory variables. Subsequently, the book covers the most popular generalized linear models, which include binomial and multinomial logistic regression for categorical data, and Poisson and negative binomial loglinear models for count data. Focusing on the theoretical underpinnings of these models, Foundations ofLinear and Generalized Linear Models also features: An introduction to quasi-likelihood methods that require weaker distributional assumptions, such as generalized estimating equation methods An overview of linear mixed models and generalized linear mixed models with random effects for clustered correlated data, Bayesian modeling, and extensions to handle problematic cases such as high dimensional problems Numerous examples that use R software for all text data analyses More than 400 exercises for readers to practice and extend the theory, methods, and data analysis A supplementary website with datasets for the examples and exercises An invaluable textbook for upper-undergraduate and graduate-level students in statistics and biostatistics courses, Foundations of Linear and Generalized Linear Models is also an excellent reference for practicing statisticians and biostatisticians, as well as anyone who is interested in learning about the most important statistical models for analyzing data.},
isbn = {978-1-118-73005-8},
langid = {english},
pagetotal = {469},
keywords = {Mathematics / Probability & Statistics / General,Mathematics / Probability & Statistics / Stochastic Processes}
}
@incollection{akaike1998information,
title = {Information Theory and an Extension of the Maximum Likelihood Principle},
booktitle = {Selected Papers of {{Hirotugu Akaike}}},
author = {Akaike, Hirotogu},
date = {1998},
pages = {199--213},
publisher = {{Springer}},
url = {https://www.springer.com/gp/book/9780387983554}
}
@article{allen2020outside,
title = {Outside the Wire: {{US}} Military Deployments and Public Opinion in Host States},
author = {Allen, Michael A and Flynn, Michael E and Machain, Carla Martinez and Stravers, Andrew},
date = {2020},
journaltitle = {American Political Science Review},
volume = {114},
number = {2},
pages = {326--341},
publisher = {{Cambridge University Press}},
doi = {10.1017/S0003055419000868},
url = {https://scholarworks.boisestate.edu/cgi/viewcontent.cgi?article=1197&context=polsci_facpubs}
}
@article{amlie2020risk,
title = {Risk of Intracranial Hemorrhage Following Intravenous {{tPA}} ({{Tissue-Type Plasminogen Activator}}) for Acute Stroke Is Low in Children},
author = {Amlie-Lefond, Catherine and Shaw, Dennis WW and Cooper, Andrew and Wainwright, Mark S and Kirton, Adam and Felling, Ryan J and Abraham, Michael G and Mackay, Mark T and Dowling, Michael M and Torres, Marcela and others},
date = {2020},
journaltitle = {Stroke},
volume = {51},
number = {2},
pages = {542--548},
publisher = {{Am Heart Assoc}},
doi = {10.1161/STROKEAHA.119.027225},
url = {https://www.ahajournals.org/doi/epub/10.1161/STROKEAHA.119.027225}
}
@article{amrheinScientistsRiseStatistical2019,
title = {Scientists Rise up against Statistical Significance},
author = {Amrhein, Valentin and Greenland, Sander and McShane, Blake},
date = {2019-03},
journaltitle = {Nature},
volume = {567},
number = {7748},
pages = {305--307},
publisher = {{Nature Publishing Group}},
doi = {10.1038/d41586-019-00857-9},
url = {https://www.nature.com/articles/d41586-019-00857-9},
urldate = {2020-05-21},
abstract = {Valentin Amrhein, Sander Greenland, Blake McShane and more than 800 signatories call for an end to hyped claims and the dismissal of possibly crucial effects.},
issue = {7748},
langid = {english},
file = {/Users/solomonkurz/Zotero/storage/5JQ3EHZV/Amrhein et al. - 2019 - Scientists rise up against statistical significanc.pdf;/Users/solomonkurz/Zotero/storage/IWDNAU3J/d41586-019-00857-9.html}
}
@article{angristDoesCompulsorySchool1991,
title = {Does Compulsory School Attendance Affect Schooling and Earnings?},
author = {Angrist, Joshua D. and Keueger, Alan B.},
date = {1991-11-01},
journaltitle = {The Quarterly Journal of Economics},
shortjournal = {Q J Econ},
volume = {106},
number = {4},
pages = {979--1014},
publisher = {{Oxford Academic}},
issn = {0033-5533},
doi = {10.2307/2937954},
url = {https://academic.oup.com/qje/article/106/4/979/1873496},
urldate = {2020-08-01},
abstract = {Abstract. We establish that season of birth is related to educational attainment because of school start age policy and compulsory school attendance laws. Indi},
langid = {english},
file = {/Users/solomonkurz/Zotero/storage/WG5T6RJ7/Angrist and Keueger - 1991 - Does Compulsory School Attendance Affect Schooling.pdf;/Users/solomonkurz/Zotero/storage/WGBTDCNB/1873496.html}
}
@article{aonoClarifyingSpringtimeTemperature2010,
title = {Clarifying Springtime Temperature Reconstructions of the Medieval Period by Gap-Filling the Cherry Blossom Phenological Data Series at {{Kyoto}}, {{Japan}}},
author = {Aono, Yasuyuki and Saito, Shizuka},
date = {2010-03-01},
journaltitle = {International Journal of Biometeorology},
shortjournal = {Int J Biometeorol},
volume = {54},
number = {2},
pages = {211--219},
issn = {1432-1254},
doi = {10.1007/s00484-009-0272-x},
url = {https://doi.org/10.1007/s00484-009-0272-x},
urldate = {2020-10-13},
abstract = {We investigated documents and diaries from the ninth to the fourteenth centuries to supplement the phenological data series of the flowering of Japanese cherry (Prunus jamasakura) in Kyoto, Japan, to improve and fill gaps in temperature estimates based on previously reported phenological data. We then reconstructed a nearly continuous series of March mean temperatures based on 224~years of cherry flowering data, including 51~years of previously unused data, to clarify springtime climate changes. We also attempted to estimate cherry full-flowering dates from phenological records of other deciduous species, adding further data for 6~years in the tenth and eleventh centuries by using the flowering phenology of Japanese wisteria (Wisteria floribunda). The reconstructed tenth century March mean temperatures were around 7\textdegree C, indicating warmer conditions than at present. Temperatures then fell until the 1180s, recovered gradually until the 1310s, and then declined again in the mid-fourteenth century.},
langid = {english},
file = {/Users/solomonkurz/Zotero/storage/D8QEWBZS/Aono and Saito - 2010 - Clarifying springtime temperature reconstructions .pdf}
}
@article{aonoLongTermChange2012,
title = {Long-term change in climate and floral phenophase},
author = {Aono, Yasuyuki},
date = {2012},
journaltitle = {Chikyu Kankyo (Global Environment)},
number = {17},
url = {http://atmenv.envi.osakafu-u.ac.jp/aono/kyophenotemp4/},
langid = {japanese},
page = {21\textendash 29}
}
@article{aonoPhenologicalDataSeries2008,
title = {Phenological Data Series of Cherry Tree Flowering in {{Kyoto}}, {{Japan}}, and Its Application to Reconstruction of Springtime Temperatures since the 9th Century},
author = {Aono, Yasuyuki and Kazui, Keiko},
date = {2008},
journaltitle = {International Journal of Climatology},
volume = {28},
number = {7},
pages = {905--914},
issn = {1097-0088},
doi = {10.1002/joc.1594},
url = {https://rmets.onlinelibrary.wiley.com/doi/abs/10.1002/joc.1594},
urldate = {2020-10-13},
abstract = {Changes in springtime temperature in Kyoto, Japan, since the 9th century were reconstructed, using the phenological data series for cherry tree (Prunus jamasakura), deduced from old diaries and chronicles. Phenological data for 732 years was made available by combining data from previous studies. The full-flowering date of cherry trees fluctuates in accordance with temperature conditions during February and March. Full-flowering dates were closely related to the March mean temperature by means of a temperature accumulation index, in which plant growth is considered to be an exponential function of temperature. Calibration enabled accurate estimation of temperatures in the instrumental period, after 1880; the root mean square error (RMSE) of temperature estimates was determined to be within 0.1 \textdegree C, after smoothing by local linear regression over time spans of 31 years. The results suggested the existence of four cold periods, 1330\textendash 1350, 1520\textendash 1550, 1670\textendash 1700, and 1825\textendash 1830, during which periods the estimated March mean temperature was 4\textendash 5 \textdegree C, about 3\textendash 4 \textdegree C lower than the present normal temperature. These cold periods coincided with the less extreme periods, known as the Wolf, Spoerer, Maunder, and Dalton minima, in the long-term solar variation cycle, which has a periodicity of 150\textendash 250 years. The sunspot cycle length, a short-term solar variation cycle, was also compared with the temperature estimates, with the result that a time lag of about 15 years was detected in the climatic temperature response to short-term solar variation. Copyright \textcopyright{} 2007 Royal Meteorological Society},
langid = {english},
keywords = {cherry blossom,climatic reconstruction,Kyoto,old documents,phenology,solar variation,springtime temperature},
annotation = {\_eprint: https://rmets.onlinelibrary.wiley.com/doi/pdf/10.1002/joc.1594},
file = {/Users/solomonkurz/Zotero/storage/8UCZE8G3/Aono and Kazui - 2008 - Phenological data series of cherry tree flowering .pdf;/Users/solomonkurz/Zotero/storage/SBJKPWD2/joc.html}
}
@article{ape2019,
title = {{{ape}} 5.0: An Environment for Modern Phylogenetics and Evolutionary Analyses in {{R}}},
author = {Paradis, E. and Schliep, K.},
date = {2019},
journaltitle = {Bioinformatics},
volume = {35},
pages = {526--528},
doi = {10.1093/bioinformatics/bty633},
url = {https://academic.oup.com/bioinformatics/article/35/3/526/5055127}
}
@article{atkinsTutorialCountRegression2013,
title = {A Tutorial on Count Regression and Zero-Altered Count Models for Longitudinal Substance Use Data},
author = {Atkins, David C. and Baldwin, Scott A. and Zheng, Cheng and Gallop, Robert J. and Neighbors, Clayton},
date = {2013-03},
journaltitle = {Psychology of addictive behaviors : journal of the Society of Psychologists in Addictive Behaviors},
shortjournal = {Psychol Addict Behav},
volume = {27},
number = {1},
eprint = {22905895},
eprinttype = {pmid},
pages = {166--177},
issn = {0893-164X},
doi = {10.1037/a0029508},
url = {https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3513584/},
urldate = {2020-06-30},
abstract = {Critical research questions in the study of addictive behaviors concern how these behaviors change over time - either as the result of intervention or in naturalistic settings. The combination of count outcomes that are often strongly skewed with many zeroes (e.g., days using, number of total drinks, number of drinking consequences) with repeated assessments (e.g., longitudinal follow-up after intervention or daily diary data) present challenges for data analyses. The current article provides a tutorial on methods for analyzing longitudinal substance use data, focusing on Poisson, zero-inflated, and hurdle mixed models, which are types of hierarchical or multilevel models. Two example datasets are used throughout, focusing on drinking-related consequences following an intervention and daily drinking over the past 30 days, respectively. Both datasets as well as R, SAS, Mplus, Stata, and SPSS code showing how to fit the models are available on a .},
pmcid = {PMC3513584},
file = {/Users/solomonkurz/Zotero/storage/3EXIKU7I/Atkins et al. - 2013 - A tutorial on count regression and zero-altered co.pdf}
}
@article{baraldiIntroductionModernMissing2010,
title = {An Introduction to Modern Missing Data Analyses},
author = {Baraldi, Amanda N. and Enders, Craig K.},
date = {2010-02-01},
journaltitle = {Journal of School Psychology},
shortjournal = {Journal of School Psychology},
volume = {48},
number = {1},
pages = {5--37},
issn = {0022-4405},
doi = {10.1016/j.jsp.2009.10.001},
url = {http://www.sciencedirect.com/science/article/pii/S0022440509000661},
urldate = {2020-09-27},
abstract = {A great deal of recent methodological research has focused on two modern missing data analysis methods: maximum likelihood and multiple imputation. These approaches are advantageous to traditional techniques (e.g. deletion and mean imputation techniques) because they require less stringent assumptions and mitigate the pitfalls of traditional techniques. This article explains the theoretical underpinnings of missing data analyses, gives an overview of traditional missing data techniques, and provides accessible descriptions of maximum likelihood and multiple imputation. In particular, this article focuses on maximum likelihood estimation and presents two analysis examples from the Longitudinal Study of American Youth data. One of these examples includes a description of the use of auxiliary variables. Finally, the paper illustrates ways that researchers can use intentional, or planned, missing data to enhance their research designs.},
langid = {english},
keywords = {Maximum likelihood,Missing data,Multiple imputation,Planned missingness},
file = {/Users/solomonkurz/Zotero/storage/LSJ3RCKR/S0022440509000661.html}
}
@article{baraldiIntroductionToModernMissingData2010,
title = {An Introduction to Modern Missing Data Analyses},
author = {Baraldi, Amanda N and Enders, Craig K},
date = {2010},
journaltitle = {Journal of school psychology},
volume = {48},
number = {1},
pages = {5--37},
publisher = {{Elsevier}},
doi = {10.1016/j.jsp.2009.10.001}
}
@article{barrettAnIntroduction2020,
title = {An Introduction to Ggdag},
author = {Barrett, Malcolm},
date = {2021-01-11},
url = {https://CRAN.R-project.org/package=ggdag/vignettes/intro-to-ggdag.html},
urldate = {2020-05-31},
langid = {english}
}
@article{barrettAnIntroduction2022,
title = {An Introduction to Ggdag},
author = {Barrett, Malcolm},
date = {2022-08-26},
url = {https://CRAN.R-project.org/package=ggdag/vignettes/intro-to-ggdag.html},
urldate = {2022-09-25},
langid = {english}
}
@article{barrRandomEffectsStructure2013,
title = {Random Effects Structure for Confirmatory Hypothesis Testing: {{Keep}} It Maximal},
shorttitle = {Random Effects Structure for Confirmatory Hypothesis Testing},
author = {Barr, Dale J. and Levy, Roger and Scheepers, Christoph and Tily, Harry J.},
date = {2013-04-01},
journaltitle = {Journal of Memory and Language},
shortjournal = {Journal of Memory and Language},
volume = {68},
number = {3},
pages = {255--278},
issn = {0749-596X},
doi = {10.1016/j.jml.2012.11.001},
url = {http://www.sciencedirect.com/science/article/pii/S0749596X12001180},
urldate = {2020-07-27},
abstract = {Linear mixed-effects models (LMEMs) have become increasingly prominent in psycholinguistics and related areas. However, many researchers do not seem to appreciate how random effects structures affect the generalizability of an analysis. Here, we argue that researchers using LMEMs for confirmatory hypothesis testing should minimally adhere to the standards that have been in place for many decades. Through theoretical arguments and Monte Carlo simulation, we show that LMEMs generalize best when they include the maximal random effects structure justified by the design. The generalization performance of LMEMs including data-driven random effects structures strongly depends upon modeling criteria and sample size, yielding reasonable results on moderately-sized samples when conservative criteria are used, but with little or no power advantage over maximal models. Finally, random-intercepts-only LMEMs used on within-subjects and/or within-items data from populations where subjects and/or items vary in their sensitivity to experimental manipulations always generalize worse than separate F1 and F2 tests, and in many cases, even worse than F1 alone. Maximal LMEMs should be the `gold standard' for confirmatory hypothesis testing in psycholinguistics and beyond.},
langid = {english},
keywords = {Generalization,Linear mixed-effects models,Monte Carlo simulation,Statistics},
file = {/Users/solomonkurz/Zotero/storage/FHRVR92C/Barr et al. - 2013 - Random effects structure for confirmatory hypothes.pdf;/Users/solomonkurz/Zotero/storage/7SG2QQCA/S0749596X12001180.html}
}
@book{baumerMocernDataScienceR2021,
title = {Modern Data Science with {{R}}},
author = {Baumer, Benjamin S. and Kaplan, Daniel T. and Horton, Nicholas J.},
date = {2021},
edition = {2nd edition},
publisher = {{Taylor \& Francis Group, LLC.}},
url = {https://mdsr-book.github.io/mdsr2e/}
}
@article{beheim2021TreatmentOfMissingData,
title = {Treatment of Missing Data Determined Conclusions Regarding Moralizing Gods},
author = {Beheim, Bret and Atkinson, Quentin D and Bulbulia, Joseph and Gervais, Will and Gray, Russell D and Henrich, Joseph and Lang, Martin and Monroe, M Willis and Muthukrishna, Michael and Norenzayan, Ara and others},
date = {2021},
journaltitle = {Nature},
volume = {595},
number = {7866},
pages = {E29--E34},
publisher = {{Nature Publishing Group}},
doi = {10.1038/s41586-019-1043-4}
}
@article{betancourtBayesSparse2018,
title = {Bayes Sparse Regression},
author = {Betancourt, Michael},
date = {2018-03},
url = {https://betanalpha.github.io/assets/case_studies/bayes_sparse_regression.html},
langid = {english}
}
@article{betancourtRobustGaussianProcesses2017,
title = {Robust {{Gaussian}} Processes in {{Stan}}},
author = {Betancourt, Michael},
date = {2017-10},
url = {https://betanalpha.github.io/assets/case_studies/gp_part3/part3.html},
urldate = {2020-08-18},
langid = {english},
file = {/Users/solomonkurz/Zotero/storage/YI47KGAQ/part3.html}
}
@article{bickelSexBiasGraduate1975,
title = {Sex Bias in Graduate Admissions: {{Data}} from {{Berkeley}}},
shorttitle = {Sex {{Bias}} in {{Graduate Admissions}}},
author = {Bickel, P. J. and Hammel, E. A. and O'Connell, J. W.},
date = {1975-02-07},
journaltitle = {Science},
volume = {187},
number = {4175},
eprint = {17835295},
eprinttype = {pmid},
pages = {398--404},
publisher = {{American Association for the Advancement of Science}},
issn = {0036-8075, 1095-9203},
doi = {10.1126/science.187.4175.398},
url = {https://pdfs.semanticscholar.org/b704/3d57d399bd28b2d3e84fb9d342a307472458.pdf},
urldate = {2020-06-17},
abstract = {Examination of aggregate data on graduate admissions to the University of California, Berkeley, for fall 1973 shows a clear but misleading pattern of bias against female applicants. Examination of the disaggregated data reveals few decision-making units that show statistically significant departures from expected frequencies of female admissions, and about as many units appear to favor women as to favor men. If the data are properly pooled, taking into account the autonomy of departmental decision making, thus correcting for the tendency of women to apply to graduate departments that are more difficult for applicants of either sex to enter, there is a small but statistically significant bias in favor of women. The graduate departments that are easier to enter tend to be those that require more mathematics in the undergraduate preparatory curriculum. The bias in the aggregated data stems not from any pattern of discrimination on the part of admissions committees, which seem quite fair on the whole, but apparently from prior screening at earlier levels of the educational system. Women are shunted by their socialization and education toward fields of graduate study that are generally more crowded, less productive of completed degrees, and less well funded, and that frequently offer poorer professional employment prospects.},
langid = {english},
file = {/Users/solomonkurz/Zotero/storage/XW4GACMB/398.html}
}
@article{blavaan2021,
title = {Efficient {{Bayesian}} Structural Equation Modeling in {{Stan}}},
author = {Merkle, Edgar C. and Fitzsimmons, Ellen and Uanhoro, James and Goodrich, Ben},
date = {2021},
journaltitle = {Journal of Statistical Software},
volume = {100},
number = {6},
pages = {1--22},
doi = {10.18637/jss.v100.i06}
}
@article{boeschLearningCurvesTeaching2019,
title = {Learning Curves and Teaching When Acquiring Nut-Cracking in Humans and Chimpanzees},
author = {Boesch, Christophe and Bombjakov\'a, Da\v{s}a and Meier, Amelia and Mundry, Roger},
date = {2019-02-06},
journaltitle = {Scientific Reports},
volume = {9},
number = {1},
pages = {1515},
publisher = {{Nature Publishing Group}},
issn = {2045-2322},
doi = {10.1038/s41598-018-38392-8},
url = {https://www.nature.com/articles/s41598-018-38392-8},
urldate = {2020-11-07},
abstract = {Humans are considered superior to other species in their tool using skills. However, most of our knowledge about animals comes from observations in artificial conditions with individuals removed from their natural environment. We present a first comparison of humans and chimpanzees spontaneously acquiring the same technique as they forage in their natural environment. We compared the acquisition of the Panda nut-cracking technique between Mbendjele foragers from the Republic of Congo and the Ta\"i chimpanzees from C\^ote d'Ivoire. Both species initially acquire the technique slowly with similar kinds of mistakes, with years of practice required for the apprentice to become expert. Chimpanzees more rapidly acquired the technique when an apprentice, and reached adult efficiency earlier than humans. Adult efficiencies in both species did not differ significantly. Expert-apprentice interactions showed many similar instances of teaching in both species, with more variability in humans due, in part to their more complex technique. While in humans, teaching occurred both vertically and obliquely, only the former existed in chimpanzees. This comparison of the acquisition of a natural technique clarifies how the two species differed in their technical intelligence. Furthermore, our observations support the idea of teaching in both species being more frequent for difficult skills.},
issue = {1},
langid = {english},
file = {/Users/solomonkurz/Zotero/storage/Z8HHDPKU/Boesch et al. - 2019 - Learning curves and teaching when acquiring nut-cr.pdf;/Users/solomonkurz/Zotero/storage/TC2DVPMS/s41598-018-38392-8.html}
}
@incollection{borgesjlJardinSenderosQue1941,
title = {El Jardin de Senderos Que Se Bifurcan. {{Buenos Aires}}: {{Sur}}. {{Translated}} by {{D}}. {{A}}. {{Yates}} (1964)},
booktitle = {Labyrinths: {{Selected Stories}} \& {{Other Writings}}},
author = {{Borges, JL}},
date = {1941},
pages = {19--29},
publisher = {{New Directions}},
location = {{New York}}
}
@book{brms2021RM,
title = {{{brms}} Reference Manual, {{Version}} 2.15.0},
author = {B\"urkner, Paul-Christian},
date = {2021},
url = {https://CRAN.R-project.org/package=brms/brms.pdf}
}
@book{brms2022RM,
title = {{{brms}} Reference Manual, {{Version}} 2.18.0},
author = {B\"urkner, Paul-Christian},
date = {2022},
url = {https://CRAN.R-project.org/package=brms/brms.pdf}
}
@book{bryanHappyGitGitHub2020,
title = {Happy {{Git}} and {{GitHub}} for the {{useR}}},
author = {Bryan, Jenny and {the STAT 545 TAs} and Hester, Jim},
date = {2020},
url = {https://happygitwithr.com}
}
@book{bugs2003UM,
title = {{{WinBUGS}} User Manual},
author = {Spiegelhalter, David and Thomas, Andrew and Best, Nicky and Lunn, Dave},
date = {2003-01},
url = {https://www.mrc-bsu.cam.ac.uk/wp-content/uploads/manual14.pdf}
}
@article{Bürkner2021Define,
title = {Define Custom Response Distributions with Brms},
author = {B\"urkner, Paul-Christian},
date = {2021-03},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_customfamilies.html}
}
@article{Bürkner2021Distributional,
title = {Estimating Distributional Models with Brms},
author = {B\"urkner, Paul-Christian},
date = {2021-03},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_distreg.html}
}
@article{Bürkner2021HandleMissingValues,
title = {Handle Missing Values with Brms},
author = {B\"urkner, Paul-Christian},
date = {2021-03},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_missings.html}
}
@article{Bürkner2021Monotonic,
title = {Estimating Monotonic Effects with Brms},
author = {B\"urkner, Paul-Christian},
date = {2021-03},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_monotonic.html}
}
@article{Bürkner2021Multivariate,
title = {Estimating Multivariate Models with Brms},
author = {B\"urkner, Paul-Christian},
date = {2021-03},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_multivariate.html}
}
@article{Bürkner2021Non_linear,
title = {Estimating Non-Linear Models with Brms},
author = {B\"urkner, Paul-Christian},
date = {2021-03},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_nonlinear.html}
}
@article{Bürkner2021Parameterization,
title = {Parameterization of Response Distributions in Brms},
author = {B\"urkner, Paul-Christian},
date = {2021-03},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_families.html}
}
@article{Bürkner2021Phylogenetic,
title = {Estimating Phylogenetic Multilevel Models with Brms},
author = {B\"urkner, Paul-Christian},
date = {2021-03},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_phylogenetics.html}
}
@article{Bürkner2022Define,
title = {Define Custom Response Distributions with Brms},
author = {B\"urkner, Paul-Christian},
date = {2022-09-19},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_customfamilies.html}
}
@article{Bürkner2022Distributional,
title = {Estimating Distributional Models with Brms},
author = {B\"urkner, Paul-Christian},
date = {2022-04},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_distreg.html}
}
@article{Bürkner2022HandleMissingValues,
title = {Handle Missing Values with Brms},
author = {B\"urkner, Paul-Christian},
date = {2022-09-19},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_missings.html},
urldate = {2022-09-26}
}
@article{Bürkner2022Monotonic,
title = {Estimating Monotonic Effects with Brms},
author = {B\"urkner, Paul-Christian},
date = {2022-09-19},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_monotonic.html},
urldate = {2022-09-26}
}
@article{Bürkner2022Multivariate,
title = {Estimating Multivariate Models with Brms},
author = {B\"urkner, Paul-Christian},
date = {2022-09-19},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_multivariate.html},
urldate = {2022-09-25}
}
@article{Bürkner2022Non_linear,
title = {Estimating Non-Linear Models with Brms},
author = {B\"urkner, Paul-Christian},
date = {2022-09-19},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_nonlinear.html}
}
@article{Bürkner2022Parameterization,
title = {Parameterization of Response Distributions in Brms},
author = {B\"urkner, Paul-Christian},
date = {2022-09-19},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_families.html},
urldate = {2022-09-26}
}
@article{Bürkner2022Phylogenetic,
title = {Estimating Phylogenetic Multilevel Models with Brms},
author = {B\"urkner, Paul-Christian},
date = {2022-09-19},
url = {https://CRAN.R-project.org/package=brms/vignettes/brms_phylogenetics.html},
urldate = {2022-09-26}
}
@article{burknerAdvancedBayesianMultilevel2018,
title = {Advanced {{Bayesian}} Multilevel Modeling with the {{R}} Package Brms},
author = {B\"urkner, Paul-Christian},
date = {2018},
journaltitle = {The R Journal},
volume = {10},
number = {1},
pages = {395--411},
doi = {10.32614/RJ-2018-017}
}
@article{burknerBrmsPackageBayesian2017,
title = {{{brms}}: {{An R}} Package for {{Bayesian}} Multilevel Models Using {{Stan}}},
author = {B\"urkner, Paul-Christian},
date = {2017},
journaltitle = {Journal of Statistical Software},
volume = {80},
number = {1},
pages = {1--28},
doi = {10.18637/jss.v080.i01}
}
@article{burknerModellingMonotonicEffects2020,
title = {Modelling Monotonic Effects of Ordinal Predictors in {{Bayesian}} Regression Models},
author = {B\"urkner, Paul-Christian and Charpentier, Emmanuel},
date = {2020},
journaltitle = {British Journal of Mathematical and Statistical Psychology},
issn = {2044-8317},
doi = {10.1111/bmsp.12195},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1111/bmsp.12195},
urldate = {2020-06-28},
abstract = {Ordinal predictors are commonly used in regression models. They are often incorrectly treated as either nominal or metric, thus under- or overestimating the information contained. Such practices may lead to worse inference and predictions compared to methods which are specifically designed for this purpose. We propose a new method for modelling ordinal predictors that applies in situations in which it is reasonable to assume their effects to be monotonic. The parameterization of such monotonic effects is realized in terms of a scale parameter b representing the direction and size of the effect and a simplex parameter modelling the normalized differences between categories. This ensures that predictions increase or decrease monotonically, while changes between adjacent categories may vary across categories. This formulation generalizes to interaction terms as well as multilevel structures. Monotonic effects may be applied not only to ordinal predictors, but also to other discrete variables for which a monotonic relationship is plausible. In simulation studies we show that the model is well calibrated and, if there is monotonicity present, exhibits predictive performance similar to or even better than other approaches designed to handle ordinal predictors. Using Stan, we developed a Bayesian estimation method for monotonic effects which allows us to incorporate prior information and to check the assumption of monotonicity. We have implemented this method in the R package brms, so that fitting monotonic effects in a fully Bayesian framework is now straightforward.},
langid = {english},
keywords = {Bayesian statistics,brms,isotonic regression,ordinal variables,R,Stan},
annotation = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1111/bmsp.12195},
file = {/Users/solomonkurz/Zotero/storage/32MU9XU6/bmsp.html}
}
@article{burknerOrdinalRegressionModels2019,
title = {Ordinal Regression Models in Psychology: {{A}} Tutorial},
shorttitle = {Ordinal {{Regression Models}} in {{Psychology}}},
author = {B\"urkner, Paul-Christian and Vuorre, Matti},
date = {2019-03-01},
journaltitle = {Advances in Methods and Practices in Psychological Science},
shortjournal = {Advances in Methods and Practices in Psychological Science},
volume = {2},
number = {1},
pages = {77--101},
publisher = {{SAGE Publications Inc}},
issn = {2515-2459},
doi = {10.1177/2515245918823199},
url = {https://doi.org/10.1177/2515245918823199},
urldate = {2020-05-18},
abstract = {Ordinal variables, although extremely common in psychology, are almost exclusively analyzed with statistical models that falsely assume them to be metric. This practice can lead to distorted effect-size estimates, inflated error rates, and other problems. We argue for the application of ordinal models that make appropriate assumptions about the variables under study. In this Tutorial, we first explain the three major classes of ordinal models: the cumulative, sequential, and adjacent-category models. We then show how to fit ordinal models in a fully Bayesian framework with the R package brms, using data sets on opinions about stem-cell research and time courses of marriage. The appendices provide detailed mathematical derivations of the models and a discussion of censored ordinal models. Compared with metric models, ordinal models provide better theoretical interpretation and numerical inference from ordinal data, and we recommend their widespread adoption in psychology.},
langid = {english}
}
@article{carpenterStanProbabilisticProgramming2017,
title = {Stan: {{A}} Probabilistic Programming Language},
author = {Carpenter, Bob and Gelman, Andrew and Hoffman, Matthew D and Lee, Daniel and Goodrich, Ben and Betancourt, Michael and Brubaker, Marcus and Guo, Jiqiang and Li, Peter and Riddell, Allen},
date = {2017},
journaltitle = {Journal of statistical software},
volume = {76},
number = {1},
publisher = {{Columbia Univ., New York, NY (United States); Harvard Univ., Cambridge, MA \ldots}},
doi = {10.18637/jss.v076.i01},
url = {https://www.osti.gov/servlets/purl/1430202}
}
@inproceedings{carvalho2009handling,
title = {Handling Sparsity via the Horseshoe},
booktitle = {Artificial Intelligence and Statistics},
author = {Carvalho, Carlos M and Polson, Nicholas G and Scott, James G},
date = {2009},
pages = {73--80},
url = {http://proceedings.mlr.press/v5/carvalho09a/carvalho09a.pdf}
}
@article{casellaExplainingGibbsSampler1992,
title = {Explaining the {{Gibbs}} Sampler},
author = {Casella, George and George, Edward I.},
date = {1992-08-01},
journaltitle = {The American Statistician},
volume = {46},
number = {3},
pages = {167--174},
publisher = {{Taylor \& Francis}},
issn = {0003-1305},
doi = {10.1080/00031305.1992.10475878},
url = {https://ecommons.cornell.edu/bitstream/handle/1813/31670/BU-1098-MA.Revised.pdf?sequence=1},
urldate = {2020-06-11},
abstract = {Computer-intensive algorithms, such as the Gibbs sampler, have become increasingly popular statistical tools, both in applied and theoretical work. The properties of such algorithms, however, may sometimes not be obvious. Here we give a simple explanation of how and why the Gibbs sampler works. We analytically establish its properties in a simple case and provide insight for more complicated cases. There are also a number of examples.},
keywords = {Data augmentation,Markov chains,Monte Carlo methods,Resampling techniques},
annotation = {\_eprint: https://www.tandfonline.com/doi/pdf/10.1080/00031305.1992.10475878},
file = {/Users/solomonkurz/Zotero/storage/7G3SEDKK/Casella and George - 1992 - Explaining the Gibbs Sampler.pdf;/Users/solomonkurz/Zotero/storage/SFZUD4XZ/00031305.1992.html}
}
@article{casillas2021interlingual,
title = {Interlingual Interactions Elicit Performance Mismatches Not ``Compromise'' Categories in Early Bilinguals: {{Evidence}} from Meta-Analysis and Coronal Stops},
author = {Casillas, Joseph V},
date = {2021},
journaltitle = {Languages},
volume = {6},
number = {1},
pages = {9},
publisher = {{Multidisciplinary Digital Publishing Institute}},
doi = {10.3390/languages6010009},
url = {https://www.mdpi.com/2226-471X/6/1/9/pdf}
}
@book{cohenAppliedMultipleRegression2013,
title = {Applied Multiple Regression/Correlation Analysis for the Behavioral Sciences},
author = {Cohen, Jacob and Cohen, Patricia and West, Stephen G. and Aiken, Leona S.},
date = {2013-06-17},
edition = {Third Edition},
publisher = {{Routledge}},
doi = {10.4324/9780203774441},
url = {https://www.taylorfrancis.com/books/9780203774441},
urldate = {2020-10-14},
abstract = {This classic text on multiple regression is noted for its nonmathematical, applied, and data-analytic approach. Readers profit from its verbal-conceptual},
isbn = {978-0-203-77444-1},
langid = {english},
file = {/Users/solomonkurz/Zotero/storage/NT4YB49X/Cohen et al. - 2013 - Applied Multiple RegressionCorrelation Analysis f.pdf;/Users/solomonkurz/Zotero/storage/B7CQQRQ9/9780203774441.html;/Users/solomonkurz/Zotero/storage/YXR5TUJX/9780203774441.html}
}
@book{cover2006elements,
title = {Elements of Information Theory},
author = {Cover, Thomas M and Thomas, Joy A},
date = {2006},
edition = {2nd Edition},
publisher = {{John Wiley \& Sons}},
url = {https://www.wiley.com/en-us/Elements+of+Information+Theory%2C+2nd+Edition-p-9780471241959},
isbn = {978-0-471-24195-9}
}
@article{cummingNewStatisticsWhy2014,
title = {The New Statistics: {{Why}} and How},
shorttitle = {The {{New Statistics}}},
author = {Cumming, Geoff},
date = {2014-01-01},
journaltitle = {Psychological Science},
shortjournal = {Psychol Sci},
volume = {25},
number = {1},
pages = {7--29},
publisher = {{SAGE Publications Inc}},
issn = {0956-7976},
doi = {10.1177/0956797613504966},
url = {https://journals.sagepub.com/doi/pdf/10.1177/0956797613504966},
urldate = {2020-05-21},
abstract = {We need to make substantial changes to how we conduct research. First, in response to heightened concern that our published research literature is incomplete and untrustworthy, we need new requirements to ensure research integrity. These include prespecification of studies whenever possible, avoidance of selection and other inappropriate data-analytic practices, complete reporting, and encouragement of replication. Second, in response to renewed recognition of the severe flaws of null-hypothesis significance testing (NHST), we need to shift from reliance on NHST to estimation and other preferred techniques. The new statistics refers to recommended practices, including estimation based on effect sizes, confidence intervals, and meta-analysis. The techniques are not new, but adopting them widely would be new for many researchers, as well as highly beneficial. This article explains why the new statistics are important and offers guidance for their use. It describes an eight-step new-statistics strategy for research with integrity, which starts with formulation of research questions in estimation terms, has no place for NHST, and is aimed at building a cumulative quantitative discipline.},
file = {/Users/solomonkurz/Zotero/storage/UJMRBZGC/Cumming - 2014 - The New Statistics Why and How.pdf}
}
@article{cushmanRoleConsciousReasoning2006,
title = {The Role of Conscious Reasoning and Intuition in Moral Judgment: {{Testing}} Three Principles of Harm},
shorttitle = {The {{Role}} of {{Conscious Reasoning}} and {{Intuition}} in {{Moral Judgment}}},
author = {Cushman, Fiery and Young, Liane and Hauser, Marc},
date = {2006-12-01},
journaltitle = {Psychological Science},
shortjournal = {Psychol Sci},
volume = {17},
number = {12},
pages = {1082--1089},
publisher = {{SAGE Publications Inc}},
issn = {0956-7976},
doi = {10.1111/j.1467-9280.2006.01834.x},
url = {https://doi.org/10.1111/j.1467-9280.2006.01834.x},
urldate = {2020-06-27},
abstract = {Is moral judgment accomplished by intuition or conscious reasoning? An answer demands a detailed account of the moral principles in question. We investigated three principles that guide moral judgments: (a) Harm caused by action is worse than harm caused by omission, (b) harm intended as the means to a goal is worse than harm foreseen as the side effect of a goal, and (c) harm involving physical contact with the victim is worse than harm involving no physical contact. Asking whether these principles are invoked to explain moral judgments, we found that subjects generally appealed to the first and third principles in their justifications, but not to the second. This finding has significance for methods and theories of moral psychology: The moral principles used in judgment must be directly compared with those articulated in justification, and doing so shows that some moral principles are available to conscious reasoning whereas others are not.},
langid = {english}
}
@article{dagitty2016,
title = {Robust Causal Inference Using Directed Acyclic Graphs: The {{R}} Package 'Dagitty'},
author = {Textor, Johannes and van der Zander, Benito and Gilthorpe, Mark S and Li\'skiewicz, Maciej and Ellison, George TH},
options = {useprefix=true},
date = {2016},
journaltitle = {International Journal of Epidemiology},
volume = {45},
number = {6},
pages = {1887--1894},
doi = {10.1093/ije/dyw341}
}
@article{davis2020genetic,
title = {A Genetic, Genomic, and Computational Resource for Exploring Neural Circuit Function},
author = {Davis, Fred P and Nern, Aljoscha and Picard, Serge and Reiser, Michael B and Rubin, Gerald M and Eddy, Sean R and Henry, Gilbert L},
date = {2020},
journaltitle = {Elife},
volume = {9},
pages = {e50901},
publisher = {{eLife Sciences Publications Limited}},
doi = {10.7554/eLife.50901},
url = {https://elifesciences.org/articles/50901.pdf}
}
@article{derooijCrossvalidationMethodEvery2020,
title = {Cross-Validation: {{A}} Method Every Psychologist Should Know},
shorttitle = {Cross-{{Validation}}},
author = {de Rooij, Mark and Weeda, Wouter},
options = {useprefix=true},
date = {2020-05-27},
journaltitle = {Advances in Methods and Practices in Psychological Science},
shortjournal = {Advances in Methods and Practices in Psychological Science},
volume = {3},
number = {2},
pages = {248--263},
publisher = {{SAGE Publications Inc}},
issn = {2515-2459},
doi = {10.1177/2515245919898466},
url = {https://doi.org/10.1177/2515245919898466},
urldate = {2020-06-03},
abstract = {Cross-validation is a statistical procedure that every psychologist should know. Most are possibly familiar with the procedure in a global way but have not used it for the analysis of their own data. We introduce cross-validation for the purpose of model selection in a general sense, as well as an R package we have developed for this kind of analysis, and we present examples illustrating the use of this package for types of research problems that are often encountered in the social sciences. Cross-validation can be an easy-to-use alternative to null-hypothesis testing, and it has the benefit that it does not make as many assumptions.},
langid = {english},
file = {/Users/solomonkurz/Zotero/storage/S7SBFDUC/de Rooij and Weeda - 2020 - Cross-Validation A Method Every Psychologist Shou.pdf}
}
@book{dunn2018generalized,
title = {Generalized Linear Models with Examples in {{R}}},
author = {Dunn, Peter K and Smyth, Gordon K},
date = {2018},
publisher = {{Springer}},
url = {https://link.springer.com/book/10.1007/978-1-4419-0118-7}
}
@article{efronSteinParadoxStatistics1977,
title = {Stein's Paradox in Statistics},
author = {Efron, Bradley and Morris, Carl},
date = {1977},
journaltitle = {Scientific American},
volume = {236},
number = {5},
eprint = {24954030},
eprinttype = {jstor},
pages = {119--127},
publisher = {{Scientific American, a division of Nature America, Inc.}},
issn = {0036-8733},
doi = {10.1038/scientificamerican0577-119}
}
@book{enders2010applied,
title = {Applied Missing Data Analysis},
author = {Enders, Craig K},
date = {2010},
publisher = {{Guilford Press}},
url = {http://www.appliedmissingdata.com/},
isbn = {978-1-60623-639-0}
}
@book{enders2022applied,
title = {Applied Missing Data Analysis},
author = {Enders, Craig K},
date = {2022},
edition = {Second Edition},
publisher = {{Guilford Press}},
url = {http://www.appliedmissingdata.com/},
isbn = {978-1-4625-4986-3}
}
@article{fernandezGGMCMCAnalysisofMCMC2016,
title = {{{ggmcmc}}: {{Analysis}} of {{MCMC}} Samples and {{Bayesian}} Inference},
author = {Fern\'andez i Mar\'in, Xavier},
date = {2016},
journaltitle = {Journal of Statistical Software},
volume = {70},
number = {9},
pages = {1--20},
doi = {10.18637/jss.v070.i09}
}
@article{freckleton2002misuse,
title = {On the Misuse of Residuals in Ecology: Regression of Residuals vs. Multiple Regression},
author = {Freckleton, Robert P},
date = {2002},
journaltitle = {Journal of Animal Ecology},
volume = {71},
number = {3},
pages = {542--545},
publisher = {{JSTOR}},
doi = {10.1046/j.1365-2656.2002.00618.x}
}
@article{gabry2019visualization,
title = {Visualization in {{Bayesian}} Workflow},
author = {Gabry, Jonah and Simpson, Daniel and Vehtari, Aki and Betancourt, Michael and Gelman, Andrew},
date = {2019},
journaltitle = {Journal of the Royal Statistical Society: Series A (Statistics in Society)},
volume = {182},
number = {2},
pages = {389--402},
publisher = {{Wiley Online Library}},
doi = {10.1111/rssa.12378},
url = {https://arxiv.org/abs/1709.01449}
}
@article{gabryPlottingMCMCDraws2021,
title = {Plotting {{MCMC}} Draws Using the Bayesplot Package},
author = {Gabry, Jonah},
date = {2021-01-07},
url = {https://CRAN.R-project.org/package=bayesplot/vignettes/plotting-mcmc-draws.html},
langid = {english}
}
@article{gabryPlottingMCMCDraws2022,
title = {Plotting {{MCMC}} Draws Using the Bayesplot Package},
author = {Gabry, Jonah},
date = {2022-11-16},
url = {https://CRAN.R-project.org/package=bayesplot/vignettes/plotting-mcmc-draws.html},
urldate = {2022-09-25},
langid = {english}
}
@article{gabryVisualMCMCDiagnostics2021,
title = {Visual {{MCMC}} Diagnostics Using the Bayesplot Package},
author = {Gabry, Jonah and Modr\'ak, Martin},
date = {2021-01-07},
url = {https://CRAN.R-project.org/package=bayesplot/vignettes/visual-mcmc-diagnostics.html},
langid = {english}
}
@article{gabryVisualMCMCDiagnostics2022,
title = {Visual {{MCMC}} Diagnostics Using the Bayesplot Package},
author = {Gabry, Jonah and Modr\'ak, Martin},
date = {2022-03-09},
url = {https://CRAN.R-project.org/package=bayesplot/vignettes/visual-mcmc-diagnostics.html},
urldate = {2022-09-26},
langid = {english}
}
@article{gelman2006difference,
title = {The Difference between ``Significant'' and ``Not Significant'' Is Not Itself Statistically Significant},
author = {Gelman, Andrew and Stern, Hal},
date = {2006},
journaltitle = {The American Statistician},
volume = {60},
number = {4},
pages = {328--331},
publisher = {{Taylor \& Francis}},
doi = {10.1198/000313006X152649},
url = {https://www.tandfonline.com/doi/pdf/10.1198/000313006X152649?needAccess=true}
}
@book{gelman2013bayesian,
title = {Bayesian Data Analysis},
author = {Gelman, Andrew and Carlin, John B and Stern, Hal S and Dunson, David B and Vehtari, Aki and Rubin, Donald B},
date = {2013},
edition = {Third Edition},
publisher = {{CRC press}},
url = {https://stat.columbia.edu/~gelman/book/}
}
@article{gelmanAnalysisVarianceWhy2005,
title = {Analysis of Variance--{{Why}} It Is More Important than Ever},
author = {Gelman, Andrew},
date = {2005-02},
journaltitle = {Annals of Statistics},
shortjournal = {Ann. Statist.},
volume = {33},
number = {1},
pages = {1--53},
publisher = {{Institute of Mathematical Statistics}},
issn = {0090-5364, 2168-8966},
doi = {10.1214/009053604000001048},
url = {https://projecteuclid.org/download/pdfview_1/euclid.aos/1112967698},
urldate = {2020-05-18},
abstract = {Analysis of variance (ANOVA) is an extremely important method in exploratory and confirmatory data analysis. Unfortunately, in complex problems (e.g., split-plot designs), it is not always easy to set up an appropriate ANOVA. We propose a hierarchical analysis that automatically gives the correct ANOVA comparisons even in complex scenarios. The inferences for all means and variances are performed under a model with a separate batch of effects for each row of the ANOVA table. We connect to classical ANOVA by working with finite-sample variance components: fixed and random effects models are characterized by inferences about existing levels of a factor and new levels, respectively. We also introduce a new graphical display showing inferences about the standard deviations of each batch of effects. We illustrate with two examples from our applied data analysis, first illustrating the usefulness of our hierarchical computations and displays, and second showing how the ideas of ANOVA are helpful in understanding a previously fit hierarchical model.},
langid = {english},
mrnumber = {MR2157795},
zmnumber = {1064.62082},
keywords = {ANOVA,Bayesian inference,fixed effects,hierarchical model,linear regression,multilevel model,random effects,variance components},
file = {/Users/solomonkurz/Zotero/storage/2U3XQY5J/Gelman - 2005 - Analysis of variance—why it is more important than.pdf;/Users/solomonkurz/Zotero/storage/SQ6DDNZI/1112967698.html}
}
@article{gelmanAreConfidenceIntervals2019,
title = {Are Confidence Intervals Better Termed ``Uncertainty Intervals''?},
author = {Gelman, Andrew and Greenland, Sander},
date = {2019-09-10},
journaltitle = {BMJ},
shortjournal = {BMJ},
pages = {l5381},
issn = {0959-8138, 1756-1833},
doi = {10.1136/bmj.l5381},
url = {https://stat.columbia.edu/~gelman/research/published/uncertainty_intervals.pdf},
urldate = {2020-05-21},
langid = {english},
file = {/Users/solomonkurz/Zotero/storage/TVDUC9Z3/Gelman and Greenland - 2019 - Are confidence intervals better termed “uncertaint.pdf}
}
@article{gelmanGardenForkingPaths2013,
title = {The Garden of Forking Paths: {{Why}} Multiple Comparisons Can Be a Problem, Even When There Is No ``Fishing Expedition'' or ``p-Hacking'' and the Research Hypothesis Was Posited Ahead of Time},
author = {Gelman, Andrew and Loken, Eric},
date = {2013-11-14},
pages = {17},
url = {https://stat.columbia.edu/~gelman/research/unpublished/p_hacking.pdf},
abstract = {Researcher degrees of freedom can lead to a multiple comparisons problem, even in settings where researchers perform only a single analysis on their data. The problem is there can be a large number of potential comparisons when the details of data analysis are highly contingent on data, without the researcher having to perform any conscious procedure of fishing or examining multiple p-values. We discuss in the context of several examples of published papers where data-analysis decisions were theoretically-motivated based on previous literature, but where the details of data selection and analysis were not pre-specified and, as a result, were contingent on data.},
langid = {english},
file = {/Users/solomonkurz/Zotero/storage/EA32DKC7/Gelman and Loken - The garden of forking paths Why multiple comparis.pdf}
}
@article{gelmanPostratificationManyCategories1997,
title = {Postratification into Many Categories Using Hierarchical Logistic Regression},
author = {Gelman, Andrew and Little, Thomas C.},
date = {1997-09},
journaltitle = {Survey Methodology},
volume = {23},
pages = {127--135},
url = {https://stat.columbia.edu/~gelman/research/published/poststrat3.pdf},
langid = {english}
}
@article{gelmanPriorCanOften2017,
title = {The Prior Can Often Only Be Understood in the Context of the Likelihood},
author = {Gelman, Andrew and Simpson, Daniel and Betancourt, Michael},
date = {2017-10},
journaltitle = {Entropy},
volume = {19},
number = {10},
pages = {555},
publisher = {{Multidisciplinary Digital Publishing Institute}},
doi = {10.3390/e19100555},
url = {https://www.mdpi.com/1099-4300/19/10/555},
urldate = {2020-06-12},
abstract = {A key sticking point of Bayesian analysis is the choice of prior distribution, and there is a vast literature on potential defaults including uniform priors, Jeffreys' priors, reference priors, maximum entropy priors, and weakly informative priors. These methods, however, often manifest a key conceptual tension in prior modeling: a model encoding true prior information should be chosen without reference to the model of the measurement process, but almost all common prior modeling techniques are implicitly motivated by a reference likelihood. In this paper we resolve this apparent paradox by placing the choice of prior into the context of the entire Bayesian analysis, from inference to prediction to model evaluation.},
issue = {10},
langid = {english},
keywords = {Bayesian inference,default priors,prior distribution},
file = {/Users/solomonkurz/Zotero/storage/GITEJRKC/Gelman et al. - 2017 - The Prior Can Often Only Be Understood in the Cont.pdf;/Users/solomonkurz/Zotero/storage/FD2UD59C/555.html}
}
@article{gelmanPriorDistributionsVariance2006,
title = {Prior Distributions for Variance Parameters in Hierarchical Models (Comment on Article by {{Browne}} and {{Draper}})},
author = {Gelman, Andrew},
date = {2006-09},
journaltitle = {Bayesian Analysis},
shortjournal = {Bayesian Anal.},
volume = {1},
number = {3},
pages = {515--534},
publisher = {{International Society for Bayesian Analysis}},
issn = {1936-0975, 1931-6690},
doi = {10.1214/06-BA117A},
url = {https://projecteuclid.org/euclid.ba/1340371048},
urldate = {2020-05-17},
abstract = {Various noninformative prior distributions have been suggested for scale parameters in hierarchical models. We construct a new folded-noncentral-ttt family of conditionally conjugate priors for hierarchical standard deviation parameters, and then consider noninformative and weakly informative priors in this family. We use an example to illustrate serious problems with the inverse-gamma family of "noninformative" prior distributions. We suggest instead to use a uniform prior on the hierarchical standard deviation, using the half-ttt family when the number of groups is small and in other settings where a weakly informative prior is desired. We also illustrate the use of the half-ttt family for hierarchical modeling of multiple variance parameters such as arise in the analysis of variance.},
langid = {english},
mrnumber = {MR2221284},
zmnumber = {1331.62139},
keywords = {Bayesian inference,conditional conjugacy,folded-noncentral-$t$ distribution,half-$t$ distribution,hierarchical model,multilevel model,noninformative prior distribution,weakly informative prior distribution},
file = {/Users/solomonkurz/Zotero/storage/LNB63KFA/Gelman - 2006 - Prior distributions for variance parameters in hie.pdf;/Users/solomonkurz/Zotero/storage/AJT3SYSS/1340371048.html}
}
@book{gelmanRegressionOtherStories2020,
title = {Regression and Other Stories},
author = {Gelman, Andrew and Hill, Jennifer and Vehtari, Aki},
date = {2020},
series = {Analytical {{Methods}} for {{Social Research}}},
publisher = {{Cambridge University Press}},
location = {{Cambridge}},
doi = {10.1017/9781139161879},
url = {https://www.cambridge.org/core/books/regression-and-other-stories/DD20DD6C9057118581076E54E40C372C},
urldate = {2020-12-09},
abstract = {Most textbooks on regression focus on theory and the simplest of examples. Real statistical problems, however, are complex and subtle. This is not a book about the theory of regression. It is about using regression to solve real problems of comparison, estimation, prediction, and causal inference. Unlike other books, it focuses on practical issues such as sample size and missing data and a wide range of goals and techniques. It jumps right in to methods and computer code you can use immediately. Real examples, real stories from the authors' experience demonstrate what regression can do and its limitations, with practical advice for understanding assumptions and implementing methods for experiments and observational studies. They make a smooth transition to logistic regression and GLM. The emphasis is on computation in R and Stan rather than derivations, with code available online. Graphics and presentation aid understanding of the models and model fitting.},
isbn = {978-1-107-02398-7},
file = {/Users/solomonkurz/Zotero/storage/GQITHSNF/DD20DD6C9057118581076E54E40C372C.html}
}
@article{gelmanRsquaredBayesianRegression2019,
title = {R-Squared for {{Bayesian}} Regression Models},
author = {Gelman, Andrew and Goodrich, Ben and Gabry, Jonah and Vehtari, Aki},
date = {2019-07-03},
journaltitle = {The American Statistician},
shortjournal = {The American Statistician},
volume = {73},
number = {3},
pages = {307--309},
issn = {0003-1305, 1537-2731},
doi = {10.1080/00031305.2018.1549100},
url = {https://www.tandfonline.com/doi/full/10.1080/00031305.2018.1549100},
urldate = {2020-05-16},
langid = {english}
}
@article{gelmanWhyHighorderPolynomials2019,
title = {Why High-Order Polynomials Should Not Be Used in Regression Discontinuity Designs},
author = {Gelman, Andrew and Imbens, Guido},
date = {2019-07-03},
journaltitle = {Journal of Business \& Economic Statistics},
shortjournal = {Journal of Business \& Economic Statistics},
volume = {37},
number = {3},
pages = {447--456},
publisher = {{Taylor \& Francis}},
issn = {0735-0015},
doi = {10.1080/07350015.2017.1366909},
url = {https://amstat.tandfonline.com/doi/full/10.1080/07350015.2017.1366909},
urldate = {2020-07-30},
abstract = {It is common in regression discontinuity analysis to control for third, fourth, or higher-degree polynomials of the forcing variable. There appears to be a perception that such methods are theoretically justified, even though they can lead to evidently nonsensical results. We argue that controlling for global high-order polynomials in regression discontinuity analysis is a flawed approach with three major problems: it leads to noisy estimates, sensitivity to the degree of the polynomial, and poor coverage of confidence intervals. We recommend researchers instead use estimators based on local linear or quadratic polynomials or other smooth functions.},
file = {/Users/solomonkurz/Zotero/storage/IZ6XLLYA/Gelman and Imbens - 2019 - Why High-Order Polynomials Should Not Be Used in R.pdf;/Users/solomonkurz/Zotero/storage/PK67RHKK/07350015.2017.html}
}
@article{gemanStochasticRelaxationGibbs1984,
title = {Stochastic Relaxation, {{Gibbs}} Distributions, and the {{Bayesian}} Restoration of Images},
author = {Geman, Stuart and Geman, Donald},
date = {1984-11},
journaltitle = {IEEE Transactions on Pattern Analysis and Machine Intelligence},
volume = {PAMI-6},
number = {6},
pages = {721--741},
issn = {1939-3539},
doi = {10.1109/TPAMI.1984.4767596},
url = {https://www.dam.brown.edu/people/documents/stochasticrelaxation.pdf},
abstract = {We make an analogy between images and statistical mechanics systems. Pixel gray levels and the presence and orientation of edges are viewed as states of atoms or molecules in a lattice-like physical system. The assignment of an energy function in the physical system determines its Gibbs distribution. Because of the Gibbs distribution, Markov random field (MRF) equivalence, this assignment also determines an MRF image model. The energy function is a more convenient and natural mechanism for embodying picture attributes than are the local characteristics of the MRF. For a range of degradation mechanisms, including blurring, nonlinear deformations, and multiplicative or additive noise, the posterior distribution is an MRF with a structure akin to the image model. By the analogy, the posterior distribution defines another (imaginary) physical system. Gradual temperature reduction in the physical system isolates low energy states (``annealing''), or what is the same thing, the most probable states under the Gibbs distribution. The analogous operation under the posterior distribution yields the maximum a posteriori (MAP) estimate of the image given the degraded observations. The result is a highly parallel ``relaxation'' algorithm for MAP estimation. We establish convergence properties of the algorithm and we experiment with some simple pictures, for which good restorations are obtained at low signal-to-noise ratios.},
eventtitle = {{{IEEE Transactions}} on {{Pattern Analysis}} and {{Machine Intelligence}}},
keywords = {Additive noise,Annealing,Bayesian methods,Deformable models,Degradation,Energy states,Gibbs distribution,image restoration,Image restoration,line process,MAP estimate,Markov random field,Markov random fields,relaxation,scene modeling,spatial degradation,Stochastic processes,Temperature distribution},
file = {/Users/solomonkurz/Zotero/storage/M4USX4TH/4767596.html}
}
@article{girard2021reconsidering,