forked from johnros/dim_reduce
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdim_reduce.bib
153 lines (136 loc) · 5.07 KB
/
dim_reduce.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
@article{hotelling1933analysis,
title={Analysis of a complex of statistical variables into principal components.},
author={Hotelling, Harold},
journal={Journal of educational psychology},
volume={24},
number={6},
pages={417},
year={1933},
publisher={Warwick \& York}
}
@article{pearson1901liii,
title={LIII. On lines and planes of closest fit to systems of points in space},
author={Pearson, Karl},
journal={The London, Edinburgh, and Dublin Philosophical Magazine and Journal of Science},
volume={2},
number={11},
pages={559--572},
year={1901},
publisher={Taylor \& Francis}
}
@book{shalev2014understanding,
title={Understanding machine learning: From theory to algorithms},
author={Shalev-Shwartz, Shai and Ben-David, Shai},
year={2014},
publisher={Cambridge University Press}
}
@article{nadler2008finite,
title={Finite sample approximation results for principal component analysis: A matrix perturbation approach},
author={Nadler, Boaz},
journal={The Annals of Statistics},
pages={2791--2817},
year={2008},
publisher={JSTOR}
}
@book{friedman2001elements,
title={The elements of statistical learning},
author={Friedman, Jerome and Hastie, Trevor and Tibshirani, Robert},
volume={Not sure about the edition. Probably the free web edition.},
year={2001},
publisher={Springer series in statistics Springer, Berlin}
}
@book{jolliffe2002principal,
title={Principal component analysis},
author={Jolliffe, Ian},
year={2002},
publisher={Wiley Online Library}
}
@article{hyvarinen2000independent,
title={Independent component analysis: algorithms and applications},
author={Hyv{\"a}rinen, Aapo and Oja, Erkki},
journal={Neural networks},
volume={13},
number={4},
pages={411--430},
year={2000},
publisher={Elsevier}
}
@article{graham1988isometric,
title={Isometric embeddings of graphs},
author={Graham, RL},
journal={Selected Topics in Graph Theory},
volume={3},
pages={133--150},
year={1988},
publisher={Academic Press San Diego, CA}
}
@article{tipping2001sparse,
title={Sparse kernel principal component analysis},
author={Tipping, Michael E},
journal={Advances in neural information processing systems},
pages={633--639},
year={2001},
publisher={MIT; 1998}
}
@book{mohri2012foundations,
title={Foundations of machine learning},
author={Mohri, Mehryar and Rostamizadeh, Afshin and Talwalkar, Ameet},
year={2012},
publisher={MIT press}
}
@book{borg2005modern,
title={Modern multidimensional scaling: Theory and applications},
author={Borg, Ingwer and Groenen, Patrick JF},
year={2005},
publisher={Springer Science \& Business Media}
}
@article{gavish_optimal_2013,
title = {The {{Optimal Hard Threshold}} for {{Singular Values}} Is 4/sqrt(3)},
journal = {arXiv:1305.5870 [stat]},
author = {Gavish, Matan and Donoho, David L.},
month = may,
year = {2013},
keywords = {Statistics - Methodology}
}
@article{maaten2008visualizing,
title={Visualizing data using t-SNE},
author={Maaten, Laurens van der and Hinton, Geoffrey},
journal={Journal of Machine Learning Research},
volume={9},
number={Nov},
pages={2579--2605},
year={2008}
}
@article{JSSv076i02,
author = {Jari Miettinen and Klaus Nordhausen and Sara Taskinen},
title = {Blind Source Separation Based on Joint Diagonalization in R: The Packages JADE and BSSasymp},
journal = {Journal of Statistical Software},
volume = {76},
number = {1},
year = {2017},
keywords = {independent component analysis; multivariate time series; nonstationary source separation; performance indices; second order source separation},
abstract = {Blind source separation (BSS) is a well-known signal processing tool which is used to solve practical data analysis problems in various fields of science. In BSS, we assume that the observed data consists of linear mixtures of latent variables. The mixing system and the distributions of the latent variables are unknown. The aim is to find an estimate of an unmixing matrix which then transforms the observed data back to latent sources. In this paper we present the R packages JADE and BSSasymp. The package JADE offers several BSS methods which are based on joint diagonalization. Package BSSasymp contains functions for computing the asymptotic covariance matrices as well as their data-based estimates for most of the BSS estimators included in package JADE. Several simulated and real datasets are used to illustrate the functions in these two packages.},
issn = {1548-7660},
pages = {1--31},
doi = {10.18637/jss.v076.i02},
url = {https://www.jstatsoft.org/index.php/jss/article/view/v076i02}
}
@article{peters2014causal,
title={Causal discovery with continuous additive noise models.},
author={Peters, Jonas and Mooij, Joris M and Janzing, Dominik and Sch{\"o}lkopf, Bernhard and others},
journal={Journal of Machine Learning Research},
volume={15},
number={1},
pages={2009--2053},
year={2014}
}
@article{kalisch2014causal,
title={Causal structure learning and inference: a selective review},
author={Kalisch, Markus and B{\"u}hlmann, Peter},
journal={Quality Technology \& Quantitative Management},
volume={11},
number={1},
pages={3--21},
year={2014},
publisher={Taylor \& Francis}
}