Attachment 'Olaf IntroGLM2 4Feb15.m'
Download 1 %% basics of linear equations
2
3 % examples of linear equations
4
5 % invertible
6 M1 = [1 0; -1 1]
7 y = [1 2]'
8 % solution
9 inv(M1)*y
10
11 % non-invertible
12 M2 = [1 -1; -1 1]
13 inv(M2)*y % doesn't work
14
15 % invertible but possibly instable
16 M3 = [1 -1; -1 1.1]
17 inv(M3)*y
18
19 %% rank
20 rank(M1)
21 rank(M2)
22 rank(M3)
23
24 % What is the result of rank( zeros(1000) )?
25 % Does the rank change when multiplying the matrix with a scalar?
26
27 %% SVD for square matrix
28 M4 = [-0.5 0.5; 1 1];
29 [U,S] = svd( M4 )
30 % condition number
31 S(1,1)/S(2,2)
32
33 % change the similarity of columns and see what happens
34 M4 = [-0.1 0.1; 1 1];
35 [U,S] = svd( M4 )
36 % condition number
37 S(1,1)/S(2,2)
38
39 M4 = [-1 1; 1 1];
40 [U,S] = svd( M4 )
41 % condition number
42 S(1,1)/S(2,2)
43
44 % the vectors in U are orthonormal (orthogonal and unit length)
45 U'*U
46
47 %% SVD for rectangular matrix
48 M = [ [2 2 0]', [0 0 1]' ]
49 [U,S,V] = svd(M)
50
51 % check whether decomposition works out
52 U*S*V'
53
54 %% Why "eigen"vectors?
55 % when you stick eigenvectors into a covariance matrix...
56 (M*M')*U
57
58 % you get the vectors back, multiplied by square of eigenvalues
59 U*S.^2
60
61 % same for V
62 (M'*M)*V
63 S.^2*V
64
65 %% Why is this useful - SVD as a filter
66 % make some data
67 x = [-2*pi:0.01:2*pi];
68 data = [sin(x)' cos(x)'];
69
70 plot(data)
71
72 % replicate this matrix
73 data = repmat(data, 1, 10);
74 whos
75
76 % add noise
77 data = data + randn(size(data));
78 plot(data)
79
80 % 'economy size' svd
81 [U,S,V] = svd( data, 0 );
82
83 % plot eigenvalues
84 sdiag = diag(S)
85 figure
86 plot( sdiag, 'x' )
87 % if you want to plot variances
88 svar = 100 * sdiag.^2 / sum( sdiag.^2 )
89 plot(svar, 'x')
90
91 % leave only first two SVD components
92 sdiag2 = sdiag;
93 sdiag2(3:end) = 0
94 S2 = diag(sdiag2);
95
96 % reassamble filtered data
97 data2 = U*S2*V';
98 figure
99 plot(data2)
100
101 %% Why is this useful - SVD to compute pseudoinverse
102
103 % get M4 from above
104 M4 = [-0.1 0.1; 1 1];
105 [U,S,V] = svd( M4 )
106
107 % invert the eigenvalues
108 S(1,1) = 1/S(1,1)
109 S(2,2) = 1/S(2,2)
110
111 % compute the inverse
112 myinv = V*S*U'
113
114 % check whether it's correct
115 inv(M4)
116
117 % "dampen" the effect of small eigenvalues
118 S(2,2) = 5
119
120 % see how this affects the inverse
121 myinv2 = V*S*U'
122
123 % apply to some "data" vector y and note the difference
124 y = [1 1]'
125
126 myinv*y
127 myinv2*y
128
129 % the data that these solutions would produce
130 M4*myinv*y % exact
131 M4*myinv2*y % approximate
132
133
134 %% Relationship PCA and SVD
135
136 % The Matlab command princomp removes means from columns,
137 % which differs from svd()
138 % Therefore, let's start with a column-centred matrix
139
140 X = [1 1 -2; 1 -2 1; -2 1 1]
141 mean(X)
142 % what's the rank of this matrix?
143
144 % PCA
145 [A,B,C] = princomp(X)
146
147 % SVD
148 [U,S,V] = svd(X)
149
150 % note: a and v are the same, because for princomp rows are observations
151
152 % s and c should contain the same values, but c is half of s-squared
153 % reason: s-square is the "raw" variance, and princomp divideds by the
154 % degrees of freedom (here: 2)
Attached Files
To refer to attachments on a page, use attachment:filename, as shown below in the list of files. Do NOT use the URL of the [get] link, since this is subject to change and can break easily.You are not allowed to attach a file to this page.