-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsweep_Lambda_samples_withoutPre.m
142 lines (102 loc) · 3.7 KB
/
sweep_Lambda_samples_withoutPre.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
% % % % % % % % % % % % % % % % % % % % % % % % % % %
% Test sparsity vs time vs M
% Pre-processing data through
% % % % % % % % % % % % % % % % % % % % % % % % % % %
start_spams
clear
clc
mdivision = 20;
% % % % % % % % % % % % % % % % % % % % % % % % % % %
% Prepare raw data
% % % % % % % % % % % % % % % % % % % % % % % % % % %
RawInpLoad = load('15814m_ltdbECG_1h.mat');
RawInpLoad = RawInpLoad.val;
n_dl = 128;
epochs = floor(length(RawInpLoad) / n_dl); % 4517
RawInpLoad = RawInpLoad(1:n_dl * epochs);
% % % % % % % % % % % % % % % % % % % % % % % % % % %
% Prepare training and testing data
% % % % % % % % % % % % % % % % % % % % % % % % % % %
batchsize = 50;
atoms = 512;
RawInp = RawInpLoad(1:n_dl*epochs);
RawInp = reshape(RawInp , n_dl, epochs);
crossValidFactor = 0.7;
indexD = randperm(atoms);
initD = RawInp(:, indexD);
initD = initD - repmat(mean(initD),[size(initD,1),1]);
initD = initD ./ repmat(sqrt(sum(initD.^2)),[size(initD,1),1]);
RawInp = RawInp(:,atoms+1:end);
epochs = epochs - atoms;
TrainInp = RawInp(:, 1 : floor(epochs*crossValidFactor));
TrainInp = TrainInp - repmat(mean(TrainInp),[size(TrainInp,1),1]);
TrainInp = TrainInp ./ repmat(sqrt(sum(TrainInp.^2)),[size(TrainInp,1),1]);
TestInp = RawInp(:, (size(TrainInp,2)+1):epochs);
TestInp = TestInp - repmat(mean(TestInp),[size(TestInp,1),1]);
TestInp = TestInp ./ repmat(sqrt(sum(TestInp.^2)),[size(TestInp,1),1]);
% % % % % % % % % % % % % % % % % % % % % % % % % % %
% Compressive sensing
% % % % % % % % % % % % % % % % % % % % % % % % % % %
samplesTrain = size(TrainInp,2);
sweepParam = [1e-4, 1e-3, 1e-2, 1e-1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9];
objFun = zeros(length(sweepParam),length(1:floor(samplesTrain / batchsize)));
normErr = zeros(length(sweepParam),length(1:floor(samplesTrain / batchsize)));
sparCoef = zeros(length(sweepParam),length(1:floor(samplesTrain / batchsize)));
%%
% poolobj = gcp('nocreate'); % If no pool, do not create new one.
% if isempty(poolobj)
% poolsize = 0;
% parpool('local',12);
% else
% poolsize = poolobj.NumWorkers;
% end
%%
for i = 1 : length(sweepParam)
parfor k = 1 : floor(samplesTrain / batchsize) % adjust iter
param = struct;
param.iter = k;
param.batchsize = batchsize;
param.K = atoms;
param.lambda = sweepParam(i);
param.numThreads = -1;
param.verbose = false;
param.iter_updateD = 1;
param.D = initD;
epochesD = floor(k * param.batchsize);
X = TrainInp(:,1:epochesD);
[D,~,~] = mexTrainDL(X,param);
coef = mexLasso(X,D,param);
objFun(i,k) = mean(0.5*sum((X-D*coef).^2) + param.lambda*sum(abs(coef)));
normErr(i,k) = mean(0.5*sum((X-D*coef).^2));
sparCoef(i,k) = 1 - length(find((coef))) / length(coef(:));
disp(sprintf('Iteration (%d, %d) without pre: objective function is %f', i, k, objFun(i,k)));
disp(sprintf('Iteration (%d, %d) without pre: L-2 norm of error is %f\n', i, k, normErr(i,k)));
% disp(sprintf('Iteration (%d, %d) without pre: sparsity of coeff is %f\n', i, k, sparCoef(i,k)));
end
end
maxObjFunc = max(objFun');
maxNormErr = max(normErr');
% delete(poolobj)
filename = sprintf('./Results/sweeplambda_WithoutPre_batchsize%d.mat', batchsize);
save(filename,'-v7.3')
figure
num = length(sweepParam);
str = cell(1,num);
subplot(211)
for i = 1 : num
plot(normErr(i,:));
hold on
str{i} = ['lambda = ', num2str(sweepParam(i))];
end
xlabel('Iteration');
ylabel('l2norm of error');
legend(str);
subplot(212)
for i = 1 : num
plot(sparCoef(i,:)*100);
hold on
str{i} = {'lambda = ', num2str(sweepParam(i))};
end
xlabel('Iteration');
ylabel('sparsity %');
legend(str);