-
Notifications
You must be signed in to change notification settings - Fork 1
/
PlotMaxIntEnt.m
128 lines (100 loc) · 2.92 KB
/
PlotMaxIntEnt.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
% Maximum input entropy: Plot curves as a function of Poisson mu
%
% mikael.mieskolainen@cern.ch, 2018
clear; close all;
addpath src
STEPS = 200; % Not too high, otherwise pdf turned to scalar picture
mu_values = logspace(-1,log10(50),STEPS);
entropies = zeros(8, STEPS);
for N = 2:8
% A.) Maximum Entropy input
%input = ones(2^N-1,1) + rand(2^N-1,1)*0.0; input = input/sum(input);
% B.) Comb [0...1]
%input = linspace(1/(2^N-1),1-1/(2^N-1),2^N-1)'; input = input/sum(input);
%input = flipud(input);
% C.) Create basis vector input
C = createCBM(N);
C = C(2:end,:);
CSUM = sum(C,2);
ind = find(CSUM == 1);
input = zeros(2^N-1,1);
input(ind) = 1;
input = input / sum(input);
% Matrix
LAMBDA = amat(N);
LAMBDAINV = inv(LAMBDA);
y_values = zeros(length(input),STEPS);
p_values = zeros(length(input),STEPS);
entropy = zeros(1,STEPS);
for i = 1:length(mu_values)
mu = mu_values(i);
% Forward
%
y = LAMBDAINV*((exp(-mu*LAMBDA*input) - 1)/(exp(-mu)-1));
y_values(:,i) = y;
%}
% Inverse
%{
p = LAMBDAINV*(log((exp(-mu) - 1)*LAMBDA*input + 1)) / (-mu);
p_values(:,i) = p;
%}
entropies(N, i) = shannonentropy(y);
end
%%
f1 = figure;
h1 = plot(mu_values, zeros(length(mu_values),1), 'k-.'); hold on; % Horizontal axis
h2 = plot(mu_values, y_values);
xlabel('$\mu$','interpreter','latex');
ylabel('$\mathbf{y}$','interpreter','latex');
set(gca,'xscale','log');
set(gca,'yscale','log');
axis tight; axis square;
axis([-inf inf 1e-3 1.0]);
legends = {};
for c = 1:length(y)
legends{end+1} = sprintf('$y_{%d}$',c);
end
title(sprintf('$N = %d$', N),'interpreter','latex');
if (N <= 4)
l = legend(h2, legends);
set(l,'interpreter','latex', 'location','northeast');
legend('boxoff');
end
% Create smaller axes in top right, and plot on it
%{
if (N > 4)
% Restart color indexing
axes('Position',[0.28 0.60 .3 .3]); % x y
range = 0.9;
plot(mu_values(1:round(length(mu_values)*range)), p_values(:,(1:round(length(mu_values)*range))) ); hold on;
plot(mu_values, zeros(length(mu_values),1), 'k-.'); % Horizontal axis
axis([0,7, -0.015,0.04]); set (gca,'xscale','log');
set(gca,'ytick',[]);
set(gca,'xtick',[]);
ax = gca;
ax.ColorOrderIndex = 1;
end
%}
%axis tight; box on;
filename = sprintf('../figs/maxinentN%d.pdf', N);
print(f1, filename, '-dpdf');
system(sprintf('pdfcrop --margins 10 %s %s', filename, filename));
%close all;
end
%% Shannon entropy scaling (bits)
f2 = figure;
Nvals = 2:8;
plot(mu_values, entropies(Nvals,:)');
set(gca,'xscale','log');
legs = {};
for i = 1:length(Nvals)
legs{i} = sprintf('$N=%d$',Nvals(i));
end
l = legend(legs); set(l,'interpreter','latex','location','northwest'); legend('boxoff');
xlabel('$\mu$','interpreter','latex');
ylabel('$S(\mathbf{y})$','interpreter','latex');
axis square;
axis([mu_values(1) mu_values(end) 0 9]);
filename = sprintf('../figs/entropyscaling.pdf');
print(f2, filename, '-dpdf');
system(sprintf('pdfcrop --margins 10 %s %s', filename, filename));