-
Notifications
You must be signed in to change notification settings - Fork 0
/
concentration_library.py
59 lines (44 loc) · 1.72 KB
/
concentration_library.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# (c) 2015 - 2023 Open Risk (https://www.openriskmanagement.com)
#
# This code is licensed under the Apache 2.0 license a copy of which is included
# in the source distribution of the course. This is notwithstanding any licenses of
# third-party software included in this distribution. You may not use this file except in
# compliance with the License.
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
# calculate total exposure
def exposure(portfolio):
return np.sum(portfolio)
# calculate portfolio exposure weights
def weights(portfolio):
return np.true_divide(portfolio, exposure(portfolio))
# calculate the concentration ratio
def cr(weights, n):
return weights[:n].sum()
# calculate the normalized hhi concentration index
def hhi(weights):
n = weights.size
h = np.square(weights).sum()
return (h - 1.0 / n) / (1.0 - 1.0 / n)
# calculate the inverted Hannah Kay index
def hk(weights, a):
n = weights.size
h1 = np.power(weights, a).sum()
h2 = np.power(h1, 1.0 / (a - 1.0))
return (h2 - 1.0 / n) / (1.0 - 1.0 / n)
# calculate the Gini index
def gini(weights):
n = weights.size
i = np.arange(1, n + 1)
return (1.0 - 2.0 * np.multiply(i, weights).sum()) / n + 1.0
# calculate the Shannon entropy index
def shannon(weights):
weights_nz = weights[weights != 0]
n = weights_nz.size
logweights = np.log(weights_nz)
h = - np.multiply(weights_nz, logweights).sum()
return 1.0 - h / np.log(n)