Skip to content

Commit

Permalink
v__0.188
Browse files Browse the repository at this point in the history
  • Loading branch information
alfredgalichon committed Dec 8, 2024
1 parent d816cd4 commit e1d3957
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 34 deletions.
36 changes: 25 additions & 11 deletions mec/blp.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def pi_invs(pi_t_y,epsilon_t_i_y_m, tau_m, maxit = 100000, reltol=1E-8, require_
U_t_y = Up_t_y

res = [U_t_y]
if require_der:
if require_der>0:
pi_t_i_y = np.concatenate( [ np.exp(U_t_y[:,None,:]+ varepsilon_t_i_y - u_t_i[:,:,None] ),
np.exp( - u_t_i)[:,:,None]],axis=2)

Expand All @@ -71,16 +71,15 @@ def pi_invs(pi_t_y,epsilon_t_i_y_m, tau_m, maxit = 100000, reltol=1E-8, require_
dUdtau_t_y_m = - sp.linalg.spsolve(A,B).toarray().reshape((T,I+Y,M))[:,-Y:,:]
res.append(dUdtau_t_y_m)



return res



def pi_inv(pi_y,epsilon_i_y_m, tau_m, maxit = 100000, reltol=1E-8, require_der = 0 ):
res = pi_invs(pi_y[None,:],epsilon_i_y_m[None,:,:,:] ,tau_m, maxit , reltol, require_der )
if require_der>0:
return [res[0].squeeze(axis=0), res[1].squeeze(axis=0)]
else:
return [res[0].squeeze(axis=0) ]
return [res[i].squeeze(axis=0) for i in range(require_der+1)]



Expand Down Expand Up @@ -126,21 +125,36 @@ def compute_utilities(pis_y,epsilons_i_y_m, tau_m):
return Us_y


def compute_omegas(Us_y,epsilons_i_y_m,depsilonsdp_i_y_m, tau_m,firms_y ):
def compute_omegas(Us_y,epsilons_i_y_m,depsilonsdp_i_y_m, tau_m,firms_y, require_der = 0):
omegas_y_y = []
for (U_y,epsilon_i_y_m,depsilondp_i_y_m,firm_y) in zip(Us_y,epsilons_i_y_m, depsilonsdp_i_y_m, firms_y):
Y = len(U_y)
varepsilon_i_y = epsilon_i_y_m @ tau_m
dvarepsilondp_i_y = depsilondp_i_y_m @ tau_m
pi_i_y = (np.exp(U_y[None,:] + varepsilon_i_y ) / (1+ np.exp( U_y[None,:] + varepsilon_i_y ).sum(axis= 1) )[:,None] )
jacobian_i_y_y = - dvarepsilondp_i_y[:,:,None] * (pi_i_y[:,:,None] * np.eye(Y)[None,:,:] - pi_i_y[:,:,None] * pi_i_y[:,None,:] )
deriv_shares_y_y =jacobian_i_y_y.mean(axis= 0)
I_y_y = np.eye(Y)
dpidu_i_y_y = pi_i_y[:,:,None] *( I_y_y[None,:,:] - pi_i_y[:,None,:] )
dpidp_y_y = ( dvarepsilondp_i_y[:,None,:] * dpidu_i_y_y ).mean(axis= 0)
# if require_der>0:
# term0 = I_y_y[None,:,:,None] * I_y_y[None,:,None,:]
# term1 = - I_y_y[None,:,:,None] * pi_i_y[:,None,None,:]
# term2 = - I_y_y[None,:,None,:] * pi_i_y[:,None,:,None]
# term3 = - I_y_y[None,None,:,:] * pi_i_y[:,None,:,None]
# term4 = 2 * pi_i_y[:,None,None,:]* pi_i_y[:,None,:,None]
# d2pidu_i_y_y_y = pi_i_y[:,:,None,None] *(term0 + term1 + term2 + term3 + term4 )
# d2pidp_y_y_y = ( dvarepsilondp_i_y[:,None,:,None] * dvarepsilondp_i_y[:,None,None,:] * d2pidu_i_y_y_y).mean(axis= 0)

for y in range(Y):
for yprime in range(y+1):
if (firm_y[y]!=firm_y[yprime]):
deriv_shares_y_y[y,yprime] = 0
deriv_shares_y_y[yprime,y] = 0
omegas_y_y.append(deriv_shares_y_y)
dpidp_y_y[y,yprime] = 0
dpidp_y_y[yprime,y] = 0
# if require_der>0:
# d2pidp_y_y_y[y,yprime,:] = 0
# d2pidp_y_y_y[yprime,y,:] = 0
omegas_y_y.append(- dpidp_y_y)
# if require_der>0:
# omegas_y_y.append(d
return omegas_y_y

def compute_omega(Us_y,epsilons_i_y_m,depsilonsdp_i_y_m, tau_m,firms_y ):
Expand Down
77 changes: 55 additions & 22 deletions mec/replication/blp99.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,30 +5,63 @@
# * Rainie Lin, 2021. Python code for BLP estimation (https://github.com/ranielin/Berry-Levinsohn-and-Pakes-1995-Replication)

import numpy as np, pandas as pd
import mec.blp
from mec.data import load_blp_car_data
from mec.blp import create_blp_instruments, pi_inv
from mec.blp import create_blp_instruments, organize_markets, collapse_markets,build_epsilons,build_depsilonsdp

def construct_car_variables_from_blp():
prod,agents = load_blp_car_data()

def eta_from_lin(T=20, K=5, nu_mean=0 , nu_var=1, D_mean = None, D_var=None, I=500, seed=123 ): # I = number of simulations per market
# borrows code from rainie lin for consistency
if D_mean is None:
D_mean = np.array([2.01156, 2.06526, 2.07843, 2.05775, 2.02915, 2.05346, 2.06745,
2.09805, 2.10404, 2.07208, 2.06019, 2.06561, 2.07672, 2.10437,
2.12608, 2.16426, 2.18071, 2.18856, 2.2125 , 2.18377])
if D_var is None:
D_var = 2.9584*np.ones(20)
np.random.seed(seed)
nu_alpha = np.zeros((1, I, T))
nu_beta = np.random.normal(nu_mean, pow(nu_var, 0.5), (K, I, T))
nu = np.concatenate((nu_alpha, nu_beta), axis = 0)
log_y = np.transpose(np.random.normal(D_mean, pow(D_var, 0.5), (I, 1, T)), (1, 0, 2))
D = 1/np.exp(log_y)
eta_t_i_ind = np.concatenate([nu,-D],axis = 0).transpose((2,1,0))
return eta_t_i_ind

def lambda_from_lin(K=5,theta_2 = np.array([43.501, 3.612, 4.628, 1.818, 1.05, 2.056]) ):
# for consistency with rainie lin's code
gamma = np.zeros((K + 1, 1))
gamma[0] = theta_2[0]
sigma = np.zeros((K + 1, K + 1))
np.fill_diagonal(sigma, np.append([0], theta_2[1:(K + 1)]))
thelambda_ind_ind = np.block([[sigma.T],[gamma.flatten()]])
return thelambda_ind_ind


def construct_car_blp_model():
prod,_ = load_blp_car_data()
prod['ones'] = 1.
mkt_o = prod['market_ids'].to_numpy()
O = len(mkt_o)
#
Xs_y_ind = mec.blp.organize_markets(mkt_o, np.block([np.ones( (O,1) ), prod[ ['hpwt','air','mpd','space' ]] ]))
#
firms_y = mec.blp.organize_markets(mkt_o,prod['firm_ids'].to_numpy())
ps_y = mec.blp.organize_markets(mkt_o,prod['prices'].to_numpy())
pis_y = mec.blp.organize_markets(mkt_o,prod['shares'].to_numpy())
cars_y = mec.blp.organize_markets(mkt_o,prod['car_ids'].to_numpy())
#
Zs_y_ind = mec.blp.organize_markets(mkt_o, create_blp_instruments(mec.blp.collapse_markets(mkt_o,Xs_y_ind), prod[['market_ids','firm_ids','car_ids']] ))
#
theW = np.block([np.ones( (O,1) ), prod[ ['hpwt','air','mpg','space','trend' ]] ])
theW[:,[1,3,4] ]= np.log(theW[:,[1,3,4] ])
theZS = create_blp_instruments(theW , prod[['market_ids','firm_ids','car_ids']] )
firms_y = organize_markets(mkt_o,prod['firm_ids'].to_numpy()) # firms of each product
ps_y = organize_markets(mkt_o,prod['prices'].to_numpy()) # prices of each product
pis_y = organize_markets(mkt_o,prod['shares'].to_numpy()) # shares of each product
O = len(mkt_o) # number of observations
# phis_y_k are the regression matrices for demand side (not interacting with individual characteristics)
phis_y_k = organize_markets(mkt_o, prod[['ones', 'hpwt','air','mpd','space' ]].to_numpy() )
# xi are the regression matrices for the demand side (interacting with indivudual characteristics)
xis_y_ind = organize_markets(mkt_o, prod[['prices','ones', 'hpwt','air','mpd','space' ]].to_numpy() )
# Z are the instruments for demand side
Zs_y_ind = organize_markets(mkt_o, create_blp_instruments(collapse_markets(mkt_o,phis_y_k), prod[['market_ids','firm_ids','car_ids']] ))
# W are the regression matrices for supply side
thegamma = prod[['ones','hpwt','air','mpg','space','trend' ]].to_numpy()
thegamma[:,[1,3,4] ]= np.log(thegamma[:,[1,3,4] ])
gammas_y_l = organize_markets(mkt_o,thegamma)
# Z are the instruments for supply side
theZS = create_blp_instruments(thegamma , prod[['market_ids','firm_ids','car_ids']] )
theZS[:,-1] = prod['mpd'].to_numpy()
theZS[:,5] += 71.
Ws_y_ind = mec.blp.organize_markets(mkt_o,theW)
ZSs_y_ind = mec.blp.organize_markets(mkt_o,theZS)
xis_y_ind = [ np.concatenate([p_y.reshape((-1,1)),X_y_ind],axis=1) for (p_y,X_y_ind) in zip(ps_y,Xs_y_ind) ]
return mkt_o,Xs_y_ind,Ws_y_ind, firms_y,ps_y,pis_y,Zs_y_ind,ZSs_y_ind ,xis_y_ind
ZSs_y_ind = organize_markets(mkt_o,theZS)
#
eta_t_i_ind = eta_from_lin( )

epsilons_i_y_m = build_epsilons(eta_t_i_ind, xis_y_ind)
depsilonsdp_i_y_m = build_depsilonsdp(eta_t_i_ind, xis_y_ind)
return mkt_o,phis_y_k,gammas_y_l, firms_y,ps_y,pis_y,Zs_y_ind,ZSs_y_ind ,epsilons_i_y_m,depsilonsdp_i_y_m
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup(
name="mec",
version="0.187",
version="0.188",
authors=["Alfred Galichon"],
author_email="ag133@nyu.edu",
licence="",
Expand Down

0 comments on commit e1d3957

Please sign in to comment.