Skip to content

Commit a20057f

Browse files
committed
CLN: Fix small code issues
Remove unnecesssary lambdas and related
1 parent 994a141 commit a20057f

File tree

10 files changed

+22
-24
lines changed

10 files changed

+22
-24
lines changed

linearmodels/asset_pricing/model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -553,7 +553,7 @@ def _moments(self, eps, betas, lam, alphas, pricing_errors):
553553
sigma_inv = self._sigma_inv
554554

555555
f = self.factors.ndarray
556-
nobs, nf, nport, nrf, s1, s2, s3 = self._boundaries()
556+
nobs, nf, nport, _, s1, s2, s3 = self._boundaries()
557557
fc = np.c_[np.ones((nobs, 1)), f]
558558
f_rep = np.tile(fc, (1, nport))
559559
eps_rep = np.tile(eps, (nf + 1, 1))

linearmodels/iv/covariance.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
Covariance estimation for 2SLS and LIML IV estimators
33
"""
44
from numpy import (arange, argsort, asarray, ceil, cos, empty, int64, ones, pi,
5-
r_, sin, sum, unique, where, zeros)
5+
r_, sin, sum as npsum, unique, where, zeros)
66
from numpy.linalg import inv, pinv
77

88
CLUSTER_ERR = """
@@ -221,7 +221,7 @@ def kernel_optimal_bandwidth(x, kernel='bartlett'):
221221
for i in range(1, m_star + 1):
222222
sigma[i] = x[i:].T @ x[:-i] / t
223223
s0 = sigma[0] + 2 * sigma[1:].sum()
224-
sq = 2 * sum(sigma[1:] * arange(1, m_star + 1) ** q)
224+
sq = 2 * npsum(sigma[1:] * arange(1, m_star + 1) ** q)
225225
rate = 1 / (2 * q + 1)
226226
gamma = c * ((sq / s0) ** 2) ** rate
227227
m = gamma * t ** rate
@@ -314,7 +314,7 @@ def __repr__(self):
314314
def s(self):
315315
"""Score covariance estimate"""
316316
x, z, eps = self.x, self.z, self.eps
317-
nobs, nvar = x.shape
317+
nobs = x.shape[0]
318318
s2 = eps.T @ eps / nobs
319319
pinvz = self._pinvz
320320
v = (x.T @ z) @ (pinvz @ x) / nobs
@@ -348,7 +348,7 @@ def s2(self):
348348
"""
349349
Estimated variance of residuals. Small-sample adjusted if debiased.
350350
"""
351-
nobs, nvar = self.x.shape
351+
nobs = self.x.shape[0]
352352
eps = self.eps
353353

354354
return self._scale * eps.T @ eps / nobs

linearmodels/iv/data.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -85,9 +85,7 @@ def __init__(self, x, var_name='x', nobs=None, convert_dummies=True, drop_first=
8585
all_numeric = True
8686
for col in x:
8787
c = x[col]
88-
if is_string_dtype(c.dtype) and \
89-
c.map(lambda v: is_string_like(v)).all():
90-
88+
if is_string_dtype(c.dtype) and c.map(is_string_like).all():
9189
c = c.astype('category')
9290
if not copied:
9391
x = x.copy()

linearmodels/iv/gmm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -410,7 +410,7 @@ def __str__(self):
410410
@property
411411
def cov(self):
412412
x, z, eps, w = self.x, self.z, self.eps, self.w
413-
nobs, nvar = x.shape
413+
nobs = x.shape[0]
414414
xpz = x.T @ z / nobs
415415
xpzw = xpz @ w
416416
xpzwzpx_inv = inv(xpzw @ xpz.T)

linearmodels/iv/model.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -492,7 +492,7 @@ def _f_statistic(self, params, cov, debiased):
492492
return wald
493493

494494
def _post_estimation(self, params, cov_estimator, cov_type):
495-
vars = self._columns
495+
columns = self._columns
496496
index = self._index
497497
eps = self.resids(params)
498498
y = self.dependent.pandas
@@ -512,17 +512,17 @@ def _post_estimation(self, params, cov_estimator, cov_type):
512512
r2 = 1 - residual_ss / total_ss
513513

514514
fstat = self._f_statistic(params, cov, debiased)
515-
out = {'params': Series(params.squeeze(), vars, name='parameter'),
515+
out = {'params': Series(params.squeeze(), columns, name='parameter'),
516516
'eps': Series(eps.squeeze(), index=index, name='residual'),
517517
'weps': Series(weps.squeeze(), index=index, name='weighted residual'),
518-
'cov': DataFrame(cov, columns=vars, index=vars),
518+
'cov': DataFrame(cov, columns=columns, index=columns),
519519
's2': float(cov_estimator.s2),
520520
'debiased': debiased,
521521
'residual_ss': float(residual_ss),
522522
'total_ss': float(total_ss),
523523
'r2': float(r2),
524524
'fstat': fstat,
525-
'vars': vars,
525+
'vars': columns,
526526
'instruments': self._instr_columns,
527527
'cov_config': cov_estimator.config,
528528
'cov_type': cov_type,

linearmodels/iv/results.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919

2020
def stub_concat(lists, sep='='):
21-
col_size = max([max(map(lambda s: len(s), l)) for l in lists])
21+
col_size = max([max(map(len, l)) for l in lists])
2222
out = []
2323
for l in lists:
2424
out.extend(l)
@@ -29,7 +29,7 @@ def stub_concat(lists, sep='='):
2929
def table_concat(lists, sep='='):
3030
col_sizes = []
3131
for l in lists:
32-
size = list(map(lambda r: list(map(lambda v: len(v), r)), l))
32+
size = list(map(lambda r: list(map(len, r)), l))
3333
col_sizes.append(list(array(size).max(0)))
3434
col_size = array(col_sizes).max(axis=0)
3535
sep_cols = [sep * (cs + 2) for cs in col_size]
@@ -644,7 +644,7 @@ def durbin(self, variables=None):
644644
if variables is not None:
645645
null = 'Variables {0} are exogenous'.format(', '.join(variables))
646646

647-
e0, e1, e2, nobs, nexog, nendog, ntested = self._endogeneity_setup(variables)
647+
e0, e1, e2, nobs, _, _, ntested = self._endogeneity_setup(variables)
648648
stat = e1.T @ e1 - e2.T @ e2
649649
stat /= (e0.T @ e0) / nobs
650650

@@ -1322,7 +1322,7 @@ def summary(self):
13221322
for key in self._results:
13231323
res = self._results[key]
13241324
all_instr.append(res.model.instruments.cols)
1325-
ninstr = max(map(lambda l: len(l), all_instr))
1325+
ninstr = max(map(len, all_instr))
13261326
instruments = []
13271327
instrument_stub = ['Instruments']
13281328
for i in range(ninstr):

linearmodels/panel/data.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def to_frame(self):
7979

8080

8181
def convert_columns(s, drop_first):
82-
if is_string_dtype(s.dtype) and s.map(lambda v: is_string_like(v)).all():
82+
if is_string_dtype(s.dtype) and s.map(is_string_like).all():
8383
s = s.astype('category')
8484

8585
if is_categorical(s):

linearmodels/panel/results.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -779,7 +779,7 @@ def summary(self):
779779
effects = getattr(res, 'included_effects', [])
780780
all_effects.append(effects)
781781

782-
neffect = max(map(lambda l: len(l), all_effects))
782+
neffect = max(map(len, all_effects))
783783
effects = []
784784
effects_stub = ['Effects']
785785
for i in range(neffect):

linearmodels/system/covariance.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -227,11 +227,11 @@ def _gls_cov(self):
227227
def _adjustment(self):
228228
if not self._debiased:
229229
return 1.0
230-
k = list(map(lambda s: s.shape[1], self._x))
230+
ks = list(map(lambda s: s.shape[1], self._x))
231231
nobs = self._x[0].shape[0]
232232
adj = []
233-
for i in range(len(k)):
234-
adj.append(nobs / (nobs - k[i]) * ones((k[i], 1)))
233+
for k in ks:
234+
adj.append(nobs / (nobs - k) * ones((k, 1)))
235235
adj = vstack(adj)
236236
adj = sqrt(adj)
237237
return adj @ adj.T

linearmodels/system/model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,7 @@ def _parse(self):
167167
else:
168168
formula = formula.replace('\n', ' ').strip()
169169
parts = formula.split('}')
170-
for i, part in enumerate(parts):
170+
for part in parts:
171171
base_key = None
172172
part = part.strip()
173173
if part == '':
@@ -506,7 +506,7 @@ def _validate_data(self):
506506
raise ValueError('Equation {eq} instrument array is full '
507507
'rank'.format(eq=label))
508508

509-
for lhs, rhs, label in zip(self._y, self._x, self._eq_labels):
509+
for rhs in self._x:
510510
const, const_loc = has_constant(rhs)
511511
constant.append(const)
512512
constant_loc.append(const_loc)

0 commit comments

Comments
 (0)