Skip to content

Commit

Permalink
Merge pull request #27 from cdt15/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
ikeuchi-screen authored May 14, 2021
2 parents 880561f + f4cdfde commit 112f868
Show file tree
Hide file tree
Showing 8 changed files with 36 additions and 20 deletions.
2 changes: 1 addition & 1 deletion lingam/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,4 @@
'CausalEffect', 'VARLiNGAM', 'VARMALiNGAM', 'LongitudinalLiNGAM', 'LongitudinalBootstrapResult',
'BottomUpParceLiNGAM', 'RCD']

__version__ = '1.5.2'
__version__ = '1.5.3'
16 changes: 9 additions & 7 deletions lingam/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,20 +133,22 @@ def _estimate_adjacency_matrix(self, X, prior_knowledge=None):
self : object
Returns the instance itself.
"""
sink_vars = get_sink_variables(prior_knowledge)
exo_vars = get_exo_variables(prior_knowledge)
if prior_knowledge is not None:
pk = prior_knowledge.copy()
np.fill_diagonal(pk, 0)

B = np.zeros([X.shape[1], X.shape[1]], dtype='float64')
for i in range(1, len(self._causal_order)):
target = self._causal_order[i]
predictors = self._causal_order[:i]

# target is not used for prediction if it is included in exogenous variables
if target in exo_vars:
continue
# Exclude variables specified in no_path with prior knowledge
if prior_knowledge is not None:
predictors = [p for p in predictors if pk[target, p] != 0]

# sink variables are not used as predictors
predictors = [v for v in predictors if v not in sink_vars]
# target is exogenous variables if predictors are empty
if len(predictors) == 0:
continue

B[target, predictors] = predict_adaptive_lasso(
X, predictors, target)
Expand Down
24 changes: 16 additions & 8 deletions lingam/bottom_up_parce_lingam.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,12 @@ def _extract_partial_orders(self, pk):
no_path_pairs = pairs[counts < 2]

check_pairs = np.concatenate([path_pairs, no_path_pairs[:, [1, 0]]])
if len(check_pairs) == 0:
# If no pairs are extracted from the specified prior knowledge,
# discard the prior knowledge.
self._Aknw = None
return None

pairs = np.unique(check_pairs, axis=0)
return pairs[:, [1, 0]] # [to, from] -> [from, to]

Expand Down Expand Up @@ -286,22 +292,24 @@ def _estimate_adjacency_matrix(self, X, prior_knowledge=None):
self : object
Returns the instance itself.
"""
sink_vars = get_sink_variables(prior_knowledge)
exo_vars = get_exo_variables(prior_knowledge)
if prior_knowledge is not None:
pk = prior_knowledge.copy()
np.fill_diagonal(pk, 0)

B = np.zeros([X.shape[1], X.shape[1]], dtype='float64')
for i in range(1, len(self._causal_order)):
target = self._causal_order[i]

# target is not used for prediction if it is included in exogenous variables
if target in exo_vars:
continue

# Flatten the array into one dimension
predictors = self._flatten(self._causal_order[:i])

# sink variables are not used as predictors
predictors = [v for v in predictors if v not in sink_vars]
# Exclude variables specified in no_path with prior knowledge
if prior_knowledge is not None:
predictors = [p for p in predictors if pk[target, p] != 0]

# target is exogenous variables if predictors are empty
if len(predictors) == 0:
continue

B[target, predictors] = predict_adaptive_lasso(
X, predictors, target)
Expand Down
6 changes: 6 additions & 0 deletions lingam/direct_lingam.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,12 @@ def _extract_partial_orders(self, pk):
no_path_pairs = pairs[counts < 2]

check_pairs = np.concatenate([path_pairs, no_path_pairs[:, [1, 0]]])
if len(check_pairs) == 0:
# If no pairs are extracted from the specified prior knowledge,
# discard the prior knowledge.
self._Aknw = None
return None

pairs = np.unique(check_pairs, axis=0)
return pairs[:, [1, 0]] # [to, from] -> [from, to]

Expand Down
2 changes: 1 addition & 1 deletion lingam/var_lingam.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def fit(self, X):
lingam_model = self._lingam_model
if lingam_model is None:
lingam_model = DirectLiNGAM()
elif not issubclass(lingam_model, _BaseLiNGAM):
elif not isinstance(lingam_model, _BaseLiNGAM):
raise ValueError('lingam_model must be a subclass of _BaseLiNGAM')

M_taus = self._ar_coefs
Expand Down
2 changes: 1 addition & 1 deletion lingam/varma_lingam.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def fit(self, X):
lingam_model = self._lingam_model
if lingam_model is None:
lingam_model = DirectLiNGAM()
elif not issubclass(lingam_model, _BaseLiNGAM):
elif not isinstance(lingam_model, _BaseLiNGAM):
raise ValueError('lingam_model must be a subclass of _BaseLiNGAM')

phis = self._ar_coefs
Expand Down
2 changes: 1 addition & 1 deletion tests/test_direct_lingam.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def test_prior_knowledge_invalid():

# prior knowledge: invalid
pk = np.array([
[0, -1, -1],
[0, -1, 1],
[-1, 0, -1],
[-1, -1, 0],
[-1, -1, -1],
Expand Down
2 changes: 1 addition & 1 deletion tests/test_multi_group_direct_lingam.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def test_prior_knowledge_invalid():

# prior knowledge: invalid
pk = np.array([
[0, -1, -1],
[0, -1, 1],
[-1, 0, -1],
[-1, -1, 0],
[-1, -1, -1],
Expand Down

0 comments on commit 112f868

Please sign in to comment.