Skip to content

Commit

Permalink
Skip pyjags tests (#1536)
Browse files Browse the repository at this point in the history
* Skip pyjags

* fix, maybe

* Remove import
  • Loading branch information
ahartikainen authored Feb 6, 2021
1 parent 1f3b8fd commit 525e311
Showing 1 changed file with 1 addition and 110 deletions.
111 changes: 1 addition & 110 deletions arviz/tests/external_tests/test_data_pyjags.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import typing as tp

import numpy as np
import pyjags
import pytest

from arviz import InferenceData, from_pyjags, waic
Expand Down Expand Up @@ -41,6 +40,7 @@ def verify_equality_of_numpy_values_dictionaries(
return True


@pytest.mark.skip("Crashes Python")
class TestDataPyJAGSWithoutEstimation:
def test_convert_pyjags_samples_dictionary_to_arviz_samples_dictionary(self):
arviz_samples_dict_from_pyjags_samples_dict = _convert_pyjags_dict_to_arviz_dict(
Expand Down Expand Up @@ -115,112 +115,3 @@ def test_inference_data_attrs(self, posterior, prior, save_warmup, warmup_iterat

fails = check_multiple_attrs(test_dict, arviz_inference_data_from_pyjags_samples_dict)
assert not fails


# class TestDataPyJAGSWithEstimation:
# @pytest.fixture(scope="class")
# def jags_prior_model(self) -> pyjags.Model:
# EIGHT_SCHOOL_PRIOR_MODEL_CODE = """
# model {
# mu ~ dnorm(0.0, 1.0/25)
# tau ~ dt(0.0, 1.0/25, 1.0) T(0, )
# for (j in 1:J) {
# theta_tilde[j] ~ dnorm(0.0, 1.0)
# }
# }
# """
#
# prior_model = pyjags.Model(
# code=EIGHT_SCHOOL_PRIOR_MODEL_CODE,
# data={"J": 8},
# chains=4,
# threads=4,
# chains_per_thread=1,
# )
#
# return prior_model
#
# @pytest.fixture(scope="class")
# def jags_posterior_model(
# self, eight_schools_params: tp.Dict[str, tp.Union[int, np.ndarray]]
# ) -> pyjags.Model:
# EIGHT_SCHOOL_POSTERIOR_MODEL_CODE = """
# model {
# mu ~ dnorm(0.0, 1.0/25)
# tau ~ dt(0.0, 1.0/25, 1.0) T(0, )
# for (j in 1:J) {
# theta_tilde[j] ~ dnorm(0.0, 1.0)
# y[j] ~ dnorm(mu + tau * theta_tilde[j], 1.0/(sigma[j]^2))
# log_like[j] = logdensity.norm(y[j], mu + tau * theta_tilde[j], 1.0/(sigma[j]^2))
# }
# }
# """
#
# posterior_model = pyjags.Model(
# code=EIGHT_SCHOOL_POSTERIOR_MODEL_CODE,
# data=eight_schools_params,
# chains=4,
# threads=4,
# chains_per_thread=1,
# )
#
# return posterior_model
#
# @pytest.fixture(scope="class")
# def jags_prior_samples(self, jags_prior_model: pyjags.Model) -> tp.Dict[str, np.ndarray]:
# return jags_prior_model.sample(
# NUMBER_OF_WARMUP_SAMPLES + NUMBER_OF_POST_WARMUP_SAMPLES, vars=PARAMETERS
# )
#
# @pytest.fixture(scope="class")
# def jags_posterior_samples(
# self, jags_posterior_model: pyjags.Model
# ) -> tp.Dict[str, np.ndarray]:
# return jags_posterior_model.sample(
# NUMBER_OF_WARMUP_SAMPLES + NUMBER_OF_POST_WARMUP_SAMPLES, vars=VARIABLES
# )
#
# @pytest.fixture()
# def pyjags_data(
# self,
# jags_prior_samples: tp.Dict[str, np.ndarray],
# jags_posterior_samples: tp.Dict[str, np.ndarray],
# ) -> InferenceData:
# return from_pyjags(
# posterior=jags_posterior_samples,
# prior=jags_prior_samples,
# log_likelihood={"y": "log_like"},
# save_warmup=True,
# warmup_iterations=NUMBER_OF_WARMUP_SAMPLES,
# )
#
# def test_waic(self, pyjags_data):
# waic_result = waic(pyjags_data)
#
# assert -31.0 < waic_result.waic < -30.0
# assert 0.75 < waic_result.p_waic < 0.90
#
# # @pytest.fixture(scope="class")
# # def data(self, jags_posterior_model, jags_posterior_samples, jags_prior_samples):
# # class Data:
# # model = self.jags_posterior_model
# # posterior = self.jags_posterior_samples
# # prior = self.jags_prior_samples
# #
# # return Data
# #
# # def get_inference_data(self, data) -> InferenceData:
# # return from_pyjags(
# # posterior=data.posterior,
# # prior=data.prior,
# # log_likelihood={"y": "log_like"},
# # save_warmup=True,
# # warmup_iterations=NUMBER_OF_WARMUP_SAMPLES,
# # )
# #
# # def test_waic(self, data):
# # pyjags_data = self.get_inference_data(data)
# # waic_result = waic(pyjags_data)
# #
# # assert -31.0 < waic_result.waic < -30.0
# # assert 0.75 < waic_result.p_waic < 0.90

0 comments on commit 525e311

Please # to comment.