Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Param #17

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

## [1.1.5] - 2018-07-21
### Added
- setting parameters for different models

## [1.1.4] - 2018-07-21
### Added
- sample method for LinearRegression model

## [1.1.3] - 2018-05-25
### Fixed
- HLR fit method sets shared vars if no minibatch_size given
Expand Down
4 changes: 2 additions & 2 deletions docs/api/modules.rst
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
api
pymc3_models
============

.. toctree::
:maxdepth: 4

pymc3_models.models
pymc3_models
15 changes: 9 additions & 6 deletions docs/api/pymc3_models.models.rst
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
models
=============================
pymc3\_models.models package
============================

pymc3\_models\.models\.HierarchicalLogisticRegression module
------------------------------------------------------------
Submodules
----------

pymc3\_models.models.HierarchicalLogisticRegression module
----------------------------------------------------------

.. automodule:: pymc3_models.models.HierarchicalLogisticRegression
:members:
:undoc-members:
:show-inheritance:

pymc3\_models\.models\.LinearRegression module
----------------------------------------------
pymc3\_models.models.LinearRegression module
--------------------------------------------

.. automodule:: pymc3_models.models.LinearRegression
:members:
Expand Down
29 changes: 29 additions & 0 deletions docs/api/pymc3_models.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
pymc3\_models package
=====================

Subpackages
-----------

.. toctree::

pymc3_models.models

Submodules
----------

pymc3\_models.exc module
------------------------

.. automodule:: pymc3_models.exc
:members:
:undoc-members:
:show-inheritance:


Module contents
---------------

.. automodule:: pymc3_models
:members:
:undoc-members:
:show-inheritance:
23 changes: 18 additions & 5 deletions pymc3_models/models/HierarchicalLogisticRegression.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,23 @@ class HierarchicalLogisticRegression(BayesianModel):
Custom Hierachical Logistic Regression built using PyMC3.
"""

def __init__(self):
def __init__(self,
mu_alpha_mu = 0.0,
mu_alpha_sd = 100.0,
sigma_alpha_sd = 100.0,
mu_beta_mu = 0.0,
mu_beta_sd = 100.0,
sigma_beta_sd = 100.0):
super(HierarchicalLogisticRegression, self).__init__()
self.num_cats = None

self.mu_alpha_mu = mu_alpha_mu
self.mu_alpha_sd = mu_alpha_sd
self.sigma_alpha_sd = sigma_alpha_sd
self.mu_beta_mu = mu_beta_mu
self.mu_beta_sd = mu_beta_sd
self.sigma_beta_sd = sigma_beta_sd

def create_model(self):
"""
Creates and returns the PyMC3 model.
Expand All @@ -42,11 +55,11 @@ def create_model(self):
model = pm.Model()

with model:
mu_alpha = pm.Normal('mu_alpha', mu=0, sd=100)
sigma_alpha = pm.HalfNormal('sigma_alpha', sd=100)
mu_alpha = pm.Normal('mu_alpha', mu=self.mu_alpha_mu, sd=self.mu_alpha_sd)
sigma_alpha = pm.HalfNormal('sigma_alpha', sd=self.sigma_alpha_sd)

mu_beta = pm.Normal('mu_beta', mu=0, sd=100)
sigma_beta = pm.HalfNormal('sigma_beta', sd=100)
mu_beta = pm.Normal('mu_beta', mu=self.mu_beta_mu, sd=self.mu_beta_sd)
sigma_beta = pm.HalfNormal('sigma_beta', sd=self.sigma_beta_sd)

alpha = pm.Normal('alpha', mu=mu_alpha, sd=sigma_alpha, shape=(self.num_cats,))
betas = pm.Normal('beta', mu=mu_beta, sd=sigma_beta, shape=(self.num_cats, self.num_pred))
Expand Down
44 changes: 36 additions & 8 deletions pymc3_models/models/LinearRegression.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,20 @@ class LinearRegression(BayesianModel):
Linear Regression built using PyMC3.
"""

def __init__(self):
def __init__(self,
alpha_mu = 0,
alpha_sd = 100,
beta_mu = 0,
beta_sd = 100,
tau = 1):
super(LinearRegression, self).__init__()

self.alpha_mu = alpha_mu
self.alpha_sd = alpha_sd
self.beta_mu = beta_mu
self.beta_sd = beta_sd
self.tau = tau

def create_model(self):
"""
Creates and returns the PyMC3 model.
Expand All @@ -38,10 +49,10 @@ def create_model(self):
model = pm.Model()

with model:
alpha = pm.Normal('alpha', mu=0, sd=100, shape=(1))
betas = pm.Normal('betas', mu=0, sd=100, shape=(1, self.num_pred))
alpha = pm.Normal('alpha', mu=self.alpha_mu, sd=self.alpha_sd, shape=(1))
betas = pm.Normal('betas', mu=self.beta_mu, sd=self.beta_sd, shape=(1, self.num_pred))

s = pm.HalfNormal('s', tau=1)
s = pm.HalfNormal('s', tau=self.tau)

mean = alpha + T.sum(betas * model_input, 1)

Expand Down Expand Up @@ -93,17 +104,17 @@ def fit(self, X, y, inference_type='advi', minibatch_size=None, inference_args=N

return self

def predict(self, X, return_std=False):

def sample(self, X, samples=2000):
"""
Predicts values of new data with a trained Linear Regression model
samples the conditional posterior estimates

Parameters
----------
X : numpy array, shape [n_samples, n_features]

return_std : Boolean flag of whether to return standard deviations with mean values. Defaults to False.
samples : number of draws to make for each point
"""

if self.trace is None:
raise PyMC3ModelsError('Run fit on the model before predict.')

Expand All @@ -116,6 +127,23 @@ def predict(self, X, return_std=False):

ppc = pm.sample_ppc(self.trace, model=self.cached_model, samples=2000)

return ppc

def predict(self, X, return_std=False, samples=2000):
"""
Predicts values of new data with a trained Linear Regression model

Parameters
----------
X : numpy array, shape [n_samples, n_features]

return_std : Boolean flag of whether to return standard deviations with mean values. Defaults to False.

samples: numberof draws to make for each input
"""

ppc = self.sample(X, samples)

if return_std:
return ppc['y'].mean(axis=0), ppc['y'].std(axis=0)
else:
Expand Down