Skip to content

Commit

Permalink
tested conservatism
Browse files Browse the repository at this point in the history
  • Loading branch information
FelixBenning committed May 20, 2024
1 parent 94e590f commit 3a55033
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 14 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,6 @@ jobs:
poetry run pylint pyrfd
poetry run flake8 pyrfd
poetry run black --check pyrfd
- name: Run tests
run: poetry run pytest tests
22 changes: 10 additions & 12 deletions pyrfd/covariance.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import torch
from scipy import stats
from tqdm import tqdm

Expand Down Expand Up @@ -46,22 +45,21 @@ def __init__(
gradient_var: Tuple[float, float] | None = None,
dims=None,
) -> None:

self.mean = mean
self.dims = dims

if variance is None:
self.var_reg = None
else:
self.var_reg = None
self.g_var_reg = None

if variance is not None:
self.var_reg = ScalarRegression(*variance)
assert self.var_reg.is_plausible_variance_regression

if gradient_var is None:
self.g_var_reg = None
else:
if gradient_var is not None:
self.g_var_reg = ScalarRegression(*gradient_var)
assert self.g_var_reg.is_plausible_variance_regression

self.dims = dims

self._fitted = False
if self._is_fitted():
self._fitted = True
Expand Down Expand Up @@ -173,7 +171,7 @@ def learning_rate(self, loss, grad_norm, *, b_size_inv=0, conservatism=0):
def diff_cost(stepsize):
cond_var = self.cond_variance(stepsize, b_size_inv=b_size_inv)
d_cond_var = self.diff_cond_variance(stepsize, b_size_inv=b_size_inv)
reg = regularization * 0.5 * d_cond_var / torch.sqrt(cond_var)
reg = regularization * 0.5 * d_cond_var / np.sqrt(cond_var)

dce = self.diff_cond_expectation(stepsize, loss, grad_norm, b_size_inv)
return dce + reg
Expand Down Expand Up @@ -407,7 +405,7 @@ def scale(self):
)

def kernel(self, neg_sq_half):
return self.variance * torch.exp(neg_sq_half / (self.scale**2))
return self.variance * np.exp(neg_sq_half / (self.scale**2))

def diff_kernel(self, neg_sq_half):
return self.kernel(neg_sq_half) / (self.scale**2)
Expand All @@ -434,7 +432,7 @@ def learning_rate(self, loss, grad_norm, *, b_size_inv=0, conservatism=0):
return (
var_g_adjust
* (self.scale**2)
/ (torch.sqrt(tmp**2 + (self.scale * grad_norm * var_g_adjust) ** 2) + tmp)
/ (np.sqrt(tmp**2 + (self.scale * grad_norm * var_g_adjust) ** 2) + tmp)
)


Expand Down
2 changes: 1 addition & 1 deletion pyrfd/regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def __call__(self, x):
@property
def is_plausible_variance_regression(self):
""" variances are positive, bool if slope and intercept are postivie """
return (self.slope > 0) and (self.intercept > 0)
return (self.slope >= 0) and (self.intercept > 0)

@property
def slope(self):
Expand Down
6 changes: 5 additions & 1 deletion tests/covariance_test.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
import pytest
from pyrfd.covariance import SquaredExponential

@pytest.mark.parametrize("mean,var,scale", [(1,1,2)])
@pytest.mark.parametrize("mean,var,scale", [
(1,1,2),
(0.1,3,10),
(10,3,0.5),
])
def test_squared_exponential(mean, var, scale):
cov = SquaredExponential(
mean=mean,
Expand Down

0 comments on commit 3a55033

Please sign in to comment.