Skip to content

Commit

Permalink
Bugfixes
Browse files Browse the repository at this point in the history
  • Loading branch information
anttttti committed Apr 21, 2020
1 parent 6f0965c commit e2be83b
Show file tree
Hide file tree
Showing 6 changed files with 20 additions and 26 deletions.
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
===============
Wordbatch 1.4.5
Wordbatch 1.4.6
===============

Overview
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

setup(
name='Wordbatch',
version='1.4.5',
version='1.4.6',
description='Python library for distributed AI processing pipelines, using swappable scheduler backends',
url='https://github.com/anttttti/Wordbatch',
author='Antti Puurula',
Expand Down
2 changes: 1 addition & 1 deletion wordbatch/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os
PACKAGE_DIR = os.path.dirname(os.path.abspath(__file__))
__version__ = '1.4.5'
__version__ = '1.4.6'

18 changes: 8 additions & 10 deletions wordbatch/models/ftrl.pyx
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
# cython: boundscheck=False, wraparound=False, cdivision=True
import numpy as np
import gzip
cimport cython
from cpython cimport array
import scipy.sparse as ssp
cimport numpy as np
from cython.parallel import prange
from libc.math cimport exp, log, fmax, fmin, sqrt, fabs
import multiprocessing
import sys
import randomgen

np.import_array()
Expand Down Expand Up @@ -213,20 +211,20 @@ cdef class FTRL:

def finalize_model(self):
D= self.D
indices = np.arange(start=0, stop=D+1, step=1, dtype=np.int32)
indptr= np.array([0, D+1], dtype=np.int32)
data = np.zeros((D+1,), dtype=np.float64)
indices = np.arange(start=0, stop=D, step=1, dtype=np.int32)
indptr= np.array([0, D], dtype=np.int32)
data = np.zeros(D, dtype=np.float64)
self.predict_f(data, indices, indptr, threads= self.threads)
del(indices, indptr, data)
self.z = None
self.n = None
self.z = np.zeros(0, dtype=np.float64)
self.n = np.zeros(0, dtype=np.float64)
self.model_finalized= True

def __getstate__(self):
return (self.alpha, self.beta, self.L1, self.L2, self.e_clip, self.D, self.init, self.seed, self.iters,
np.asarray(self.w), np.asarray(self.z), np.asarray(self.n), self.inv_link, self.threads, self.bias_term,
self.verbose)
self.model_finalized, self.verbose)

def __setstate__(self, params):
(self.alpha, self.beta, self.L1, self.L2, self.e_clip, self.D, self.init, self.seed,
self.iters, self.w, self.z, self.n, self.inv_link, self.threads, self.bias_term, self.verbose)= params
(self.alpha, self.beta, self.L1, self.L2, self.e_clip, self.D, self.init, self.seed, self.iters, self.w,
self.z, self.n, self.inv_link, self.threads, self.bias_term, self.model_finalized, self.verbose)= params
18 changes: 8 additions & 10 deletions wordbatch/models/ftrl32.pyx
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
# cython: boundscheck=False, wraparound=False, cdivision=True
import numpy as np
import gzip
cimport cython
from cpython cimport array
import scipy.sparse as ssp
cimport numpy as np
from cython.parallel import prange
from libc.math cimport exp, log, fmax, fmin, sqrt, fabs
import multiprocessing
import sys
import randomgen

np.import_array()
Expand Down Expand Up @@ -214,20 +212,20 @@ cdef class FTRL32:

def finalize_model(self):
D= self.D
indices = np.arange(start=0, stop=D+1, step=1, dtype=np.int32)
indptr= np.array([0, D+1], dtype=np.int32)
data = np.zeros((D+1,), dtype=np.float64)
indices = np.arange(start=0, stop=D, step=1, dtype=np.int32)
indptr= np.array([0, D], dtype=np.int32)
data = np.zeros(D, dtype=np.float64)
self.predict_f(data, indices, indptr, threads= self.threads)
del(indices, indptr, data)
self.z = None
self.n = None
self.z = np.zeros(0, dtype=np.float32)
self.n = np.zeros(0, dtype=np.float32)
self.model_finalized= True

def __getstate__(self):
return (self.alpha, self.beta, self.L1, self.L2, self.e_clip, self.D, self.init, self.seed, self.iters,
np.asarray(self.w), np.asarray(self.z), np.asarray(self.n), self.inv_link, self.threads, self.bias_term,
self.verbose)
self.model_finalized, self.verbose)

def __setstate__(self, params):
(self.alpha, self.beta, self.L1, self.L2, self.e_clip, self.D, self.init, self.seed,
self.iters, self.w, self.z, self.n, self.inv_link, self.threads, self.bias_term, self.verbose)= params
(self.alpha, self.beta, self.L1, self.L2, self.e_clip, self.D, self.init, self.seed, self.iters, self.w,
self.z, self.n, self.inv_link, self.threads, self.bias_term, self.model_finalized, self.verbose)= params
4 changes: 1 addition & 3 deletions wordbatch/pipelines/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,7 @@

def decorator_apply(func, batcher=None, cache=None, vectorize=None):
def wrapper_func(*args, **kwargs):
if len(args)>1: argss= args[1]
else: argss= []
return Apply(func, args=argss, kwargs= kwargs, batcher= batcher, cache= cache,
return Apply(func, args=args[1:], kwargs= kwargs, batcher= batcher, cache= cache,
vectorize= vectorize).transform(args[0])
return wrapper_func

Expand Down

0 comments on commit e2be83b

Please sign in to comment.