Skip to content

Commit

Permalink
add secondary setup for extensions
Browse files Browse the repository at this point in the history
  • Loading branch information
thierrymoudiki committed Apr 27, 2024
1 parent 5af0675 commit 4da5619
Show file tree
Hide file tree
Showing 23 changed files with 1,025 additions and 690 deletions.
10 changes: 5 additions & 5 deletions .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ jobs:
- name: Run examples
run: pip install .&&find examples -maxdepth 2 -name "*.py" -exec python3 {} \;

- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.PYPI_GLOBAL_MLSAUCE }}
repository-url: https://upload.pypi.org/legacy/
# - name: Publish to PyPI
# uses: pypa/gh-action-pypi-publish@release/v1
# with:
# password: ${{ secrets.PYPI_GLOBAL_MLSAUCE }}
# repository-url: https://upload.pypi.org/legacy/
5 changes: 2 additions & 3 deletions mlsauce/adaopt/_adaopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,10 @@
from tqdm import tqdm
from ..utils import subsample
from ..utils import cluster

try:
try:
from . import _adaoptc as adaoptc
except ImportError:
pass
import _adaoptc as adaoptc


class AdaOpt(BaseEstimator, ClassifierMixin):
Expand Down
Binary file added mlsauce/adaopt/_adaoptc.cpython-311-darwin.so
Binary file not shown.
8 changes: 8 additions & 0 deletions mlsauce/adaopt/setup2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import os
from distutils.core import setup
from Cython.Build import cythonize

dir_path = os.path.dirname(os.path.realpath(__file__))

setup(ext_modules=cythonize(os.path.join(dir_path, "_adaoptc.pyx"),
compiler_directives={'language_level' : "3"}))
20 changes: 17 additions & 3 deletions mlsauce/booster/_booster_classifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@
from sklearn.base import BaseEstimator
from sklearn.base import ClassifierMixin
from sklearn.preprocessing import PolynomialFeatures
from . import _boosterc as boosterc
try:
from . import _boosterc as boosterc
except ImportError:
import _boosterc as boosterc
from ..utils import cluster


Expand All @@ -26,6 +29,10 @@ class LSBoostClassifier(BaseEstimator, ClassifierMixin):
reg_lambda: float
L2 regularization parameter for successive errors in the optimizer
(at training time).
alpha: float
compromise between L1 and L2 regularization (must be in [0, 1]),
for `solver` == 'enet'.
row_sample: float
percentage of rows chosen from the training set.
Expand Down Expand Up @@ -53,7 +60,8 @@ class LSBoostClassifier(BaseEstimator, ClassifierMixin):
type of backend; must be in ('cpu', 'gpu', 'tpu')
solver: str
type of 'weak' learner; currently in ('ridge', 'lasso')
type of 'weak' learner; currently in ('ridge', 'lasso', 'enet').
'enet' is a combination of 'ridge' and 'lasso' called Elastic Net.
activation: str
activation function: currently 'relu', 'relu6', 'sigmoid', 'tanh'
Expand All @@ -78,6 +86,7 @@ def __init__(
learning_rate=0.1,
n_hidden_features=5,
reg_lambda=0.1,
alpha=0.5,
row_sample=1,
col_sample=1,
dropout=0,
Expand Down Expand Up @@ -113,7 +122,8 @@ def __init__(
assert solver in (
"ridge",
"lasso",
), "`solver` must be in ('ridge', 'lasso')"
"enet",
), "`solver` must be in ('ridge', 'lasso', 'enet')"

sys_platform = platform.system()

Expand All @@ -127,6 +137,8 @@ def __init__(
self.learning_rate = learning_rate
self.n_hidden_features = n_hidden_features
self.reg_lambda = reg_lambda
assert (alpha >= 0 and alpha <= 1), "`alpha` must be in [0, 1]"
self.alpha = alpha
self.row_sample = row_sample
self.col_sample = col_sample
self.dropout = dropout
Expand Down Expand Up @@ -194,6 +206,7 @@ def fit(self, X, y, **kwargs):
learning_rate=self.learning_rate,
n_hidden_features=self.n_hidden_features,
reg_lambda=self.reg_lambda,
alpha=self.alpha,
row_sample=self.row_sample,
col_sample=self.col_sample,
dropout=self.dropout,
Expand All @@ -213,6 +226,7 @@ def fit(self, X, y, **kwargs):
learning_rate=self.learning_rate,
n_hidden_features=self.n_hidden_features,
reg_lambda=self.reg_lambda,
alpha=self.alpha,
row_sample=self.row_sample,
col_sample=self.col_sample,
dropout=self.dropout,
Expand Down
9 changes: 9 additions & 0 deletions mlsauce/booster/_booster_regressor.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@ class LSBoostRegressor(BaseEstimator, RegressorMixin):
reg_lambda: float
L2 regularization parameter for successive errors in the optimizer
(at training time).
alpha: float
compromise between L1 and L2 regularization (must be in [0, 1]),
for `solver` == 'enet'
row_sample: float
percentage of rows chosen from the training set.
Expand Down Expand Up @@ -88,6 +92,7 @@ def __init__(
learning_rate=0.1,
n_hidden_features=5,
reg_lambda=0.1,
alpha=0.5,
row_sample=1,
col_sample=1,
dropout=0,
Expand Down Expand Up @@ -140,6 +145,8 @@ def __init__(
self.learning_rate = learning_rate
self.n_hidden_features = n_hidden_features
self.reg_lambda = reg_lambda
assert (alpha >= 0 and alpha <= 1), "`alpha` must be in [0, 1]"
self.alpha = alpha
self.row_sample = row_sample
self.col_sample = col_sample
self.dropout = dropout
Expand Down Expand Up @@ -210,6 +217,7 @@ def fit(self, X, y, **kwargs):
learning_rate=self.learning_rate,
n_hidden_features=self.n_hidden_features,
reg_lambda=self.reg_lambda,
alpha=self.alpha,
row_sample=self.row_sample,
col_sample=self.col_sample,
dropout=self.dropout,
Expand All @@ -229,6 +237,7 @@ def fit(self, X, y, **kwargs):
learning_rate=self.learning_rate,
n_hidden_features=self.n_hidden_features,
reg_lambda=self.reg_lambda,
alpha=self.alpha,
row_sample=self.row_sample,
col_sample=self.col_sample,
dropout=self.dropout,
Expand Down
Loading

0 comments on commit 4da5619

Please sign in to comment.