From b5092db4db22c8773b194efb0b64047cbe991857 Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 12:27:20 +0100 Subject: [PATCH 01/14] Add tests for argsort_mut --- src/linalg/basic/arrays.rs | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/src/linalg/basic/arrays.rs b/src/linalg/basic/arrays.rs index 3c889722..cc2ce570 100644 --- a/src/linalg/basic/arrays.rs +++ b/src/linalg/basic/arrays.rs @@ -2190,4 +2190,32 @@ mod tests { assert_eq!(result, [65, 581, 30]) } + + #[test] + fn test_argsort_mut_exact_boundary() { + // Test index == length - 1 case + let boundary = DenseMatrix::from_2d_array( + &[&[1.0, 2.0, 3.0, f64::MAX], &[3.0, f64::MAX, 0.0, 2.0]]).unwrap(); + let mut view0: Vec = boundary.get_col(0).iterator(0).copied().collect(); + let indices = view0.argsort_mut(); + assert_eq!(indices.last(), Some(&1)); + assert_eq!(indices.first(), Some(&0)); + + let mut view1: Vec = boundary.get_col(3).iterator(0).copied().collect(); + let indices = view1.argsort_mut(); + assert_eq!(indices.last(), Some(&0)); + assert_eq!(indices.first(), Some(&1)); + } + + + #[test] + fn test_argsort_mut_filled_array() { + let matrix = DenseMatrix::::rand(1000, 1000); + let mut view: Vec = matrix.get_col(0).iterator(0).copied().collect(); + let sorted = view.argsort_mut(); + assert_eq!( + sorted.len(), + 1000); + } + } From ab6e709ac57094b076bf1f8beada99727654eb9c Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 12:33:15 +0100 Subject: [PATCH 02/14] Add formatting and cleaning up .github directory --- .github/CODEOWNERS | 1 - .github/CONTRIBUTING.md | 2 +- src/linalg/basic/arrays.rs | 11 ++++------- 3 files changed, 5 insertions(+), 9 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index faeb28c5..7a3a35a1 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,6 +2,5 @@ # the repo. Unless a later match takes precedence, # Developers in this list will be requested for # review when someone opens a pull request. -* @VolodymyrOrlov * @morenol * @Mec-iS diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 895db0f5..5665723c 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -50,9 +50,9 @@ $ rust-code-analysis-cli -p src/algorithm/neighbour/fastpair.rs --ls 22 --le 213 1. After a PR is opened maintainers are notified 2. Probably changes will be required to comply with the workflow, these commands are run automatically and all tests shall pass: - * **Coverage** (optional): `tarpaulin` is used with command `cargo tarpaulin --out Lcov --all-features -- --test-threads 1` * **Formatting**: run `rustfmt src/*.rs` to apply automatic formatting * **Linting**: `clippy` is used with command `cargo clippy --all-features -- -Drust-2018-idioms -Dwarnings` + * **Coverage** (optional): `tarpaulin` is used with command `cargo tarpaulin --out Lcov --all-features -- --test-threads 1` * **Testing**: multiple test pipelines are run for different targets 3. When everything is OK, code is merged. diff --git a/src/linalg/basic/arrays.rs b/src/linalg/basic/arrays.rs index cc2ce570..e8f6ae67 100644 --- a/src/linalg/basic/arrays.rs +++ b/src/linalg/basic/arrays.rs @@ -2194,8 +2194,9 @@ mod tests { #[test] fn test_argsort_mut_exact_boundary() { // Test index == length - 1 case - let boundary = DenseMatrix::from_2d_array( - &[&[1.0, 2.0, 3.0, f64::MAX], &[3.0, f64::MAX, 0.0, 2.0]]).unwrap(); + let boundary = + DenseMatrix::from_2d_array(&[&[1.0, 2.0, 3.0, f64::MAX], &[3.0, f64::MAX, 0.0, 2.0]]) + .unwrap(); let mut view0: Vec = boundary.get_col(0).iterator(0).copied().collect(); let indices = view0.argsort_mut(); assert_eq!(indices.last(), Some(&1)); @@ -2206,16 +2207,12 @@ mod tests { assert_eq!(indices.last(), Some(&0)); assert_eq!(indices.first(), Some(&1)); } - #[test] fn test_argsort_mut_filled_array() { let matrix = DenseMatrix::::rand(1000, 1000); let mut view: Vec = matrix.get_col(0).iterator(0).copied().collect(); let sorted = view.argsort_mut(); - assert_eq!( - sorted.len(), - 1000); + assert_eq!(sorted.len(), 1000); } - } From 17b4519d5f92f690aceb680fa97572b3b9e92ac5 Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 12:44:49 +0100 Subject: [PATCH 03/14] fix clippy error. suggestion to use .contains() --- src/neighbors/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/neighbors/mod.rs b/src/neighbors/mod.rs index 0abe9bdc..001fb1e9 100644 --- a/src/neighbors/mod.rs +++ b/src/neighbors/mod.rs @@ -64,7 +64,7 @@ impl KNNWeightFunction { KNNWeightFunction::Distance => { // if there are any points that has zero distance from one or more training points, // those training points are weighted as 1.0 and the other points as 0.0 - if distances.iter().any(|&e| e == 0f64) { + if distances.contains(&0f64) { distances .iter() .map(|e| if *e == 0f64 { 1f64 } else { 0f64 }) From 611a831d1888765c398243159a5f851a3ef2947d Mon Sep 17 00:00:00 2001 From: Lorenzo Date: Sun, 1 Jun 2025 14:13:42 +0000 Subject: [PATCH 04/14] define type explicitly for variable jstack --- src/linalg/basic/arrays.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/linalg/basic/arrays.rs b/src/linalg/basic/arrays.rs index e8f6ae67..a5abe634 100644 --- a/src/linalg/basic/arrays.rs +++ b/src/linalg/basic/arrays.rs @@ -619,7 +619,7 @@ pub trait MutArrayView1: T: Number + PartialOrd, { let stack_size = 64; - let mut jstack = -1; + let mut jstack: i32 = -1; let mut l = 0; let mut istack = vec![0; stack_size]; let mut ir = self.shape() - 1; From 2efb13799cd1065d9a6c0451eecd249a0dc59a46 Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 15:31:43 +0100 Subject: [PATCH 05/14] wip --- src/svm/mod.rs | 2 +- src/svm/search/svr_params.rs | 191 ++++++++++++++++++----------------- src/svm/svr.rs | 48 ++++----- 3 files changed, 124 insertions(+), 117 deletions(-) diff --git a/src/svm/mod.rs b/src/svm/mod.rs index f6baf8bb..b5a88149 100644 --- a/src/svm/mod.rs +++ b/src/svm/mod.rs @@ -26,7 +26,7 @@ pub mod svc; pub mod svr; // /// search parameters space -// pub mod search; +pub mod search; use core::fmt::Debug; diff --git a/src/svm/search/svr_params.rs b/src/svm/search/svr_params.rs index 03d0ecef..d3be20f3 100644 --- a/src/svm/search/svr_params.rs +++ b/src/svm/search/svr_params.rs @@ -1,105 +1,112 @@ -// /// SVR grid search parameters -// #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -// #[derive(Debug, Clone)] -// pub struct SVRSearchParameters, K: Kernel> { -// /// Epsilon in the epsilon-SVR model. -// pub eps: Vec, -// /// Regularization parameter. -// pub c: Vec, -// /// Tolerance for stopping eps. -// pub tol: Vec, -// /// The kernel function. -// pub kernel: Vec, -// /// Unused parameter. -// m: PhantomData, -// } +use crate::numbers::basenum::Number; +use crate::numbers::floatnum::FloatNumber; +use crate::numbers::realnum::RealNumber; +use crate::linalg::basic::arrays::Array2; +use crate::svm::{svr, Kernel, LinearKernel, Kernels}; +use std::marker::PhantomData; + + +/// SVR grid search parameters +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Debug, Clone)] +pub struct SVRSearchParameters> { + /// Epsilon in the epsilon-SVR model. + pub eps: Vec, + /// Regularization parameter. + pub c: Vec, + /// Tolerance for stopping eps. + pub tol: Vec, + /// The kernel function. + pub kernel: Option, + /// Unused parameter. + m: PhantomData, +} -// /// SVR grid search iterator -// pub struct SVRSearchParametersIterator, K: Kernel> { -// svr_search_parameters: SVRSearchParameters, -// current_eps: usize, -// current_c: usize, -// current_tol: usize, -// current_kernel: usize, -// } +/// SVR grid search iterator +pub struct SVRSearchParametersIterator> { + svr_search_parameters: SVRSearchParameters, + current_eps: usize, + current_c: usize, + current_tol: usize, + current_kernel: usize, +} -// impl, K: Kernel> IntoIterator -// for SVRSearchParameters -// { -// type Item = SVRParameters; -// type IntoIter = SVRSearchParametersIterator; +impl> IntoIterator + for SVRSearchParameters +{ + type Item = svr::SVRParameters; + type IntoIter = SVRSearchParametersIterator; -// fn into_iter(self) -> Self::IntoIter { -// SVRSearchParametersIterator { -// svr_search_parameters: self, -// current_eps: 0, -// current_c: 0, -// current_tol: 0, -// current_kernel: 0, -// } -// } -// } + fn into_iter(self) -> Self::IntoIter { + SVRSearchParametersIterator { + svr_search_parameters: self, + current_eps: 0, + current_c: 0, + current_tol: 0, + current_kernel: 0, + } + } +} -// impl, K: Kernel> Iterator -// for SVRSearchParametersIterator -// { -// type Item = SVRParameters; +impl> Iterator + for SVRSearchParametersIterator +{ + type Item = svr::SVRParameters; -// fn next(&mut self) -> Option { -// if self.current_eps == self.svr_search_parameters.eps.len() -// && self.current_c == self.svr_search_parameters.c.len() -// && self.current_tol == self.svr_search_parameters.tol.len() -// && self.current_kernel == self.svr_search_parameters.kernel.len() -// { -// return None; -// } + fn next(&mut self) -> Option { + if self.current_eps == self.svr_search_parameters.eps.len() + && self.current_c == self.svr_search_parameters.c.len() + && self.current_tol == self.svr_search_parameters.tol.len() + && self.current_kernel == self.svr_search_parameters.kernel.len() + { + return None; + } -// let next = SVRParameters:: { -// eps: self.svr_search_parameters.eps[self.current_eps], -// c: self.svr_search_parameters.c[self.current_c], -// tol: self.svr_search_parameters.tol[self.current_tol], -// kernel: self.svr_search_parameters.kernel[self.current_kernel].clone(), -// m: PhantomData, -// }; + let next = svr::SVRParameters:: { + eps: self.svr_search_parameters.eps[self.current_eps], + c: self.svr_search_parameters.c[self.current_c], + tol: self.svr_search_parameters.tol[self.current_tol], + kernel: self.svr_search_parameters.kernel[self.current_kernel].clone() + }; -// if self.current_eps + 1 < self.svr_search_parameters.eps.len() { -// self.current_eps += 1; -// } else if self.current_c + 1 < self.svr_search_parameters.c.len() { -// self.current_eps = 0; -// self.current_c += 1; -// } else if self.current_tol + 1 < self.svr_search_parameters.tol.len() { -// self.current_eps = 0; -// self.current_c = 0; -// self.current_tol += 1; -// } else if self.current_kernel + 1 < self.svr_search_parameters.kernel.len() { -// self.current_eps = 0; -// self.current_c = 0; -// self.current_tol = 0; -// self.current_kernel += 1; -// } else { -// self.current_eps += 1; -// self.current_c += 1; -// self.current_tol += 1; -// self.current_kernel += 1; -// } + if self.current_eps + 1 < self.svr_search_parameters.eps.len() { + self.current_eps += 1; + } else if self.current_c + 1 < self.svr_search_parameters.c.len() { + self.current_eps = 0; + self.current_c += 1; + } else if self.current_tol + 1 < self.svr_search_parameters.tol.len() { + self.current_eps = 0; + self.current_c = 0; + self.current_tol += 1; + } else if self.current_kernel + 1 < self.svr_search_parameters.kernel.len() { + self.current_eps = 0; + self.current_c = 0; + self.current_tol = 0; + self.current_kernel += 1; + } else { + self.current_eps += 1; + self.current_c += 1; + self.current_tol += 1; + self.current_kernel += 1; + } -// Some(next) -// } -// } + Some(next) + } +} -// impl> Default for SVRSearchParameters { -// fn default() -> Self { -// let default_params: SVRParameters = SVRParameters::default(); +impl> Default for SVRSearchParameters { + fn default() -> Self { + let default_params: svr::SVRParameters = svr::SVRParameters::default(); -// SVRSearchParameters { -// eps: vec![default_params.eps], -// c: vec![default_params.c], -// tol: vec![default_params.tol], -// kernel: vec![default_params.kernel], -// m: PhantomData, -// } -// } -// } + SVRSearchParameters { + eps: vec![default_params.eps], + c: vec![default_params.c], + tol: vec![default_params.tol], + kernel: vec![default_params.kernel], + m: PhantomData, + } + } +} // #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] // #[derive(Debug)] diff --git a/src/svm/svr.rs b/src/svm/svr.rs index 4ce0aa28..e3a01326 100644 --- a/src/svm/svr.rs +++ b/src/svm/svr.rs @@ -80,7 +80,7 @@ use crate::error::{Failed, FailedError}; use crate::linalg::basic::arrays::{Array1, Array2, MutArray}; use crate::numbers::basenum::Number; use crate::numbers::floatnum::FloatNumber; -use crate::svm::Kernel; +use crate::svm::{Kernel, Kernels}; #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug)] @@ -97,7 +97,7 @@ pub struct SVRParameters { all(feature = "serde", target_arch = "wasm32"), serde(skip_serializing, skip_deserializing) )] - pub kernel: Option>, + pub kernel: Option, } #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -160,8 +160,8 @@ impl SVRParameters { self } /// The kernel function. - pub fn with_kernel(mut self, kernel: K) -> Self { - self.kernel = Some(Box::new(kernel)); + pub fn with_kernel(mut self, kernel: Kernels) -> Self { + self.kernel = Some(kernel); self } } @@ -269,7 +269,6 @@ impl<'a, T: Number + FloatNumber + PartialOrd, X: Array2, Y: Array1> SVR<' .as_ref() .unwrap() .kernel - .as_ref() .unwrap() .apply(&xi, &self.instances.as_ref().unwrap()[i]) .unwrap(), @@ -597,25 +596,26 @@ mod tests { use super::*; use crate::linalg::basic::matrix::DenseMatrix; use crate::metrics::mean_squared_error; - use crate::svm::Kernels; - - // #[test] - // fn search_parameters() { - // let parameters: SVRSearchParameters, LinearKernel> = - // SVRSearchParameters { - // eps: vec![0., 1.], - // kernel: vec![LinearKernel {}], - // ..Default::default() - // }; - // let mut iter = parameters.into_iter(); - // let next = iter.next().unwrap(); - // assert_eq!(next.eps, 0.); - // assert_eq!(next.kernel, LinearKernel {}); - // let next = iter.next().unwrap(); - // assert_eq!(next.eps, 1.); - // assert_eq!(next.kernel, LinearKernel {}); - // assert!(iter.next().is_none()); - // } + use crate::svm::{Kernels, LinearKernel}; + use crate::svm::search::svr_params::SVRSearchParameters; + + #[test] + fn search_parameters() { + let parameters: SVRSearchParameters, LinearKernel> = + SVRSearchParameters { + eps: vec![0., 1.], + kernel: vec![LinearKernel {}], + ..Default::default() + }; + let mut iter = parameters.into_iter(); + let next = iter.next().unwrap(); + assert_eq!(next.eps, 0.); + // assert_eq!(next.kernel, LinearKernel {}); + // let next = iter.next().unwrap(); + // assert_eq!(next.eps, 1.); + // assert_eq!(next.kernel, LinearKernel {}); + // assert!(iter.next().is_none()); + } #[cfg_attr( all(target_arch = "wasm32", not(target_os = "wasi")), From 93f3fcc9a77b3fe76b824f612c33761ae429afcf Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 15:52:14 +0100 Subject: [PATCH 06/14] Implement kernel as enumerator --- src/svm/mod.rs | 243 +++++++++++++------------------------------------ 1 file changed, 64 insertions(+), 179 deletions(-) diff --git a/src/svm/mod.rs b/src/svm/mod.rs index b5a88149..dcbd9afe 100644 --- a/src/svm/mod.rs +++ b/src/svm/mod.rs @@ -48,205 +48,85 @@ pub trait Kernel: Debug { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result; } -/// Pre-defined kernel functions -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone)] -pub struct Kernels; - -impl Kernels { - /// Return a default linear - pub fn linear() -> LinearKernel { - LinearKernel - } - /// Return a default RBF - pub fn rbf() -> RBFKernel { - RBFKernel::default() - } - /// Return a default polynomial - pub fn polynomial() -> PolynomialKernel { - PolynomialKernel::default() - } - /// Return a default sigmoid - pub fn sigmoid() -> SigmoidKernel { - SigmoidKernel::default() - } -} - -/// Linear Kernel -#[allow(clippy::derive_partial_eq_without_eq)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone, PartialEq, Eq, Default)] -pub struct LinearKernel; - -/// Radial basis function (Gaussian) kernel -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Default, Clone, PartialEq)] -pub struct RBFKernel { - /// kernel coefficient - pub gamma: Option, -} - -#[allow(dead_code)] -impl RBFKernel { - /// assign gamma parameter to kernel (required) - /// ```rust - /// use smartcore::svm::RBFKernel; - /// let knl = RBFKernel::default().with_gamma(0.7); - /// ``` - pub fn with_gamma(mut self, gamma: f64) -> Self { - self.gamma = Some(gamma); - self - } -} - -/// Polynomial kernel #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq)] -pub struct PolynomialKernel { - /// degree of the polynomial - pub degree: Option, - /// kernel coefficient - pub gamma: Option, - /// independent term in kernel function - pub coef0: Option, +pub enum Kernels { + Linear, + RBF { gamma: Option }, + Polynomial { degree: Option, gamma: Option, coef0: Option }, + Sigmoid { gamma: Option, coef0: Option }, } -impl Default for PolynomialKernel { - fn default() -> Self { - Self { - gamma: Option::None, - degree: Option::None, - coef0: Some(1f64), +impl Kernels { + pub fn linear() -> Self { Kernels::Linear } + pub fn rbf() -> Self { Kernels::RBF { gamma: None } } + pub fn polynomial() -> Self { Kernels::Polynomial { degree: None, gamma: None, coef0: Some(1.0) } } + pub fn sigmoid() -> Self { Kernels::Sigmoid { gamma: None, coef0: Some(1.0) } } + + // Builder-style methods for ergonomic parameter setting + pub fn with_gamma(self, gamma: f64) -> Self { + match self { + Kernels::RBF { .. } => Kernels::RBF { gamma: Some(gamma) }, + Kernels::Polynomial { degree, coef0, .. } => Kernels::Polynomial { degree, gamma: Some(gamma), coef0 }, + Kernels::Sigmoid { coef0, .. } => Kernels::Sigmoid { gamma: Some(gamma), coef0 }, + other => other, } } -} - -impl PolynomialKernel { - /// set parameters for kernel - /// ```rust - /// use smartcore::svm::PolynomialKernel; - /// let knl = PolynomialKernel::default().with_params(3.0, 0.7, 1.0); - /// ``` - pub fn with_params(mut self, degree: f64, gamma: f64, coef0: f64) -> Self { - self.degree = Some(degree); - self.gamma = Some(gamma); - self.coef0 = Some(coef0); - self - } - /// set gamma parameter for kernel - /// ```rust - /// use smartcore::svm::PolynomialKernel; - /// let knl = PolynomialKernel::default().with_gamma(0.7); - /// ``` - pub fn with_gamma(mut self, gamma: f64) -> Self { - self.gamma = Some(gamma); - self - } - /// set degree parameter for kernel - /// ```rust - /// use smartcore::svm::PolynomialKernel; - /// let knl = PolynomialKernel::default().with_degree(3.0, 100); - /// ``` - pub fn with_degree(self, degree: f64, n_features: usize) -> Self { - self.with_params(degree, 1f64, 1f64 / n_features as f64) - } -} - -/// Sigmoid (hyperbolic tangent) kernel -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone, PartialEq)] -pub struct SigmoidKernel { - /// kernel coefficient - pub gamma: Option, - /// independent term in kernel function - pub coef0: Option, -} - -impl Default for SigmoidKernel { - fn default() -> Self { - Self { - gamma: Option::None, - coef0: Some(1f64), + pub fn with_degree(self, degree: f64) -> Self { + match self { + Kernels::Polynomial { gamma, coef0, .. } => Kernels::Polynomial { degree: Some(degree), gamma, coef0 }, + other => other, } } -} - -impl SigmoidKernel { - /// set parameters for kernel - /// ```rust - /// use smartcore::svm::SigmoidKernel; - /// let knl = SigmoidKernel::default().with_params(0.7, 1.0); - /// ``` - pub fn with_params(mut self, gamma: f64, coef0: f64) -> Self { - self.gamma = Some(gamma); - self.coef0 = Some(coef0); - self - } - /// set gamma parameter for kernel - /// ```rust - /// use smartcore::svm::SigmoidKernel; - /// let knl = SigmoidKernel::default().with_gamma(0.7); - /// ``` - pub fn with_gamma(mut self, gamma: f64) -> Self { - self.gamma = Some(gamma); - self - } -} - -#[cfg_attr(all(feature = "serde", not(target_arch = "wasm32")), typetag::serde)] -impl Kernel for LinearKernel { - fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { - Ok(x_i.dot(x_j)) - } -} - -#[cfg_attr(all(feature = "serde", not(target_arch = "wasm32")), typetag::serde)] -impl Kernel for RBFKernel { - fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { - if self.gamma.is_none() { - return Err(Failed::because( - FailedError::ParametersError, - "gamma should be set, use {Kernel}::default().with_gamma(..)", - )); + pub fn with_coef0(self, coef0: f64) -> Self { + match self { + Kernels::Polynomial { degree, gamma, .. } => Kernels::Polynomial { degree, gamma, coef0: Some(coef0) }, + Kernels::Sigmoid { gamma, .. } => Kernels::Sigmoid { gamma, coef0: Some(coef0) }, + other => other, } - let v_diff = x_i.sub(x_j); - Ok((-self.gamma.unwrap() * v_diff.mul(&v_diff).sum()).exp()) } } -#[cfg_attr(all(feature = "serde", not(target_arch = "wasm32")), typetag::serde)] -impl Kernel for PolynomialKernel { +impl Kernel for Kernels { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { - if self.gamma.is_none() || self.coef0.is_none() || self.degree.is_none() { - return Err(Failed::because( - FailedError::ParametersError, "gamma, coef0, degree should be set, - use {Kernel}::default().with_{parameter}(..)") - ); + match self { + Kernels::Linear => Ok(x_i.dot(x_j)), + Kernels::RBF { gamma } => { + let gamma = gamma.ok_or_else(|| Failed::because(FailedError::ParametersError, "gamma not set"))?; + let v_diff = x_i.sub(x_j); + Ok((-gamma * v_diff.mul(&v_diff).sum()).exp()) + } + Kernels::Polynomial { degree, gamma, coef0 } => { + let degree = degree.ok_or_else(|| Failed::because(FailedError::ParametersError, "degree not set"))?; + let gamma = gamma.ok_or_else(|| Failed::because(FailedError::ParametersError, "gamma not set"))?; + let coef0 = coef0.ok_or_else(|| Failed::because(FailedError::ParametersError, "coef0 not set"))?; + let dot = x_i.dot(x_j); + Ok((gamma * dot + coef0).powf(degree)) + } + Kernels::Sigmoid { gamma, coef0 } => { + let gamma = gamma.ok_or_else(|| Failed::because(FailedError::ParametersError, "gamma not set"))?; + let coef0 = coef0.ok_or_else(|| Failed::because(FailedError::ParametersError, "coef0 not set"))?; + let dot = x_i.dot(x_j); + Ok((gamma * dot + coef0).tanh()) + } } - let dot = x_i.dot(x_j); - Ok((self.gamma.unwrap() * dot + self.coef0.unwrap()).powf(self.degree.unwrap())) } } -#[cfg_attr(all(feature = "serde", not(target_arch = "wasm32")), typetag::serde)] -impl Kernel for SigmoidKernel { - fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { - if self.gamma.is_none() || self.coef0.is_none() { - return Err(Failed::because( - FailedError::ParametersError, "gamma, coef0, degree should be set, - use {Kernel}::default().with_{parameter}(..)") - ); - } - let dot = x_i.dot(x_j); - Ok(self.gamma.unwrap() * dot + self.coef0.unwrap().tanh()) - } -} #[cfg(test)] mod tests { use super::*; use crate::svm::Kernels; + #[test] + fn rbf_kernel() { + let v1 = vec![1., 2., 3.]; + let v2 = vec![4., 5., 6.]; + let result = Kernels::rbf().with_gamma(0.055).apply(&v1, &v2).unwrap().abs(); + assert!((0.2265f64 - result) < 1e-4); + } + #[cfg_attr( all(target_arch = "wasm32", not(target_os = "wasi")), wasm_bindgen_test::wasm_bindgen_test @@ -264,7 +144,7 @@ mod tests { wasm_bindgen_test::wasm_bindgen_test )] #[test] - fn rbf_kernel() { + fn test_rbf_kernel() { let v1 = vec![1., 2., 3.]; let v2 = vec![4., 5., 6.]; @@ -287,7 +167,10 @@ mod tests { let v2 = vec![4., 5., 6.]; let result = Kernels::polynomial() - .with_params(3.0, 0.5, 1.0) + .with_gamma(3.0) + .with_degree(0.5) + .with_coef0(1.0) + //.with_params(3.0, 0.5, 1.0) .apply(&v1, &v2) .unwrap() .abs(); @@ -305,7 +188,9 @@ mod tests { let v2 = vec![4., 5., 6.]; let result = Kernels::sigmoid() - .with_params(0.01, 0.1) + .with_gamma(0.01) + .with_coef0(0.1) + //.with_params(0.01, 0.1) .apply(&v1, &v2) .unwrap() .abs(); From efbe148aa56a22508afa9a405acced2830bac448 Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 16:32:15 +0100 Subject: [PATCH 07/14] basic svr and svr_params implementation --- src/svm/search/svr_params.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/svm/search/svr_params.rs b/src/svm/search/svr_params.rs index d3be20f3..1f08d809 100644 --- a/src/svm/search/svr_params.rs +++ b/src/svm/search/svr_params.rs @@ -2,7 +2,7 @@ use crate::numbers::basenum::Number; use crate::numbers::floatnum::FloatNumber; use crate::numbers::realnum::RealNumber; use crate::linalg::basic::arrays::Array2; -use crate::svm::{svr, Kernel, LinearKernel, Kernels}; +use crate::svm::{svr, Kernels}; use std::marker::PhantomData; @@ -17,9 +17,9 @@ pub struct SVRSearchParameters> { /// Tolerance for stopping eps. pub tol: Vec, /// The kernel function. - pub kernel: Option, + pub kernel: Vec, /// Unused parameter. - m: PhantomData, + pub m: PhantomData, } /// SVR grid search iterator @@ -66,7 +66,7 @@ impl> Iterator eps: self.svr_search_parameters.eps[self.current_eps], c: self.svr_search_parameters.c[self.current_c], tol: self.svr_search_parameters.tol[self.current_tol], - kernel: self.svr_search_parameters.kernel[self.current_kernel].clone() + kernel: Some(self.svr_search_parameters.kernel[self.current_kernel].clone()) }; if self.current_eps + 1 < self.svr_search_parameters.eps.len() { @@ -102,7 +102,7 @@ impl> Default for SVRSearchPa eps: vec![default_params.eps], c: vec![default_params.c], tol: vec![default_params.tol], - kernel: vec![default_params.kernel], + kernel: vec![default_params.kernel.unwrap()], m: PhantomData, } } From 5f8dd73a41ffa21b064c752e810df3898dcc1e27 Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 16:43:32 +0100 Subject: [PATCH 08/14] fix tests --- src/svm/mod.rs | 8 ++++---- src/svm/search/svr_params.rs | 2 +- src/svm/svr.rs | 13 +++++++------ 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/src/svm/mod.rs b/src/svm/mod.rs index dcbd9afe..4b9ee4de 100644 --- a/src/svm/mod.rs +++ b/src/svm/mod.rs @@ -60,14 +60,14 @@ pub enum Kernels { impl Kernels { pub fn linear() -> Self { Kernels::Linear } pub fn rbf() -> Self { Kernels::RBF { gamma: None } } - pub fn polynomial() -> Self { Kernels::Polynomial { degree: None, gamma: None, coef0: Some(1.0) } } + pub fn polynomial() -> Self { Kernels::Polynomial { gamma: None, degree: None, coef0: Some(1.0) } } pub fn sigmoid() -> Self { Kernels::Sigmoid { gamma: None, coef0: Some(1.0) } } // Builder-style methods for ergonomic parameter setting pub fn with_gamma(self, gamma: f64) -> Self { match self { Kernels::RBF { .. } => Kernels::RBF { gamma: Some(gamma) }, - Kernels::Polynomial { degree, coef0, .. } => Kernels::Polynomial { degree, gamma: Some(gamma), coef0 }, + Kernels::Polynomial { degree, coef0, .. } => Kernels::Polynomial { gamma: Some(gamma), degree, coef0 }, Kernels::Sigmoid { coef0, .. } => Kernels::Sigmoid { gamma: Some(gamma), coef0 }, other => other, } @@ -167,8 +167,8 @@ mod tests { let v2 = vec![4., 5., 6.]; let result = Kernels::polynomial() - .with_gamma(3.0) - .with_degree(0.5) + .with_gamma(0.5) + .with_degree(3.0) .with_coef0(1.0) //.with_params(3.0, 0.5, 1.0) .apply(&v1, &v2) diff --git a/src/svm/search/svr_params.rs b/src/svm/search/svr_params.rs index 1f08d809..acc7292f 100644 --- a/src/svm/search/svr_params.rs +++ b/src/svm/search/svr_params.rs @@ -102,7 +102,7 @@ impl> Default for SVRSearchPa eps: vec![default_params.eps], c: vec![default_params.c], tol: vec![default_params.tol], - kernel: vec![default_params.kernel.unwrap()], + kernel: vec![default_params.kernel.unwrap_or_else(Kernels::linear)], m: PhantomData, } } diff --git a/src/svm/svr.rs b/src/svm/svr.rs index e3a01326..7275696b 100644 --- a/src/svm/svr.rs +++ b/src/svm/svr.rs @@ -51,9 +51,9 @@ //! //! let knl = Kernels::linear(); //! let params = &SVRParameters::default().with_eps(2.0).with_c(10.0).with_kernel(knl); -//! // let svr = SVR::fit(&x, &y, params).unwrap(); +//! let svr = SVR::fit(&x, &y, params).unwrap(); //! -//! // let y_hat = svr.predict(&x).unwrap(); +//! let y_hat = svr.predict(&x).unwrap(); //! ``` //! //! ## References: @@ -269,6 +269,7 @@ impl<'a, T: Number + FloatNumber + PartialOrd, X: Array2, Y: Array1> SVR<' .as_ref() .unwrap() .kernel + .as_ref() .unwrap() .apply(&xi, &self.instances.as_ref().unwrap()[i]) .unwrap(), @@ -596,15 +597,15 @@ mod tests { use super::*; use crate::linalg::basic::matrix::DenseMatrix; use crate::metrics::mean_squared_error; - use crate::svm::{Kernels, LinearKernel}; + use crate::svm::Kernels; use crate::svm::search::svr_params::SVRSearchParameters; #[test] fn search_parameters() { - let parameters: SVRSearchParameters, LinearKernel> = + let parameters: SVRSearchParameters> = SVRSearchParameters { eps: vec![0., 1.], - kernel: vec![LinearKernel {}], + kernel: vec![Kernels::linear()], ..Default::default() }; let mut iter = parameters.into_iter(); @@ -648,7 +649,7 @@ mod tests { 114.2, 115.7, 116.9, ]; - let knl = Kernels::linear(); + let knl: Kernels = Kernels::linear(); let y_hat = SVR::fit( &x, &y, From e371f10a3a13bb3dece33fb8cacd6dc5d16402ba Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 17:19:11 +0100 Subject: [PATCH 09/14] Complete enum implementation for Kernels. Implement search grid for SVR. Add documentation. --- src/svm/mod.rs | 267 +++++++++++++++++++++++++++++++---- src/svm/search/mod.rs | 2 + src/svm/search/svr_params.rs | 189 +++++++++++++++++++++++-- src/svm/svr.rs | 13 +- 4 files changed, 424 insertions(+), 47 deletions(-) diff --git a/src/svm/mod.rs b/src/svm/mod.rs index 4b9ee4de..772498aa 100644 --- a/src/svm/mod.rs +++ b/src/svm/mod.rs @@ -25,7 +25,7 @@ /// search parameters pub mod svc; pub mod svr; -// /// search parameters space +// search parameters space pub mod search; use core::fmt::Debug; @@ -48,64 +48,275 @@ pub trait Kernel: Debug { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result; } -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +/// A enumerator for all the kernels type to support. +/// This allows kernel selection and parameterization ergonomic, type-safe, and ready for use in parameter structs like SVRParameters. +/// You can construct kernels using the provided variants and builder-style methods. +/// +/// # Examples +/// +/// ``` +/// use smartcore::svm::Kernels; +/// +/// let linear = Kernels::linear(); +/// let rbf = Kernels::rbf().with_gamma(0.5); +/// let poly = Kernels::polynomial().with_degree(3.0).with_gamma(0.5).with_coef0(1.0); +/// let sigmoid = Kernels::sigmoid().with_gamma(0.2).with_coef0(0.0); +/// ``` #[derive(Debug, Clone, PartialEq)] pub enum Kernels { + /// Linear kernel (default). + /// + /// Computes the standard dot product between vectors. Linear, - RBF { gamma: Option }, - Polynomial { degree: Option, gamma: Option, coef0: Option }, - Sigmoid { gamma: Option, coef0: Option }, + + /// Radial Basis Function (RBF) kernel. + /// + /// Formula: K(x, y) = exp(-gamma * ||x-y||²) + RBF { + /// Controls the width of the Gaussian RBF kernel. + /// + /// Larger values of gamma lead to higher bias and lower variance. + /// This parameter is inversely proportional to the radius of influence + /// of samples selected by the model as support vectors. + gamma: Option, + }, + + /// Polynomial kernel. + /// + /// Formula: K(x, y) = (gamma * + coef0)^degree + Polynomial { + /// The degree of the polynomial kernel. + /// + /// Integer values are typical (2 = quadratic, 3 = cubic), but any positive real value is valid. + /// Higher degree values create decision boundaries with higher complexity. + degree: Option, + + /// Kernel coefficient for the dot product. + /// + /// Controls the influence of higher-degree versus lower-degree terms in the polynomial. + /// If None, a default value will be used. + gamma: Option, + + /// Independent term in the polynomial kernel. + /// + /// Controls the influence of higher-degree versus lower-degree terms. + /// If None, a default value of 1.0 will be used. + coef0: Option, + }, + + /// Sigmoid kernel. + /// + /// Formula: K(x, y) = tanh(gamma * + coef0) + Sigmoid { + /// Kernel coefficient for the dot product. + /// + /// Controls the scaling of the dot product in the sigmoid function. + /// If None, a default value will be used. + gamma: Option, + + /// Independent term in the sigmoid kernel. + /// + /// Acts as a threshold/bias term in the sigmoid function. + /// If None, a default value of 1.0 will be used. + coef0: Option, + }, } impl Kernels { - pub fn linear() -> Self { Kernels::Linear } - pub fn rbf() -> Self { Kernels::RBF { gamma: None } } - pub fn polynomial() -> Self { Kernels::Polynomial { gamma: None, degree: None, coef0: Some(1.0) } } - pub fn sigmoid() -> Self { Kernels::Sigmoid { gamma: None, coef0: Some(1.0) } } + /// Create a linear kernel. + /// + /// The linear kernel computes the dot product between two vectors: + /// K(x, y) = + pub fn linear() -> Self { + Kernels::Linear + } + + /// Create an RBF kernel with unspecified gamma. + /// + /// The RBF kernel is defined as: + /// K(x, y) = exp(-gamma * ||x-y||²) + /// + /// You should specify gamma using `with_gamma()` before using this kernel. + pub fn rbf() -> Self { + Kernels::RBF { gamma: None } + } + + /// Create a polynomial kernel with default parameters. + /// + /// The polynomial kernel is defined as: + /// K(x, y) = (gamma * + coef0)^degree + /// + /// Default values: + /// - gamma: None (must be specified) + /// - degree: None (must be specified) + /// - coef0: 1.0 + pub fn polynomial() -> Self { + Kernels::Polynomial { + gamma: None, + degree: None, + coef0: Some(1.0), + } + } - // Builder-style methods for ergonomic parameter setting + /// Create a sigmoid kernel with default parameters. + /// + /// The sigmoid kernel is defined as: + /// K(x, y) = tanh(gamma * + coef0) + /// + /// Default values: + /// - gamma: None (must be specified) + /// - coef0: 1.0 + /// + pub fn sigmoid() -> Self { + Kernels::Sigmoid { + gamma: None, + coef0: Some(1.0), + } + } + + /// Set the `gamma` parameter for RBF, polynomial, or sigmoid kernels. + /// + /// The gamma parameter has different interpretations depending on the kernel: + /// - For RBF: Controls the width of the Gaussian. Larger values mean tighter fit. + /// - For Polynomial: Scaling factor for the dot product. + /// - For Sigmoid: Scaling factor for the dot product. + /// pub fn with_gamma(self, gamma: f64) -> Self { match self { Kernels::RBF { .. } => Kernels::RBF { gamma: Some(gamma) }, - Kernels::Polynomial { degree, coef0, .. } => Kernels::Polynomial { gamma: Some(gamma), degree, coef0 }, - Kernels::Sigmoid { coef0, .. } => Kernels::Sigmoid { gamma: Some(gamma), coef0 }, + Kernels::Polynomial { degree, coef0, .. } => Kernels::Polynomial { + gamma: Some(gamma), + degree, + coef0, + }, + Kernels::Sigmoid { coef0, .. } => Kernels::Sigmoid { + gamma: Some(gamma), + coef0, + }, other => other, } } + + /// Set the `degree` parameter for the polynomial kernel. + /// + /// The degree parameter controls the flexibility of the decision boundary. + /// Higher degrees create more complex boundaries but may lead to overfitting. + /// pub fn with_degree(self, degree: f64) -> Self { match self { - Kernels::Polynomial { gamma, coef0, .. } => Kernels::Polynomial { degree: Some(degree), gamma, coef0 }, + Kernels::Polynomial { gamma, coef0, .. } => Kernels::Polynomial { + degree: Some(degree), + gamma, + coef0, + }, other => other, } } + + /// Set the `coef0` parameter for polynomial or sigmoid kernels. + /// + /// The coef0 parameter is the independent term in the kernel function: + /// - For Polynomial: Controls the influence of higher-degree vs. lower-degree terms. + /// - For Sigmoid: Acts as a threshold/bias term. + /// pub fn with_coef0(self, coef0: f64) -> Self { match self { - Kernels::Polynomial { degree, gamma, .. } => Kernels::Polynomial { degree, gamma, coef0: Some(coef0) }, - Kernels::Sigmoid { gamma, .. } => Kernels::Sigmoid { gamma, coef0: Some(coef0) }, + Kernels::Polynomial { degree, gamma, .. } => Kernels::Polynomial { + degree, + gamma, + coef0: Some(coef0), + }, + Kernels::Sigmoid { gamma, .. } => Kernels::Sigmoid { + gamma, + coef0: Some(coef0), + }, other => other, } } } +/// Implementation of the [`Kernel`] trait for the [`Kernels`] enum in smartcore. +/// +/// This method computes the value of the kernel function between two feature vectors `x_i` and `x_j`, +/// according to the variant and parameters of the [`Kernels`] enum. This enables flexible and type-safe +/// selection of kernel functions for SVM and SVR models in smartcore. +/// +/// # Supported Kernels +/// +/// - [`Kernels::Linear`]: Computes the standard dot product between `x_i` and `x_j`. +/// - [`Kernels::RBF`]: Computes the Radial Basis Function (Gaussian) kernel. Requires `gamma`. +/// - [`Kernels::Polynomial`]: Computes the polynomial kernel. Requires `degree`, `gamma`, and `coef0`. +/// - [`Kernels::Sigmoid`]: Computes the sigmoid kernel. Requires `gamma` and `coef0`. +/// +/// # Parameters +/// +/// - `x_i`: First input vector (feature vector). +/// - `x_j`: Second input vector (feature vector). +/// +/// # Returns +/// +/// - `Ok(f64)`: The computed kernel value. +/// - `Err(Failed)`: If any required kernel parameter is missing. +/// +/// # Errors +/// +/// Returns `Err(Failed)` if a required parameter (such as `gamma`, `degree`, or `coef0`) +/// is `None` for the selected kernel variant. +/// +/// # Example +/// +/// ``` +/// use smartcore::svm::Kernels; +/// use smartcore::svm::Kernel; +/// +/// let x = vec![1.0, 2.0, 3.0]; +/// let y = vec![4.0, 5.0, 6.0]; +/// let kernel = Kernels::rbf().with_gamma(0.5); +/// let value = kernel.apply(&x, &y).unwrap(); +/// ``` +/// +/// # Notes +/// +/// - This implementation follows smartcore's philosophy: pure Rust, no macros, no unsafe code, +/// and an accessible, pythonic API surface for both ML practitioners and Rust beginners. +/// - All kernel parameters must be set before calling `apply`; missing parameters will result in an error. +/// +/// See the [`Kernels`] enum documentation for more details on each kernel type and its parameters. impl Kernel for Kernels { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { match self { Kernels::Linear => Ok(x_i.dot(x_j)), Kernels::RBF { gamma } => { - let gamma = gamma.ok_or_else(|| Failed::because(FailedError::ParametersError, "gamma not set"))?; + let gamma = gamma.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "gamma not set") + })?; let v_diff = x_i.sub(x_j); Ok((-gamma * v_diff.mul(&v_diff).sum()).exp()) } - Kernels::Polynomial { degree, gamma, coef0 } => { - let degree = degree.ok_or_else(|| Failed::because(FailedError::ParametersError, "degree not set"))?; - let gamma = gamma.ok_or_else(|| Failed::because(FailedError::ParametersError, "gamma not set"))?; - let coef0 = coef0.ok_or_else(|| Failed::because(FailedError::ParametersError, "coef0 not set"))?; + Kernels::Polynomial { + degree, + gamma, + coef0, + } => { + let degree = degree.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "degree not set") + })?; + let gamma = gamma.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "gamma not set") + })?; + let coef0 = coef0.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "coef0 not set") + })?; let dot = x_i.dot(x_j); Ok((gamma * dot + coef0).powf(degree)) } Kernels::Sigmoid { gamma, coef0 } => { - let gamma = gamma.ok_or_else(|| Failed::because(FailedError::ParametersError, "gamma not set"))?; - let coef0 = coef0.ok_or_else(|| Failed::because(FailedError::ParametersError, "coef0 not set"))?; + let gamma = gamma.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "gamma not set") + })?; + let coef0 = coef0.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "coef0 not set") + })?; let dot = x_i.dot(x_j); Ok((gamma * dot + coef0).tanh()) } @@ -113,7 +324,6 @@ impl Kernel for Kernels { } } - #[cfg(test)] mod tests { use super::*; @@ -123,7 +333,11 @@ mod tests { fn rbf_kernel() { let v1 = vec![1., 2., 3.]; let v2 = vec![4., 5., 6.]; - let result = Kernels::rbf().with_gamma(0.055).apply(&v1, &v2).unwrap().abs(); + let result = Kernels::rbf() + .with_gamma(0.055) + .apply(&v1, &v2) + .unwrap() + .abs(); assert!((0.2265f64 - result) < 1e-4); } @@ -188,9 +402,8 @@ mod tests { let v2 = vec![4., 5., 6.]; let result = Kernels::sigmoid() - .with_gamma(0.01) - .with_coef0(0.1) - //.with_params(0.01, 0.1) + .with_gamma(0.01) + .with_coef0(0.1) .apply(&v1, &v2) .unwrap() .abs(); diff --git a/src/svm/search/mod.rs b/src/svm/search/mod.rs index 6d86feb5..d61b8326 100644 --- a/src/svm/search/mod.rs +++ b/src/svm/search/mod.rs @@ -1,3 +1,5 @@ +//! SVC and Grid Search + /// SVC search parameters pub mod svc_params; /// SVC search parameters diff --git a/src/svm/search/svr_params.rs b/src/svm/search/svr_params.rs index acc7292f..454cc67f 100644 --- a/src/svm/search/svr_params.rs +++ b/src/svm/search/svr_params.rs @@ -1,12 +1,75 @@ +//! # SVR Grid Search Parameters +//! +//! This module provides utilities for defining and iterating over grid search parameter spaces +//! for Support Vector Regression (SVR) models in [smartcore](https://github.com/smartcorelib/smartcore). +//! +//! The main struct, [`SVRSearchParameters`], allows users to specify multiple values for each +//! SVR hyperparameter (epsilon, regularization parameter C, tolerance, and kernel function). +//! The provided iterator yields all possible combinations (the Cartesian product) of these parameters, +//! enabling exhaustive grid search for hyperparameter tuning. +//! +//! +//! ## Example +//! ``` +//! use smartcore::svm::Kernels; +//! use smartcore::svm::search::svr_params::SVRSearchParameters; +//! use smartcore::linalg::basic::matrix::DenseMatrix; +//! +//! let params = SVRSearchParameters::> { +//! eps: vec![0.1, 0.2], +//! c: vec![1.0, 10.0], +//! tol: vec![1e-3], +//! kernel: vec![Kernels::linear(), Kernels::rbf().with_gamma(0.5)], +//! m: std::marker::PhantomData, +//! }; +//! +//! // for param_set in params.into_iter() { +//! // Use param_set (of type svr::SVRParameters) to fit and evaluate your SVR model. +//! // } +//! ``` +//! +//! +//! ## Note +//! This module is intended for use with smartcore version 0.4 or later. The API is not compatible with older versions[1]. +use crate::linalg::basic::arrays::Array2; use crate::numbers::basenum::Number; use crate::numbers::floatnum::FloatNumber; use crate::numbers::realnum::RealNumber; -use crate::linalg::basic::arrays::Array2; use crate::svm::{svr, Kernels}; use std::marker::PhantomData; - -/// SVR grid search parameters +/// ## SVR grid search parameters +/// A struct representing a grid of hyperparameters for SVR grid search in smartcore. +/// +/// Each field is a vector of possible values for the corresponding SVR hyperparameter. +/// The [`IntoIterator`] implementation yields every possible combination of these parameters +/// as an `svr::SVRParameters` struct, suitable for use in model selection routines. +/// +/// # Type Parameters +/// - `T`: Numeric type for parameters (e.g., `f64`) +/// - `M`: Matrix type implementing [`Array2`] +/// +/// # Fields +/// - `eps`: Vector of epsilon values for the epsilon-insensitive loss in SVR. +/// - `c`: Vector of regularization parameters (C) for SVR. +/// - `tol`: Vector of tolerance values for the stopping criterion. +/// - `kernel`: Vector of kernel function variants (see [`Kernels`]). +/// - `m`: Phantom data for the matrix type parameter. +/// +/// # Example +/// ``` +/// use smartcore::svm::Kernels; +/// use smartcore::svm::search::svr_params::SVRSearchParameters; +/// use smartcore::linalg::basic::matrix::DenseMatrix; +/// +/// let params = SVRSearchParameters::> { +/// eps: vec![0.1, 0.2], +/// c: vec![1.0, 10.0], +/// tol: vec![1e-3], +/// kernel: vec![Kernels::linear(), Kernels::rbf().with_gamma(0.5)], +/// m: std::marker::PhantomData, +/// }; +/// ``` #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone)] pub struct SVRSearchParameters> { @@ -66,7 +129,7 @@ impl> Iterator eps: self.svr_search_parameters.eps[self.current_eps], c: self.svr_search_parameters.c[self.current_c], tol: self.svr_search_parameters.tol[self.current_tol], - kernel: Some(self.svr_search_parameters.kernel[self.current_kernel].clone()) + kernel: Some(self.svr_search_parameters.kernel[self.current_kernel].clone()), }; if self.current_eps + 1 < self.svr_search_parameters.eps.len() { @@ -108,12 +171,112 @@ impl> Default for SVRSearchPa } } -// #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -// #[derive(Debug)] -// #[cfg_attr( -// feature = "serde", -// serde(bound( -// serialize = "M::RowVector: Serialize, K: Serialize, T: Serialize", -// deserialize = "M::RowVector: Deserialize<'de>, K: Deserialize<'de>, T: Deserialize<'de>", -// )) -// )] +#[cfg(test)] +mod tests { + use super::*; + use crate::svm::Kernels; + use crate::linalg::basic::matrix::DenseMatrix; + + type T = f64; + type M = DenseMatrix; + + #[test] + fn test_default_parameters() { + let params = SVRSearchParameters::::default(); + assert_eq!(params.eps.len(), 1); + assert_eq!(params.c.len(), 1); + assert_eq!(params.tol.len(), 1); + assert_eq!(params.kernel.len(), 1); + // Check that the default kernel is linear + assert_eq!(params.kernel[0], Kernels::linear()); + } + + #[test] + fn test_single_grid_iteration() { + let params = SVRSearchParameters:: { + eps: vec![0.1], + c: vec![1.0], + tol: vec![1e-3], + kernel: vec![Kernels::rbf().with_gamma(0.5)], + m: PhantomData, + }; + let mut iter = params.into_iter(); + let param = iter.next().unwrap(); + assert_eq!(param.eps, 0.1); + assert_eq!(param.c, 1.0); + assert_eq!(param.tol, 1e-3); + assert_eq!(param.kernel, Some(Kernels::rbf().with_gamma(0.5))); + assert!(iter.next().is_none()); + } + + #[test] + fn test_cartesian_grid_iteration() { + let params = SVRSearchParameters:: { + eps: vec![0.1, 0.2], + c: vec![1.0, 2.0], + tol: vec![1e-3], + kernel: vec![Kernels::linear(), Kernels::rbf().with_gamma(0.5)], + m: PhantomData, + }; + let expected_count = params.eps.len() * params.c.len() * params.tol.len() * params.kernel.len(); + let results: Vec<_> = params.into_iter().collect(); + assert_eq!(results.len(), expected_count); + + // Check that all parameter combinations are present + let mut seen = vec![]; + for p in &results { + seen.push((p.eps, p.c, p.tol, p.kernel.clone().unwrap())); + } + for &eps in &[0.1, 0.2] { + for &c in &[1.0, 2.0] { + for &tol in &[1e-3] { + for kernel in &[Kernels::linear(), Kernels::rbf().with_gamma(0.5)] { + assert!(seen.contains(&(eps, c, tol, kernel.clone()))); + } + } + } + } + } + + #[test] + fn test_empty_grid() { + let params = SVRSearchParameters:: { + eps: vec![], + c: vec![], + tol: vec![], + kernel: vec![], + m: PhantomData, + }; + let mut iter = params.into_iter(); + assert!(iter.next().is_none()); + } + + #[test] + fn test_kernel_enum_variants() { + let lin = Kernels::linear(); + let rbf = Kernels::rbf().with_gamma(0.2); + let poly = Kernels::polynomial().with_degree(2.0).with_gamma(1.0).with_coef0(0.5); + let sig = Kernels::sigmoid().with_gamma(0.3).with_coef0(0.1); + + assert_eq!(lin, Kernels::Linear); + match rbf { + Kernels::RBF { gamma } => assert_eq!(gamma, Some(0.2)), + _ => panic!("Not RBF"), + } + match poly { + Kernels::Polynomial { degree, gamma, coef0 } => { + assert_eq!(degree, Some(2.0)); + assert_eq!(gamma, Some(1.0)); + assert_eq!(coef0, Some(0.5)); + } + _ => panic!("Not Polynomial"), + } + match sig { + Kernels::Sigmoid { gamma, coef0 } => { + assert_eq!(gamma, Some(0.3)); + assert_eq!(coef0, Some(0.1)); + } + _ => panic!("Not Sigmoid"), + } + } +} diff --git a/src/svm/svr.rs b/src/svm/svr.rs index 7275696b..66369b4c 100644 --- a/src/svm/svr.rs +++ b/src/svm/svr.rs @@ -597,17 +597,16 @@ mod tests { use super::*; use crate::linalg::basic::matrix::DenseMatrix; use crate::metrics::mean_squared_error; - use crate::svm::Kernels; use crate::svm::search::svr_params::SVRSearchParameters; + use crate::svm::Kernels; #[test] fn search_parameters() { - let parameters: SVRSearchParameters> = - SVRSearchParameters { - eps: vec![0., 1.], - kernel: vec![Kernels::linear()], - ..Default::default() - }; + let parameters: SVRSearchParameters> = SVRSearchParameters { + eps: vec![0., 1.], + kernel: vec![Kernels::linear()], + ..Default::default() + }; let mut iter = parameters.into_iter(); let next = iter.next().unwrap(); assert_eq!(next.eps, 0.); From 4c653f843c9077f360dd247befa6fad83c22033e Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 17:47:29 +0100 Subject: [PATCH 10/14] Fix serde configuration in cargo clippy --- src/svm/mod.rs | 2 ++ src/svm/search/svr_params.rs | 19 +++++++++++++++---- src/svm/svr.rs | 3 ++- 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/src/svm/mod.rs b/src/svm/mod.rs index 772498aa..72b9626d 100644 --- a/src/svm/mod.rs +++ b/src/svm/mod.rs @@ -62,6 +62,7 @@ pub trait Kernel: Debug { /// let poly = Kernels::polynomial().with_degree(3.0).with_gamma(0.5).with_coef0(1.0); /// let sigmoid = Kernels::sigmoid().with_gamma(0.2).with_coef0(0.0); /// ``` +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq)] pub enum Kernels { /// Linear kernel (default). @@ -282,6 +283,7 @@ impl Kernels { /// - All kernel parameters must be set before calling `apply`; missing parameters will result in an error. /// /// See the [`Kernels`] enum documentation for more details on each kernel type and its parameters. +#[cfg_attr(feature = "serde", typetag::serde)] impl Kernel for Kernels { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { match self { diff --git a/src/svm/search/svr_params.rs b/src/svm/search/svr_params.rs index 454cc67f..8a819354 100644 --- a/src/svm/search/svr_params.rs +++ b/src/svm/search/svr_params.rs @@ -31,6 +31,9 @@ //! //! ## Note //! This module is intended for use with smartcore version 0.4 or later. The API is not compatible with older versions[1]. +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; + use crate::linalg::basic::arrays::Array2; use crate::numbers::basenum::Number; use crate::numbers::floatnum::FloatNumber; @@ -174,8 +177,8 @@ impl> Default for SVRSearchPa #[cfg(test)] mod tests { use super::*; - use crate::svm::Kernels; use crate::linalg::basic::matrix::DenseMatrix; + use crate::svm::Kernels; type T = f64; type M = DenseMatrix; @@ -218,7 +221,8 @@ mod tests { kernel: vec![Kernels::linear(), Kernels::rbf().with_gamma(0.5)], m: PhantomData, }; - let expected_count = params.eps.len() * params.c.len() * params.tol.len() * params.kernel.len(); + let expected_count = + params.eps.len() * params.c.len() * params.tol.len() * params.kernel.len(); let results: Vec<_> = params.into_iter().collect(); assert_eq!(results.len(), expected_count); @@ -255,7 +259,10 @@ mod tests { fn test_kernel_enum_variants() { let lin = Kernels::linear(); let rbf = Kernels::rbf().with_gamma(0.2); - let poly = Kernels::polynomial().with_degree(2.0).with_gamma(1.0).with_coef0(0.5); + let poly = Kernels::polynomial() + .with_degree(2.0) + .with_gamma(1.0) + .with_coef0(0.5); let sig = Kernels::sigmoid().with_gamma(0.3).with_coef0(0.1); assert_eq!(lin, Kernels::Linear); @@ -264,7 +271,11 @@ mod tests { _ => panic!("Not RBF"), } match poly { - Kernels::Polynomial { degree, gamma, coef0 } => { + Kernels::Polynomial { + degree, + gamma, + coef0, + } => { assert_eq!(degree, Some(2.0)); assert_eq!(gamma, Some(1.0)); assert_eq!(coef0, Some(0.5)); diff --git a/src/svm/svr.rs b/src/svm/svr.rs index 66369b4c..e912743b 100644 --- a/src/svm/svr.rs +++ b/src/svm/svr.rs @@ -80,11 +80,12 @@ use crate::error::{Failed, FailedError}; use crate::linalg::basic::arrays::{Array1, Array2, MutArray}; use crate::numbers::basenum::Number; use crate::numbers::floatnum::FloatNumber; + use crate::svm::{Kernel, Kernels}; +/// SVR Parameters #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug)] -/// SVR Parameters pub struct SVRParameters { /// Epsilon in the epsilon-SVR model. pub eps: T, From 29049af8a6f75c063f0c18d320bb4ced5a668d9e Mon Sep 17 00:00:00 2001 From: Lorenzo Date: Mon, 2 Jun 2025 01:55:29 +0900 Subject: [PATCH 11/14] Implement search parameters (#304) * Implement SVR kernels as enumerator * basic svr and svr_params implementation * Implement search grid for SVR. Add documentation. * Fix serde configuration in cargo clippy --- src/svm/mod.rs | 436 +++++++++++++++++++++-------------- src/svm/search/mod.rs | 2 + src/svm/search/svr_params.rs | 405 +++++++++++++++++++++++--------- src/svm/svr.rs | 51 ++-- 4 files changed, 589 insertions(+), 305 deletions(-) diff --git a/src/svm/mod.rs b/src/svm/mod.rs index f6baf8bb..72b9626d 100644 --- a/src/svm/mod.rs +++ b/src/svm/mod.rs @@ -25,8 +25,8 @@ /// search parameters pub mod svc; pub mod svr; -// /// search parameters space -// pub mod search; +// search parameters space +pub mod search; use core::fmt::Debug; @@ -48,197 +48,281 @@ pub trait Kernel: Debug { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result; } -/// Pre-defined kernel functions +/// A enumerator for all the kernels type to support. +/// This allows kernel selection and parameterization ergonomic, type-safe, and ready for use in parameter structs like SVRParameters. +/// You can construct kernels using the provided variants and builder-style methods. +/// +/// # Examples +/// +/// ``` +/// use smartcore::svm::Kernels; +/// +/// let linear = Kernels::linear(); +/// let rbf = Kernels::rbf().with_gamma(0.5); +/// let poly = Kernels::polynomial().with_degree(3.0).with_gamma(0.5).with_coef0(1.0); +/// let sigmoid = Kernels::sigmoid().with_gamma(0.2).with_coef0(0.0); +/// ``` #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone)] -pub struct Kernels; +#[derive(Debug, Clone, PartialEq)] +pub enum Kernels { + /// Linear kernel (default). + /// + /// Computes the standard dot product between vectors. + Linear, -impl Kernels { - /// Return a default linear - pub fn linear() -> LinearKernel { - LinearKernel - } - /// Return a default RBF - pub fn rbf() -> RBFKernel { - RBFKernel::default() - } - /// Return a default polynomial - pub fn polynomial() -> PolynomialKernel { - PolynomialKernel::default() - } - /// Return a default sigmoid - pub fn sigmoid() -> SigmoidKernel { - SigmoidKernel::default() - } -} + /// Radial Basis Function (RBF) kernel. + /// + /// Formula: K(x, y) = exp(-gamma * ||x-y||²) + RBF { + /// Controls the width of the Gaussian RBF kernel. + /// + /// Larger values of gamma lead to higher bias and lower variance. + /// This parameter is inversely proportional to the radius of influence + /// of samples selected by the model as support vectors. + gamma: Option, + }, -/// Linear Kernel -#[allow(clippy::derive_partial_eq_without_eq)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone, PartialEq, Eq, Default)] -pub struct LinearKernel; + /// Polynomial kernel. + /// + /// Formula: K(x, y) = (gamma * + coef0)^degree + Polynomial { + /// The degree of the polynomial kernel. + /// + /// Integer values are typical (2 = quadratic, 3 = cubic), but any positive real value is valid. + /// Higher degree values create decision boundaries with higher complexity. + degree: Option, -/// Radial basis function (Gaussian) kernel -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Default, Clone, PartialEq)] -pub struct RBFKernel { - /// kernel coefficient - pub gamma: Option, -} + /// Kernel coefficient for the dot product. + /// + /// Controls the influence of higher-degree versus lower-degree terms in the polynomial. + /// If None, a default value will be used. + gamma: Option, -#[allow(dead_code)] -impl RBFKernel { - /// assign gamma parameter to kernel (required) - /// ```rust - /// use smartcore::svm::RBFKernel; - /// let knl = RBFKernel::default().with_gamma(0.7); - /// ``` - pub fn with_gamma(mut self, gamma: f64) -> Self { - self.gamma = Some(gamma); - self - } -} + /// Independent term in the polynomial kernel. + /// + /// Controls the influence of higher-degree versus lower-degree terms. + /// If None, a default value of 1.0 will be used. + coef0: Option, + }, -/// Polynomial kernel -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone, PartialEq)] -pub struct PolynomialKernel { - /// degree of the polynomial - pub degree: Option, - /// kernel coefficient - pub gamma: Option, - /// independent term in kernel function - pub coef0: Option, -} + /// Sigmoid kernel. + /// + /// Formula: K(x, y) = tanh(gamma * + coef0) + Sigmoid { + /// Kernel coefficient for the dot product. + /// + /// Controls the scaling of the dot product in the sigmoid function. + /// If None, a default value will be used. + gamma: Option, -impl Default for PolynomialKernel { - fn default() -> Self { - Self { - gamma: Option::None, - degree: Option::None, - coef0: Some(1f64), - } - } + /// Independent term in the sigmoid kernel. + /// + /// Acts as a threshold/bias term in the sigmoid function. + /// If None, a default value of 1.0 will be used. + coef0: Option, + }, } -impl PolynomialKernel { - /// set parameters for kernel - /// ```rust - /// use smartcore::svm::PolynomialKernel; - /// let knl = PolynomialKernel::default().with_params(3.0, 0.7, 1.0); - /// ``` - pub fn with_params(mut self, degree: f64, gamma: f64, coef0: f64) -> Self { - self.degree = Some(degree); - self.gamma = Some(gamma); - self.coef0 = Some(coef0); - self - } - /// set gamma parameter for kernel - /// ```rust - /// use smartcore::svm::PolynomialKernel; - /// let knl = PolynomialKernel::default().with_gamma(0.7); - /// ``` - pub fn with_gamma(mut self, gamma: f64) -> Self { - self.gamma = Some(gamma); - self - } - /// set degree parameter for kernel - /// ```rust - /// use smartcore::svm::PolynomialKernel; - /// let knl = PolynomialKernel::default().with_degree(3.0, 100); - /// ``` - pub fn with_degree(self, degree: f64, n_features: usize) -> Self { - self.with_params(degree, 1f64, 1f64 / n_features as f64) +impl Kernels { + /// Create a linear kernel. + /// + /// The linear kernel computes the dot product between two vectors: + /// K(x, y) = + pub fn linear() -> Self { + Kernels::Linear } -} -/// Sigmoid (hyperbolic tangent) kernel -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone, PartialEq)] -pub struct SigmoidKernel { - /// kernel coefficient - pub gamma: Option, - /// independent term in kernel function - pub coef0: Option, -} + /// Create an RBF kernel with unspecified gamma. + /// + /// The RBF kernel is defined as: + /// K(x, y) = exp(-gamma * ||x-y||²) + /// + /// You should specify gamma using `with_gamma()` before using this kernel. + pub fn rbf() -> Self { + Kernels::RBF { gamma: None } + } -impl Default for SigmoidKernel { - fn default() -> Self { - Self { - gamma: Option::None, - coef0: Some(1f64), + /// Create a polynomial kernel with default parameters. + /// + /// The polynomial kernel is defined as: + /// K(x, y) = (gamma * + coef0)^degree + /// + /// Default values: + /// - gamma: None (must be specified) + /// - degree: None (must be specified) + /// - coef0: 1.0 + pub fn polynomial() -> Self { + Kernels::Polynomial { + gamma: None, + degree: None, + coef0: Some(1.0), } } -} -impl SigmoidKernel { - /// set parameters for kernel - /// ```rust - /// use smartcore::svm::SigmoidKernel; - /// let knl = SigmoidKernel::default().with_params(0.7, 1.0); - /// ``` - pub fn with_params(mut self, gamma: f64, coef0: f64) -> Self { - self.gamma = Some(gamma); - self.coef0 = Some(coef0); - self - } - /// set gamma parameter for kernel - /// ```rust - /// use smartcore::svm::SigmoidKernel; - /// let knl = SigmoidKernel::default().with_gamma(0.7); - /// ``` - pub fn with_gamma(mut self, gamma: f64) -> Self { - self.gamma = Some(gamma); - self + /// Create a sigmoid kernel with default parameters. + /// + /// The sigmoid kernel is defined as: + /// K(x, y) = tanh(gamma * + coef0) + /// + /// Default values: + /// - gamma: None (must be specified) + /// - coef0: 1.0 + /// + pub fn sigmoid() -> Self { + Kernels::Sigmoid { + gamma: None, + coef0: Some(1.0), + } } -} -#[cfg_attr(all(feature = "serde", not(target_arch = "wasm32")), typetag::serde)] -impl Kernel for LinearKernel { - fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { - Ok(x_i.dot(x_j)) + /// Set the `gamma` parameter for RBF, polynomial, or sigmoid kernels. + /// + /// The gamma parameter has different interpretations depending on the kernel: + /// - For RBF: Controls the width of the Gaussian. Larger values mean tighter fit. + /// - For Polynomial: Scaling factor for the dot product. + /// - For Sigmoid: Scaling factor for the dot product. + /// + pub fn with_gamma(self, gamma: f64) -> Self { + match self { + Kernels::RBF { .. } => Kernels::RBF { gamma: Some(gamma) }, + Kernels::Polynomial { degree, coef0, .. } => Kernels::Polynomial { + gamma: Some(gamma), + degree, + coef0, + }, + Kernels::Sigmoid { coef0, .. } => Kernels::Sigmoid { + gamma: Some(gamma), + coef0, + }, + other => other, + } } -} -#[cfg_attr(all(feature = "serde", not(target_arch = "wasm32")), typetag::serde)] -impl Kernel for RBFKernel { - fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { - if self.gamma.is_none() { - return Err(Failed::because( - FailedError::ParametersError, - "gamma should be set, use {Kernel}::default().with_gamma(..)", - )); + /// Set the `degree` parameter for the polynomial kernel. + /// + /// The degree parameter controls the flexibility of the decision boundary. + /// Higher degrees create more complex boundaries but may lead to overfitting. + /// + pub fn with_degree(self, degree: f64) -> Self { + match self { + Kernels::Polynomial { gamma, coef0, .. } => Kernels::Polynomial { + degree: Some(degree), + gamma, + coef0, + }, + other => other, } - let v_diff = x_i.sub(x_j); - Ok((-self.gamma.unwrap() * v_diff.mul(&v_diff).sum()).exp()) } -} -#[cfg_attr(all(feature = "serde", not(target_arch = "wasm32")), typetag::serde)] -impl Kernel for PolynomialKernel { - fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { - if self.gamma.is_none() || self.coef0.is_none() || self.degree.is_none() { - return Err(Failed::because( - FailedError::ParametersError, "gamma, coef0, degree should be set, - use {Kernel}::default().with_{parameter}(..)") - ); + /// Set the `coef0` parameter for polynomial or sigmoid kernels. + /// + /// The coef0 parameter is the independent term in the kernel function: + /// - For Polynomial: Controls the influence of higher-degree vs. lower-degree terms. + /// - For Sigmoid: Acts as a threshold/bias term. + /// + pub fn with_coef0(self, coef0: f64) -> Self { + match self { + Kernels::Polynomial { degree, gamma, .. } => Kernels::Polynomial { + degree, + gamma, + coef0: Some(coef0), + }, + Kernels::Sigmoid { gamma, .. } => Kernels::Sigmoid { + gamma, + coef0: Some(coef0), + }, + other => other, } - let dot = x_i.dot(x_j); - Ok((self.gamma.unwrap() * dot + self.coef0.unwrap()).powf(self.degree.unwrap())) } } -#[cfg_attr(all(feature = "serde", not(target_arch = "wasm32")), typetag::serde)] -impl Kernel for SigmoidKernel { +/// Implementation of the [`Kernel`] trait for the [`Kernels`] enum in smartcore. +/// +/// This method computes the value of the kernel function between two feature vectors `x_i` and `x_j`, +/// according to the variant and parameters of the [`Kernels`] enum. This enables flexible and type-safe +/// selection of kernel functions for SVM and SVR models in smartcore. +/// +/// # Supported Kernels +/// +/// - [`Kernels::Linear`]: Computes the standard dot product between `x_i` and `x_j`. +/// - [`Kernels::RBF`]: Computes the Radial Basis Function (Gaussian) kernel. Requires `gamma`. +/// - [`Kernels::Polynomial`]: Computes the polynomial kernel. Requires `degree`, `gamma`, and `coef0`. +/// - [`Kernels::Sigmoid`]: Computes the sigmoid kernel. Requires `gamma` and `coef0`. +/// +/// # Parameters +/// +/// - `x_i`: First input vector (feature vector). +/// - `x_j`: Second input vector (feature vector). +/// +/// # Returns +/// +/// - `Ok(f64)`: The computed kernel value. +/// - `Err(Failed)`: If any required kernel parameter is missing. +/// +/// # Errors +/// +/// Returns `Err(Failed)` if a required parameter (such as `gamma`, `degree`, or `coef0`) +/// is `None` for the selected kernel variant. +/// +/// # Example +/// +/// ``` +/// use smartcore::svm::Kernels; +/// use smartcore::svm::Kernel; +/// +/// let x = vec![1.0, 2.0, 3.0]; +/// let y = vec![4.0, 5.0, 6.0]; +/// let kernel = Kernels::rbf().with_gamma(0.5); +/// let value = kernel.apply(&x, &y).unwrap(); +/// ``` +/// +/// # Notes +/// +/// - This implementation follows smartcore's philosophy: pure Rust, no macros, no unsafe code, +/// and an accessible, pythonic API surface for both ML practitioners and Rust beginners. +/// - All kernel parameters must be set before calling `apply`; missing parameters will result in an error. +/// +/// See the [`Kernels`] enum documentation for more details on each kernel type and its parameters. +#[cfg_attr(feature = "serde", typetag::serde)] +impl Kernel for Kernels { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { - if self.gamma.is_none() || self.coef0.is_none() { - return Err(Failed::because( - FailedError::ParametersError, "gamma, coef0, degree should be set, - use {Kernel}::default().with_{parameter}(..)") - ); + match self { + Kernels::Linear => Ok(x_i.dot(x_j)), + Kernels::RBF { gamma } => { + let gamma = gamma.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "gamma not set") + })?; + let v_diff = x_i.sub(x_j); + Ok((-gamma * v_diff.mul(&v_diff).sum()).exp()) + } + Kernels::Polynomial { + degree, + gamma, + coef0, + } => { + let degree = degree.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "degree not set") + })?; + let gamma = gamma.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "gamma not set") + })?; + let coef0 = coef0.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "coef0 not set") + })?; + let dot = x_i.dot(x_j); + Ok((gamma * dot + coef0).powf(degree)) + } + Kernels::Sigmoid { gamma, coef0 } => { + let gamma = gamma.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "gamma not set") + })?; + let coef0 = coef0.ok_or_else(|| { + Failed::because(FailedError::ParametersError, "coef0 not set") + })?; + let dot = x_i.dot(x_j); + Ok((gamma * dot + coef0).tanh()) + } } - let dot = x_i.dot(x_j); - Ok(self.gamma.unwrap() * dot + self.coef0.unwrap().tanh()) } } @@ -247,6 +331,18 @@ mod tests { use super::*; use crate::svm::Kernels; + #[test] + fn rbf_kernel() { + let v1 = vec![1., 2., 3.]; + let v2 = vec![4., 5., 6.]; + let result = Kernels::rbf() + .with_gamma(0.055) + .apply(&v1, &v2) + .unwrap() + .abs(); + assert!((0.2265f64 - result) < 1e-4); + } + #[cfg_attr( all(target_arch = "wasm32", not(target_os = "wasi")), wasm_bindgen_test::wasm_bindgen_test @@ -264,7 +360,7 @@ mod tests { wasm_bindgen_test::wasm_bindgen_test )] #[test] - fn rbf_kernel() { + fn test_rbf_kernel() { let v1 = vec![1., 2., 3.]; let v2 = vec![4., 5., 6.]; @@ -287,7 +383,10 @@ mod tests { let v2 = vec![4., 5., 6.]; let result = Kernels::polynomial() - .with_params(3.0, 0.5, 1.0) + .with_gamma(0.5) + .with_degree(3.0) + .with_coef0(1.0) + //.with_params(3.0, 0.5, 1.0) .apply(&v1, &v2) .unwrap() .abs(); @@ -305,7 +404,8 @@ mod tests { let v2 = vec![4., 5., 6.]; let result = Kernels::sigmoid() - .with_params(0.01, 0.1) + .with_gamma(0.01) + .with_coef0(0.1) .apply(&v1, &v2) .unwrap() .abs(); diff --git a/src/svm/search/mod.rs b/src/svm/search/mod.rs index 6d86feb5..d61b8326 100644 --- a/src/svm/search/mod.rs +++ b/src/svm/search/mod.rs @@ -1,3 +1,5 @@ +//! SVC and Grid Search + /// SVC search parameters pub mod svc_params; /// SVC search parameters diff --git a/src/svm/search/svr_params.rs b/src/svm/search/svr_params.rs index 03d0ecef..8a819354 100644 --- a/src/svm/search/svr_params.rs +++ b/src/svm/search/svr_params.rs @@ -1,112 +1,293 @@ -// /// SVR grid search parameters -// #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -// #[derive(Debug, Clone)] -// pub struct SVRSearchParameters, K: Kernel> { -// /// Epsilon in the epsilon-SVR model. -// pub eps: Vec, -// /// Regularization parameter. -// pub c: Vec, -// /// Tolerance for stopping eps. -// pub tol: Vec, -// /// The kernel function. -// pub kernel: Vec, -// /// Unused parameter. -// m: PhantomData, -// } - -// /// SVR grid search iterator -// pub struct SVRSearchParametersIterator, K: Kernel> { -// svr_search_parameters: SVRSearchParameters, -// current_eps: usize, -// current_c: usize, -// current_tol: usize, -// current_kernel: usize, -// } - -// impl, K: Kernel> IntoIterator -// for SVRSearchParameters -// { -// type Item = SVRParameters; -// type IntoIter = SVRSearchParametersIterator; - -// fn into_iter(self) -> Self::IntoIter { -// SVRSearchParametersIterator { -// svr_search_parameters: self, -// current_eps: 0, -// current_c: 0, -// current_tol: 0, -// current_kernel: 0, -// } -// } -// } - -// impl, K: Kernel> Iterator -// for SVRSearchParametersIterator -// { -// type Item = SVRParameters; - -// fn next(&mut self) -> Option { -// if self.current_eps == self.svr_search_parameters.eps.len() -// && self.current_c == self.svr_search_parameters.c.len() -// && self.current_tol == self.svr_search_parameters.tol.len() -// && self.current_kernel == self.svr_search_parameters.kernel.len() -// { -// return None; -// } - -// let next = SVRParameters:: { -// eps: self.svr_search_parameters.eps[self.current_eps], -// c: self.svr_search_parameters.c[self.current_c], -// tol: self.svr_search_parameters.tol[self.current_tol], -// kernel: self.svr_search_parameters.kernel[self.current_kernel].clone(), -// m: PhantomData, -// }; - -// if self.current_eps + 1 < self.svr_search_parameters.eps.len() { -// self.current_eps += 1; -// } else if self.current_c + 1 < self.svr_search_parameters.c.len() { -// self.current_eps = 0; -// self.current_c += 1; -// } else if self.current_tol + 1 < self.svr_search_parameters.tol.len() { -// self.current_eps = 0; -// self.current_c = 0; -// self.current_tol += 1; -// } else if self.current_kernel + 1 < self.svr_search_parameters.kernel.len() { -// self.current_eps = 0; -// self.current_c = 0; -// self.current_tol = 0; -// self.current_kernel += 1; -// } else { -// self.current_eps += 1; -// self.current_c += 1; -// self.current_tol += 1; -// self.current_kernel += 1; -// } - -// Some(next) -// } -// } - -// impl> Default for SVRSearchParameters { -// fn default() -> Self { -// let default_params: SVRParameters = SVRParameters::default(); - -// SVRSearchParameters { -// eps: vec![default_params.eps], -// c: vec![default_params.c], -// tol: vec![default_params.tol], -// kernel: vec![default_params.kernel], -// m: PhantomData, -// } -// } -// } - -// #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -// #[derive(Debug)] -// #[cfg_attr( -// feature = "serde", -// serde(bound( -// serialize = "M::RowVector: Serialize, K: Serialize, T: Serialize", -// deserialize = "M::RowVector: Deserialize<'de>, K: Deserialize<'de>, T: Deserialize<'de>", -// )) -// )] +//! # SVR Grid Search Parameters +//! +//! This module provides utilities for defining and iterating over grid search parameter spaces +//! for Support Vector Regression (SVR) models in [smartcore](https://github.com/smartcorelib/smartcore). +//! +//! The main struct, [`SVRSearchParameters`], allows users to specify multiple values for each +//! SVR hyperparameter (epsilon, regularization parameter C, tolerance, and kernel function). +//! The provided iterator yields all possible combinations (the Cartesian product) of these parameters, +//! enabling exhaustive grid search for hyperparameter tuning. +//! +//! +//! ## Example +//! ``` +//! use smartcore::svm::Kernels; +//! use smartcore::svm::search::svr_params::SVRSearchParameters; +//! use smartcore::linalg::basic::matrix::DenseMatrix; +//! +//! let params = SVRSearchParameters::> { +//! eps: vec![0.1, 0.2], +//! c: vec![1.0, 10.0], +//! tol: vec![1e-3], +//! kernel: vec![Kernels::linear(), Kernels::rbf().with_gamma(0.5)], +//! m: std::marker::PhantomData, +//! }; +//! +//! // for param_set in params.into_iter() { +//! // Use param_set (of type svr::SVRParameters) to fit and evaluate your SVR model. +//! // } +//! ``` +//! +//! +//! ## Note +//! This module is intended for use with smartcore version 0.4 or later. The API is not compatible with older versions[1]. +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; + +use crate::linalg::basic::arrays::Array2; +use crate::numbers::basenum::Number; +use crate::numbers::floatnum::FloatNumber; +use crate::numbers::realnum::RealNumber; +use crate::svm::{svr, Kernels}; +use std::marker::PhantomData; + +/// ## SVR grid search parameters +/// A struct representing a grid of hyperparameters for SVR grid search in smartcore. +/// +/// Each field is a vector of possible values for the corresponding SVR hyperparameter. +/// The [`IntoIterator`] implementation yields every possible combination of these parameters +/// as an `svr::SVRParameters` struct, suitable for use in model selection routines. +/// +/// # Type Parameters +/// - `T`: Numeric type for parameters (e.g., `f64`) +/// - `M`: Matrix type implementing [`Array2`] +/// +/// # Fields +/// - `eps`: Vector of epsilon values for the epsilon-insensitive loss in SVR. +/// - `c`: Vector of regularization parameters (C) for SVR. +/// - `tol`: Vector of tolerance values for the stopping criterion. +/// - `kernel`: Vector of kernel function variants (see [`Kernels`]). +/// - `m`: Phantom data for the matrix type parameter. +/// +/// # Example +/// ``` +/// use smartcore::svm::Kernels; +/// use smartcore::svm::search::svr_params::SVRSearchParameters; +/// use smartcore::linalg::basic::matrix::DenseMatrix; +/// +/// let params = SVRSearchParameters::> { +/// eps: vec![0.1, 0.2], +/// c: vec![1.0, 10.0], +/// tol: vec![1e-3], +/// kernel: vec![Kernels::linear(), Kernels::rbf().with_gamma(0.5)], +/// m: std::marker::PhantomData, +/// }; +/// ``` +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Debug, Clone)] +pub struct SVRSearchParameters> { + /// Epsilon in the epsilon-SVR model. + pub eps: Vec, + /// Regularization parameter. + pub c: Vec, + /// Tolerance for stopping eps. + pub tol: Vec, + /// The kernel function. + pub kernel: Vec, + /// Unused parameter. + pub m: PhantomData, +} + +/// SVR grid search iterator +pub struct SVRSearchParametersIterator> { + svr_search_parameters: SVRSearchParameters, + current_eps: usize, + current_c: usize, + current_tol: usize, + current_kernel: usize, +} + +impl> IntoIterator + for SVRSearchParameters +{ + type Item = svr::SVRParameters; + type IntoIter = SVRSearchParametersIterator; + + fn into_iter(self) -> Self::IntoIter { + SVRSearchParametersIterator { + svr_search_parameters: self, + current_eps: 0, + current_c: 0, + current_tol: 0, + current_kernel: 0, + } + } +} + +impl> Iterator + for SVRSearchParametersIterator +{ + type Item = svr::SVRParameters; + + fn next(&mut self) -> Option { + if self.current_eps == self.svr_search_parameters.eps.len() + && self.current_c == self.svr_search_parameters.c.len() + && self.current_tol == self.svr_search_parameters.tol.len() + && self.current_kernel == self.svr_search_parameters.kernel.len() + { + return None; + } + + let next = svr::SVRParameters:: { + eps: self.svr_search_parameters.eps[self.current_eps], + c: self.svr_search_parameters.c[self.current_c], + tol: self.svr_search_parameters.tol[self.current_tol], + kernel: Some(self.svr_search_parameters.kernel[self.current_kernel].clone()), + }; + + if self.current_eps + 1 < self.svr_search_parameters.eps.len() { + self.current_eps += 1; + } else if self.current_c + 1 < self.svr_search_parameters.c.len() { + self.current_eps = 0; + self.current_c += 1; + } else if self.current_tol + 1 < self.svr_search_parameters.tol.len() { + self.current_eps = 0; + self.current_c = 0; + self.current_tol += 1; + } else if self.current_kernel + 1 < self.svr_search_parameters.kernel.len() { + self.current_eps = 0; + self.current_c = 0; + self.current_tol = 0; + self.current_kernel += 1; + } else { + self.current_eps += 1; + self.current_c += 1; + self.current_tol += 1; + self.current_kernel += 1; + } + + Some(next) + } +} + +impl> Default for SVRSearchParameters { + fn default() -> Self { + let default_params: svr::SVRParameters = svr::SVRParameters::default(); + + SVRSearchParameters { + eps: vec![default_params.eps], + c: vec![default_params.c], + tol: vec![default_params.tol], + kernel: vec![default_params.kernel.unwrap_or_else(Kernels::linear)], + m: PhantomData, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::linalg::basic::matrix::DenseMatrix; + use crate::svm::Kernels; + + type T = f64; + type M = DenseMatrix; + + #[test] + fn test_default_parameters() { + let params = SVRSearchParameters::::default(); + assert_eq!(params.eps.len(), 1); + assert_eq!(params.c.len(), 1); + assert_eq!(params.tol.len(), 1); + assert_eq!(params.kernel.len(), 1); + // Check that the default kernel is linear + assert_eq!(params.kernel[0], Kernels::linear()); + } + + #[test] + fn test_single_grid_iteration() { + let params = SVRSearchParameters:: { + eps: vec![0.1], + c: vec![1.0], + tol: vec![1e-3], + kernel: vec![Kernels::rbf().with_gamma(0.5)], + m: PhantomData, + }; + let mut iter = params.into_iter(); + let param = iter.next().unwrap(); + assert_eq!(param.eps, 0.1); + assert_eq!(param.c, 1.0); + assert_eq!(param.tol, 1e-3); + assert_eq!(param.kernel, Some(Kernels::rbf().with_gamma(0.5))); + assert!(iter.next().is_none()); + } + + #[test] + fn test_cartesian_grid_iteration() { + let params = SVRSearchParameters:: { + eps: vec![0.1, 0.2], + c: vec![1.0, 2.0], + tol: vec![1e-3], + kernel: vec![Kernels::linear(), Kernels::rbf().with_gamma(0.5)], + m: PhantomData, + }; + let expected_count = + params.eps.len() * params.c.len() * params.tol.len() * params.kernel.len(); + let results: Vec<_> = params.into_iter().collect(); + assert_eq!(results.len(), expected_count); + + // Check that all parameter combinations are present + let mut seen = vec![]; + for p in &results { + seen.push((p.eps, p.c, p.tol, p.kernel.clone().unwrap())); + } + for &eps in &[0.1, 0.2] { + for &c in &[1.0, 2.0] { + for &tol in &[1e-3] { + for kernel in &[Kernels::linear(), Kernels::rbf().with_gamma(0.5)] { + assert!(seen.contains(&(eps, c, tol, kernel.clone()))); + } + } + } + } + } + + #[test] + fn test_empty_grid() { + let params = SVRSearchParameters:: { + eps: vec![], + c: vec![], + tol: vec![], + kernel: vec![], + m: PhantomData, + }; + let mut iter = params.into_iter(); + assert!(iter.next().is_none()); + } + + #[test] + fn test_kernel_enum_variants() { + let lin = Kernels::linear(); + let rbf = Kernels::rbf().with_gamma(0.2); + let poly = Kernels::polynomial() + .with_degree(2.0) + .with_gamma(1.0) + .with_coef0(0.5); + let sig = Kernels::sigmoid().with_gamma(0.3).with_coef0(0.1); + + assert_eq!(lin, Kernels::Linear); + match rbf { + Kernels::RBF { gamma } => assert_eq!(gamma, Some(0.2)), + _ => panic!("Not RBF"), + } + match poly { + Kernels::Polynomial { + degree, + gamma, + coef0, + } => { + assert_eq!(degree, Some(2.0)); + assert_eq!(gamma, Some(1.0)); + assert_eq!(coef0, Some(0.5)); + } + _ => panic!("Not Polynomial"), + } + match sig { + Kernels::Sigmoid { gamma, coef0 } => { + assert_eq!(gamma, Some(0.3)); + assert_eq!(coef0, Some(0.1)); + } + _ => panic!("Not Sigmoid"), + } + } +} diff --git a/src/svm/svr.rs b/src/svm/svr.rs index 4ce0aa28..e912743b 100644 --- a/src/svm/svr.rs +++ b/src/svm/svr.rs @@ -51,9 +51,9 @@ //! //! let knl = Kernels::linear(); //! let params = &SVRParameters::default().with_eps(2.0).with_c(10.0).with_kernel(knl); -//! // let svr = SVR::fit(&x, &y, params).unwrap(); +//! let svr = SVR::fit(&x, &y, params).unwrap(); //! -//! // let y_hat = svr.predict(&x).unwrap(); +//! let y_hat = svr.predict(&x).unwrap(); //! ``` //! //! ## References: @@ -80,11 +80,12 @@ use crate::error::{Failed, FailedError}; use crate::linalg::basic::arrays::{Array1, Array2, MutArray}; use crate::numbers::basenum::Number; use crate::numbers::floatnum::FloatNumber; -use crate::svm::Kernel; +use crate::svm::{Kernel, Kernels}; + +/// SVR Parameters #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug)] -/// SVR Parameters pub struct SVRParameters { /// Epsilon in the epsilon-SVR model. pub eps: T, @@ -97,7 +98,7 @@ pub struct SVRParameters { all(feature = "serde", target_arch = "wasm32"), serde(skip_serializing, skip_deserializing) )] - pub kernel: Option>, + pub kernel: Option, } #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -160,8 +161,8 @@ impl SVRParameters { self } /// The kernel function. - pub fn with_kernel(mut self, kernel: K) -> Self { - self.kernel = Some(Box::new(kernel)); + pub fn with_kernel(mut self, kernel: Kernels) -> Self { + self.kernel = Some(kernel); self } } @@ -597,25 +598,25 @@ mod tests { use super::*; use crate::linalg::basic::matrix::DenseMatrix; use crate::metrics::mean_squared_error; + use crate::svm::search::svr_params::SVRSearchParameters; use crate::svm::Kernels; - // #[test] - // fn search_parameters() { - // let parameters: SVRSearchParameters, LinearKernel> = - // SVRSearchParameters { - // eps: vec![0., 1.], - // kernel: vec![LinearKernel {}], - // ..Default::default() - // }; - // let mut iter = parameters.into_iter(); - // let next = iter.next().unwrap(); - // assert_eq!(next.eps, 0.); - // assert_eq!(next.kernel, LinearKernel {}); - // let next = iter.next().unwrap(); - // assert_eq!(next.eps, 1.); - // assert_eq!(next.kernel, LinearKernel {}); - // assert!(iter.next().is_none()); - // } + #[test] + fn search_parameters() { + let parameters: SVRSearchParameters> = SVRSearchParameters { + eps: vec![0., 1.], + kernel: vec![Kernels::linear()], + ..Default::default() + }; + let mut iter = parameters.into_iter(); + let next = iter.next().unwrap(); + assert_eq!(next.eps, 0.); + // assert_eq!(next.kernel, LinearKernel {}); + // let next = iter.next().unwrap(); + // assert_eq!(next.eps, 1.); + // assert_eq!(next.kernel, LinearKernel {}); + // assert!(iter.next().is_none()); + } #[cfg_attr( all(target_arch = "wasm32", not(target_os = "wasi")), @@ -648,7 +649,7 @@ mod tests { 114.2, 115.7, 116.9, ]; - let knl = Kernels::linear(); + let knl: Kernels = Kernels::linear(); let y_hat = SVR::fit( &x, &y, From 90c0950131631cec8f50aaf2df04bb8028157cb1 Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 18:00:18 +0100 Subject: [PATCH 12/14] Fix wasm32 typetag --- src/svm/mod.rs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/svm/mod.rs b/src/svm/mod.rs index 72b9626d..114da948 100644 --- a/src/svm/mod.rs +++ b/src/svm/mod.rs @@ -33,15 +33,16 @@ use core::fmt::Debug; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; +// Only import typetag if not compiling for wasm32 and serde is enabled +#[cfg(all(feature = "serde", not(target_arch = "wasm32")))] +use typetag; + use crate::error::{Failed, FailedError}; use crate::linalg::basic::arrays::{Array1, ArrayView1}; /// Defines a kernel function. /// This is a object-safe trait. -#[cfg_attr( - all(feature = "serde", not(target_arch = "wasm32")), - typetag::serde(tag = "type") -)] +#[cfg_attr(all(feature = "serde", not(target_arch = "wasm32")), typetag::serde(tag = "type"))] pub trait Kernel: Debug { #[allow(clippy::ptr_arg)] /// Apply kernel function to x_i and x_j @@ -283,7 +284,7 @@ impl Kernels { /// - All kernel parameters must be set before calling `apply`; missing parameters will result in an error. /// /// See the [`Kernels`] enum documentation for more details on each kernel type and its parameters. -#[cfg_attr(feature = "serde", typetag::serde)] +#[cfg_attr(all(feature = "serde", not(target_arch = "wasm32")), typetag::serde)] impl Kernel for Kernels { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { match self { From 421a25b5b72501cac555b7845b9da6e67f3d14ef Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 18:03:00 +0100 Subject: [PATCH 13/14] fix typetag --- src/svm/mod.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/svm/mod.rs b/src/svm/mod.rs index 114da948..648e8946 100644 --- a/src/svm/mod.rs +++ b/src/svm/mod.rs @@ -42,7 +42,10 @@ use crate::linalg::basic::arrays::{Array1, ArrayView1}; /// Defines a kernel function. /// This is a object-safe trait. -#[cfg_attr(all(feature = "serde", not(target_arch = "wasm32")), typetag::serde(tag = "type"))] +#[cfg_attr( + all(feature = "serde", not(target_arch = "wasm32")), + typetag::serde(tag = "type") +)] pub trait Kernel: Debug { #[allow(clippy::ptr_arg)] /// Apply kernel function to x_i and x_j From 855fc823a328e42b529e60d7a83f79329286e1b2 Mon Sep 17 00:00:00 2001 From: Lorenzo Mec-iS Date: Sun, 1 Jun 2025 18:05:58 +0100 Subject: [PATCH 14/14] Bump to version 0.4.2 --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 6ce1952d..3c1b8ab9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,7 +2,7 @@ name = "smartcore" description = "Machine Learning in Rust." homepage = "https://smartcorelib.org" -version = "0.4.1" +version = "0.4.2" authors = ["smartcore Developers"] edition = "2021" license = "Apache-2.0"