diff --git a/src/svm/mod.rs b/src/svm/mod.rs index febfeadd..a30fe876 100644 --- a/src/svm/mod.rs +++ b/src/svm/mod.rs @@ -29,7 +29,6 @@ pub mod svr; // pub mod search; use core::fmt::Debug; -use std::marker::PhantomData; #[cfg(feature = "serde")] use serde::ser::{SerializeStruct, Serializer}; @@ -41,22 +40,22 @@ use crate::linalg::basic::arrays::{Array1, ArrayView1}; /// Defines a kernel function. /// This is a object-safe trait. -pub trait Kernel<'a> { +pub trait Kernel { #[allow(clippy::ptr_arg)] /// Apply kernel function to x_i and x_j fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result; /// Return a serializable name - fn name(&self) -> &'a str; + fn name(&self) -> &'static str; } -impl<'a> Debug for dyn Kernel<'_> + 'a { +impl Debug for dyn Kernel { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "Kernel") } } #[cfg(feature = "serde")] -impl<'a> Serialize for dyn Kernel<'_> + 'a { +impl Serialize for dyn Kernel { fn serialize(&self, serializer: S) -> Result where S: Serializer, @@ -72,21 +71,21 @@ impl<'a> Serialize for dyn Kernel<'_> + 'a { #[derive(Debug, Clone)] pub struct Kernels {} -impl<'a> Kernels { +impl Kernels { /// Return a default linear - pub fn linear() -> LinearKernel<'a> { + pub fn linear() -> LinearKernel { LinearKernel::default() } /// Return a default RBF - pub fn rbf() -> RBFKernel<'a> { + pub fn rbf() -> RBFKernel { RBFKernel::default() } /// Return a default polynomial - pub fn polynomial() -> PolynomialKernel<'a> { + pub fn polynomial() -> PolynomialKernel { PolynomialKernel::default() } /// Return a default sigmoid - pub fn sigmoid() -> SigmoidKernel<'a> { + pub fn sigmoid() -> SigmoidKernel { SigmoidKernel::default() } } @@ -94,39 +93,19 @@ impl<'a> Kernels { /// Linear Kernel #[allow(clippy::derive_partial_eq_without_eq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone, PartialEq)] -pub struct LinearKernel<'a> { - phantom: PhantomData<&'a ()>, -} - -impl<'a> Default for LinearKernel<'a> { - fn default() -> Self { - Self { - phantom: PhantomData, - } - } -} +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub struct LinearKernel; /// Radial basis function (Gaussian) kernel #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone, PartialEq)] -pub struct RBFKernel<'a> { +#[derive(Debug, Default, Clone, PartialEq)] +pub struct RBFKernel { /// kernel coefficient pub gamma: Option, - phantom: PhantomData<&'a ()>, -} - -impl<'a> Default for RBFKernel<'a> { - fn default() -> Self { - Self { - gamma: Option::None, - phantom: PhantomData, - } - } } #[allow(dead_code)] -impl<'a> RBFKernel<'a> { +impl RBFKernel { /// assign gamma parameter to kernel (required) /// ```rust /// use smartcore::svm::RBFKernel; @@ -141,29 +120,26 @@ impl<'a> RBFKernel<'a> { /// Polynomial kernel #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq)] -pub struct PolynomialKernel<'a> { +pub struct PolynomialKernel { /// degree of the polynomial pub degree: Option, /// kernel coefficient pub gamma: Option, /// independent term in kernel function pub coef0: Option, - phantom: PhantomData<&'a ()>, } -impl<'a> Default for PolynomialKernel<'a> { +impl Default for PolynomialKernel { fn default() -> Self { Self { gamma: Option::None, degree: Option::None, coef0: Some(1f64), - phantom: PhantomData, } } } -#[allow(dead_code)] -impl<'a> PolynomialKernel<'a> { +impl PolynomialKernel { /// set parameters for kernel /// ```rust /// use smartcore::svm::PolynomialKernel; @@ -197,26 +173,23 @@ impl<'a> PolynomialKernel<'a> { /// Sigmoid (hyperbolic tangent) kernel #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq)] -pub struct SigmoidKernel<'a> { +pub struct SigmoidKernel { /// kernel coefficient pub gamma: Option, /// independent term in kernel function pub coef0: Option, - phantom: PhantomData<&'a ()>, } -impl<'a> Default for SigmoidKernel<'a> { +impl Default for SigmoidKernel { fn default() -> Self { Self { gamma: Option::None, coef0: Some(1f64), - phantom: PhantomData, } } } -#[allow(dead_code)] -impl<'a> SigmoidKernel<'a> { +impl SigmoidKernel { /// set parameters for kernel /// ```rust /// use smartcore::svm::SigmoidKernel; @@ -238,16 +211,16 @@ impl<'a> SigmoidKernel<'a> { } } -impl<'a> Kernel<'a> for LinearKernel<'a> { +impl Kernel for LinearKernel { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { Ok(x_i.dot(x_j)) } - fn name(&self) -> &'a str { + fn name(&self) -> &'static str { "Linear" } } -impl<'a> Kernel<'a> for RBFKernel<'a> { +impl Kernel for RBFKernel { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { if self.gamma.is_none() { return Err(Failed::because( @@ -258,12 +231,12 @@ impl<'a> Kernel<'a> for RBFKernel<'a> { let v_diff = x_i.sub(x_j); Ok((-self.gamma.unwrap() * v_diff.mul(&v_diff).sum()).exp()) } - fn name(&self) -> &'a str { + fn name(&self) -> &'static str { "RBF" } } -impl<'a> Kernel<'a> for PolynomialKernel<'a> { +impl Kernel for PolynomialKernel { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { if self.gamma.is_none() || self.coef0.is_none() || self.degree.is_none() { return Err(Failed::because( @@ -274,12 +247,12 @@ impl<'a> Kernel<'a> for PolynomialKernel<'a> { let dot = x_i.dot(x_j); Ok((self.gamma.unwrap() * dot + self.coef0.unwrap()).powf(self.degree.unwrap())) } - fn name(&self) -> &'a str { + fn name(&self) -> &'static str { "Polynomial" } } -impl<'a> Kernel<'a> for SigmoidKernel<'a> { +impl Kernel for SigmoidKernel { fn apply(&self, x_i: &Vec, x_j: &Vec) -> Result { if self.gamma.is_none() || self.coef0.is_none() { return Err(Failed::because( @@ -290,7 +263,7 @@ impl<'a> Kernel<'a> for SigmoidKernel<'a> { let dot = x_i.dot(x_j); Ok(self.gamma.unwrap() * dot + self.coef0.unwrap().tanh()) } - fn name(&self) -> &'a str { + fn name(&self) -> &'static str { "Sigmoid" } } diff --git a/src/svm/svc.rs b/src/svm/svc.rs index 716f5219..9cb140d7 100644 --- a/src/svm/svc.rs +++ b/src/svm/svc.rs @@ -58,7 +58,7 @@ //! 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]; //! //! let knl = Kernels::linear(); -//! let params = &SVCParameters::default().with_c(200.0).with_kernel(&knl); +//! let params = &SVCParameters::default().with_c(200.0).with_kernel(knl); //! let svc = SVC::fit(&x, &y, params).unwrap(); //! //! let y_hat = svc.predict(&x).unwrap(); @@ -91,15 +91,9 @@ use crate::rand_custom::get_rng_impl; use crate::svm::Kernel; #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone)] +#[derive(Debug)] /// SVC Parameters -pub struct SVCParameters< - 'a, - TX: Number + RealNumber, - TY: Number + Ord, - X: Array2, - Y: Array1, -> { +pub struct SVCParameters, Y: Array1> { /// Number of epochs. pub epoch: usize, /// Regularization parameter. @@ -108,7 +102,7 @@ pub struct SVCParameters< pub tol: TX, #[cfg_attr(feature = "serde", serde(skip_deserializing))] /// The kernel function. - pub kernel: Option<&'a dyn Kernel<'a>>, + pub kernel: Option>, /// Unused parameter. m: PhantomData<(X, Y, TY)>, /// Controls the pseudo random number generation for shuffling the data for probability estimates @@ -129,7 +123,7 @@ pub struct SVC<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2, Y: classes: Option>, instances: Option>>, #[cfg_attr(feature = "serde", serde(skip))] - parameters: Option<&'a SVCParameters<'a, TX, TY, X, Y>>, + parameters: Option<&'a SVCParameters>, w: Option>, b: Option, phantomdata: PhantomData<(X, Y)>, @@ -155,7 +149,7 @@ struct Cache, Y: Array1 struct Optimizer<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2, Y: Array1> { x: &'a X, y: &'a Y, - parameters: &'a SVCParameters<'a, TX, TY, X, Y>, + parameters: &'a SVCParameters, svmin: usize, svmax: usize, gmin: TX, @@ -165,8 +159,8 @@ struct Optimizer<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2, Y recalculate_minmax_grad: bool, } -impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2, Y: Array1> - SVCParameters<'a, TX, TY, X, Y> +impl, Y: Array1> + SVCParameters { /// Number of epochs. pub fn with_epoch(mut self, epoch: usize) -> Self { @@ -184,8 +178,8 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2, Y: Array1 self } /// The kernel function. - pub fn with_kernel(mut self, kernel: &'a (dyn Kernel<'a>)) -> Self { - self.kernel = Some(kernel); + pub fn with_kernel(mut self, kernel: K) -> Self { + self.kernel = Some(Box::new(kernel)); self } @@ -196,8 +190,8 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2, Y: Array1 } } -impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2, Y: Array1> Default - for SVCParameters<'a, TX, TY, X, Y> +impl, Y: Array1> Default + for SVCParameters { fn default() -> Self { SVCParameters { @@ -212,7 +206,7 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2, Y: Array1 } impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2, Y: Array1> - SupervisedEstimatorBorrow<'a, X, Y, SVCParameters<'a, TX, TY, X, Y>> for SVC<'a, TX, TY, X, Y> + SupervisedEstimatorBorrow<'a, X, Y, SVCParameters> for SVC<'a, TX, TY, X, Y> { fn new() -> Self { Self { @@ -227,7 +221,7 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2, Y: Array1 fn fit( x: &'a X, y: &'a Y, - parameters: &'a SVCParameters<'a, TX, TY, X, Y>, + parameters: &'a SVCParameters, ) -> Result { SVC::fit(x, y, parameters) } @@ -251,7 +245,7 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2 + 'a, Y: Array pub fn fit( x: &'a X, y: &'a Y, - parameters: &'a SVCParameters<'a, TX, TY, X, Y>, + parameters: &'a SVCParameters, ) -> Result, Failed> { let (n, _) = x.shape(); @@ -447,7 +441,7 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2, Y: Array1 fn new( x: &'a X, y: &'a Y, - parameters: &'a SVCParameters<'a, TX, TY, X, Y>, + parameters: &'a SVCParameters, ) -> Optimizer<'a, TX, TY, X, Y> { let (n, _) = x.shape(); @@ -979,7 +973,7 @@ mod tests { let knl = Kernels::linear(); let params = SVCParameters::default() .with_c(200.0) - .with_kernel(&knl) + .with_kernel(knl) .with_seed(Some(100)); let y_hat = SVC::fit(&x, &y, ¶ms) @@ -1018,7 +1012,7 @@ mod tests { &y, &SVCParameters::default() .with_c(200.0) - .with_kernel(&Kernels::linear()), + .with_kernel(Kernels::linear()), ) .and_then(|lr| lr.decision_function(&x2)) .unwrap(); @@ -1073,7 +1067,7 @@ mod tests { &y, &SVCParameters::default() .with_c(1.0) - .with_kernel(&Kernels::rbf().with_gamma(0.7)), + .with_kernel(Kernels::rbf().with_gamma(0.7)), ) .and_then(|lr| lr.predict(&x)) .unwrap(); @@ -1122,7 +1116,7 @@ mod tests { ]; let knl = Kernels::linear(); - let params = SVCParameters::default().with_kernel(&knl); + let params = SVCParameters::default().with_kernel(knl); let svc = SVC::fit(&x, &y, ¶ms).unwrap(); // serialization diff --git a/src/svm/svr.rs b/src/svm/svr.rs index cf35bde1..7a39a56b 100644 --- a/src/svm/svr.rs +++ b/src/svm/svr.rs @@ -50,7 +50,7 @@ //! 100.0, 101.2, 104.6, 108.4, 110.8, 112.6, 114.2, 115.7, 116.9]; //! //! let knl = Kernels::linear(); -//! let params = &SVRParameters::default().with_eps(2.0).with_c(10.0).with_kernel(&knl); +//! let params = &SVRParameters::default().with_eps(2.0).with_c(10.0).with_kernel(knl); //! // let svr = SVR::fit(&x, &y, params).unwrap(); //! //! // let y_hat = svr.predict(&x).unwrap(); @@ -83,9 +83,9 @@ use crate::numbers::floatnum::FloatNumber; use crate::svm::Kernel; #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone)] +#[derive(Debug)] /// SVR Parameters -pub struct SVRParameters<'a, T: Number + FloatNumber + PartialOrd> { +pub struct SVRParameters { /// Epsilon in the epsilon-SVR model. pub eps: T, /// Regularization parameter. @@ -94,7 +94,7 @@ pub struct SVRParameters<'a, T: Number + FloatNumber + PartialOrd> { pub tol: T, #[cfg_attr(feature = "serde", serde(skip_deserializing))] /// The kernel function. - pub kernel: Option<&'a dyn Kernel<'a>>, + pub kernel: Option>, } #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -103,7 +103,7 @@ pub struct SVRParameters<'a, T: Number + FloatNumber + PartialOrd> { pub struct SVR<'a, T: Number + FloatNumber + PartialOrd, X: Array2, Y: Array1> { instances: Option>>, #[cfg_attr(feature = "serde", serde(skip_deserializing))] - parameters: Option<&'a SVRParameters<'a, T>>, + parameters: Option<&'a SVRParameters>, w: Option>, b: T, phantom: PhantomData<(X, Y)>, @@ -123,7 +123,7 @@ struct SupportVector { struct Optimizer<'a, T: Number + FloatNumber + PartialOrd> { tol: T, c: T, - parameters: Option<&'a SVRParameters<'a, T>>, + parameters: Option<&'a SVRParameters>, svmin: usize, svmax: usize, gmin: T, @@ -140,7 +140,7 @@ struct Cache { data: Vec>>>, } -impl<'a, T: Number + FloatNumber + PartialOrd> SVRParameters<'a, T> { +impl SVRParameters { /// Epsilon in the epsilon-SVR model. pub fn with_eps(mut self, eps: T) -> Self { self.eps = eps; @@ -157,13 +157,13 @@ impl<'a, T: Number + FloatNumber + PartialOrd> SVRParameters<'a, T> { self } /// The kernel function. - pub fn with_kernel(mut self, kernel: &'a (dyn Kernel<'a>)) -> Self { - self.kernel = Some(kernel); + pub fn with_kernel(mut self, kernel: K) -> Self { + self.kernel = Some(Box::new(kernel)); self } } -impl<'a, T: Number + FloatNumber + PartialOrd> Default for SVRParameters<'a, T> { +impl Default for SVRParameters { fn default() -> Self { SVRParameters { eps: T::from_f64(0.1).unwrap(), @@ -175,7 +175,7 @@ impl<'a, T: Number + FloatNumber + PartialOrd> Default for SVRParameters<'a, T> } impl<'a, T: Number + FloatNumber + PartialOrd, X: Array2, Y: Array1> - SupervisedEstimatorBorrow<'a, X, Y, SVRParameters<'a, T>> for SVR<'a, T, X, Y> + SupervisedEstimatorBorrow<'a, X, Y, SVRParameters> for SVR<'a, T, X, Y> { fn new() -> Self { Self { @@ -186,7 +186,7 @@ impl<'a, T: Number + FloatNumber + PartialOrd, X: Array2, Y: Array1> phantom: PhantomData, } } - fn fit(x: &'a X, y: &'a Y, parameters: &'a SVRParameters<'a, T>) -> Result { + fn fit(x: &'a X, y: &'a Y, parameters: &'a SVRParameters) -> Result { SVR::fit(x, y, parameters) } } @@ -208,7 +208,7 @@ impl<'a, T: Number + FloatNumber + PartialOrd, X: Array2, Y: Array1> SVR<' pub fn fit( x: &'a X, y: &'a Y, - parameters: &'a SVRParameters<'a, T>, + parameters: &'a SVRParameters, ) -> Result, Failed> { let (n, _) = x.shape(); @@ -324,7 +324,7 @@ impl<'a, T: Number + FloatNumber + PartialOrd> Optimizer<'a, T> { fn new, Y: Array1>( x: &'a X, y: &'a Y, - parameters: &'a SVRParameters<'a, T>, + parameters: &'a SVRParameters, ) -> Optimizer<'a, T> { let (n, _) = x.shape(); @@ -655,7 +655,7 @@ mod tests { &SVRParameters::default() .with_eps(2.0) .with_c(10.0) - .with_kernel(&knl), + .with_kernel(knl), ) .and_then(|lr| lr.predict(&x)) .unwrap(); @@ -697,7 +697,7 @@ mod tests { ]; let knl = Kernels::rbf().with_gamma(0.7); - let params = SVRParameters::default().with_kernel(&knl); + let params = SVRParameters::default().with_kernel(knl); let svr = SVR::fit(&x, &y, ¶ms).unwrap();