Skip to content

Use Box in SVM and remove lifetimes #228

New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Merged
merged 3 commits into from
Nov 4, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
83 changes: 28 additions & 55 deletions src/svm/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ pub mod svr;
// pub mod search;

use core::fmt::Debug;
use std::marker::PhantomData;

#[cfg(feature = "serde")]
use serde::ser::{SerializeStruct, Serializer};
Expand All @@ -41,22 +40,22 @@ use crate::linalg::basic::arrays::{Array1, ArrayView1};

/// Defines a kernel function.
/// This is a object-safe trait.
pub trait Kernel<'a> {
pub trait Kernel {
#[allow(clippy::ptr_arg)]
/// Apply kernel function to x_i and x_j
fn apply(&self, x_i: &Vec<f64>, x_j: &Vec<f64>) -> Result<f64, Failed>;
/// Return a serializable name
fn name(&self) -> &'a str;
fn name(&self) -> &'static str;
}

impl<'a> Debug for dyn Kernel<'_> + 'a {
impl Debug for dyn Kernel {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "Kernel<f64>")
}
}

#[cfg(feature = "serde")]
impl<'a> Serialize for dyn Kernel<'_> + 'a {
impl Serialize for dyn Kernel {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
Expand All @@ -72,61 +71,41 @@ impl<'a> Serialize for dyn Kernel<'_> + 'a {
#[derive(Debug, Clone)]
pub struct Kernels {}

impl<'a> Kernels {
impl Kernels {
/// Return a default linear
pub fn linear() -> LinearKernel<'a> {
pub fn linear() -> LinearKernel {
LinearKernel::default()
}
/// Return a default RBF
pub fn rbf() -> RBFKernel<'a> {
pub fn rbf() -> RBFKernel {
RBFKernel::default()
}
/// Return a default polynomial
pub fn polynomial() -> PolynomialKernel<'a> {
pub fn polynomial() -> PolynomialKernel {
PolynomialKernel::default()
}
/// Return a default sigmoid
pub fn sigmoid() -> SigmoidKernel<'a> {
pub fn sigmoid() -> SigmoidKernel {
SigmoidKernel::default()
}
}

/// Linear Kernel
#[allow(clippy::derive_partial_eq_without_eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Debug, Clone, PartialEq)]
pub struct LinearKernel<'a> {
phantom: PhantomData<&'a ()>,
}

impl<'a> Default for LinearKernel<'a> {
fn default() -> Self {
Self {
phantom: PhantomData,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct LinearKernel;

/// Radial basis function (Gaussian) kernel
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Debug, Clone, PartialEq)]
pub struct RBFKernel<'a> {
#[derive(Debug, Default, Clone, PartialEq)]
pub struct RBFKernel {
/// kernel coefficient
pub gamma: Option<f64>,
phantom: PhantomData<&'a ()>,
}

impl<'a> Default for RBFKernel<'a> {
fn default() -> Self {
Self {
gamma: Option::None,
phantom: PhantomData,
}
}
}

#[allow(dead_code)]
impl<'a> RBFKernel<'a> {
impl RBFKernel {
/// assign gamma parameter to kernel (required)
/// ```rust
/// use smartcore::svm::RBFKernel;
Expand All @@ -141,29 +120,26 @@ impl<'a> RBFKernel<'a> {
/// Polynomial kernel
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Debug, Clone, PartialEq)]
pub struct PolynomialKernel<'a> {
pub struct PolynomialKernel {
/// degree of the polynomial
pub degree: Option<f64>,
/// kernel coefficient
pub gamma: Option<f64>,
/// independent term in kernel function
pub coef0: Option<f64>,
phantom: PhantomData<&'a ()>,
}

impl<'a> Default for PolynomialKernel<'a> {
impl Default for PolynomialKernel {
fn default() -> Self {
Self {
gamma: Option::None,
degree: Option::None,
coef0: Some(1f64),
phantom: PhantomData,
}
}
}

#[allow(dead_code)]
impl<'a> PolynomialKernel<'a> {
impl PolynomialKernel {
/// set parameters for kernel
/// ```rust
/// use smartcore::svm::PolynomialKernel;
Expand Down Expand Up @@ -197,26 +173,23 @@ impl<'a> PolynomialKernel<'a> {
/// Sigmoid (hyperbolic tangent) kernel
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Debug, Clone, PartialEq)]
pub struct SigmoidKernel<'a> {
pub struct SigmoidKernel {
/// kernel coefficient
pub gamma: Option<f64>,
/// independent term in kernel function
pub coef0: Option<f64>,
phantom: PhantomData<&'a ()>,
}

impl<'a> Default for SigmoidKernel<'a> {
impl Default for SigmoidKernel {
fn default() -> Self {
Self {
gamma: Option::None,
coef0: Some(1f64),
phantom: PhantomData,
}
}
}

#[allow(dead_code)]
impl<'a> SigmoidKernel<'a> {
impl SigmoidKernel {
/// set parameters for kernel
/// ```rust
/// use smartcore::svm::SigmoidKernel;
Expand All @@ -238,16 +211,16 @@ impl<'a> SigmoidKernel<'a> {
}
}

impl<'a> Kernel<'a> for LinearKernel<'a> {
impl Kernel for LinearKernel {
fn apply(&self, x_i: &Vec<f64>, x_j: &Vec<f64>) -> Result<f64, Failed> {
Ok(x_i.dot(x_j))
}
fn name(&self) -> &'a str {
fn name(&self) -> &'static str {
"Linear"
}
}

impl<'a> Kernel<'a> for RBFKernel<'a> {
impl Kernel for RBFKernel {
fn apply(&self, x_i: &Vec<f64>, x_j: &Vec<f64>) -> Result<f64, Failed> {
if self.gamma.is_none() {
return Err(Failed::because(
Expand All @@ -258,12 +231,12 @@ impl<'a> Kernel<'a> for RBFKernel<'a> {
let v_diff = x_i.sub(x_j);
Ok((-self.gamma.unwrap() * v_diff.mul(&v_diff).sum()).exp())
}
fn name(&self) -> &'a str {
fn name(&self) -> &'static str {
"RBF"
}
}

impl<'a> Kernel<'a> for PolynomialKernel<'a> {
impl Kernel for PolynomialKernel {
fn apply(&self, x_i: &Vec<f64>, x_j: &Vec<f64>) -> Result<f64, Failed> {
if self.gamma.is_none() || self.coef0.is_none() || self.degree.is_none() {
return Err(Failed::because(
Expand All @@ -274,12 +247,12 @@ impl<'a> Kernel<'a> for PolynomialKernel<'a> {
let dot = x_i.dot(x_j);
Ok((self.gamma.unwrap() * dot + self.coef0.unwrap()).powf(self.degree.unwrap()))
}
fn name(&self) -> &'a str {
fn name(&self) -> &'static str {
"Polynomial"
}
}

impl<'a> Kernel<'a> for SigmoidKernel<'a> {
impl Kernel for SigmoidKernel {
fn apply(&self, x_i: &Vec<f64>, x_j: &Vec<f64>) -> Result<f64, Failed> {
if self.gamma.is_none() || self.coef0.is_none() {
return Err(Failed::because(
Expand All @@ -290,7 +263,7 @@ impl<'a> Kernel<'a> for SigmoidKernel<'a> {
let dot = x_i.dot(x_j);
Ok(self.gamma.unwrap() * dot + self.coef0.unwrap().tanh())
}
fn name(&self) -> &'a str {
fn name(&self) -> &'static str {
"Sigmoid"
}
}
Expand Down
46 changes: 20 additions & 26 deletions src/svm/svc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
//! 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1];
//!
//! let knl = Kernels::linear();
//! let params = &SVCParameters::default().with_c(200.0).with_kernel(&knl);
//! let params = &SVCParameters::default().with_c(200.0).with_kernel(knl);
//! let svc = SVC::fit(&x, &y, params).unwrap();
//!
//! let y_hat = svc.predict(&x).unwrap();
Expand Down Expand Up @@ -91,15 +91,9 @@ use crate::rand_custom::get_rng_impl;
use crate::svm::Kernel;

#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Debug, Clone)]
#[derive(Debug)]
/// SVC Parameters
pub struct SVCParameters<
'a,
TX: Number + RealNumber,
TY: Number + Ord,
X: Array2<TX>,
Y: Array1<TY>,
> {
pub struct SVCParameters<TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>> {
/// Number of epochs.
pub epoch: usize,
/// Regularization parameter.
Expand All @@ -108,7 +102,7 @@ pub struct SVCParameters<
pub tol: TX,
#[cfg_attr(feature = "serde", serde(skip_deserializing))]
/// The kernel function.
pub kernel: Option<&'a dyn Kernel<'a>>,
pub kernel: Option<Box<dyn Kernel>>,
/// Unused parameter.
m: PhantomData<(X, Y, TY)>,
/// Controls the pseudo random number generation for shuffling the data for probability estimates
Expand All @@ -129,7 +123,7 @@ pub struct SVC<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y:
classes: Option<Vec<TY>>,
instances: Option<Vec<Vec<TX>>>,
#[cfg_attr(feature = "serde", serde(skip))]
parameters: Option<&'a SVCParameters<'a, TX, TY, X, Y>>,
parameters: Option<&'a SVCParameters<TX, TY, X, Y>>,
w: Option<Vec<TX>>,
b: Option<TX>,
phantomdata: PhantomData<(X, Y)>,
Expand All @@ -155,7 +149,7 @@ struct Cache<TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1
struct Optimizer<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>> {
x: &'a X,
y: &'a Y,
parameters: &'a SVCParameters<'a, TX, TY, X, Y>,
parameters: &'a SVCParameters<TX, TY, X, Y>,
svmin: usize,
svmax: usize,
gmin: TX,
Expand All @@ -165,8 +159,8 @@ struct Optimizer<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y
recalculate_minmax_grad: bool,
}

impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>>
SVCParameters<'a, TX, TY, X, Y>
impl<TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>>
SVCParameters<TX, TY, X, Y>
{
/// Number of epochs.
pub fn with_epoch(mut self, epoch: usize) -> Self {
Expand All @@ -184,8 +178,8 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>
self
}
/// The kernel function.
pub fn with_kernel(mut self, kernel: &'a (dyn Kernel<'a>)) -> Self {
self.kernel = Some(kernel);
pub fn with_kernel<K: Kernel + 'static>(mut self, kernel: K) -> Self {
self.kernel = Some(Box::new(kernel));
self
}

Expand All @@ -196,8 +190,8 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>
}
}

impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>> Default
for SVCParameters<'a, TX, TY, X, Y>
impl<TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>> Default
for SVCParameters<TX, TY, X, Y>
{
fn default() -> Self {
SVCParameters {
Expand All @@ -212,7 +206,7 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>
}

impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>>
SupervisedEstimatorBorrow<'a, X, Y, SVCParameters<'a, TX, TY, X, Y>> for SVC<'a, TX, TY, X, Y>
SupervisedEstimatorBorrow<'a, X, Y, SVCParameters<TX, TY, X, Y>> for SVC<'a, TX, TY, X, Y>
{
fn new() -> Self {
Self {
Expand All @@ -227,7 +221,7 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>
fn fit(
x: &'a X,
y: &'a Y,
parameters: &'a SVCParameters<'a, TX, TY, X, Y>,
parameters: &'a SVCParameters<TX, TY, X, Y>,
) -> Result<Self, Failed> {
SVC::fit(x, y, parameters)
}
Expand All @@ -251,7 +245,7 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX> + 'a, Y: Array
pub fn fit(
x: &'a X,
y: &'a Y,
parameters: &'a SVCParameters<'a, TX, TY, X, Y>,
parameters: &'a SVCParameters<TX, TY, X, Y>,
) -> Result<SVC<'a, TX, TY, X, Y>, Failed> {
let (n, _) = x.shape();

Expand Down Expand Up @@ -447,7 +441,7 @@ impl<'a, TX: Number + RealNumber, TY: Number + Ord, X: Array2<TX>, Y: Array1<TY>
fn new(
x: &'a X,
y: &'a Y,
parameters: &'a SVCParameters<'a, TX, TY, X, Y>,
parameters: &'a SVCParameters<TX, TY, X, Y>,
) -> Optimizer<'a, TX, TY, X, Y> {
let (n, _) = x.shape();

Expand Down Expand Up @@ -979,7 +973,7 @@ mod tests {
let knl = Kernels::linear();
let params = SVCParameters::default()
.with_c(200.0)
.with_kernel(&knl)
.with_kernel(knl)
.with_seed(Some(100));

let y_hat = SVC::fit(&x, &y, &params)
Expand Down Expand Up @@ -1018,7 +1012,7 @@ mod tests {
&y,
&SVCParameters::default()
.with_c(200.0)
.with_kernel(&Kernels::linear()),
.with_kernel(Kernels::linear()),
)
.and_then(|lr| lr.decision_function(&x2))
.unwrap();
Expand Down Expand Up @@ -1073,7 +1067,7 @@ mod tests {
&y,
&SVCParameters::default()
.with_c(1.0)
.with_kernel(&Kernels::rbf().with_gamma(0.7)),
.with_kernel(Kernels::rbf().with_gamma(0.7)),
)
.and_then(|lr| lr.predict(&x))
.unwrap();
Expand Down Expand Up @@ -1122,7 +1116,7 @@ mod tests {
];

let knl = Kernels::linear();
let params = SVCParameters::default().with_kernel(&knl);
let params = SVCParameters::default().with_kernel(knl);
let svc = SVC::fit(&x, &y, &params).unwrap();

// serialization
Expand Down
Loading