Skip to content
This repository was archived by the owner on Aug 15, 2019. It is now read-only.

linalg: Addition of LU decomposition #1675

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
103 changes: 103 additions & 0 deletions src/ops/linalg_ops.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@
import {ENGINE} from '../engine';
import {dispose} from '../globals';
import {Tensor, Tensor1D, Tensor2D} from '../tensor';
import {TypedArray} from '../types';
import {assert} from '../util';

import {eye, squeeze, stack, unstack} from './array_ops';
import {split} from './concat_split';
import {norm} from './norm';
Expand Down Expand Up @@ -109,6 +111,106 @@ function gramSchmidt_(xs: Tensor1D[]|Tensor2D): Tensor1D[]|Tensor2D {
}
}

/**
* Computes LU decomposition of a given square tensor
* Implementation based on Doolittle Decomposition of Matrix
* (http://www.engr.colostate.edu/~thompson/hPage/CourseMat/Tutorials/
* CompMethods/doolittle.pdf)
*
* ```js
* const x = tf.tensor2d([[1, 2], [3, 4]]);
* const res = tf.linalg.lu(x);
* console.log(res.length);
* const [l, u] = res[0];
* console.log('L - Lower Triangular Matrix:');
* l.print();
* console.log('U - Upper Triangular Matrix:');
* u.print()
* l.matMul(u).print() // Should display a tesor same as x
* ```
*
* @param x `tf.Tensor` that has to be decomposed into a Lower Triangular Matrix
* and an Upper Triangualar Matrix such that their product is same as the
* original tensor.
* `x` must be of rank >= 2 and must be have the two innermost dimension
* equal i.e., `x.shape` must be of form `[......., N, N]`.
* @returns A 2D `Array` with second dimension equal to 2. Each element of the
* `Array` is equal to `[l, u]` where l is the lower triangular matrix and u
* is the upper triangular both of order `N`, obtained as a result of
* decomposition of `NxN` sub tensor.
* If the given sub matrix isn't decomposable, the result for `[l, u]` is
* `[null, null]`.
* @throws
* - If the rank of `x` is less than 2.
* - If `x` doesn't have the two innermost dimension equal.
*/
/**
* @doc {heading:'Operations',
* subheading:'Linear Algebra',
* namespace:'linalg'}
*/
function lu_(x: Tensor): Array<[Tensor2D | null, Tensor2D | null]> {
if (x.rank < 2) {
throw new Error(
`lu() requires input tensor of rank >= 2 but found` +
` tensor of rank ${x.rank}`);
} else if (x.shape[x.shape.length - 1] !== x.shape[x.shape.length - 2]) {
throw new Error(
`lu() requires the tensor with the two innermost dimension equal` +
`but found a tensor of dimension ${x.shape}`);
}

const xData = x.dataSync();
const n = x.shape[x.shape.length - 1];
const totalElements = x.shape.reduce((a, b) => (a * b));
let res: Array<[Tensor2D, Tensor2D]>;
res = [];

for (let i = 0; i < totalElements; i += n * n) {
const res2d = lu2d(xData.slice(i, i + n * n), n);
res.push(res2d);
}

return res;
}
function lu2d(xData: TypedArray, n: number): [Tensor2D, Tensor2D] {
return ENGINE.tidy(() => {
let l: number[];
let u: number[];
l = new Array(n * n).fill(0);
u = new Array(n * n).fill(0);

for (let i = 0; i < n; ++i) {
// Evaluating the upper triangular matrix
for (let k = i; k < n; ++k) {
u[i * n + k] = xData[i * n + k];
for (let j = 0; j < i; ++j) {
u[i * n + k] -= (l[i * n + j] * u[j * n + k]);
}
}

// Evaluating the lower triangular matrix
for (let k = i; k < n; ++k) {
if (i === k) {
l[i * n + i] = 1;
} else {
if (u[i * n + i] === 0) {
return [null, null];
}

l[k * n + i] = xData[k * n + i];
for (let j = 0; j < i; ++j) {
l[k * n + i] -= (l[k * n + j] * u[j * n + i]);
}

l[k * n + i] /= u[i * n + i];
}
}
}
return [tensor2d(l, [n, n]), tensor2d(u, [n, n])];
}) as [Tensor2D, Tensor2D];
}

/**
* Compute QR decomposition of m-by-n matrix using Householder transformation.
*
Expand Down Expand Up @@ -265,3 +367,4 @@ function qr2d(x: Tensor2D, fullMatrices = false): [Tensor2D, Tensor2D] {

export const gramSchmidt = op({gramSchmidt_});
export const qr = op({qr_});
export const lu = op({lu_});
111 changes: 111 additions & 0 deletions src/ops/linalg_ops_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,117 @@ describeWithFlags('gramSchmidt-tiny', ALL_ENVS, () => {
});
});

describeWithFlags('lu', ALL_ENVS, () => {
it('1X1', () => {
const x = tensor2d([[2]], [1, 1]);
const res = tf.linalg.lu(x);
expect(res.length).toEqual(1);
const [l, u] = res[0];
expectArraysClose(l, tensor2d([[1]], [1, 1]));
expectArraysClose(u, tensor2d([[2]], [1, 1]));
expectArraysClose(l.matMul(u), x);
});

it('2X2', () => {
const x = tensor2d([[1, 2], [3, 4]], [2, 2]);
const res = tf.linalg.lu(x);
expect(res.length).toEqual(1);
const [l, u] = res[0];
expectArraysClose(l, tensor2d([[1, 0], [3, 1]], [2, 2]));
expectArraysClose(u, tensor2d([[1, 2], [0, -2]], [2, 2]));
expectArraysClose(l.matMul(u), x);
});

it('3X3', () => {
const x = tensor2d([[2, -1, -2], [-4, 6, 3], [-4, -2, 8]], [3, 3]);
const res = tf.linalg.lu(x);
expect(res.length).toEqual(1);
const [l, u] = res[0];
expectArraysClose(
l, tensor2d([[1, 0, 0], [-2, 1, 0], [-2, -1, 1]], [3, 3]));
expectArraysClose(
u, tensor2d([[2, -1, -2], [0, 4, -1], [0, 0, 3]], [3, 3]));
expectArraysClose(l.matMul(u), x);
});

it('rank < 2', () => {
const x = tensor1d([1]);
expect(() => {
tf.linalg.lu(x);
})
.toThrowError(
`lu() requires input tensor of rank >= 2 but found` +
` tensor of rank ${x.rank}`);
});

it('innermost dimensions equal 1X2X2', () => {
const x = tensor3d([[[2, -1, -2], [-4, 6, 3], [-4, -2, 8]]], [1, 3, 3]);
const res = tf.linalg.lu(x);
expect(res.length).toEqual(1);
const [l, u] = res[0];
expectArraysClose(
l, tensor2d([[1, 0, 0], [-2, 1, 0], [-2, -1, 1]], [3, 3]));
expectArraysClose(
u, tensor2d([[2, -1, -2], [0, 4, -1], [0, 0, 3]], [3, 3]));
expectArraysClose(l.matMul(u), x.squeeze());
});

it('innermost dimension equal 2X2X2', () => {
const x = tensor3d([[[1, 2], [3, 4]], [[2, 0], [0, 2]]], [2, 2, 2]);
const res = tf.linalg.lu(x);
expect(res.length).toEqual(2);

expectArraysClose(res[0][0], tensor2d([[1, 0], [3, 1]], [2, 2]));
expectArraysClose(res[0][1], tensor2d([[1, 2], [0, -2]], [2, 2]));
expectArraysClose(res[0][0].matMul(res[0][1]), tensor2d([[1, 2], [3, 4]]));

expectArraysClose(res[1][0], tensor2d([[1, 0], [0, 1]], [2, 2]));
expectArraysClose(res[1][1], tensor2d([[2, 0], [0, 2]], [2, 2]));
expectArraysClose(res[1][0].matMul(res[1][1]), tensor2d([[2, 0], [0, 2]]));
});

it('innermost dimension equal 1X2X2X2', () => {
const x = tensor4d([[[[1, 2], [3, 4]], [[2, 0], [0, 2]]]], [1, 2, 2, 2]);
const res = tf.linalg.lu(x);
expect(res.length).toEqual(2);

expectArraysClose(res[0][0], tensor2d([[1, 0], [3, 1]], [2, 2]));
expectArraysClose(res[0][1], tensor2d([[1, 2], [0, -2]], [2, 2]));
expectArraysClose(res[0][0].matMul(res[0][1]), tensor2d([[1, 2], [3, 4]]));

expectArraysClose(res[1][0], tensor2d([[1, 0], [0, 1]], [2, 2]));
expectArraysClose(res[1][1], tensor2d([[2, 0], [0, 2]], [2, 2]));
expectArraysClose(res[1][0].matMul(res[1][1]), tensor2d([[2, 0], [0, 2]]));
});

it('not a square tensor', () => {
const x = tensor2d([[1, 2, 3], [4, 5, 6]], [2, 3]);
expect(() => {
tf.linalg.lu(x);
})
.toThrowError(
`lu() requires the tensor with the two innermost dimension equal` +
`but found a tensor of dimension ${x.shape}`);
});

it('non decomposable tensor', () => {
const x = tensor2d([[0, 1], [1, 0]], [2, 2]);
const res = tf.linalg.lu(x);
expect(res.length).toEqual(1);
const [l, u] = res[0];
expect(l).toEqual(null);
expect(u).toEqual(null);
});

it('16X16', () => {
const x = tf.randomNormal([16, 16]) as Tensor2D;
const res = tf.linalg.lu(x);
expect(res.length).toEqual(1);
const [l, u] = res[0];
expectArraysClose(l.matMul(u), x);
});
});

describeWithFlags('qr', ALL_ENVS, () => {
it('1x1', () => {
const x = tensor2d([[10]], [1, 1]);
Expand Down