KrisLibrary
1.0.0
|
This is the complete list of members for Math3D::Matrix2, including all inherited members.
add(const Matrix2 &a, const Matrix2 &b) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
col(int j) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
col(int j) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
col1() (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
col1() const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
col2() (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
col2() const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
data | Math3D::Matrix2 | |
determinant() const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
div(const Matrix2 &a, Real b) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
get(Matrix2 &) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
get(Real[2][2]) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
get(Real[4]) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
get(Vector2 &xb, Vector2 &yb) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
getCol(int j, Vector2 &v) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
getCol1(Vector2 &v) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
getCol2(Vector2 &v) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
getInverse(Matrix2 &) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
getNegative(Matrix2 &) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
getRow(int i, Vector2 &v) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
getRow1(Vector2 &v) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
getRow2(Vector2 &v) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
getTranspose(Matrix2 &) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
getXBasis() const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
getYBasis() const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
inplaceColScale(Real sx, Real sy) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
inplaceDiv(Real s) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
inplaceInverse() (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
inplaceMul(Real s) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
inplaceNegative() (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
inplaceRowScale(Real sx, Real sy) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
inplaceTranspose() (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
isEqual(const Matrix2 &, Real eps=Zero) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
isIdentity(Real eps=Zero) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
isInvertible(Real eps=Zero) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
isZero(Real eps=Zero) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
Matrix2() (defined in Math3D::Matrix2) | Math3D::Matrix2 | |
Matrix2(const Matrix2 &) (defined in Math3D::Matrix2) | Math3D::Matrix2 | |
Matrix2(Real) (defined in Math3D::Matrix2) | Math3D::Matrix2 | explicit |
Matrix2(const Real[2][2]) (defined in Math3D::Matrix2) | Math3D::Matrix2 | explicit |
Matrix2(const Real *) (defined in Math3D::Matrix2) | Math3D::Matrix2 | explicit |
Matrix2(const Vector2 &xb, const Vector2 &yb) (defined in Math3D::Matrix2) | Math3D::Matrix2 | |
maxAbsElement(int *i=NULL, int *j=NULL) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
maxElement(int *i=NULL, int *j=NULL) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
minAbsElement(int *i=NULL, int *j=NULL) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
minElement(int *i=NULL, int *j=NULL) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
mul(const Matrix2 &a, const Matrix2 &b) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
mul(const Matrix2 &a, Real b) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
mul(const Vector2 &a, Vector2 &out) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
mulTranspose(const Vector2 &a, Vector2 &out) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
mulTransposeA(const Matrix2 &a, const Matrix2 &b) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
mulTransposeB(const Matrix2 &a, const Matrix2 &b) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
operator const Real *() const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
operator!=(const Matrix2 &) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | |
operator()(int i, int j) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
operator()(int i, int j) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
operator*=(const Matrix2 &) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
operator*=(Real scale) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
operator+=(const Matrix2 &) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
operator-=(const Matrix2 &) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
operator/=(Real scale) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
operator=(const Matrix2 &) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
operator==(const Matrix2 &) const (defined in Math3D::Matrix2) | Math3D::Matrix2 | |
Read(File &) (defined in Math3D::Matrix2) | Math3D::Matrix2 | |
set(const Matrix2 &) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
set(Real) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
set(const Real[2][2]) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
set(const Real *) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
set(const Vector2 &xb, const Vector2 &yb) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setCol(int j, const Vector2 &v) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setCol1(const Vector2 &v) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setCol2(const Vector2 &v) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setDiagonal(const Vector2 &) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setIdentity() (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setInverse(const Matrix2 &) | Math3D::Matrix2 | inline |
setNegative(const Matrix2 &) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setOuterProduct(const Vector2 &a, const Vector2 &b) | Math3D::Matrix2 | inline |
setRotate(Real rads) | Math3D::Matrix2 | inline |
setRow(int i, const Vector2 &v) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setRow1(const Vector2 &v) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setRow2(const Vector2 &v) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setScale(Real s) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setScale(Real sx, Real sy) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setTranspose(const Matrix2 &) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
setZero() (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
sub(const Matrix2 &a, const Matrix2 &b) (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
trace() const (defined in Math3D::Matrix2) | Math3D::Matrix2 | inline |
Write(File &) const (defined in Math3D::Matrix2) | Math3D::Matrix2 |