G+Smo
24.08.0
Geometry + Simulation Modules
|
This is the complete list of members for gsTensorBasis< d, T >, including all inherited members.
active(const gsMatrix< T > &u) const | gsFunctionSet< T > | inline |
active_cwise(const gsMatrix< T > &u, gsVector< index_t, d > &low, gsVector< index_t, d > &upp) const | gsTensorBasis< d, T > | |
active_into(const gsMatrix< T > &u, gsMatrix< index_t > &result) const | gsTensorBasis< d, T > | virtual |
activeCoefs_into(const gsVector< T > &u, const gsMatrix< T > &coefs, gsMatrix< T > &result) const | gsBasis< T > | virtual |
allBoundary() const | gsTensorBasis< d, T > | virtual |
anchor_into(index_t i, gsMatrix< T > &result) const | gsTensorBasis< d, T > | virtual |
anchors() const | gsBasis< T > | inline |
anchors_into(gsMatrix< T > &result) const | gsTensorBasis< d, T > | virtual |
basis(const index_t k) const | gsFunctionSet< T > | |
begin() const | gsTensorBasis< d, T > | inline |
begin() | gsTensorBasis< d, T > | inline |
boundary(boxSide const &s) const | gsBasis< T > | inline |
boundaryBasis(boxSide const &s) | gsBasis< T > | |
boundaryOffset(boxSide const &s, index_t offset) const | gsTensorBasis< d, T > | virtual |
clone() | gsFunctionSet< T > | |
coefSlice(short_t dir, index_t k) const | gsTensorBasis< d, T > | |
collocationMatrix(gsMatrix< T > const &u) const | gsBasis< T > | inline |
component(short_t dir) | gsTensorBasis< d, T > | inlinevirtual |
component(short_t dir) const | gsTensorBasis< d, T > | inlinevirtual |
componentBasis(boxComponent b) const | gsBasis< T > | virtual |
componentBasis_withIndices(boxComponent b, gsMatrix< index_t > &indices, bool noBoundary=true) const | gsBasis< T > | virtual |
compute(const gsMatrix< T > &in, gsFuncData< T > &out) const | gsFunctionSet< T > | virtual |
connectivity(const gsMatrix< T > &nodes, gsMesh< T > &mesh) const | gsTensorBasis< d, T > | virtual |
connectivityAtAnchors(gsMesh< T > &mesh) const | gsBasis< T > | inlinevirtual |
create() const | gsBasis< T > | virtual |
degree(short_t i) const | gsTensorBasis< d, T > | inlinevirtual |
degreeDecrease(short_t const &i=1, short_t const dir=-1) | gsTensorBasis< d, T > | inlinevirtual |
degreeElevate(short_t const &i=1, short_t const dir=-1) | gsTensorBasis< d, T > | inlinevirtual |
degreeIncrease(short_t const &i=1, short_t const dir=-1) | gsTensorBasis< d, T > | inlinevirtual |
degreeReduce(short_t const &i=1, short_t const dir=-1) | gsTensorBasis< d, T > | inlinevirtual |
deriv(const gsMatrix< T > &u) const | gsFunctionSet< T > | |
deriv2(const gsMatrix< T > &u) const | gsFunctionSet< T > | |
deriv2_into(const gsMatrix< T > &u, gsMatrix< T > &result) const | gsTensorBasis< d, T > | virtual |
deriv2Func(const gsMatrix< T > &u, const gsMatrix< T > &coefs) const | gsBasis< T > | inline |
deriv2Func_into(const gsMatrix< T > &u, const gsMatrix< T > &coefs, gsMatrix< T > &result) const | gsBasis< T > | virtual |
deriv2Single(index_t i, const gsMatrix< T > &u) const | gsBasis< T > | inline |
deriv2Single_into(index_t i, const gsMatrix< T > &u, gsMatrix< T > &result) const | gsTensorBasis< d, T > | virtual |
deriv_into(const gsMatrix< T > &u, gsMatrix< T > &result) const | gsTensorBasis< d, T > | virtual |
derivFunc(const gsMatrix< T > &u, const gsMatrix< T > &coefs) const | gsBasis< T > | inline |
derivFunc_into(const gsMatrix< T > &u, const gsMatrix< T > &coefs, gsMatrix< T > &result) const | gsBasis< T > | virtual |
derivSingle(index_t i, const gsMatrix< T > &u) const | gsBasis< T > | inline |
derivSingle_into(index_t i, const gsMatrix< T > &u, gsMatrix< T > &result) const | gsTensorBasis< d, T > | virtual |
detail() const | gsBasis< T > | inlinevirtual |
Dim | gsTensorBasis< d, T > | static |
domain() const | gsBasis< T > | virtual |
domainDim() const | gsTensorBasis< d, T > | inlinevirtual |
elementIndex(const gsVector< T > &u) const | gsTensorBasis< d, T > | inlinevirtual |
elementInSupportOf(index_t j) const | gsTensorBasis< d, T > | virtual |
elevateContinuity(int const &i=1) | gsBasis< T > | virtual |
end() const | gsTensorBasis< d, T > | inline |
end() | gsTensorBasis< d, T > | inline |
eval(const gsMatrix< T > &u) const | gsFunctionSet< T > | |
eval_into(const gsMatrix< T > &u, gsMatrix< T > &result) const | gsTensorBasis< d, T > | virtual |
eval_into(const gsMatrix< T > &u, const gsMatrix< T > &coefs, gsMatrix< T > &result) const | gsTensorBasis< d, T > | virtual |
evalAllDers(const gsMatrix< T > &u, int n, bool sameElement=false) const | gsFunctionSet< T > | |
evalAllDers_into(const gsMatrix< T > &u, int n, std::vector< gsMatrix< T > > &result, bool sameElement=false) const | gsTensorBasis< d, T > | virtual |
evalAllDersFunc_into(const gsMatrix< T > &u, const gsMatrix< T > &coefs, const unsigned n, std::vector< gsMatrix< T > > &result, bool sameElement=false) const | gsBasis< T > | virtual |
evalAllDersSingle_into(index_t i, const gsMatrix< T > &u, int n, gsMatrix< T > &result) const | gsBasis< T > | virtual |
evalDerSingle_into(index_t i, const gsMatrix< T > &u, int n, gsMatrix< T > &result) const | gsBasis< T > | virtual |
evalFunc(const gsMatrix< T > &u, const gsMatrix< T > &coefs) const | gsBasis< T > | inline |
evalFunc_into(const gsMatrix< T > &u, const gsMatrix< T > &coefs, gsMatrix< T > &result) const | gsBasis< T > | virtual |
evalSingle(index_t i, const gsMatrix< T > &u) const | gsBasis< T > | inline |
evalSingle_into(index_t i, const gsMatrix< T > &u, gsMatrix< T > &result) const | gsTensorBasis< d, T > | virtual |
function(index_t i) const | gsBasis< T > | |
getComponentsForSide(boxSide const &s, std::vector< Basis_t * > &rr) const | gsTensorBasis< d, T > | |
getMaxCellLength() const | gsTensorBasis< d, T > | virtual |
getMinCellLength() const | gsTensorBasis< d, T > | virtual |
gsTensorBasis(const gsTensorBasis &o) | gsTensorBasis< d, T > | |
gsTensorBasis(Basis_t *x, Basis_t *y) | gsTensorBasis< d, T > | |
gsTensorBasis(Basis_t *x, Basis_t *y, Basis_t *z) | gsTensorBasis< d, T > | |
gsTensorBasis(Basis_t *x, Basis_t *y, Basis_t *z, Basis_t *w) | gsTensorBasis< d, T > | |
gsTensorBasis(iterator it) | gsTensorBasis< d, T > | inlineexplicit |
index(unsigned i, unsigned j, unsigned k=0) const | gsTensorBasis< d, T > | inline |
index(gsVector< index_t, d > const &v) const | gsTensorBasis< d, T > | inline |
indexOnBoundary(const gsVector< index_t, d > &ind) const | gsTensorBasis< d, T > | inline |
indexOnBoundary(const index_t m) const | gsTensorBasis< d, T > | inline |
interpolateAtAnchors(gsMatrix< T > const &vals) const | gsTensorBasis< d, T > | virtual |
interpolateData(gsMatrix< T > const &vals, gsMatrix< T > const &pts) const | gsBasis< T > | inline |
interpolateGrid(gsMatrix< T > const &vals, std::vector< gsMatrix< T > >const &grid) const | gsTensorBasis< d, T > | |
isActive(const index_t i, const gsVector< T > &u) const | gsTensorBasis< d, T > | virtual |
isRational() const | gsBasis< T > | inlinevirtual |
iterator typedef | gsTensorBasis< d, T > | |
jacobianFunc_into(const gsMatrix< T > &u, const gsMatrix< T > &coefs, gsMatrix< T > &result) const | gsBasis< T > | virtual |
laplacian(const gsMatrix< T > &u) const | gsBasis< T > | inlinevirtual |
linearCombination_into(const gsMatrix< T > &coefs, const gsMatrix< index_t > &actives, const gsMatrix< T > &values, gsMatrix< T > &result, bool sameElement=false) | gsBasis< T > | static |
makeDomainIterator() const | gsTensorBasis< d, T > | virtual |
makeDomainIterator(const boxSide &s) const | gsTensorBasis< d, T > | virtual |
makeGeometry(gsMatrix< T > coefs) const =0 | gsBasis< T > | pure virtual |
makeNonRational() const | gsBasis< T > | inlinevirtual |
matchWith(const boundaryInterface &bi, const gsBasis< T > &other, gsMatrix< index_t > &bndThis, gsMatrix< index_t > &bndOther, index_t offset) const | gsTensorBasis< d, T > | virtual |
maxDegree() const | gsTensorBasis< d, T > | inlinevirtual |
minDegree() const | gsTensorBasis< d, T > | inlinevirtual |
nPieces() const | gsFunctionSet< T > | inlinevirtual |
numActive(const gsMatrix< T > &u) const | gsBasis< T > | inline |
numActive_into(const gsMatrix< T > &u, gsVector< index_t > &result) const | gsBasis< T > | virtual |
numElements(boxSide const &s=boundary::none) const | gsTensorBasis< d, T > | inlinevirtual |
numElements_cwise(gsVector< unsigned > &result) const | gsTensorBasis< d, T > | inline |
piece(const index_t k) const | gsBasis< T > | inlinevirtual |
print(std::ostream &os) const =0 | gsTensorBasis< d, T > | pure virtual |
reduceContinuity(int const &i=1) | gsBasis< T > | virtual |
refine(gsMatrix< T > const &boxes, int refExt=0) | gsBasis< T > | virtual |
refineElements(std::vector< index_t > const &elements) | gsTensorBasis< d, T > | virtual |
refineElements_withCoefs(gsMatrix< T > &coefs, std::vector< index_t > const &boxes) | gsBasis< T > | virtual |
reverse() | gsBasis< T > | virtual |
Scalar_t typedef | gsTensorBasis< d, T > | |
setDegree(short_t const &i) | gsBasis< T > | |
setDegreePreservingMultiplicity(short_t const &i) | gsBasis< T > | |
size() const | gsTensorBasis< d, T > | inlinevirtual |
size(short_t k) const | gsTensorBasis< d, T > | inline |
size_cwise(gsVector< index_t, s > &result) const | gsTensorBasis< d, T > | inline |
source() const | gsBasis< T > | inlinevirtual |
source() | gsBasis< T > | inlinevirtual |
stride(short_t dir) const | gsTensorBasis< d, T > | inline |
stride_cwise(gsVector< index_t, d > &result) const | gsTensorBasis< d, T > | inline |
support() const | gsTensorBasis< d, T > | virtual |
support(const index_t &i) const | gsTensorBasis< d, T > | virtual |
supportInterval(index_t dir) const | gsBasis< T > | |
targetDim() const | gsFunctionSet< T > | inlinevirtual |
tensorIndex(const index_t &m) const | gsTensorBasis< d, T > | inline |
tensorize(const gsBasis &other) const | gsBasis< T > | virtual |
totalDegree() const | gsTensorBasis< d, T > | inlinevirtual |
uniformCoarsen(int numKnots=1) | gsTensorBasis< d, T > | inlinevirtual |
uniformCoarsen_withCoefs(gsMatrix< T > &coefs, int numKnots=1) | gsBasis< T > | virtual |
uniformCoarsen_withTransfer(gsSparseMatrix< T, RowMajor > &transfer, int numKnots=1) | gsTensorBasis< d, T > | virtual |
uniformRefine(int numKnots=1, int mul=1, int dir=-1) | gsTensorBasis< d, T > | inlinevirtual |
uniformRefine_withCoefs(gsMatrix< T > &coefs, int numKnots=1, int mul=1, int dir=-1) | gsTensorBasis< d, T > | virtual |
uniformRefine_withTransfer(gsSparseMatrix< T, RowMajor > &transfer, int numKnots=1, int mul=1) | gsTensorBasis< d, T > | virtual |
weights() const | gsBasis< T > | inlinevirtual |
weights() | gsBasis< T > | inlinevirtual |