G+Smo
24.08.0
Geometry + Simulation Modules
|
This is the complete list of members for gsHBSplineBasis< d, T >, including all inherited members.
_diadicIndexToKnotIndex(const index_t level, gsVector< index_t, d > &diadicIndex) const | gsHTensorBasis< d, T > | protected |
_knotIndexToDiadicIndex(const index_t level, const index_t dir, index_t &knotIndex) const | gsHTensorBasis< d, T > | protected |
active(const gsMatrix< T > &u) const | gsFunctionSet< T > | inline |
active_into(const gsMatrix< T > &u, gsMatrix< index_t > &result) const | gsHTensorBasis< d, T > | virtual |
activeBoundaryFunctionsOfLevel(const unsigned level, const boxSide &s, std::vector< bool > &actives) const | gsHTensorBasis< d, T > | |
activeCoefs_into(const gsVector< T > &u, const gsMatrix< T > &coefs, gsMatrix< T > &result) const | gsBasis< T > | virtual |
addLevel(const gsTensorBSplineBasis< d, T > &next_basis) | gsHTensorBasis< d, T > | |
allBoundary() const | gsHTensorBasis< d, T > | virtual |
anchor_into(index_t i, gsMatrix< T > &result) const | gsHTensorBasis< d, T > | inlinevirtual |
anchors() const | gsBasis< T > | inline |
anchors_into(gsMatrix< T > &result) const | gsHTensorBasis< d, T > | inlinevirtual |
basis(const index_t k) const | gsFunctionSet< T > | |
basisSlice(index_t dir_fixed, T par) const | gsHBSplineBasis< d, T > | |
boundary(boxSide const &s) const | gsBasis< T > | inline |
boundaryBasis(boxSide const &s) | gsHBSplineBasis< d, T > | |
boundaryOffset(boxSide const &s, index_t offset) const | gsHTensorBasis< d, T > | virtual |
clone() | gsFunctionSet< T > | |
coarsening(const std::vector< gsSortedVector< index_t > > &old, const std::vector< gsSortedVector< index_t > > &n, const gsSparseMatrix< T, RowMajor > &transfer) const | gsHBSplineBasis< d, T > | privatevirtual |
collocationMatrix(gsMatrix< T > const &u) const | gsBasis< T > | inline |
component(short_t i) | gsHTensorBasis< d, T > | inlinevirtual |
component(short_t i) const | gsHTensorBasis< d, T > | inlinevirtual |
componentBasis(boxComponent b) const | gsBasis< T > | virtual |
componentBasis_withIndices(boxComponent b, gsMatrix< index_t > &indices, bool noBoundary=true) const | gsBasis< T > | virtual |
compute(const gsMatrix< T > &in, gsFuncData< T > &out) const | gsFunctionSet< T > | virtual |
connectivity(const gsMatrix< T > &nodes, gsMesh< T > &mesh) const | gsHTensorBasis< d, T > | virtual |
connectivityAtAnchors(gsMesh< T > &mesh) const | gsBasis< T > | inlinevirtual |
create() const | gsBasis< T > | virtual |
createMoreLevels(int numLevels) const | gsHTensorBasis< d, T > | protected |
degree(short_t i) const | gsHTensorBasis< d, T > | inlinevirtual |
gismo::gsBasis::degreeDecrease(short_t const &i=1, short_t const dir=-1) | gsBasis< T > | virtual |
gismo::gsBasis::degreeElevate(short_t const &i=1, short_t const dir=-1) | gsBasis< T > | virtual |
gismo::gsBasis::degreeIncrease(short_t const &i=1, short_t const dir=-1) | gsBasis< T > | virtual |
gismo::gsBasis::degreeReduce(short_t const &i=1, short_t const dir=-1) | gsBasis< T > | virtual |
deriv(const gsMatrix< T > &u) const | gsFunctionSet< T > | |
deriv2(const gsMatrix< T > &u) const | gsFunctionSet< T > | |
deriv2_into(const gsMatrix< T > &u, gsMatrix< T > &result) const | gsHBSplineBasis< d, T > | virtual |
deriv2Func(const gsMatrix< T > &u, const gsMatrix< T > &coefs) const | gsBasis< T > | inline |
deriv2Func_into(const gsMatrix< T > &u, const gsMatrix< T > &coefs, gsMatrix< T > &result) const | gsBasis< T > | virtual |
deriv2Single(index_t i, const gsMatrix< T > &u) const | gsBasis< T > | inline |
deriv2Single_into(index_t i, const gsMatrix< T > &u, gsMatrix< T > &result) const | gsHBSplineBasis< d, T > | virtual |
deriv_into(const gsMatrix< T > &u, gsMatrix< T > &result) const | gsHBSplineBasis< d, T > | virtual |
derivFunc(const gsMatrix< T > &u, const gsMatrix< T > &coefs) const | gsBasis< T > | inline |
derivFunc_into(const gsMatrix< T > &u, const gsMatrix< T > &coefs, gsMatrix< T > &result) const | gsBasis< T > | virtual |
derivSingle(index_t i, const gsMatrix< T > &u) const | gsBasis< T > | inline |
derivSingle_into(index_t i, const gsMatrix< T > &u, gsMatrix< T > &result) const | gsHBSplineBasis< d, T > | virtual |
detail() const | gsBasis< T > | inlinevirtual |
Dim | gsHTensorBasis< d, T > | static |
dim() const | gsHTensorBasis< d, T > | inlinevirtual |
domain() const | gsBasis< T > | virtual |
domainBoundariesIndices(std::vector< std::vector< std::vector< std::vector< index_t > > > > &result) const | gsHTensorBasis< d, T > | |
domainBoundariesParams(std::vector< std::vector< std::vector< std::vector< T > > > > &result) const | gsHTensorBasis< d, T > | |
domainDim() const | gsHBSplineBasis< d, T > | inlinevirtual |
elementIndex(const gsVector< T > &u) const | gsBasis< T > | virtual |
elementInSupportOf(index_t j) const | gsHTensorBasis< d, T > | inlinevirtual |
elevateContinuity(int const &i=1) | gsBasis< T > | virtual |
eval(const gsMatrix< T > &u) const | gsFunctionSet< T > | |
eval_into(const gsMatrix< T > &u, gsMatrix< T > &result) const | gsHBSplineBasis< d, T > | virtual |
evalAllDers(const gsMatrix< T > &u, int n, bool sameElement=false) const | gsFunctionSet< T > | |
evalAllDers_into(const gsMatrix< T > &u, int n, std::vector< gsMatrix< T > > &result, bool sameElement=false) const | gsFunctionSet< T > | virtual |
evalAllDersFunc_into(const gsMatrix< T > &u, const gsMatrix< T > &coefs, const unsigned n, std::vector< gsMatrix< T > > &result, bool sameElement=false) const | gsBasis< T > | virtual |
evalAllDersSingle_into(index_t i, const gsMatrix< T > &u, int n, gsMatrix< T > &result) const | gsBasis< T > | virtual |
evalDerSingle_into(index_t i, const gsMatrix< T > &u, int n, gsMatrix< T > &result) const | gsBasis< T > | virtual |
evalFunc(const gsMatrix< T > &u, const gsMatrix< T > &coefs) const | gsBasis< T > | inline |
evalFunc_into(const gsMatrix< T > &u, const gsMatrix< T > &coefs, gsMatrix< T > &result) const | gsBasis< T > | virtual |
evalSingle(index_t i, const gsMatrix< T > &u) const | gsBasis< T > | inline |
evalSingle_into(index_t i, const gsMatrix< T > &u, gsMatrix< T > &result) const | gsHBSplineBasis< d, T > | virtual |
flatTensorIndexesToHierachicalIndexes(gsSortedVector< int > &indexes, const int level) const | gsHTensorBasis< d, T > | |
flatTensorIndexOf(const index_t i) const | gsHTensorBasis< d, T > | inline |
flatTensorIndexOf(const index_t i, const index_t level) const | gsHTensorBasis< d, T > | inline |
flatTensorIndexToHierachicalIndex(index_t index, const int level) const | gsHTensorBasis< d, T > | |
function(index_t i) const | gsBasis< T > | |
GeometryType typedef | gsHBSplineBasis< d, T > | |
getBases() const | gsHTensorBasis< d, T > | inline |
getBoxesAlongSlice(int dir, T par, std::vector< index_t > &boxes) const | gsHTensorBasis< d, T > | protected |
getLevelAtIndex(const point &Pt) const | gsHTensorBasis< d, T > | inline |
getLevelAtPoint(const gsMatrix< T > &Pt) const | gsHTensorBasis< d, T > | inline |
getLevelUniqueSpanAtPoints(const gsMatrix< T > &Pt, gsVector< index_t > &lvl, gsMatrix< index_t > &loIdx) const | gsHTensorBasis< d, T > | inline |
getMaxCellLength() const | gsBasis< T > | virtual |
getMinCellLength() const | gsBasis< T > | virtual |
gsHBSplineBasis(gsBasis< T > const &tbasis, bool manualLevels=false) | gsHBSplineBasis< d, T > | inline |
gsHTensorBasis() | gsHTensorBasis< d, T > | inline |
gsHTensorBasis(gsTensorBSplineBasis< d, T > const &tbasis, gsMatrix< T > const &boxes) | gsHTensorBasis< d, T > | inline |
gsHTensorBasis(gsTensorBSplineBasis< d, T > const &tbasis, gsMatrix< T > const &boxes, const std::vector< index_t > &levels) | gsHTensorBasis< d, T > | inline |
gsHTensorBasis(const gsHTensorBasis &o) | gsHTensorBasis< d, T > | inline |
increaseMultiplicity(index_t lvl, int dir, T knotValue, int mult=1) | gsHTensorBasis< d, T > | virtual |
increaseMultiplicity(index_t lvl, int dir, const std::vector< T > &knotValue, int mult=1) | gsHTensorBasis< d, T > | virtual |
initialize() | gsHBSplineBasis< d, T > | private |
interpolateAtAnchors(gsMatrix< T > const &vals) const | gsBasis< T > | inlinevirtual |
interpolateData(gsMatrix< T > const &vals, gsMatrix< T > const &pts) const | gsBasis< T > | inline |
isActive(const index_t i, const gsVector< T > &u) const | gsBasis< T > | virtual |
isRational() const | gsBasis< T > | inlinevirtual |
jacobianFunc_into(const gsMatrix< T > &u, const gsMatrix< T > &coefs, gsMatrix< T > &result) const | gsBasis< T > | virtual |
knot(int lvl, int k, int i) const | gsHTensorBasis< d, T > | inline |
laplacian(const gsMatrix< T > &u) const | gsBasis< T > | inlinevirtual |
levelOf(index_t i) const | gsHTensorBasis< d, T > | inline |
linearCombination_into(const gsMatrix< T > &coefs, const gsMatrix< index_t > &actives, const gsMatrix< T > &values, gsMatrix< T > &result, bool sameElement=false) | gsBasis< T > | static |
m_bases | gsHTensorBasis< d, T > | mutableprotected |
m_tree | gsHTensorBasis< d, T > | protected |
m_uIndices | gsHTensorBasis< d, T > | protected |
m_xmatrix | gsHTensorBasis< d, T > | protected |
m_xmatrix_offset | gsHTensorBasis< d, T > | protected |
makeCompressed() | gsHTensorBasis< d, T > | |
makeDomainIterator() const | gsHTensorBasis< d, T > | inlinevirtual |
makeDomainIterator(const boxSide &s) const | gsHTensorBasis< d, T > | inlinevirtual |
makeGeometry(gsMatrix< T > coefs) const =0 | gsBasis< T > | pure virtual |
makeNonRational() const | gsBasis< T > | inlinevirtual |
manualLevels() const | gsHTensorBasis< d, T > | inline |
matchWith(const boundaryInterface &bi, const gsBasis< T > &other, gsMatrix< index_t > &bndThis, gsMatrix< index_t > &bndOther, index_t offset) const | gsHTensorBasis< d, T > | virtual |
maxDegree() const | gsHTensorBasis< d, T > | inlinevirtual |
maxLevel() const | gsHTensorBasis< d, T > | inline |
minDegree() const | gsHTensorBasis< d, T > | inlinevirtual |
needLevel(int maxLevel) const | gsHTensorBasis< d, T > | protected |
nPieces() const | gsFunctionSet< T > | inlinevirtual |
numActive(const gsMatrix< T > &u) const | gsBasis< T > | inline |
numActive_into(const gsMatrix< T > &u, gsVector< index_t > &result) const | gsHTensorBasis< d, T > | inlinevirtual |
numBreaks(int lvl, int k) const | gsHTensorBasis< d, T > | inline |
numElements(boxSide const &s=0) const | gsHTensorBasis< d, T > | inlinevirtual |
numKnots(int lvl, int k) const | gsHTensorBasis< d, T > | inline |
numLevels() const | gsHTensorBasis< d, T > | inline |
only_insert_box(point const &k1, point const &k2, int lvl) | gsHTensorBasis< d, T > | |
piece(const index_t k) const | gsBasis< T > | inlinevirtual |
print(std::ostream &os) const | gsHBSplineBasis< d, T > | virtual |
printBases(std::ostream &os=gsInfo) const | gsHTensorBasis< d, T > | inline |
printBasic(std::ostream &os=gsInfo) const | gsHTensorBasis< d, T > | inline |
printSpaces(std::ostream &os=gsInfo) const | gsHTensorBasis< d, T > | inline |
Ptr typedef | gsHBSplineBasis< d, T > | |
reduceContinuity(int const &i=1) | gsHTensorBasis< d, T > | inlinevirtual |
refine(gsMatrix< T > const &boxes, int refExt) | gsHTensorBasis< d, T > | virtual |
refine(gsMatrix< T > const &boxes) | gsHTensorBasis< d, T > | virtual |
refineBasisFunction(const index_t i) | gsHTensorBasis< d, T > | |
refineElements(std::vector< index_t > const &boxes) | gsHTensorBasis< d, T > | virtual |
refineElements_withCoefs(gsMatrix< T > &coefs, std::vector< index_t > const &boxes) | gsHTensorBasis< d, T > | virtual |
refineSide(const boxSide side, index_t lvl) | gsHTensorBasis< d, T > | |
reverse() | gsBasis< T > | virtual |
setActiveToLvl(int level, std::vector< CMatrix > &x_matrix_lvl) const | gsHTensorBasis< d, T > | |
setDegree(short_t const &i) | gsBasis< T > | |
setDegreePreservingMultiplicity(short_t const &i) | gsBasis< T > | |
size() const | gsHTensorBasis< d, T > | virtual |
source() const | gsBasis< T > | inlinevirtual |
source() | gsBasis< T > | inlinevirtual |
support() const | gsHTensorBasis< d, T > | virtual |
support(const index_t &i) const | gsHTensorBasis< d, T > | virtual |
supportInterval(index_t dir) const | gsBasis< T > | |
targetDim() const | gsFunctionSet< T > | inlinevirtual |
tensorize(const gsBasis &other) const | gsBasis< T > | virtual |
tensorLevel(index_t i) const | gsHTensorBasis< d, T > | inline |
testPartitionOfUnity(const index_t npts=100, const T tol=1e-12) const | gsHTensorBasis< d, T > | |
totalDegree() const | gsBasis< T > | virtual |
transfer(const std::vector< gsSortedVector< index_t > > &old, gsSparseMatrix< T > &result) | gsHTensorBasis< d, T > | |
transferbyLvl(std::vector< gsSparseMatrix< T > > &result) | gsHBSplineBasis< d, T > | |
tree() const | gsHTensorBasis< d, T > | inline |
tree() | gsHTensorBasis< d, T > | inline |
treeSize() const | gsHTensorBasis< d, T > | inline |
uniformCoarsen(int numKnots=1) | gsHTensorBasis< d, T > | virtual |
uniformCoarsen_withCoefs(gsMatrix< T > &coefs, int numKnots=1) | gsHTensorBasis< d, T > | virtual |
uniformCoarsen_withTransfer(gsSparseMatrix< T, RowMajor > &transfer, int numKnots=1) | gsBasis< T > | virtual |
uniformRefine(int numKnots=1, int mul=1, int dir=-1) | gsHTensorBasis< d, T > | virtual |
uniformRefine_withCoefs(gsMatrix< T > &coefs, int numKnots=1, int mul=1, int dir=-1) | gsHTensorBasis< d, T > | virtual |
uniformRefine_withTransfer(gsSparseMatrix< T, RowMajor > &transfer, int numKnots=1, int mul=1) | gsBasis< T > | virtual |
unrefineElements(std::vector< index_t > const &boxes) | gsHTensorBasis< d, T > | virtual |
update_structure() | gsHTensorBasis< d, T > | protectedvirtual |
uPtr typedef | gsHBSplineBasis< d, T > | |
weights() const | gsBasis< T > | inlinevirtual |
weights() | gsBasis< T > | inlinevirtual |
~gsHTensorBasis() | gsHTensorBasis< d, T > | inlinevirtual |