DecisionTree Implementation of a Decision Tree
virtual | ~DecisionTree() |
virtual void* | TMVA::BinaryTree::AddXMLTo(void* parent) const |
void | ApplyValidationSample(const TMVA::DecisionTree::EventConstList* validationSample) const |
TMVA::BinaryTree | TMVA::BinaryTree::BinaryTree() |
TMVA::BinaryTree | TMVA::BinaryTree::BinaryTree(const TMVA::BinaryTree&) |
UInt_t | BuildTree(const TMVA::DecisionTree::EventConstList& eventSample, TMVA::DecisionTreeNode* node = __null) |
Double_t | CheckEvent(const TMVA::Event*, Bool_t UseYesNoLeaf = kFALSE) const |
void | CheckEventWithPrunedTree(const TMVA::Event*) const |
static TClass* | Class() |
virtual const char* | ClassName() const |
UInt_t | CleanTree(TMVA::DecisionTreeNode* node = __null) |
void | ClearTree() |
UInt_t | CountLeafNodes(TMVA::Node* n = __null) |
UInt_t | TMVA::BinaryTree::CountNodes(TMVA::Node* n = __null) |
static TMVA::DecisionTree* | CreateFromXML(void* node, UInt_t tmva_Version_Code = 262657) |
virtual TMVA::DecisionTreeNode* | CreateNode(UInt_t) const |
virtual TMVA::BinaryTree* | CreateTree() const |
TMVA::DecisionTree | DecisionTree() |
TMVA::DecisionTree | DecisionTree(const TMVA::DecisionTree& d) |
TMVA::DecisionTree | DecisionTree(TMVA::SeparationBase* sepType, Float_t minSize, Int_t nCuts, TMVA::DataSetInfo* = __null, UInt_t cls = 0, Bool_t randomisedTree = kFALSE, Int_t useNvars = 0, Bool_t usePoissonNvars = kFALSE, UInt_t nMaxDepth = 9999999, Int_t iSeed = fgRandomSeed, Float_t purityLimit = 0.5, Int_t treeID = 0) |
void | DescendTree(TMVA::Node* n = __null) |
Bool_t | DoRegression() const |
void | FillEvent(const TMVA::Event& event, TMVA::DecisionTreeNode* node) |
void | FillTree(const TMVA::DecisionTree::EventList& eventSample) |
TMVA::Types::EAnalysisType | GetAnalysisType() |
TMVA::DecisionTreeNode* | GetEventNode(const TMVA::Event& e) const |
vector<Double_t> | GetFisherCoefficients(const TMVA::DecisionTree::EventConstList& eventSample, UInt_t nFisherVars, UInt_t* mapVarInFisher) |
TMVA::Node* | TMVA::BinaryTree::GetLeftDaughter(TMVA::Node* n) |
UInt_t | TMVA::BinaryTree::GetNNodes() const |
Int_t | GetNNodesBeforePruning() |
TMVA::Node* | GetNode(ULong_t sequence, UInt_t depth) |
Double_t | GetNodePurityLimit() const |
Double_t | GetPruneStrength() const |
void | GetRandomisedVariables(Bool_t* useVariable, UInt_t* variableMap, UInt_t& nVars) |
TMVA::Node* | TMVA::BinaryTree::GetRightDaughter(TMVA::Node* n) |
virtual TMVA::DecisionTreeNode* | GetRoot() const |
Double_t | GetSumWeights(const TMVA::DecisionTree::EventConstList* validationSample) const |
UInt_t | TMVA::BinaryTree::GetTotalTreeDepth() const |
Int_t | GetTreeID() |
vector<Double_t> | GetVariableImportance() |
Double_t | GetVariableImportance(UInt_t ivar) |
virtual TClass* | IsA() const |
TMVA::DecisionTree& | operator=(const TMVA::DecisionTree&) |
virtual void | TMVA::BinaryTree::Print(ostream& os) const |
void | PruneNode(TMVA::DecisionTreeNode* node) |
void | PruneNodeInPlace(TMVA::DecisionTreeNode* node) |
Double_t | PruneTree(const TMVA::DecisionTree::EventConstList* validationSample = __null) |
virtual void | TMVA::BinaryTree::Read(istream& istr, UInt_t tmva_Version_Code = 262657) |
virtual void | TMVA::BinaryTree::ReadXML(void* node, UInt_t tmva_Version_Code = 262657) |
void | SetAnalysisType(TMVA::Types::EAnalysisType t) |
void | SetMinLinCorrForFisher(Double_t min) |
void | SetNodePurityLimit(Double_t p) |
void | SetNVars(Int_t n) |
void | SetParentTreeInNodes(TMVA::Node* n = __null) |
void | SetPruneMethod(TMVA::DecisionTree::EPruneMethod m = kCostComplexityPruning) |
void | SetPruneStrength(Double_t p) |
void | TMVA::BinaryTree::SetRoot(TMVA::Node* r) |
void | TMVA::BinaryTree::SetTotalTreeDepth(Int_t depth) |
void | TMVA::BinaryTree::SetTotalTreeDepth(TMVA::Node* n = __null) |
void | SetTreeID(Int_t treeID) |
void | SetUseExclusiveVars(Bool_t t = kTRUE) |
void | SetUseFisherCuts(Bool_t t = kTRUE) |
virtual void | ShowMembers(TMemberInspector& insp) const |
virtual void | Streamer(TBuffer&) |
void | StreamerNVirtual(TBuffer& ClassDef_StreamerNVirtual_b) |
Double_t | TestPrunedTreeQuality(const TMVA::DecisionTreeNode* dt = __null, Int_t mode = 0) const |
Double_t | TrainNode(const TMVA::DecisionTree::EventConstList& eventSample, TMVA::DecisionTreeNode* node) |
Double_t | TrainNodeFast(const TMVA::DecisionTree::EventConstList& eventSample, TMVA::DecisionTreeNode* node) |
Double_t | TrainNodeFull(const TMVA::DecisionTree::EventConstList& eventSample, TMVA::DecisionTreeNode* node) |
void | TMVA::BinaryTree::DeleteNode(TMVA::Node*) |
TMVA::MsgLogger& | TMVA::BinaryTree::Log() const |
Double_t | SamplePurity(TMVA::DecisionTree::EventList eventSample) |
static TMVA::DecisionTree::EPruneMethod | kCostComplexityPruning | |
static TMVA::DecisionTree::EPruneMethod | kExpectedErrorPruning | |
static TMVA::DecisionTree::EPruneMethod | kNoPruning |
UInt_t | TMVA::BinaryTree::fDepth | maximal depth in tree reached |
UInt_t | TMVA::BinaryTree::fNNodes | total number of nodes in the tree (counted) |
TMVA::Node* | TMVA::BinaryTree::fRoot | the root node of the tree |
TMVA::Types::EAnalysisType | fAnalysisType | kClassification(=0=false) or kRegression(=1=true) |
TMVA::DataSetInfo* | fDataSetInfo | |
UInt_t | fMaxDepth | max depth |
Double_t | fMinLinCorrForFisher | the minimum linear correlation between two variables demanded for use in fisher criterium in node splitting |
Double_t | fMinNodeSize | min fraction of training events in node |
Double_t | fMinSepGain | min number of separation gain to perform node splitting |
Double_t | fMinSize | min number of events in node |
TRandom3* | fMyTrandom | random number generator for randomised trees |
Int_t | fNCuts | number of grid point in variable cut scans |
Int_t | fNNodesBeforePruning | remember this one (in case of pruning, it allows to monitor the before/after |
Double_t | fNodePurityLimit | purity limit to decide whether a node is signal |
UInt_t | fNvars | number of variables used to separate S and B |
TMVA::DecisionTree::EPruneMethod | fPruneMethod | method used for prunig |
Double_t | fPruneStrength | a parameter to set the "amount" of pruning..needs to be adjusted |
Bool_t | fRandomisedTree | choose at each node splitting a random set of variables |
TMVA::RegressionVariance* | fRegType | the separation crition used in Regression |
TMVA::SeparationBase* | fSepType | the separation crition |
UInt_t | fSigClass | class which is treated as signal when building the tree |
Int_t | fTreeID | just an ID number given to the tree.. makes debugging easier as tree knows who he is. |
Bool_t | fUseExclusiveVars | individual variables already used in fisher criterium are not anymore analysed individually for node splitting |
Bool_t | fUseFisherCuts | use multivariate splits using the Fisher criterium |
Int_t | fUseNvars | the number of variables used in randomised trees; |
Bool_t | fUsePoissonNvars | use "fUseNvars" not as fixed number but as mean of a possion distr. in each split |
Bool_t | fUseSearchTree | cut scan done with binary trees or simple event loop. |
vector<Double_t> | fVariableImportance | the relative importance of the different variables |
static const Int_t | fgDebugLevel | debug level determining some printout/control plots etc. |
static const Int_t | fgRandomSeed | set nonzero for debugging and zero for random seeds |
Inheritance Chart: | |||||||||
|
the constructur needed for the "reading" of the decision tree from weight files
the constructur needed for constructing the decision tree via training with events
Retrieves the address of the root node
{ return dynamic_cast<TMVA::DecisionTreeNode*>(fRoot); }
building of a tree by recursivly splitting the nodes UInt_t BuildTree( const EventList & eventSample, DecisionTreeNode *node = NULL);
{ return TrainNodeFast( eventSample, node ); }
determine the way how a node is split (which variable, which cut value)
fill at tree with a given structure already (just see how many signa/bkgr events end up in each node
fill the existing the decision tree structure by filling event in from the top node and see where they happen to end up
returns: 1 = Signal (right), -1 = Bkg (left)
{ fPruneMethod = m; }
recursive pruning of the tree, validation sample required for automatic pruning
manage the pruning strength parameter (iff < 0 -> automate the pruning process)
{ fPruneStrength = p; }
apply pruning validation sample to a decision tree
return the misclassification rate of a pruned tree
pass a single validation event throught a pruned decision tree
calculate the normalization factor for a pruning validation sample
retrieve node from the tree. Its position (up to a maximal tree depth of 64)
is coded as a sequence of left-right moves starting from the root, coded as
0-1 bit patterns stored in the "long-integer" together with the depth
prune a node from the tree without deleting its descendants; allows one to effectively prune a tree many times without making deep copies
utility functions calculate the Purity out of the number of sig and bkg events collected from individual samples. calculates the purity S/(S+B) of a given event sample