A++ » INCLUDE » TMultiLayerPerceptron

class TMultiLayerPerceptron: public TObject


 TMultiLayerPerceptron

 This class decribes a Neural network.
 There are facilities to train the network and use the output.

 The input layer is made of inactive neurons (returning the
 normalized input), hidden layers are made of sigmoids and output
 neurons are linear.

 The basic input is a TTree and two (training and test) TEventLists.
 For classification jobs, a branch (maybe in a TFriend) must contain
 the expected output.
 6 learning methods are available: kStochastic, kBatch,
 kSteepestDescent, kRibierePolak, kFletcherReeves and kBFGS.

 This implementation is *inspired* from the mlpfit package from
 J.Schwindling et al.


Function Members (Methods)

public:
virtual~TMultiLayerPerceptron()
voidTObject::AbstractMethod(const char* method) const
virtual voidTObject::AppendPad(Option_t* option = "")
virtual voidTObject::Browse(TBrowser* b)
static TClass*Class()
virtual const char*TObject::ClassName() const
virtual voidTObject::Clear(Option_t* = "")
virtual TObject*TObject::Clone(const char* newname = "") const
virtual Int_tTObject::Compare(const TObject* obj) const
voidComputeDEDw() const
virtual voidTObject::Copy(TObject& object) const
virtual voidTObject::Delete(Option_t* option = "")MENU
virtual Int_tTObject::DistancetoPrimitive(Int_t px, Int_t py)
virtual voidDraw(Option_t* option = "")
virtual voidTObject::DrawClass() constMENU
virtual TObject*TObject::DrawClone(Option_t* option = "") constMENU
voidDrawResult(Int_t index = 0, Option_t* option = "test") const
virtual voidTObject::Dump() constMENU
Bool_tDumpWeights(Option_t* filename = "-") const
virtual voidTObject::Error(const char* method, const char* msgfmt) const
Double_tEvaluate(Int_t index, Double_t* params) const
virtual voidTObject::Execute(const char* method, const char* params, Int_t* error = 0)
virtual voidTObject::Execute(TMethod* method, TObjArray* params, Int_t* error = 0)
virtual voidTObject::ExecuteEvent(Int_t event, Int_t px, Int_t py)
voidExport(Option_t* filename = "NNfunction", Option_t* language = "C++") const
virtual voidTObject::Fatal(const char* method, const char* msgfmt) const
virtual TObject*TObject::FindObject(const char* name) const
virtual TObject*TObject::FindObject(const TObject* obj) const
Double_tGetDelta() const
virtual Option_t*TObject::GetDrawOption() const
static Long_tTObject::GetDtorOnly()
Double_tGetEpsilon() const
Double_tGetError(Int_t event) const
Double_tGetError(TMultiLayerPerceptron::EDataSet set) const
Double_tGetEta() const
Double_tGetEtaDecay() const
virtual const char*TObject::GetIconName() const
TMultiLayerPerceptron::ELearningMethodGetLearningMethod() const
virtual const char*TObject::GetName() const
virtual char*TObject::GetObjectInfo(Int_t px, Int_t py) const
static Bool_tTObject::GetObjectStat()
virtual Option_t*TObject::GetOption() const
Int_tGetReset() const
TStringGetStructure() const
Double_tGetTau() const
virtual const char*TObject::GetTitle() const
TNeuron::ENeuronTypeGetType() const
virtual UInt_tTObject::GetUniqueID() const
virtual Bool_tTObject::HandleTimer(TTimer* timer)
virtual ULong_tTObject::Hash() const
virtual voidTObject::Info(const char* method, const char* msgfmt) const
virtual Bool_tTObject::InheritsFrom(const char* classname) const
virtual Bool_tTObject::InheritsFrom(const TClass* cl) const
virtual voidTObject::Inspect() constMENU
voidTObject::InvertBit(UInt_t f)
virtual TClass*IsA() const
virtual Bool_tTObject::IsEqual(const TObject* obj) const
virtual Bool_tTObject::IsFolder() const
Bool_tTObject::IsOnHeap() const
virtual Bool_tTObject::IsSortable() const
Bool_tTObject::IsZombie() const
Bool_tLoadWeights(Option_t* filename = "")
virtual voidTObject::ls(Option_t* option = "") const
voidTObject::MayNotUse(const char* method) const
virtual Bool_tTObject::Notify()
voidTObject::Obsolete(const char* method, const char* asOfVers, const char* removedFromVers) const
voidTObject::operator delete(void* ptr)
voidTObject::operator delete(void* ptr, void* vp)
voidTObject::operator delete[](void* ptr)
voidTObject::operator delete[](void* ptr, void* vp)
void*TObject::operator new(size_t sz)
void*TObject::operator new(size_t sz, void* vp)
void*TObject::operator new[](size_t sz)
void*TObject::operator new[](size_t sz, void* vp)
virtual voidTObject::Paint(Option_t* option = "")
virtual voidTObject::Pop()
virtual voidTObject::Print(Option_t* option = "") const
voidRandomize() const
virtual Int_tTObject::Read(const char* name)
virtual voidTObject::RecursiveRemove(TObject* obj)
voidTObject::ResetBit(UInt_t f)
Double_tResult(Int_t event, Int_t index = 0) const
virtual voidTObject::SaveAs(const char* filename = "", Option_t* option = "") constMENU
virtual voidTObject::SavePrimitive(ostream& out, Option_t* option = "")
voidTObject::SetBit(UInt_t f)
voidTObject::SetBit(UInt_t f, Bool_t set)
voidSetData(TTree*)
voidSetDelta(Double_t delta)
virtual voidTObject::SetDrawOption(Option_t* option = "")MENU
static voidTObject::SetDtorOnly(void* obj)
voidSetEpsilon(Double_t eps)
voidSetEta(Double_t eta)
voidSetEtaDecay(Double_t ed)
voidSetEventWeight(const char*)
voidSetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
static voidTObject::SetObjectStat(Bool_t stat)
voidSetReset(Int_t reset)
voidSetTau(Double_t tau)
voidSetTestDataSet(TEventList* test)
voidSetTestDataSet(const char* test)
voidSetTrainingDataSet(TEventList* train)
voidSetTrainingDataSet(const char* train)
virtual voidTObject::SetUniqueID(UInt_t uid)
virtual voidShowMembers(TMemberInspector& insp) const
virtual voidStreamer(TBuffer&)
voidStreamerNVirtual(TBuffer& ClassDef_StreamerNVirtual_b)
virtual voidTObject::SysError(const char* method, const char* msgfmt) const
Bool_tTObject::TestBit(UInt_t f) const
Int_tTObject::TestBits(UInt_t f) const
TMultiLayerPerceptron()
TMultiLayerPerceptron(const char* layout, TTree* data = 0, const char* training = "Entry$%2==0", const char* test = "", TNeuron::ENeuronType type = TNeuron::kSigmoid, const char* extF = "", const char* extD = "")
TMultiLayerPerceptron(const char* layout, TTree* data, TEventList* training, TEventList* test, TNeuron::ENeuronType type = TNeuron::kSigmoid, const char* extF = "", const char* extD = "")
TMultiLayerPerceptron(const char* layout, const char* weight, TTree* data = 0, const char* training = "Entry$%2==0", const char* test = "", TNeuron::ENeuronType type = TNeuron::kSigmoid, const char* extF = "", const char* extD = "")
TMultiLayerPerceptron(const char* layout, const char* weight, TTree* data, TEventList* training, TEventList* test, TNeuron::ENeuronType type = TNeuron::kSigmoid, const char* extF = "", const char* extD = "")
voidTrain(Int_t nEpoch, Option_t* option = "text", Double_t minE = 0)
virtual voidTObject::UseCurrentStyle()
virtual voidTObject::Warning(const char* method, const char* msgfmt) const
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0)
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0) const

Data Members

public:
static TMultiLayerPerceptron::ELearningMethodkBFGS
static TMultiLayerPerceptron::ELearningMethodkBatch
static TObject::(anonymous)TObject::kBitMask
static TObject::EStatusBitsTObject::kCanDelete
static TObject::EStatusBitsTObject::kCannotPick
static TMultiLayerPerceptron::ELearningMethodkFletcherReeves
static TObject::EStatusBitsTObject::kHasUUID
static TObject::EStatusBitsTObject::kInvalidObject
static TObject::(anonymous)TObject::kIsOnHeap
static TObject::EStatusBitsTObject::kIsReferenced
static TObject::EStatusBitsTObject::kMustCleanup
static TObject::EStatusBitsTObject::kNoContextMenu
static TObject::(anonymous)TObject::kNotDeleted
static TObject::EStatusBitsTObject::kObjInCanvas
static TObject::(anonymous)TObject::kOverwrite
static TMultiLayerPerceptron::ELearningMethodkRibierePolak
static TObject::(anonymous)TObject::kSingleKey
static TMultiLayerPerceptron::ELearningMethodkSteepestDescent
static TMultiLayerPerceptron::ELearningMethodkStochastic
static TMultiLayerPerceptron::EDataSetkTest
static TMultiLayerPerceptron::EDataSetkTraining
static TObject::(anonymous)TObject::kWriteDelete
static TObject::(anonymous)TObject::kZombie
private:
Int_tfCurrentTree! index of the current tree in a chain
Double_tfCurrentTreeWeight! weight of the current tree in a chain
TTree*fData! pointer to the tree used as datasource
Double_tfDelta! Delta - used in stochastic minimisation - Default=0.
Double_tfEpsilon! Epsilon - used in stochastic minimisation - Default=0.
Double_tfEta! Eta - used in stochastic minimisation - Default=0.1
Double_tfEtaDecay! EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
TTreeFormula*fEventWeight! formula representing the event weight
TObjArrayfFirstLayerCollection of the input neurons; subset of fNetwork
Double_tfLastAlpha! internal parameter used in line search
TObjArrayfLastLayerCollection of the output neurons; subset of fNetwork
TMultiLayerPerceptron::ELearningMethodfLearningMethod! The Learning Method
TTreeFormulaManager*fManager! TTreeFormulaManager for the weight and neurons
TObjArrayfNetworkCollection of all the neurons in the network
TNeuron::ENeuronTypefOutTypeType of output neurons
Int_tfReset! number of epochs between two resets of the search direction to the steepest descent - Default=50
TStringfStructureString containing the network structure
TObjArrayfSynapsesCollection of all the synapses in the network
Double_tfTau! Tau - used in line search - Default=3.
TEventList*fTest! EventList defining the events in the test dataset
Bool_tfTestOwner! internal flag whether one has to delete fTest or not
TEventList*fTraining! EventList defining the events in the training dataset
Bool_tfTrainingOwner! internal flag whether one has to delete fTraining or not
TNeuron::ENeuronTypefTypeType of hidden neurons
TStringfWeightString containing the event weight
TStringfextDString containing the derivative name
TStringfextFString containing the function name

Class Charts

Inheritance Chart:
TObject
TMultiLayerPerceptron

Function documentation

TMultiLayerPerceptron()
TMultiLayerPerceptron(const char* layout, TTree* data = 0, const char* training = "Entry$%2==0", const char* test = "", TNeuron::ENeuronType type = TNeuron::kSigmoid, const char* extF = "", const char* extD = "")
TMultiLayerPerceptron(const char* layout, TTree* data, TEventList* training, TEventList* test, TNeuron::ENeuronType type = TNeuron::kSigmoid, const char* extF = "", const char* extD = "")
virtual ~TMultiLayerPerceptron()
void SetData(TTree* )
void SetTrainingDataSet(TEventList* train)
void SetTestDataSet(TEventList* test)
void SetTrainingDataSet(const char* train)
void SetTestDataSet(const char* test)
void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
void SetEventWeight(const char* )
void Train(Int_t nEpoch, Option_t* option = "text", Double_t minE = 0)
Double_t Result(Int_t event, Int_t index = 0) const
Double_t GetError(Int_t event) const
Double_t GetError(TMultiLayerPerceptron::EDataSet set) const
void ComputeDEDw() const
void Randomize() const
void SetEta(Double_t eta)
void SetEpsilon(Double_t eps)
void SetDelta(Double_t delta)
void SetEtaDecay(Double_t ed)
void SetTau(Double_t tau)
void SetReset(Int_t reset)
Double_t GetEta() const
{ return fEta; }
Double_t GetEpsilon() const
{ return fEpsilon; }
Double_t GetDelta() const
{ return fDelta; }
Double_t GetEtaDecay() const
{ return fEtaDecay; }
TMultiLayerPerceptron::ELearningMethod GetLearningMethod() const
{ return fLearningMethod; }
Double_t GetTau() const
{ return fTau; }
Int_t GetReset() const
{ return fReset; }
TString GetStructure() const
{ return fStructure; }
TNeuron::ENeuronType GetType() const
{ return fType; }
void DrawResult(Int_t index = 0, Option_t* option = "test") const
Bool_t DumpWeights(Option_t* filename = "-") const
Bool_t LoadWeights(Option_t* filename = "")
Double_t Evaluate(Int_t index, Double_t* params) const
void Export(Option_t* filename = "NNfunction", Option_t* language = "C++") const
void Draw(Option_t* option = "")
void AttachData()
void BuildNetwork()
void GetEntry(Int_t ) const
 it's a choice not to force learning function being const, even if possible
void MLP_Stochastic(Double_t* )
void MLP_Batch(Double_t* )
Bool_t LineSearch(Double_t* , Double_t* )
void SteepestDir(Double_t* )
void ConjugateGradientsDir(Double_t* , Double_t )
void SetGammaDelta(TMatrixD& , TMatrixD& , Double_t* )
bool GetBFGSH(TMatrixD& , TMatrixD& , TMatrixD& )
void BFGSDir(TMatrixD& , Double_t* )
Double_t DerivDir(Double_t* )
Double_t GetCrossEntropyBinary() const
Double_t GetCrossEntropy() const
Double_t GetSumSquareError() const
TMultiLayerPerceptron(const TMultiLayerPerceptron& )
TMultiLayerPerceptron& operator=(const TMultiLayerPerceptron& )
void ExpandStructure()
void BuildFirstLayer(TString& )
void BuildHiddenLayers(TString& )
void BuildOneHiddenLayer(const TString& sNumNodes, Int_t& layer, Int_t& prevStart, Int_t& prevStop, Bool_t lastLayer)
void Shuffle(Int_t* , Int_t ) const
void MLP_Line(Double_t* , Double_t* , Double_t )