TarzaNN
TarzaNN neural network simulator
C:/Users/albertlr/projects/TarzaNN/TarzaNN/NetworkFactory.h
Go to the documentation of this file.
00001 /****************************************************************************
00002  **
00003  ** Copyright C 2002-2012 Laboratory for Active and Attentive Vision (LAAV), Department of Computer Science and Engineering, York University, Toronto, ON, Canada.
00004  ** All rights reserved.
00005  **
00006  ** This file is part of the TarzaNN Neural Network Simulator.
00007  **
00008  ** This file may be distributed and/or modified under the terms of the
00009  ** GNU General Public License version 2 as published by the Free Software
00010  ** Foundation and appearing in the file LICENSE.GPL included in the
00011  ** packaging of this file.
00012  **
00013  ** See http://www.tarzann.org/gpl/ for GPL licensing information.
00014  **
00015  ** Contact info@tarzann.org if any conditions of this licensing are
00016  ** not clear to you.
00017  **
00018  ** This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
00019  ** WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
00020  **
00021  ****************************************************************************/
00022 
00023 #ifndef NetworkFactory_H
00024 #define NetworkFactory_H
00025 
00026 #include <stack>
00027 
00028 #include "Network.h"
00029 
00030 using namespace std;
00031 typedef vector<FeaturePlaneAbstract*> FPList;
00032 
00038 class NetworkFactory{
00039 public:
00040     NetworkFactory(Network* nn);
00041     ~NetworkFactory(void);
00042 
00044     void createLayer(float weight, int dirs, int scales, QString layerName, int layerType);
00045 
00047     void tmpltInputFeaturePlane( QString* fpName, QString* fileName, QString* taskFileName, float min_activation, float max_activation, bool scale, bool visible, bool isST);
00048     void tmpltInputLearningFeaturePlane( QString* fpName, QString* fileName, QString* taskFileName, float min_activation, float max_activation, bool scale, bool visible, bool isST, int levels);
00050     void tmpltInputScales( QString* fpName, QString* inputBaseName, int w, int h, int scales, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00052     void tmpltLGN( QString* fpName, QString* inputBaseName, int w, int h, int scales, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00054     void tmpltV1Edges( QString* fpName, QString* inputBaseName, int w, int h, int scales, int angles, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00056     void tmpltV2EndStopped( QString* fpName, QString* inputBaseName, int w, int h, int scales, int angles, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00058     void tmpltV2Circle( QString* fpName, QString* inputBaseName, int w, int h, int scales, int angles, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00060     void tmpltV2Plus( QString* fpName, QString* inputBaseName, int w, int h, int scales, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00062     void tmpltV2X( QString* fpName, QString* inputBaseName, int w, int h, int scales, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00063 
00064 
00066     void tmpltMultiInputFeaturePlane( QString* fpName, QString* fileName, QString* taskFileName, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00068     void tmpltV1Motion( QString* fpName, QString* inputBaseName, int w, int h, int scales, int angles, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00070     void tmpltMT_T( QString* fpName, QString* inputBaseName, int w, int h, int scales, int angles, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00072     void tmpltMT_R( QString* fpName, QString* inputBaseName, int w, int h, int scales, int angles, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00074     void tmpltMST_T( QString* fpName, QString* inputBaseName, int w, int h, int scales, int angles, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00076     void tmpltMST_R( QString* fpName, QString* inputBaseName, int w, int h, int scales, int angles, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA);
00077 
00079     void tmpltSTSOMLayer( QString* fpName, QString* inputBaseName, int w, int h, int scales, int angles, int neuronType, float nParam1, float nParam2, float wta_theta, float min_activation, float max_activation, bool scale, bool visible, bool isWTA, bool learning);
00080 
00081 private:
00082     std::stack<FPList*> layers;
00083     int layerCount;
00084     Network* net;
00085 
00086     int getAngleDegrees(int angle, int angles){
00087         return (int)(angle*180/angles);
00088     }
00089 };
00090 
00091 #endif //NetworkFactory_H
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines