forked from btgraham/SparseConvNet-archived
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathSparseConvNet.h
More file actions
58 lines (53 loc) · 3.46 KB
/
SparseConvNet.h
File metadata and controls
58 lines (53 loc) · 3.46 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
//Ben Graham, University of Warwick, 2015, b.graham@warwick.ac.uk
//SparseConvNet is free software: you can redistribute it and/or modify
//it under the terms of the GNU General Public License as published by
//the Free Software Foundation, either version 3 of the License, or
//(at your option) any later version.
//SparseConvNet is distributed in the hope that it will be useful,
//but WITHOUT ANY WARRANTY; without even the implied warranty of
//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//GNU General Public License for more details.
#pragma once
#include "SpatiallySparseDataset.h"
#include <memory>
#include <string>
#include <fstream>
class SparseConvNetCUDA;
class SparseConvNet {
private:
std::unique_ptr<SparseConvNetCUDA> cnn;
public:
SparseConvNet(int dimension, int nInputFeatures, int nClasses, int nTop=1);
~SparseConvNet();
void addLeNetLayerMP(int nFeatures, int filterSize, int filterStride, int poolSize, int poolStride, ActivationFunction activationFn=RELU, float dropout=0.0f, int minActiveInputs=1);
void addLeNetLayerPOFMP(int nFeatures, int filterSize, int filterStride, int poolSize, float fmpShrink, ActivationFunction activationFn=RELU, float dropout=0.0f, int minActiveInputs=1);
void addLeNetLayerROFMP(int nFeatures, int filterSize, int filterStride, int poolSize, float fmpShrink, ActivationFunction activationFn=RELU, float dropout=0.0f, int minActiveInputs=1);
void addTerminalPoolingLayer(int poolSize);
void addSoftmaxLayer();
void addIndexLearnerLayer();
float processDataset(SpatiallySparseDataset &dataset, int batchSize=100, float learningRate=0, float momentum=0.99);
void processDatasetRepeatTest(SpatiallySparseDataset &dataset, int batchSize=100, int nReps=12, std::string predictionsFilename="",std::string header="",std::string confusionMatrixFilename="");
float processIndexLearnerDataset(SpatiallySparseDataset &dataset, int batchSize=100, float learningRate=0.0,float momentum=0.99);
void processDatasetDumpTopLevelFeatures(SpatiallySparseDataset &dataset, int batchSize, int reps=1);
void loadWeights(std::string baseName, int epoch, int firstNlayers=1000000);
void saveWeights(std::string baseName, int epoch);
void calculateInputRegularizingConstants(SpatiallySparseDataset dataset);
};
class SparseConvTriangLeNet {
private:
std::auto_ptr<SparseConvNetCUDA> cnn;
public:
SparseConvTriangLeNet(int dimension, int nInputFeatures, int nClasses, int nTop=1);
~SparseConvTriangLeNet();
void addLeNetLayerMP(int nFeatures, int filterSize, int filterStride, int poolSize, int poolStride, ActivationFunction activationFn=RELU, float dropout=0.0f, int minActiveInputs=1);
void addTerminalPoolingLayer(int poolSize);
void addSoftmaxLayer();
void addIndexLearnerLayer();
float processDataset(SpatiallySparseDataset &dataset, int batchSize=100, float learningRate=0, float momentum=0.99);
void processDatasetRepeatTest(SpatiallySparseDataset &dataset, int batchSize=100, int nReps=12, std::string predictionsFilename="",std::string header="",std::string confusionMatrixFilename="");
float processIndexLearnerDataset(SpatiallySparseDataset &dataset, int batchSize=100, float learningRate=0, float momentum=0.99);
void processDatasetDumpTopLevelFeatures(SpatiallySparseDataset &dataset, int batchSize, int reps=1);
void loadWeights(std::string baseName, int epoch, int firstNlayers=1000000);
void saveWeights(std::string baseName, int epoch);
void calculateInputRegularizingConstants(SpatiallySparseDataset dataset);
};