forked from btgraham/SparseConvNet-archived
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathSparseConvNet.cu
More file actions
112 lines (85 loc) · 4.9 KB
/
SparseConvNet.cu
File metadata and controls
112 lines (85 loc) · 4.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
#include "SparseConvNet.h"
#include "SparseConvNetCUDA.h"
#include "utilities.h"
SparseConvNet::SparseConvNet(int dimension, int nInputFeatures, int nClasses, int nTop)
: cnn(new SparseConvNetCUDA(dimension, nInputFeatures, nClasses, nTop)) {
}
SparseConvNet::~SparseConvNet(){
}
void SparseConvNet::addLeNetLayerMP(int nFeatures, int filterSize, int filterStride, int poolSize, int poolStride, ActivationFunction activationFn, float dropout, int minActiveInputs){
cnn->addLeNetLayerMP(nFeatures, filterSize, filterStride, poolSize, poolStride, activationFn, dropout, minActiveInputs);
}
void SparseConvNet::addLeNetLayerPOFMP(int nFeatures, int filterSize, int filterStride, int poolSize, float fmpShrink, ActivationFunction activationFn, float dropout, int minActiveInputs){
cnn->addLeNetLayerPOFMP(nFeatures, filterSize, filterStride, poolSize, fmpShrink, activationFn, dropout, minActiveInputs);
}
void SparseConvNet::addLeNetLayerROFMP(int nFeatures, int filterSize, int filterStride, int poolSize, float fmpShrink, ActivationFunction activationFn, float dropout, int minActiveInputs){
cnn->addLeNetLayerROFMP(nFeatures, filterSize, filterStride, poolSize, fmpShrink, activationFn, dropout, minActiveInputs);
}
void SparseConvNet::addTerminalPoolingLayer(int poolSize){
cnn->addTerminalPoolingLayer(poolSize, ipow(poolSize,cnn->dimension));
}
void SparseConvNet::addSoftmaxLayer(){
cnn->addSoftmaxLayer();
}
void SparseConvNet::addIndexLearnerLayer(){
cnn->addIndexLearnerLayer();
}
float SparseConvNet::processDataset(SpatiallySparseDataset &dataset, int batchSize, float learningRate, float momentum) {
return cnn->processDataset(dataset,batchSize,learningRate,momentum);
}
void SparseConvNet::processDatasetRepeatTest(SpatiallySparseDataset &dataset, int batchSize, int nReps, std::string predictionsFilename,std::string header,std::string confusionMatrixFilename){
cnn->processDatasetRepeatTest(dataset,batchSize,nReps,predictionsFilename,header,confusionMatrixFilename);
}
float SparseConvNet::processIndexLearnerDataset(SpatiallySparseDataset &dataset, int batchSize, float learningRate, float momentum){
return cnn->processIndexLearnerDataset(dataset,batchSize,learningRate,momentum);
}
void SparseConvNet::processDatasetDumpTopLevelFeatures(SpatiallySparseDataset &dataset, int batchSize, int reps){
cnn->processDatasetDumpTopLevelFeatures(dataset,batchSize,reps);
}
void SparseConvNet::loadWeights(std::string baseName, int epoch, int firstNlayers){
cnn->loadWeights(baseName,epoch,firstNlayers);
}
void SparseConvNet::saveWeights(std::string baseName, int epoch){
cnn->saveWeights(baseName,epoch);
}
void SparseConvNet::calculateInputRegularizingConstants(SpatiallySparseDataset dataset){
cnn->calculateInputRegularizingConstants(dataset);
}
SparseConvTriangLeNet::SparseConvTriangLeNet(int dimension, int nInputFeatures, int nClasses, int nTop)
: cnn(new SparseConvNetCUDA(dimension, nInputFeatures, nClasses, nTop)) {
}
SparseConvTriangLeNet::~SparseConvTriangLeNet(){
}
void SparseConvTriangLeNet::addLeNetLayerMP(int nFeatures, int filterSize, int filterStride, int poolSize, int poolStride, ActivationFunction activationFn, float dropout, int minActiveInputs){
cnn->addTriangularLeNetLayerMP(nFeatures, filterSize, filterStride, poolSize, poolStride, activationFn, dropout, minActiveInputs);
}
void SparseConvTriangLeNet::addTerminalPoolingLayer(int poolSize) {
cnn->addTerminalPoolingLayer(poolSize, triangleSize(poolSize,cnn->dimension));
}
void SparseConvTriangLeNet::addSoftmaxLayer(){
cnn->addSoftmaxLayer();
}
void SparseConvTriangLeNet::addIndexLearnerLayer(){
cnn->addIndexLearnerLayer();
}
float SparseConvTriangLeNet::processDataset(SpatiallySparseDataset &dataset, int batchSize, float learningRate, float momentum){
return cnn->processDataset(dataset,batchSize,learningRate, momentum);
}
void SparseConvTriangLeNet::processDatasetRepeatTest(SpatiallySparseDataset &dataset, int batchSize, int nReps, std::string predictionsFilename,std::string header,std::string confusionMatrixFilename){
cnn->processDatasetRepeatTest(dataset,batchSize,nReps,predictionsFilename,header,confusionMatrixFilename);
}
float SparseConvTriangLeNet::processIndexLearnerDataset(SpatiallySparseDataset &dataset, int batchSize, float learningRate,float momentum){
return cnn->processIndexLearnerDataset(dataset,batchSize,learningRate,momentum);
}
void SparseConvTriangLeNet::processDatasetDumpTopLevelFeatures(SpatiallySparseDataset &dataset, int batchSize, int reps){
cnn->processDatasetDumpTopLevelFeatures(dataset,batchSize,reps);
}
void SparseConvTriangLeNet::loadWeights(std::string baseName, int epoch, int firstNlayers){
cnn->loadWeights(baseName,epoch,firstNlayers);
}
void SparseConvTriangLeNet::saveWeights(std::string baseName, int epoch){
cnn->saveWeights(baseName,epoch);
}
void SparseConvTriangLeNet::calculateInputRegularizingConstants(SpatiallySparseDataset dataset){
cnn->calculateInputRegularizingConstants(dataset);
}