-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathlinearclassifiers.v
More file actions
119 lines (93 loc) · 3.44 KB
/
linearclassifiers.v
File metadata and controls
119 lines (93 loc) · 3.44 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
Set Implicit Arguments.
Unset Strict Implicit.
Require Import mathcomp.ssreflect.ssreflect.
From mathcomp Require Import all_ssreflect.
Require Import NArith.
Require Import List. Import ListNotations.
Require Import Extraction.
Require Import MLCert.float32 MLCert.learners MLCert.extraction_hs MLCert.monads.
Section LinearThresholdClassifier.
Variable n : nat. (*the dimensionality*)
Definition A := float32_arr n. (*examples*)
Definition B := bool. (*labels*)
Definition Weights := float32_arr n.
Definition Bias := float32.
Definition Params := (Weights * Bias)%type.
Section predict.
Open Scope f32_scope.
Definition predict (p : Params) (a : A) : B :=
let: (w, b) := p in
f32_dot w a + b > 0.
End predict.
End LinearThresholdClassifier.
Module Perceptron.
Section Learner.
Variable n : nat.
Notation A := (A n).
Notation B := B.
Notation Params := (Params n).
Record Hypers : Type :=
mkHypers {
alpha : float32;
}.
Open Scope f32_scope.
Definition update (h:Hypers) (example_label:A*B) (p:Params) : Params :=
let: (example, label) := example_label in
let: predicted_label := predict p example in
if Bool.eqb predicted_label label then p
else let: (w, b) := p in
(f32_map2 (fun x1 x2 => x1 + (alpha h)*label*x2) w example, b+label).
Definition Learner : Learner.t A B Hypers Params :=
Learner.mk
(fun h => @predict n)
update.
End Learner.
End Perceptron.
Require Import Reals Fourier.
Require Import OUVerT.bigops OUVerT.dist OUVerT.chernoff OUVerT.learning.
Section PerceptronGeneralization.
Variable n : nat. (*The dimensionality*)
Notation A := (float32_arr_finType n).
Notation B := bool_finType.
Variable d : A * B -> R.
Variable d_dist : big_sum (enum [finType of A * B]) d = 1.
Variable d_nonneg : forall x, 0 <= d x.
Variable m : nat. (*The number of training samples*)
Variable m_gt0 : (0 < m)%nat.
Variable epochs : nat.
Variable hypers : Perceptron.Hypers.
(*accuracy is 0-1 accuracy applied to Perceptron's prediction function*)
Notation Params := [finType of A * float32_finType].
Definition accuracy :=
@accuracy01 A _ m Params (Learner.predict (Perceptron.Learner n) hypers).
Lemma card_Params : INR #|Params| = 2^(n*32 + 32).
Proof. by rewrite pow_add card_prod mult_INR float32_card float32_arr_card !pow_INR. Qed.
Variable
(not_perfectly_learnable :
forall p : Params, 0 < expVal d m_gt0 accuracy p < 1).
Lemma perceptron_bound eps (eps_gt0 : 0 < eps) init :
@main A B Params Perceptron.Hypers (Perceptron.Learner n)
hypers m m_gt0 epochs d eps init (fun _ => 1) <=
2^(n*32 + 32) * exp (-2%R * eps^2 * mR m).
Proof.
rewrite -card_Params.
apply: Rle_trans; first by apply: main_bound.
apply: Rle_refl.
Qed.
End PerceptronGeneralization.
Section PerceptronExtraction.
Variable n : nat. (*The dimensionality*)
Notation A := (float32_arr n).
Notation B := bool.
Variable d : A * B -> R.
Variable m : nat. (*The number of training samples*)
Variable epochs : nat.
Notation Params := ((A * float32)%type).
Variable hypers : Perceptron.Hypers.
Definition perceptron (r:Type) :=
@extractible_main A B Params Perceptron.Hypers
(Perceptron.Learner n) hypers epochs _ (@list_Foldable (A*B)%type) r
(fun T => ret T).
End PerceptronExtraction.
Extraction Language Haskell.
Extraction "hs/Perceptron.hs" perceptron.