diff --git a/synthesis/bin/Main.ml b/synthesis/bin/Main.ml
index b274a34c..45c5f3f1 100644
--- a/synthesis/bin/Main.ml
+++ b/synthesis/bin/Main.ml
@@ -527,6 +527,202 @@ let onf_real : Command.t =
ONFReal.spec
ONFReal.run
+module ToOBT = struct
+ let spec = Command.Spec.(
+ empty
+ +> flag "-DEBUG" no_arg ~doc:"print debugging statements"
+ +> flag "--thrift" no_arg ~doc:"parse & write bmv2/thrift commands"
+ +> flag "-i" no_arg ~doc:"interactive mode"
+ +> flag "-data" (required string) ~doc:"the input log"
+ +> flag "-p" no_arg ~doc:"show_result_at_end"
+ +> anon ("p4file" %: string)
+ +> anon ("log-edits" %: string)
+ +> anon ("phys-edits" %: string)
+ +> anon ("fvs" %: string)
+ +> anon ("assume" %: string)
+ +> flag "-I" (listed string) ~doc:"
add directory to include search path for file"
+ ++ opt_flags)
+
+let run debug thrift_mode interactive data print
+ p4file log_edits phys_edits fvs assume inc
+ widening
+ do_slice
+ semantic_slicing
+ edits_depth
+ search_width
+ monotonic
+ injection
+ fastcx
+ vcache
+ ecache
+ shortening
+ above
+ minimize
+ hints
+ only_holes
+ allow_annotations
+ nlp
+ unique_edits
+ domain
+ restrict_mask
+ no_defaults
+ no_deletes
+ use_all_cexs
+ reach_restrict
+ reach_filter
+ hot_start
+ () =
+ let res = Benchmark.to_obt
+ Parameters.({
+ widening;
+ do_slice;
+ semantic_slicing;
+ edits_depth;
+ search_width;
+ debug;
+ thrift_mode;
+ monotonic;
+ interactive;
+ injection;
+ fastcx;
+ vcache;
+ ecache;
+ shortening;
+ above;
+ minimize;
+ hints = Option.is_some hints;
+ hint_type = Option.value hints ~default:"exact";
+ only_holes;
+ allow_annotations;
+ nlp;
+ unique_edits;
+ domain;
+ restrict_mask;
+ no_defaults;
+ no_deletes;
+ use_all_cexs;
+ reach_restrict;
+ reach_filter;
+ timeout = None;
+ hot_start})
+ data p4file log_edits phys_edits fvs assume inc
+ in
+ match res with
+ | None -> Core.Printf.printf "no example could be found\n"
+ | Some r when print ->
+ Core.Printf.printf "Edits\n";
+ List.iter r ~f:(fun edit ->
+ Tables.Edit.to_string edit
+ |> Core.Printf.printf "%s\n%!"
+ )
+ | Some _ -> ()
+
+end
+
+let to_obt : Command.t =
+ Command.basic_spec
+ ~summary: "Run the onf benchmark on the real p4 programs"
+ ToOBT.spec
+ ToOBT.run
+
+module FromOBT = struct
+ let spec = Command.Spec.(
+ empty
+ +> flag "-DEBUG" no_arg ~doc:"print debugging statements"
+ +> flag "--thrift" no_arg ~doc:"parse & write bmv2/thrift commands"
+ +> flag "-i" no_arg ~doc:"interactive mode"
+ +> flag "-data" (required string) ~doc:"the input log"
+ +> flag "-p" no_arg ~doc:"show_result_at_end"
+ +> anon ("p4file" %: string)
+ +> anon ("log-edits" %: string)
+ +> anon ("phys-edits" %: string)
+ +> anon ("fvs" %: string)
+ +> anon ("assume" %: string)
+ +> flag "-I" (listed string) ~doc:" add directory to include search path for file"
+ ++ opt_flags)
+
+let run debug thrift_mode interactive data print
+ p4file log_edits phys_edits fvs assume inc
+ widening
+ do_slice
+ semantic_slicing
+ edits_depth
+ search_width
+ monotonic
+ injection
+ fastcx
+ vcache
+ ecache
+ shortening
+ above
+ minimize
+ hints
+ only_holes
+ allow_annotations
+ nlp
+ unique_edits
+ domain
+ restrict_mask
+ no_defaults
+ no_deletes
+ use_all_cexs
+ reach_restrict
+ reach_filter
+ hot_start
+ () =
+ let res = Benchmark.from_obt
+ Parameters.({
+ widening;
+ do_slice;
+ semantic_slicing;
+ edits_depth;
+ search_width;
+ debug;
+ thrift_mode;
+ monotonic;
+ interactive;
+ injection;
+ fastcx;
+ vcache;
+ ecache;
+ shortening;
+ above;
+ minimize;
+ hints = Option.is_some hints;
+ hint_type = Option.value hints ~default:"exact";
+ only_holes;
+ allow_annotations;
+ nlp;
+ unique_edits;
+ domain;
+ restrict_mask;
+ no_defaults;
+ no_deletes;
+ use_all_cexs;
+ reach_restrict;
+ reach_filter;
+ timeout = None;
+ hot_start})
+ data p4file log_edits phys_edits fvs assume inc
+ in
+ match res with
+ | None -> Core.Printf.printf "no example could be found\n"
+ | Some r when print ->
+ Core.Printf.printf "Edits\n";
+ List.iter r ~f:(fun edit ->
+ Tables.Edit.to_string edit
+ |> Core.Printf.printf "%s\n%!"
+ )
+ | Some _ -> ()
+
+end
+
+let from_obt : Command.t =
+ Command.basic_spec
+ ~summary: "Run the onf benchmark on the real p4 programs"
+ FromOBT.spec
+ FromOBT.run
+
module Equality = struct
let spec = Command.Spec.(
empty
@@ -1417,6 +1613,8 @@ let main : Command.t =
; ("onf-real", onf_real)
; ("obt", obt)
; ("obt-real", obt_real)
+ ; ("to-obt", to_obt)
+ ; ("from-obt", from_obt)
; ("eq", equality)
; ("eq-real", equality_real)
; ("wp", wp_cmd)]
diff --git a/synthesis/lib/Benchmark.ml b/synthesis/lib/Benchmark.ml
index 45efc4c6..825b8a66 100644
--- a/synthesis/lib/Benchmark.ml
+++ b/synthesis/lib/Benchmark.ml
@@ -225,6 +225,63 @@ let rec basic_onf_ipv4_real params data_file log_p4 phys_p4 log_edits_file phys_
assert (implements params (ProfData.zero ()) (problem ()) = `Yes);
measure params None problem (onos_to_edits var_mapping data_file "routing_v6" "hdr.ipv6.dst_addr")
+and to_obt params data_file p4_file log_edits_file phys_edits_file fvs_file _ inc =
+ let var_mapping = parse_fvs fvs_file in
+ let fvs = List.map var_mapping ~f:snd in
+ let assume = Skip (* parse_file assume_file *) in
+
+ (* let print_fvs = printf "fvs = %s" (Sexp.to_string ([%sexp_of: (string * int) list] fvs)) in *)
+
+ let log = (assume %:% Encode.encode_from_p4 inc p4_file false)
+ |> Encode.unify_names var_mapping |> zero_init fvs |> drop_handle fvs in
+
+ let phys = OneBigTable.mk_one_big_table (ConstantProp.propogate log) |> zero_init fvs |> drop_handle fvs in
+ (* |> CompilerOpts.optimize fvs *)
+ (* let maxN n = Bigint.(of_int_exn n ** of_int_exn 2 - one) in *)
+ (* let fvs = parse_fvs fvs in *)
+ let log_edits = Runtime.parse_whippersnapper log log_edits_file in
+ let phys_edits = Runtime.parse phys phys_edits_file in
+
+ let problem =
+ Problem.make
+ ~log ~phys ~fvs
+ ~log_inst:Instance.(update_list params empty log_edits)
+ ~phys_inst:Instance.(update_list params empty phys_edits)
+ ~log_edits:[]
+ in
+ (* Core.Printf.printf "\n\n------------\n%s\n----------\n" (Problem.to_string params problem); *)
+ assert (implements params (ProfData.zero ()) (problem ()) = `Yes);
+ measure params None problem (List.map (Runtime.parse_whippersnapper log data_file) ~f:(fun r -> [r]))
+
+and from_obt params data_file p4_file log_edits_file phys_edits_file fvs_file _ inc =
+ let var_mapping = parse_fvs fvs_file in
+ let fvs = List.map var_mapping ~f:snd in
+ let assume = Skip (* parse_file assume_file *) in
+
+ (* let print_fvs = printf "fvs = %s" (Sexp.to_string ([%sexp_of: (string * int) list] fvs)) in *)
+
+ let phys = (assume %:% Encode.encode_from_p4 inc p4_file false)
+ |> Encode.unify_names var_mapping |> zero_init fvs |> drop_handle fvs in
+
+ let log = OneBigTable.mk_one_big_table (ConstantProp.propogate phys) |> zero_init fvs |> drop_handle fvs in
+ (* |> CompilerOpts.optimize fvs *)
+ (* let maxN n = Bigint.(of_int_exn n ** of_int_exn 2 - one) in *)
+ (* let fvs = parse_fvs fvs in *)
+ let log_edits = Runtime.parse_whippersnapper log log_edits_file in
+ let phys_edits = Runtime.parse phys phys_edits_file in
+
+ let problem =
+ Problem.make
+ ~log ~phys ~fvs
+ ~log_inst:Instance.(update_list params empty log_edits)
+ ~phys_inst:Instance.(update_list params empty phys_edits)
+ ~log_edits:[]
+ in
+ (* Core.Printf.printf "\n\n------------\n%s\n----------\n" (Problem.to_string params problem); *)
+ assert (implements params (ProfData.zero ()) (problem ()) = `Yes);
+ measure params None problem (List.map (Runtime.parse log data_file) ~f:(fun r -> [r]))
+
+
and zero_init fvs cmd =
let fvs = StringSet.of_list @@ fsts @@ fvs in
let vs = free_of_cmd `Var cmd
diff --git a/synthesis/lib/FastCX.ml b/synthesis/lib/FastCX.ml
index f00cdd27..1425048d 100644
--- a/synthesis/lib/FastCX.ml
+++ b/synthesis/lib/FastCX.ml
@@ -41,8 +41,8 @@ let is_reachable encode_tag params problem fvs in_pkt tbl_name keys =
passive_hoare_triple ~fvs
(Packet.to_test in_pkt ~fvs)
trunc
- (Hole.match_holes_table encode_tag tbl_name keys
- %&% Instance.negate_rows phys_inst tbl_name)
+ (Hole.match_holes_table encode_tag tbl_name keys)
+ (* %&% Instance.negate_rows phys_inst tbl_name) *)
let hits_pred params (_: ProfData.t ref) prog inst edits e : test =
diff --git a/synthesis/lib/Manip.ml b/synthesis/lib/Manip.ml
index 0135990b..8de9d219 100644
--- a/synthesis/lib/Manip.ml
+++ b/synthesis/lib/Manip.ml
@@ -138,6 +138,376 @@ let rec wp negs c phi =
* wp ~no_negations (holify (List.map scope ~f:fst) a) phi) ~c:(mkAnd) ~init:(Some True)
* %&% wp ~no_negations dflt phi *)
+let freshen v sz i = (v ^ "$" ^ string_of_int i, sz)
+
+let rec indexVars_expr e (sub : ((int * int) StringMap.t)) =
+ let binop f (e1,e2) =
+ let (e1', sub1) = indexVars_expr e1 sub in
+ let (e2', sub2) = indexVars_expr e2 sub in
+ f e1' e2', StringMap.merge sub1 sub2
+ ~f:(fun ~key -> function
+ | `Both (i1, i2) -> if i1 = i2
+ then Some i1
+ else failwith @@ Printf.sprintf "collision on %s: (%d,%d) <> (%d,%d)"
+ key (fst i1) (snd i1) (fst i2) (snd i2)
+ | `Left i | `Right i -> Some i
+ )
+ in
+ match e with
+ | Value _ -> (e, sub)
+ | Var (x,sz) ->
+ begin match StringMap.find sub x with
+ | None -> (Var (freshen x sz 0), StringMap.set sub ~key:x ~data:(0,sz))
+ | Some (i,_) -> (Var (freshen x sz i), sub)
+ end
+ | Hole (x, sz) ->
+ begin match StringMap.find sub x with
+ | None -> (Hole (x,sz), sub) (*"couldn't find "^x^" in substitution map " |> failwith*)
+ | Some (i,_) -> (Hole (freshen x sz i), sub)
+ end
+ | Cast (i,e) ->
+ let e', sub' = indexVars_expr e sub in
+ mkCast i e', sub'
+ | Slice {hi;lo;bits} ->
+ let e', sub' = indexVars_expr bits sub in
+ mkSlice hi lo e', sub'
+ | Plus es | Minus es | Times es | Mask es | Xor es | BOr es | Shl es | Concat es | SatPlus es | SatMinus es
+ -> binop (ctor_for_binexpr e) es
+
+let rec indexVars b sub =
+ let binop_t op a b = op (indexVars a sub) (indexVars b sub) in
+ let binop_e f (e1,e2) =
+ let (e1',_) = indexVars_expr e1 sub in
+ let (e2',_) = indexVars_expr e2 sub in
+ f e1' e2'
+ in
+ match b with
+ | True | False -> b
+ | Neg b -> !%(indexVars b sub)
+ | And (a,b) -> binop_t (%&%) a b
+ | Or (a,b) -> binop_t (%+%) a b
+ | Impl (a,b) -> binop_t (%=>%) a b
+ | Iff (a,b) -> binop_t (%<=>%) a b
+ | Eq es -> binop_e (%=%) es
+ | Le es -> binop_e (%<=%) es
+
+
+
+
+let rec passify_aux sub c : ((int * int) StringMap.t * cmd) =
+ match c with
+ | Skip -> (sub, Skip)
+ | Assume b ->
+ (sub, Assume (indexVars b sub))
+ | Assign (f,e) ->
+ begin match StringMap.find sub f with
+ | None ->
+ let sz = size_of_expr e in
+ (StringMap.set sub ~key:f ~data:(1, sz)
+ , Assume (Var (freshen f sz 1) %=% fst(indexVars_expr e sub)))
+ | Some (idx, sz) ->
+ (StringMap.set sub ~key:f ~data:(idx + 1,sz)
+ , Assume (Var (freshen f sz (idx + 1)) %=% fst(indexVars_expr e sub)))
+ end
+ | Seq (c1, c2) ->
+ let (sub1, c1') = passify_aux sub c1 in
+ let (sub2, c2') = passify_aux sub1 c2 in
+ (sub2, c1' %:% c2')
+ | Select (Total, _) -> failwith "Don't know what to do for if total"
+ | Select (typ, ss) ->
+ let sub_lst = List.map ss ~f:(fun (t,c) ->
+ let sub', c' = passify_aux sub c in
+ (sub', (indexVars t sub, c'))) in
+ let merged_subst =
+ List.fold sub_lst ~init:StringMap.empty
+ ~f:(fun acc (sub', _) ->
+ StringMap.merge acc sub'
+ ~f:(fun ~key:_ ->
+ function
+ | `Left i -> Some i
+ | `Right i -> Some i
+ | `Both ((i,sz),(j,_)) -> Some (max i j, sz)))
+ in
+ let rewriting sub =
+ StringMap.fold sub ~init:Skip
+ ~f:(fun ~key:v ~data:(idx,_) acc ->
+ let merged_idx,sz = StringMap.find_exn merged_subst v in
+ if merged_idx > idx then
+ Assume (Var(freshen v sz merged_idx)
+ %=% Var(freshen v sz idx))
+ %:% acc
+ else acc
+ )
+ in
+ let ss' =
+ List.filter_map sub_lst ~f:(fun (sub', (t', c')) ->
+ let rc = rewriting sub' in
+ Some (t', c' %:% rc)
+ )
+ in
+ (merged_subst, mkSelect typ ss')
+
+ | Apply _ ->
+ failwith ("Cannot passify (yet) table applications "^(string_of_cmd c ~depth:0))
+
+let passify fvs c =
+ let init_sub =
+ List.fold fvs ~init:StringMap.empty ~f:(fun sub (v,sz) ->
+ StringMap.set sub ~key:v ~data:(0,sz)
+ )
+ in
+ (* Printf.printf "active : \n %s \n" (string_of_cmd c); *)
+ passify_aux init_sub c
+
+
+let rec good_wp c =
+ match c with
+ | Skip -> True
+ | Assume b -> b
+ | Seq (c1,c2) -> good_wp c1 %&% good_wp c2
+ | Select(Total, _) -> failwith "Totality eludes me"
+ | Select(Partial, ss) ->
+ List.fold ss ~init:False
+ ~f:(fun cond (t,c) -> cond %+% (t %&% good_wp (c)))
+ | Select(Ordered, ss) ->
+ List.fold ss ~init:(False,False)
+ ~f:(fun (cond, misses) (t,c) ->
+ (cond %+% (
+ t %&% !%(misses) %&% good_wp c
+ )
+ , t %+% misses))
+ |> fst
+ | Assign _ -> failwith "ERROR: PROGRAM NOT IN PASSIVE FORM! Assignments should have been removed"
+ | Apply _ -> failwith "Tables should be applied at this stage"
+
+let rec bad_wp c =
+ match c with
+ | Skip -> False
+ | Assume _ -> False
+ | Seq(c1,c2) -> bad_wp c1 %+% (good_wp c1 %&% bad_wp c2)
+ | Select (Total, _) -> failwith "totality eludes me "
+ | Select (Partial, ss) ->
+ List.fold ss ~init:(True)
+ ~f:(fun acc (t,c) -> acc %+% (t %&% bad_wp (c)))
+ | Select(Ordered, ss) ->
+ List.fold ss ~init:(False,False)
+ ~f:(fun (cond, misses) (t,c) ->
+ (cond %+% (
+ t %&% !%(misses) %&% bad_wp c
+ )
+ , t %+% misses))
+ |> fst
+ | Assign _ -> failwith "ERROR: PROGRAM NOT IN PASSIVE FORM! Assignments should have been removed"
+ | Apply _ -> failwith "Tables should be applied at this stage"
+
+
+let good_execs fvs c =
+ let merged_sub, passive_c = passify fvs c in
+ (* Printf.printf "passive : \n %s\n" (string_of_cmd passive_c); *)
+ (* let vc = good_wp passive_c in *)
+ (* Printf.printf "good_executions:\n %s\n%!" (string_of_test vc); *)
+ (merged_sub, passive_c, good_wp passive_c, bad_wp passive_c)
+
+
+let inits fvs sub =
+ StringMap.fold sub ~init:[]
+ ~f:(fun ~key:v ~data:(_,sz) vs ->
+ if List.exists fvs ~f:(fun (x,_) -> x = v)
+ then (freshen v sz 0) :: vs
+ else vs)
+ |> List.dedup_and_sort ~compare:(fun (u,_) (v,_) -> Stdlib.compare u v)
+
+let rec apply_init_expr (e : expr) =
+ let unop op e = op @@ apply_init_expr e in
+ let binop op (e,e') = op (apply_init_expr e) (apply_init_expr e') in
+ match e with
+ | Value _ | Hole _ -> e
+ | Var (v, sz) -> Var (freshen v sz 0)
+ | Cast (sz, e) -> unop (mkCast sz) e
+ | Slice {hi;lo;bits} -> unop (mkSlice hi lo) bits
+ | Plus es
+ | Times es
+ | Minus es
+ | Mask es
+ | Xor es
+ | BOr es
+ | Shl es
+ | Concat es
+ | SatPlus es
+ | SatMinus es -> binop (ctor_for_binexpr e) es
+
+let rec apply_init_test t =
+ let ebinop op (e,e') = op (apply_init_expr e) (apply_init_expr e') in
+ let tbinop op (t,t') = op (apply_init_test t) (apply_init_test t') in
+ let tunop op t = op @@ apply_init_test t in
+ match t with
+ | True | False -> t
+ | Eq es -> ebinop mkEq es
+ | Le es -> ebinop mkLe es
+ | And ts -> tbinop mkAnd ts
+ | Or ts -> tbinop mkAnd ts
+ | Impl ts -> tbinop mkAnd ts
+ | Iff ts -> tbinop mkAnd ts
+ | Neg t -> tunop mkNeg t
+
+let finals fvs sub =
+ StringMap.fold sub ~init:[]
+ ~f:(fun ~key:v ~data:(i,sz) vs ->
+ if List.exists fvs ~f:(fun (x,_) -> x = v)
+ then (freshen v sz i) :: vs
+ else vs)
+ |> List.sort ~compare:(fun (u,_) (v,_) -> Stdlib.compare u v)
+
+let apply_finals_sub_packet pkt sub =
+ StringMap.fold pkt ~init:StringMap.empty
+ ~f:(fun ~key ~data acc ->
+ let key =
+ match StringMap.find sub key with
+ | Some (i,_) -> fst (freshen key (size_of_value data) i)
+ | None -> key
+ in
+ let data = Value data in
+ StringMap.set acc ~key ~data
+ )
+
+
+let rec apply_finals_sub_expr e sub =
+ let unop op e = op @@ apply_finals_sub_expr e sub in
+ let binop op (e,e') = op (apply_finals_sub_expr e sub) (apply_finals_sub_expr e' sub) in
+ match e with
+ | Value _ | Hole _ -> e
+ | Var (v, sz) ->
+ begin match StringMap.find sub v with
+ | Some (i,_) -> Var (freshen v sz i)
+ | None -> Var (v,sz)
+ end
+ | Cast (sz, e) -> unop (mkCast sz) e
+ | Slice {hi;lo;bits} -> unop (mkSlice hi lo) bits
+ | Plus es
+ | Times es
+ | Minus es
+ | Mask es
+ | Xor es
+ | BOr es
+ | Shl es
+ | Concat es
+ | SatPlus es
+ | SatMinus es -> binop (ctor_for_binexpr e) es
+
+
+let rec apply_finals_sub_test t sub =
+ let ebinop op (e,e') = op (apply_finals_sub_expr e sub) (apply_finals_sub_expr e' sub) in
+ let tbinop op (t,t') = op (apply_finals_sub_test t sub) (apply_finals_sub_test t' sub) in
+ let tunop op t = op @@ apply_finals_sub_test t sub in
+ match t with
+ | True | False -> t
+ | Eq es -> ebinop mkEq es
+ | Le es -> ebinop mkLe es
+ | And ts -> tbinop mkAnd ts
+ | Or ts -> tbinop mkAnd ts
+ | Impl ts -> tbinop mkAnd ts
+ | Iff ts -> tbinop mkAnd ts
+ | Neg t -> tunop mkNeg t
+
+let zip_eq_exn xs ys =
+ List.fold2_exn xs ys ~init:True ~f:(fun acc x y -> acc %&% (Var x %=% Var y) )
+
+let prepend_str = Printf.sprintf "%s%s"
+
+let rec prepend_expr pfx e =
+ let binop op (e,e') = op (prepend_expr pfx e) (prepend_expr pfx e') in
+ match e with
+ | Value _ -> e
+ | Var (v,sz) -> Var(prepend_str pfx v, sz)
+ | Hole(v, sz) -> Var(prepend_str pfx v, sz)
+ | Cast (i,e) -> mkCast i @@ prepend_expr pfx e
+ | Slice {hi;lo;bits} -> mkSlice hi lo @@ prepend_expr pfx bits
+ | Plus es | Minus es | Times es | Mask es | Xor es | BOr es | Shl es | Concat es | SatPlus es | SatMinus es
+ -> binop (ctor_for_binexpr e) es
+
+let rec prepend_test pfx b =
+ match b with
+ | True | False -> b
+ | Neg b -> !%(prepend_test pfx b)
+ | Eq(e1,e2) -> prepend_expr pfx e1 %=% prepend_expr pfx e2
+ | Le(e1,e2) -> prepend_expr pfx e1 %<=% prepend_expr pfx e2
+ | And(b1,b2) -> prepend_test pfx b1 %&% prepend_test pfx b2
+ | Or(b1,b2) -> prepend_test pfx b1 %+% prepend_test pfx b2
+ | Impl(b1,b2) -> prepend_test pfx b1 %=>% prepend_test pfx b2
+ | Iff (b1,b2) -> prepend_test pfx b1 %<=>% prepend_test pfx b2
+
+let rec prepend pfx c =
+ match c with
+ | Skip -> Skip
+ | Assign(f,e) -> Assign(prepend_str pfx f, prepend_expr pfx e)
+ | Assume b -> prepend_test pfx b |> Assume
+ | Seq(c1,c2) -> prepend pfx c1 %:% prepend pfx c2
+ | Select(typ, cs) ->
+ List.map cs ~f:(fun (t,c) -> (prepend_test pfx t, prepend pfx c))
+ |> mkSelect typ
+ | Apply t ->
+ Apply {name = prepend_str pfx t.name;
+ keys = List.map t.keys ~f:(fun (k,sz,v_opt) -> (prepend_str pfx k, sz,v_opt));
+ actions = List.map t.actions ~f:(fun (n, scope, act) -> (n, List.map scope ~f:(fun (x,sz) -> (prepend_str pfx x, sz)), prepend pfx act));
+ default = prepend pfx t.default}
+
+
+let equivalent ?neg:(neg = True) (data : ProfData.t ref) eq_fvs l p =
+ (* Printf.printf "assuming %s\n%!" (string_of_test neg); *)
+ let l = Assume neg %:% l in
+ let p = Assume neg %:% p in
+ let phys_prefix = "phys_" in
+ let p' = prepend phys_prefix p in
+ let st = Time.now () in
+ let fvs =
+ free_of_cmd `Hole l
+ @ free_of_cmd `Var l
+ @ free_of_cmd `Hole p
+ @ free_of_cmd `Var p
+ in
+ ProfData.update_time !data.prefixing_time st;
+ let prefix_list = List.map ~f:(fun (x,sz) -> (phys_prefix ^ x, sz)) in
+ let fvs_p = prefix_list fvs in
+ let eq_fvs_p = prefix_list eq_fvs in
+
+ let st = Time.now() in
+ let sub_l, _, gl, _ = good_execs fvs l in
+ let sub_p, _, gp, _ = good_execs fvs_p p' in
+ ProfData.update_time !data.good_execs_time st;
+ let st = Time.now () in
+ let lin = inits eq_fvs sub_l in
+ let pin = inits eq_fvs_p sub_p in
+ let lout = finals eq_fvs sub_l in
+ let pout = finals eq_fvs_p sub_p in
+ let in_eq = zip_eq_exn lin pin in
+ let out_eq = zip_eq_exn lout pout in
+ ProfData.update_time !data.ingress_egress_time st;
+ ((gl %&% gp %&% in_eq) %=>% out_eq)
+
+
+let hoare_triple_passified_relabelled assum good_n conseq =
+ mkImplies assum @@ mkImplies good_n conseq
+
+let hoare_triple_passified sub assum good_n conseq =
+ hoare_triple_passified_relabelled
+ (apply_init_test assum)
+ good_n
+ (apply_finals_sub_test conseq sub)
+
+
+let passive_hoare_triple_pkt ~fvs in_pkt cmd out_pkt =
+ let sub, _, good_N, _ = good_execs fvs cmd in
+ hoare_triple_passified sub
+ (Packet.to_test in_pkt ~fvs)
+ good_N
+ (Packet.to_test out_pkt ~fvs)
+
+let passive_hoare_triple ~fvs assum cmd conseq =
+ let sub, _, good_N, _ = good_execs fvs cmd in
+ hoare_triple_passified sub
+ assum
+ good_N
+ conseq
(** [fill_holes(|_value|_test]) replace the applies the substitution
diff --git a/synthesis/lib/OneBigTable.ml b/synthesis/lib/OneBigTable.ml
index 1c088bbc..68e4eadf 100644
--- a/synthesis/lib/OneBigTable.ml
+++ b/synthesis/lib/OneBigTable.ml
@@ -24,30 +24,62 @@ let combine_actions (act1 : string * (string * size) list * cmd) (act2 : string
let new_p2' = List.map new_p2 ~f:(fun (_, v, w) -> v, w) in
n1 ^ "_" ^ n2, new_p1' @ new_p2', new_c1 %:% new_c2
+let combine_all_actions actions1 actions2 =
+ List.map
+ (cross actions1 actions2 |> List.concat)
+ ~f:(fun (x, y) -> combine_actions x y)
+
type only_apply = {keys:(string * size * value option) list;
actions: ((string * (string * size) list * cmd) list);
default: cmd}
+let rec replace_apply_with_def c =
+ match c with
+ | Skip
+ | Assign _
+ | Assume _ -> c
+ | Seq(c1, c2) -> Seq(replace_apply_with_def c1, replace_apply_with_def c2)
+ | Select(st, tc) -> Select(st, List.map tc ~f:(fun (t, c) -> (t, replace_apply_with_def c)))
+ | Apply a -> a.default
+
+let rec has_apply c =
+ match c with
+ | Skip
+ | Assign _
+ | Assume _ -> false
+ | Seq(c1, c2) -> has_apply c1 || has_apply c2
+ | Select(_, tc) -> List.fold (List.map tc ~f:(fun (_, c) -> has_apply c)) ~init:false ~f:(||)
+ | Apply _ -> true
+
+let empty_only_apply = {keys = []; actions = []; default = Skip}
+
let rec mk_one_big_table' (tbl : only_apply) c =
match c with
| Skip -> tbl
- | Assign _ ->
+ | Assign _
+ | Assume _ ->
{ tbl with
actions = List.map tbl.actions ~f:(fun (n, p, act) -> (n, p, act %:% c));
default = tbl.default %:% c }
- | Assume _ -> failwith "Assume not handled"
| Seq(c1, c2) -> mk_one_big_table' (mk_one_big_table' tbl c1) c2
- | Select(_, tcl) ->
+ | Select(_, tcl) when has_apply c ->
let free = List.map tcl
~f:(fun (t, _) -> List.map (free_vars_of_test t) ~f:(fun(f, s) -> (f, s, None))) |> List.concat in
- let es = List.map tcl ~f:snd in
+ let es_tbl = List.map tcl ~f:snd |> List.map ~f:(mk_one_big_table' empty_only_apply) in
let tbl_keys = {tbl with keys = dedup (tbl.keys @ free)} in
- List.fold es ~init:tbl_keys ~f:mk_one_big_table'
+ let acts = ("DEFAULT", [], tbl_keys.default) :: tbl_keys.actions in
+ let es_tbl_acts = (List.map es_tbl ~f:(fun t -> "DEFAULT", [], t.default))
+ @ List.concat_map es_tbl ~f:(fun est -> est.actions) in
+ let new_acts = combine_all_actions acts es_tbl_acts in
+ { tbl_keys with actions = new_acts;
+ default = tbl_keys.default %:% replace_apply_with_def c }
+ | Select _ -> (* We can assume no Apply in the Select, given the previous case *)
+ { tbl with
+ actions = List.map tbl.actions ~f:(fun (n, p, act) -> (n, p, act %:% c));
+ default = tbl.default %:% c }
| Apply app_t ->
- let cross_actions = List.map
- (cross tbl.actions app_t.actions |> List.concat)
- ~f:(fun (x, y) -> combine_actions x y) in
+ let cross_actions = combine_all_actions tbl.actions app_t.actions in
let def_tbl_to_app_t = List.map app_t.actions ~f:(combine_actions ("DEFAULT", [], tbl.default)) in
let tbl_to_def_app_t = List.map tbl.actions ~f:(fun t -> combine_actions t ("DEFAULT", [], app_t.default)) in
{ keys = dedup (tbl.keys @ app_t.keys);
diff --git a/synthesis/lib/Runtime.ml b/synthesis/lib/Runtime.ml
index a7c58b07..f8dbdfc3 100644
--- a/synthesis/lib/Runtime.ml
+++ b/synthesis/lib/Runtime.ml
@@ -87,8 +87,13 @@ let matches_of_string ?sep:(sep=';') (keys : (string * int) list) (data_str : st
String.lsplit2 match_str ~on:'#'
|> Option.value ~default:(match_str, Printf.sprintf "%d" sz)
in
+ let value_str' = if String.contains value_str ':'
+ then value_str
+ |> String.substr_replace_all ~pattern:":" ~with_:""
+ |> Printf.sprintf "0x%s"
+ else value_str in
let size = int_of_string size_str in
- let value = Bigint.of_string value_str in
+ let value = Bigint.of_string value_str' in
Match.exact_ key (Int(value, size))
end
)
@@ -100,7 +105,7 @@ let parse program filename : Edit.t list =
match data with
| ["ADD"; tbl_nm; matches; action_data; action] ->
begin match get_schema_of_table tbl_nm program with
- | None -> failwith @@ Printf.sprintf "unrecognized table %s" tbl_nm
+ | None -> failwith @@ Printf.sprintf "unrecognized table %s in row %s" tbl_nm (List.reduce_exn data ~f:(^))
| Some (keys, _,_) ->
let keys = List.map keys ~f:(fun (k,sz,_) -> (k,sz)) in
Add (tbl_nm,
@@ -123,7 +128,6 @@ let parse program filename : Edit.t list =
edits
-
let parse_bmv2_entry cmd string : Edit.t =
match String.split string ~on:' ' with
| "table_add" :: tbl_name :: action_name :: cont ->
@@ -166,3 +170,5 @@ let parse_bmv2_entry cmd string : Edit.t =
let parse_bmv2 cmd filename : Edit.t list =
In_channel.read_lines filename
|> List.map ~f:(parse_bmv2_entry cmd)
+
+let parse_whippersnapper = parse_bmv2
diff --git a/synthesis/whippersnapper/bench_identity.py b/synthesis/whippersnapper/bench_identity.py
new file mode 100644
index 00000000..60b48981
--- /dev/null
+++ b/synthesis/whippersnapper/bench_identity.py
@@ -0,0 +1,297 @@
+#!/usr/bin/python
+
+import os
+import random
+import shutil
+import subprocess
+import sys
+import time
+
+import plotter as pl
+
+# generate a rule, generic functions
+
+# take a list of rule templates (see gen_rule). Generates num rules, using the rule templates as evenly as possible.
+def gen_rules(num, rule_temps):
+ res = []
+ for i in list(range(0, num)):
+ j = i % len(rule_temps);
+ rt = rule_temps[j];
+ r = gen_rule(rt);
+ res.append(r);
+ return res;
+
+# takes a list of strings and ints. Replaces every int n with a hex string of width n,
+# concatenates into a single list, and returns
+def gen_rule(rule_temp):
+ rule = map(adj_rule_piece, rule_temp);
+ return "".join(rule);
+
+def adj_rule_piece(p):
+ if isinstance(p, str):
+ return p;
+ else:
+ b = random.getrandbits(p);
+ b = str(b); #add_colons(str(hex(b))[2:]);
+ return b;
+
+def add_colons(s):
+ s2 = ""
+ for (i, c) in enumerate(s):
+ if i != 0 and i % 2 == 0:
+ s2 += ":"
+ s2 += c;
+ return s2;
+
+
+# fvs and rule gen
+
+def pipeline_fvs(i):
+ return "hdr.ethernet.dstAddr,hdr.ethernet.dstAddr,48\nstandard_metadata.egress_spec,standard_metadata.egress_spec,9"
+
+def pipeline_rules(ind):
+ res = []
+ for i in list(range(0, ind)):
+ (tbl_name, act) = ("forward_table", "forward") if i == 0 else ("table_" + str(i), "forward" + str(i));
+ res += [["table_add " + tbl_name + " " + act + " ", 48, " => ", 9]];
+ return res;
+
+def set_field_fvs(ind):
+ fvs = pipeline_fvs(ind);
+ fvs += "\nhdr.ptp.reserved2,hdr.ptp.reserved2,8\n";
+ for i in list(range(0, ind)):
+ fvs += "hdr.header_0.field_" + str(i) + ",hdr.header_0.field_" + str(i) + ",16\n"
+ return fvs;
+
+def set_field_rules(ind):
+ return [ ["table_add forward_table forward ", 48, " => ", 9]
+ , ["table_add test_tbl mod_headers ", 8, " =>"]];
+
+
+def add_header_fvs(ind):
+ fvs = "hdr.ptp.reserved2,hdr.ptp.reserved2,8\nstandard_metadata.egress_spec,standard_metadata.egress_spec,9\n";
+ fvs += "hdr.ethernet.dstAddr,hdr.ethernet.dstAddr,48\n";
+ for i in list(range(0, ind)):
+ fvs += "hdr.header_" + str(i) + ".field_0,hdr.header_" + str(i) + ".field_0,16\n"
+ return fvs;
+
+
+def add_header_rules(ind):
+ return [ ["table_add forward_table forward ", 48, " => ", 9]
+ , ["table_add test_tbl add_headers ", 8, " =>"]];
+
+
+def rm_header_fvs(ind):
+ return add_header_fvs(ind);
+
+def rm_header_rules(ind):
+ return [ ["table_add forward_table forward ", 48, " => ", 9]
+ , ["table_add test_tbl remove_headers ", 8, " =>"]];
+
+# whippersnapper commands
+def whippersnapper_cmds():
+ return { "set-field" : (["--operations"], [], set_field_fvs, set_field_rules)
+ , "add-header" : (["--headers"], [], add_header_fvs, add_header_rules)
+ , "rm-header" : (["--headers"], [], rm_header_fvs, rm_header_rules)
+ , "pipeline" : (["--tables"], ["--table-size", "16"], pipeline_fvs, pipeline_rules) }
+
+# generate the OBT, and the corresponding rules
+def rewrite_cmd(cmd):
+ try:
+ pieces = cmd.split(",");
+ if pieces[0] == "ADD":
+ params = pieces[2];
+ fparams = params.split(" ")[2::3];
+ fparams = ";".join(fparams);
+ pieces[2] = fparams;
+ #pieces = list(map(lambda s : s[s.find("0x"):] if s.find("0x") != -1 else s, pieces));
+ fcmd = ",".join(pieces);
+ return fcmd;
+ elif pieces[0] == "DEL":
+ return cmd;
+ elif pieces[0] == "":
+ return "";
+ except:
+ return cmd;
+
+def rewrite_cmds(cmds):
+ fcmds = list(map(rewrite_cmd, cmds.split("\n")));
+ fmcds = "\n".join(fcmds);
+ return fmcds;
+
+def non_cache_flags_pipeline():
+ return ["--reach-filter", "--hints", "exact"]
+
+def non_cache_flags_set_field():
+ return ["-w", "--restrict-mask"]; #hints
+
+def avenir_flags_pipeline():
+ x = ["--cache-edits", "1", "--cache-queries"]
+ x.extend(non_cache_flags_pipeline());
+ return x;
+
+def avenir_flags_set_field():
+ x = ["--cache-edits", "1", "--cache-queries"]
+ x.extend(non_cache_flags_set_field());
+ return x;
+
+def hot_start_flags_pipeline():
+ x = ["--hot-start"];
+ x.extend(avenir_flags_pipeline());
+ return x;
+
+def hot_start_flags_set_field():
+ x = ["--hot-start"];
+ x.extend(avenir_flags_set_field());
+ return x;
+
+def orig_to_obt(ws_cmd, fldr):
+ return ("whippersnapper/" + fldr + "/" + ws_cmd + "_res.csv");
+
+def avenir(ws_cmd, fldr, i, fn, num, timeout, rule_temps, fvs, flags):
+ edits_file = "whippersnapper/empty_edits.txt";
+ fvs_file = "output/fvs.txt";
+ assume_file = "whippersnapper/empty_assume.txt";
+ #commands_file = "output/commands.txt";
+ commands_no_def_file = "output/commands_no_def.txt"
+
+ with open(fvs_file, 'w') as fvsf:
+ fvsf.write(fvs);
+
+ rules = gen_rules(num, rule_temps);
+
+ with open(commands_no_def_file, 'w') as cmdnd:
+ rules_str = "\n".join(rules);
+ cmdnd.write(rules_str);
+ #with open(commands_file, 'r') as cmds:
+ # with open(commands_no_def_file, 'w') as cmdnd:
+ # for line in cmds:
+ # if not line.startswith("table_set_default"):
+ # cmdnd.write(line);
+
+
+ st_time = time.perf_counter();
+ try:
+ res = subprocess.run(["./avenir", "synth", "output/main16.p4", "output/main16.p4", edits_file
+ , edits_file, fvs_file, "-b", "100", "-data"
+ , commands_no_def_file, "-e", "25", "-p"] + flags
+ + ["-I1", "whippersnapper/p4includes"]
+ + ["-I2", "whippersnapper/p4includes"]
+ + ["-P4", "--thrift"]
+ , stdout = subprocess.PIPE, stderr = subprocess.PIPE, timeout = timeout);
+ end_time = time.perf_counter();
+ elapsed = end_time - st_time;
+
+ obt_commands = "output/obt_commands.txt";
+
+ cmds = res.stdout.decode('utf-8');
+ cmds = cmds.split("Target operations:\n")[1];
+ with open(obt_commands, 'w') as f:
+ f.write(cmds);
+ #except:
+ # print("no commands written");
+
+ with open(orig_to_obt(ws_cmd, fldr), "a") as res_file:
+ res_file.write(str(i) + "," + str(elapsed) + "\n")
+ except subprocess.TimeoutExpired:
+ return;
+
+def run_whippersnapper(ws_cmd, fldr, timeout, rule_num, mx, flags):
+ if not os.path.isdir("whippersnapper/" + fldr):
+ os.mkdir("whippersnapper/" + fldr)
+
+ if not os.path.isdir("whippersnapper/"+ fldr + "/" + ws_cmd):
+ os.mkdir("whippersnapper/" + fldr + "/" + ws_cmd)
+
+ mn = 1;
+ if os.path.exists(orig_to_obt(ws_cmd, fldr)):
+ with open(orig_to_obt(ws_cmd, fldr), 'r') as res_file:
+ res = res_file.read();
+ for lne in res.split("\n"):
+ if ',' in lne:
+ mn += 1;
+
+ for i in list(range(mn, int(mx))):
+ print(str(i));
+ (cmd_line1, cmd_line2, fvs, get_rule_temps) = whippersnapper_cmds()[ws_cmd];
+ subprocess.run(["p4benchmark", "--feature", ws_cmd] + cmd_line1 + [str(i)] + cmd_line2);
+ subprocess.run(["p4test", "--p4v", "14", "--pp", "output/main16.p4", "output/main.p4"]);
+ avenir(ws_cmd, fldr, i, "output", rule_num, timeout, get_rule_temps(i), fvs(i), flags);
+
+ shutil.move("output", "whippersnapper/" + fldr + "/"+ ws_cmd + "/output_" + str(i));
+
+
+# run the actual evaluation, using OBT as the logical program
+
+# plot
+
+def read_data(fn):
+ data_str = ""
+ with open(fn) as f:
+ data_str = f.read();
+ return data_str;
+
+def string_to_data(data_str):
+ data = {};
+ for line in data_str.split("\n"):
+ if "," in line:
+ pieces = line.split(",");
+ data[int(pieces[0])] = float(pieces[1]);
+
+ return data
+
+def get_data(mx, f):
+ s = read_data(f);
+ data = string_to_data(s);
+ data = { k: data[k] for k in data.keys() if k < mx };
+ return data;
+
+def adjust_hot(hot, cold):
+ for k in hot:
+ hot[k] = hot[k] - cold[k];
+
+def plot(rn, mx, ws_cmd, xlbl):
+ cache_data = get_data(mx, "whippersnapper/id_cache_" + str(rn) + "/" + ws_cmd + "_res.csv")
+ hot_data = get_data(mx, "whippersnapper/id_hot_cache_" + str(rn) + "/" + ws_cmd + "_res.csv")
+ no_cache_data = get_data (mx, "whippersnapper/id_no_cache_" + str(rn) + "/" + ws_cmd + "_res.csv")
+
+ adjust_hot(hot_data, cache_data)
+
+ pl.plot_series(cache_data, hot_data, no_cache_data, name = "whippersnapper/id_" + ws_cmd, xlabel = xlbl, ylabel = "synthesis time (s)");
+
+cmd = sys.argv[1];
+ws_cmd = sys.argv[2] if len(sys.argv) > 2 else None;
+
+if cmd == "generate" and os.path.exists("whippersnapper/" + ws_cmd + "_orig_to_obt_res.csv"):
+ print("output file already exists");
+ sys.exit();
+
+if cmd == "gen-all":
+ rule_num = int(sys.argv[2]);
+ timeout = int(sys.argv[3]);
+ max_pl = int(sys.argv[4]);
+ max_sf = int(sys.argv[5]);
+
+ run_whippersnapper("pipeline", "id_cache_" + str(rule_num), timeout, rule_num, max_pl, avenir_flags_pipeline());
+ run_whippersnapper("pipeline", "id_hot_cache_" + str(rule_num), timeout, rule_num, max_pl, hot_start_flags_pipeline());
+ run_whippersnapper("pipeline", "id_no_cache_" + str(rule_num), timeout, rule_num, max_pl, non_cache_flags_pipeline());
+
+ run_whippersnapper("set-field", "id_cache_" + str(rule_num), timeout, rule_num, max_sf, avenir_flags_set_field());
+ run_whippersnapper("set-field", "id_hot_cache_" + str(rule_num), timeout, rule_num, max_sf, hot_start_flags_set_field());
+ run_whippersnapper("set-field", "id_no_cache_" + str(rule_num), timeout, rule_num, max_sf, non_cache_flags_set_field());
+
+ plot(rule_num, max_pl, "pipeline", "# of tables");
+ plot(rule_num, max_sf, "set-field", "# of fields");
+
+elif cmd == "generate":
+ rule_num = int(sys.argv[3]);
+ mx = sys.argv[4];
+ flags = non_cache_flags() if "--no-cache" in sys.argv else avenir_flags();
+ fldr = "no_cache" if "--no-cache" in sys.argv else "cache";
+ run_whippersnapper(ws_cmd, fldr, rule_num, mx, flags);
+elif cmd == "plot":
+ if ws_cmd == "pipeline":
+ xlbl = "# of tables"
+ else:
+ xlbl = "# of fields"
+ plot(ws_cmd, xlbl);
diff --git a/synthesis/whippersnapper/bench_obt.py b/synthesis/whippersnapper/bench_obt.py
new file mode 100644
index 00000000..f03b75f5
--- /dev/null
+++ b/synthesis/whippersnapper/bench_obt.py
@@ -0,0 +1,332 @@
+#!/usr/bin/python
+
+import os
+import random
+import shutil
+import subprocess
+import sys
+import time
+
+import plotter as pl
+
+# generate a rule, generic functions
+
+# take a list of rule templates (see gen_rule). Generates num rules, using the rule templates as evenly as possible.
+def gen_rules(num, rule_temps):
+ res = []
+ for i in list(range(0, num)):
+ j = i % len(rule_temps);
+ rt = rule_temps[j];
+ r = gen_rule(rt);
+ res.append(r);
+ return res;
+
+# takes a list of strings and ints. Replaces every int n with a hex string of width n,
+# concatenates into a single list, and returns
+def gen_rule(rule_temp):
+ rule = map(adj_rule_piece, rule_temp);
+ return "".join(rule);
+
+def adj_rule_piece(p):
+ if isinstance(p, str):
+ return p;
+ else:
+ b = random.getrandbits(p);
+ b = str(b); #add_colons(str(hex(b))[2:]);
+ return b;
+
+def add_colons(s):
+ s2 = ""
+ for (i, c) in enumerate(s):
+ if i != 0 and i % 2 == 0:
+ s2 += ":"
+ s2 += c;
+ return s2;
+
+
+# fvs and rule gen
+
+def pipeline_fvs(i):
+ return "hdr.ethernet.dstAddr,hdr.ethernet.dstAddr,48\nstandard_metadata.egress_spec,standard_metadata.egress_spec,9"
+
+def pipeline_rules(ind):
+ res = []
+ for i in list(range(0, ind)):
+ (tbl_name, act) = ("forward_table", "forward") if i == 0 else ("table_" + str(i), "forward" + str(i));
+ res += [["table_add " + tbl_name + " " + act + " ", 48, " => ", 9]];
+ return res;
+
+def set_field_fvs(ind):
+ fvs = pipeline_fvs(ind);
+ fvs += "\nhdr.ptp.reserved2,hdr.ptp.reserved2,8\n";
+ for i in list(range(0, ind)):
+ fvs += "hdr.header_0.field_" + str(i) + ",hdr.header_0.field_" + str(i) + ",16\n"
+ return fvs;
+
+def set_field_rules(ind):
+ return [ ["table_add forward_table forward ", 48, " => ", 9]
+ , ["table_add test_tbl mod_headers ", 8, " =>"]];
+
+
+def add_header_fvs(ind):
+ fvs = "hdr.ptp.reserved2,hdr.ptp.reserved2,8\nstandard_metadata.egress_spec,standard_metadata.egress_spec,9\n";
+ fvs += "hdr.ethernet.dstAddr,hdr.ethernet.dstAddr,48\n";
+ for i in list(range(0, ind)):
+ fvs += "hdr.header_" + str(i) + ".field_0,hdr.header_" + str(i) + ".field_0,16\n"
+ return fvs;
+
+
+def add_header_rules(ind):
+ return [ ["table_add forward_table forward ", 48, " => ", 9]
+ , ["table_add test_tbl add_headers ", 8, " =>"]];
+
+
+def rm_header_fvs(ind):
+ return add_header_fvs(ind);
+
+def rm_header_rules(ind):
+ return [ ["table_add forward_table forward ", 48, " => ", 9]
+ , ["table_add test_tbl remove_headers ", 8, " =>"]];
+
+# whippersnapper commands
+def whippersnapper_cmds():
+ return { "set-field" : (["--operations"], [], set_field_fvs, set_field_rules)
+ , "add-header" : (["--headers"], [], add_header_fvs, add_header_rules)
+ , "rm-header" : (["--headers"], [], rm_header_fvs, rm_header_rules)
+ , "pipeline" : (["--tables"], ["--table-size", "16"], pipeline_fvs, pipeline_rules) }
+
+# generate the OBT, and the corresponding rules
+def rewrite_cmd(cmd):
+ try:
+ pieces = cmd.split(",");
+ if pieces[0] == "ADD":
+ params = pieces[2];
+ fparams = params.split(" ")[2::3];
+ fparams = ";".join(fparams);
+ pieces[2] = fparams;
+ #pieces = list(map(lambda s : s[s.find("0x"):] if s.find("0x") != -1 else s, pieces));
+ fcmd = ",".join(pieces);
+ return fcmd;
+ elif pieces[0] == "DEL":
+ return cmd;
+ elif pieces[0] == "":
+ return "";
+ except:
+ return cmd;
+
+def rewrite_cmds(cmds):
+ fcmds = list(map(rewrite_cmd, cmds.split("\n")));
+ fmcds = "\n".join(fcmds);
+ return fmcds;
+
+def non_cache_flags_pipeline():
+ return ["--reach-filter", "--hints", "exact"]
+
+def non_cache_flags_set_field():
+ return ["-w", "--restrict-mask"]; #hints
+
+def avenir_flags_pipeline():
+ x = ["--cache-edits", "1", "--cache-queries"]
+ x.extend(non_cache_flags_pipeline());
+ return x;
+
+def avenir_flags_set_field():
+ x = ["--cache-edits", "1", "--cache-queries"]
+ x.extend(non_cache_flags_set_field());
+ return x;
+
+def hot_start_flags_pipeline():
+ x = ["--hot-start"];
+ x.extend(avenir_flags_pipeline());
+ return x;
+
+def hot_start_flags_set_field():
+ x = ["--hot-start"];
+ x.extend(avenir_flags_set_field());
+ return x;
+
+def orig_to_obt(ws_cmd, fldr):
+ return ("whippersnapper/" + fldr + "/" + ws_cmd + "_orig_to_obt_res.csv");
+
+def rules_for_obt(ws_cmd, fldr, i, fn, num, rule_temps, fvs, flags):
+ edits_file = "whippersnapper/empty_edits.txt";
+ fvs_file = "output/fvs.txt";
+ assume_file = "whippersnapper/empty_assume.txt";
+ #commands_file = "output/commands.txt";
+ commands_no_def_file = "output/commands_no_def.txt"
+
+ with open(fvs_file, 'w') as fvsf:
+ fvsf.write(fvs);
+
+ rules = gen_rules(num, rule_temps);
+
+ with open(commands_no_def_file, 'w') as cmdnd:
+ rules_str = "\n".join(rules);
+ cmdnd.write(rules_str);
+ #with open(commands_file, 'r') as cmds:
+ # with open(commands_no_def_file, 'w') as cmdnd:
+ # for line in cmds:
+ # if not line.startswith("table_set_default"):
+ # cmdnd.write(line);
+
+
+ st_time = time.perf_counter();
+ res = subprocess.run(["./avenir", "to-obt", "output/main16.p4", edits_file
+ , edits_file, fvs_file, assume_file, "-b", "100", "-data"
+ , commands_no_def_file, "-e", "25", "-p"] + flags + ["-I", "whippersnapper/p4includes"], stdout = subprocess.PIPE, stderr = subprocess.PIPE);
+ end_time = time.perf_counter();
+ elapsed = end_time - st_time;
+
+ obt_commands = "output/obt_commands.txt";
+
+ cmds = res.stdout.decode('utf-8');
+ cmds = cmds.split("Edits\n")[1];
+ with open(obt_commands, 'w') as f:
+ f.write(rewrite_cmds(cmds));
+ #except:
+ # print("no commands written");
+
+ with open(orig_to_obt(ws_cmd, fldr), "a") as res_file:
+ res_file.write(str(i) + "," + str(elapsed) + "\n")
+
+def run_whippersnapper(ws_cmd, fldr, rule_num, mx, flags):
+ if not os.path.isdir("whippersnapper/" + fldr):
+ os.mkdir("whippersnapper/" + fldr)
+
+ if not os.path.isdir("whippersnapper/"+ fldr + "/" + ws_cmd):
+ os.mkdir("whippersnapper/" + fldr + "/" + ws_cmd)
+
+ mn = 1;
+ if os.path.exists(orig_to_obt(ws_cmd, fldr)):
+ with open(orig_to_obt(ws_cmd, fldr), 'r') as res_file:
+ res = res_file.read();
+ for lne in res.split("\n"):
+ if ',' in lne:
+ mn += 1;
+
+ for i in list(range(mn, int(mx))):
+ print(str(i));
+ (cmd_line1, cmd_line2, fvs, get_rule_temps) = whippersnapper_cmds()[ws_cmd];
+ subprocess.run(["p4benchmark", "--feature", ws_cmd] + cmd_line1 + [str(i)] + cmd_line2);
+ subprocess.run(["p4test", "--p4v", "14", "--pp", "output/main16.p4", "output/main.p4"]);
+ rules_for_obt(ws_cmd, fldr, i, "output", rule_num, get_rule_temps(i), fvs(i), flags);
+
+ shutil.move("output", "whippersnapper/" + fldr + "/"+ ws_cmd + "/output_" + str(i));
+
+
+# run the actual evaluation, using OBT as the logical program
+
+def run_avenir(ws_cmd, fldr, rn, ceil_mx, flags):
+ mx = 1;
+ while os.path.isdir("whippersnapper/" + fldr + "/" + ws_cmd + "/output_" + str(mx)):
+ mx += 1;
+
+ mx = min(mx, ceil_mx);
+
+ res = ""
+ for i in list(range(1, int(mx))):
+ print(ws_cmd + " " + str(i));
+ output = "whippersnapper/" + fldr + "/" + ws_cmd + "/output_" + str(i) +"/";
+ commands_file = "whippersnapper/cache_" + str(rn) + "/" + ws_cmd + "/output_" + str(i) + "/obt_commands.txt";
+ edits_file = "whippersnapper/empty_edits.txt";
+ assume_file = "whippersnapper/empty_assume.txt";
+ fvs_file = output + "fvs.txt";
+
+ st_time = time.perf_counter();
+ subprocess.run(["./avenir", "from-obt", output + "main16.p4", edits_file, edits_file, fvs_file, assume_file, "-b", "100", "-data", commands_file, "-e", "25", "-p"] + flags + ["-I", "whippersnapper/p4includes"], stdout = subprocess.PIPE, stderr = subprocess.PIPE);
+ end_time = time.perf_counter();
+ elapsed = end_time - st_time;
+ res += str(i) + "," + str(elapsed) + "\n"
+
+ with open("whippersnapper/" + fldr + "/" + ws_cmd + "_obt_to_orig_res.csv", "w") as res_file:
+ res_file.write(res);
+
+# plot
+
+def read_data(fn):
+ data_str = ""
+ with open(fn) as f:
+ data_str = f.read();
+ return data_str;
+
+def string_to_data(data_str):
+ data = {};
+ for line in data_str.split("\n"):
+ if "," in line:
+ pieces = line.split(",");
+ data[int(pieces[0])] = float(pieces[1]);
+
+ return data
+
+def get_data(mx, f):
+ s = read_data(f);
+ data = string_to_data(s);
+ data = { k: data[k] for k in data.keys() if k < mx };
+ return data;
+
+def adjust_hot(hot, cold):
+ for k in hot:
+ hot[k] = hot[k] - cold[k];
+
+def plot(rn, mx, ws_cmd, direction, xlbl):
+ cache_data = get_data(mx, "whippersnapper/cache_" + str(rn) + "/" + ws_cmd + "_" + direction + "_res.csv")
+ hot_data = get_data(mx, "whippersnapper/hot_cache_" + str(rn) + "/" + ws_cmd + "_" + direction + "_res.csv")
+ no_cache_data = get_data (mx, "whippersnapper/no_cache_" + str(rn) + "/" + ws_cmd + "_" + direction + "_res.csv")
+
+ adjust_hot(hot_data, cache_data)
+
+ pl.plot_series(cache_data, hot_data, no_cache_data, name = "whippersnapper/" + ws_cmd + "_" + direction, xlabel = xlbl, ylabel = "synthesis time (s)");
+
+cmd = sys.argv[1];
+ws_cmd = sys.argv[2] if len(sys.argv) > 2 else None;
+
+if cmd == "generate" and os.path.exists("whippersnapper/" + ws_cmd + "_orig_to_obt_res.csv"):
+ print("output file already exists");
+ sys.exit();
+
+if cmd == "gen-all":
+ rule_num = int(sys.argv[2]);
+ max_pl = int(sys.argv[3]);
+ max_sf = int(sys.argv[4]);
+
+ run_whippersnapper("pipeline", "cache_" + str(rule_num) , rule_num, max_pl, avenir_flags_pipeline());
+ run_whippersnapper("pipeline", "hot_cache_" + str(rule_num), rule_num, max_pl, hot_start_flags_pipeline());
+ run_whippersnapper("pipeline", "no_cache_" + str(rule_num), rule_num, max_pl, non_cache_flags_pipeline());
+
+ run_whippersnapper("set-field", "cache_" + str(rule_num), rule_num, max_sf, avenir_flags_set_field());
+ run_whippersnapper("set-field", "hot_cache_" + str(rule_num), rule_num, max_sf, hot_start_flags_set_field());
+ run_whippersnapper("set-field", "no_cache_" + str(rule_num), rule_num, max_sf, non_cache_flags_set_field());
+
+ plot(rule_num, max_pl, "pipeline", "orig_to_obt", "# of tables");
+ plot(rule_num, max_sf, "set-field", "orig_to_obt", "# of fields");
+elif cmd == "gen-all-rev":
+ rule_num = int(sys.argv[2]);
+ max_pl = int(sys.argv[3]);
+ max_sf = int(sys.argv[4]);
+
+ run_avenir("pipeline", "cache_" + str(rule_num) , rule_num, max_pl, avenir_flags_pipeline());
+ run_avenir("pipeline", "hot_cache_" + str(rule_num), rule_num, max_pl, hot_start_flags_pipeline());
+ run_avenir("pipeline", "no_cache_" + str(rule_num), rule_num, max_pl, non_cache_flags_pipeline());
+
+ run_avenir("set-field", "cache_" + str(rule_num), rule_num, max_sf, avenir_flags_set_field());
+ run_avenir("set-field", "hot_cache_" + str(rule_num), rule_num, max_sf, hot_start_flags_set_field());
+ run_avenir("set-field", "no_cache_" + str(rule_num), rule_num, max_sf, non_cache_flags_set_field());
+
+ plot(rule_num, max_pl, "pipeline", "obt_to_orig", "# of tables");
+ plot(rule_num, max_sf, "set-field", "obt_to_orig", "# of fields");
+
+
+elif cmd == "generate":
+ rule_num = int(sys.argv[3]);
+ mx = sys.argv[4];
+ flags = non_cache_flags() if "--no-cache" in sys.argv else avenir_flags();
+ fldr = "no_cache" if "--no-cache" in sys.argv else "cache";
+ run_whippersnapper(ws_cmd, fldr, rule_num, mx, flags);
+elif cmd == "eval":
+ run_avenir(ws_cmd);
+elif cmd == "plot":
+ if ws_cmd == "pipeline":
+ xlbl = "# of tables"
+ else:
+ xlbl = "# of fields"
+ plot(ws_cmd, xlbl);
diff --git a/synthesis/whippersnapper/empty_assume.txt b/synthesis/whippersnapper/empty_assume.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/synthesis/whippersnapper/empty_edits.txt b/synthesis/whippersnapper/empty_edits.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/synthesis/whippersnapper/p4includes/core.p4 b/synthesis/whippersnapper/p4includes/core.p4
new file mode 100644
index 00000000..03576ae4
--- /dev/null
+++ b/synthesis/whippersnapper/p4includes/core.p4
@@ -0,0 +1,81 @@
+/*
+Copyright 2013-present Barefoot Networks, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+/* This is the P4-16 core library, which declares some built-in P4 constructs using P4 */
+
+#ifndef _CORE_P4_
+#define _CORE_P4_
+
+/// Standard error codes. New error codes can be declared by users.
+error {
+ NoError, /// No error.
+ PacketTooShort, /// Not enough bits in packet for 'extract'.
+ NoMatch, /// 'select' expression has no matches.
+ StackOutOfBounds, /// Reference to invalid element of a header stack.
+ HeaderTooShort, /// Extracting too many bits into a varbit field.
+ ParserTimeout /// Parser execution time limit exceeded.
+}
+
+extern packet_in {
+ /// Read a header from the packet into a fixed-sized header @hdr and advance the cursor.
+ /// May trigger error PacketTooShort or StackOutOfBounds.
+ /// @T must be a fixed-size header type
+ void extract(out T hdr);
+ /// Read bits from the packet into a variable-sized header @variableSizeHeader
+ /// and advance the cursor.
+ /// @T must be a header containing exactly 1 varbit field.
+ /// May trigger errors PacketTooShort, StackOutOfBounds, or HeaderTooShort.
+ void extract(out T variableSizeHeader,
+ in bit<32> variableFieldSizeInBits);
+ /// Read bits from the packet without advancing the cursor.
+ /// @returns: the bits read from the packet.
+ /// T may be an arbitrary fixed-size type.
+ T lookahead();
+ /// Advance the packet cursor by the specified number of bits.
+ void advance(in bit<32> sizeInBits);
+ /// @return packet length in bytes. This method may be unavailable on
+ /// some target architectures.
+ bit<32> length();
+}
+
+extern packet_out {
+ /// Write @hdr into the output packet, advancing cursor.
+ /// @T can be a header type, a header stack, a header_union, or a struct
+ /// containing fields with such types.
+ void emit(in T hdr);
+}
+
+// TODO: remove from this file, convert to built-in
+/// Check a predicate @check in the parser; if the predicate is true do nothing,
+/// otherwise set the parser error to @toSignal, and transition to the `reject` state.
+extern void verify(in bool check, in error toSignal);
+
+/// Built-in action that does nothing.
+action NoAction() {}
+
+/// Standard match kinds for table key fields.
+/// Some architectures may not support all these match kinds.
+/// Architectures can declare additional match kinds.
+match_kind {
+ /// Match bits exactly.
+ exact,
+ /// Ternary match, using a mask.
+ ternary,
+ /// Longest-prefix match.
+ lpm
+}
+
+#endif /* _CORE_P4_ */
diff --git a/synthesis/whippersnapper/p4includes/v1model.p4 b/synthesis/whippersnapper/p4includes/v1model.p4
new file mode 100644
index 00000000..c10d2d79
--- /dev/null
+++ b/synthesis/whippersnapper/p4includes/v1model.p4
@@ -0,0 +1,261 @@
+/*
+Copyright 2013-present Barefoot Networks, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+/* P4-16 declaration of the P4 v1.0 switch model */
+
+#ifndef _V1_MODEL_P4_
+#define _V1_MODEL_P4_
+
+#include "core.p4"
+
+match_kind {
+ range,
+ // Used for implementing dynamic_action_selection
+ selector
+}
+
+// Are these correct?
+@metadata @name("standard_metadata")
+struct standard_metadata_t {
+ bit<9> ingress_port;
+ bit<9> egress_spec;
+ bit<9> egress_port;
+ bit<32> clone_spec;
+ bit<32> instance_type;
+ // The drop and recirculate_port fields are not used at all by the
+ // behavioral-model simple_switch software switch as of September
+ // 2018, and perhaps never was. They may be considered
+ // deprecated, at least for that P4 target device. simple_switch
+ // uses the value of the egress_spec field to determine whether a
+ // packet is dropped or not, and it is recommended to use the
+ // P4_14 drop() primitive action, or the P4_16 + v1model
+ // mark_to_drop() primitive action, to cause that field to be
+ // changed so the packet will be dropped.
+ bit<1> drop;
+ bit<16> recirculate_port;
+ bit<32> packet_length;
+ //
+ // @alias is used to generate the field_alias section of the BMV2 JSON.
+ // Field alias creates a mapping from the metadata name in P4 program to
+ // the behavioral model's internal metadata name. Here we use it to
+ // expose all metadata supported by simple switch to the user through
+ // standard_metadata_t.
+ //
+ // flattening fields that exist in bmv2-ss
+ // queueing metadata
+ @alias("queueing_metadata.enq_timestamp") bit<32> enq_timestamp;
+ @alias("queueing_metadata.enq_qdepth") bit<19> enq_qdepth;
+ @alias("queueing_metadata.deq_timedelta") bit<32> deq_timedelta;
+ @alias("queueing_metadata.deq_qdepth") bit<19> deq_qdepth;
+ // intrinsic metadata
+ @alias("intrinsic_metadata.ingress_global_timestamp") bit<48> ingress_global_timestamp;
+ @alias("intrinsic_metadata.egress_global_timestamp") bit<48> egress_global_timestamp;
+ @alias("intrinsic_metadata.lf_field_list") bit<32> lf_field_list;
+ @alias("intrinsic_metadata.mcast_grp") bit<16> mcast_grp;
+ @alias("intrinsic_metadata.resubmit_flag") bit<32> resubmit_flag;
+ @alias("intrinsic_metadata.egress_rid") bit<16> egress_rid;
+ /// Indicates that a verify_checksum() method has failed.
+ // 1 if a checksum error was found, otherwise 0.
+ bit<1> checksum_error;
+ @alias("intrinsic_metadata.recirculate_flag") bit<32> recirculate_flag;
+ /// Error produced by parsing
+ error parser_error;
+}
+
+enum CounterType {
+ packets,
+ bytes,
+ packets_and_bytes
+}
+
+enum MeterType {
+ packets,
+ bytes
+}
+
+extern counter {
+ counter(bit<32> size, CounterType type);
+ void count(in bit<32> index);
+}
+
+extern direct_counter {
+ direct_counter(CounterType type);
+ void count();
+}
+
+extern meter {
+ meter(bit<32> size, MeterType type);
+ void execute_meter(in bit<32> index, out T result);
+}
+
+extern direct_meter {
+ direct_meter(MeterType type);
+ void read(out T result);
+}
+
+extern register {
+ register(bit<32> size);
+ void read(out T result, in bit<32> index);
+ void write(in bit<32> index, in T value);
+}
+
+// used as table implementation attribute
+extern action_profile {
+ action_profile(bit<32> size);
+}
+
+// Get a random number in the range lo..hi
+extern void random(out T result, in T lo, in T hi);
+// If the type T is a named struct, the name is used
+// to generate the control-plane API.
+extern void digest(in bit<32> receiver, in T data);
+
+enum HashAlgorithm {
+ crc32,
+ crc32_custom,
+ crc16,
+ crc16_custom,
+ random,
+ identity,
+ csum16,
+ xor16
+}
+
+extern void mark_to_drop();
+extern void hash(out O result, in HashAlgorithm algo, in T base, in D data, in M max);
+
+extern action_selector {
+ action_selector(HashAlgorithm algorithm, bit<32> size, bit<32> outputWidth);
+}
+
+enum CloneType {
+ I2E,
+ E2E
+}
+
+@deprecated("Please use verify_checksum/update_checksum instead.")
+extern Checksum16 {
+ Checksum16();
+ bit<16> get(in D data);
+}
+
+/**
+Verifies the checksum of the supplied data.
+If this method detects that a checksum of the data is not correct it
+sets the standard_metadata checksum_error bit.
+@param T Must be a tuple type where all the fields are bit-fields or varbits.
+ The total dynamic length of the fields is a multiple of the output size.
+@param O Checksum type; must be bit type.
+@param condition If 'false' the verification always succeeds.
+@param data Data whose checksum is verified.
+@param checksum Expected checksum of the data; note that is must be a left-value.
+@param algo Algorithm to use for checksum (not all algorithms may be supported).
+ Must be a compile-time constant.
+*/
+extern void verify_checksum(in bool condition, in T data, inout O checksum, HashAlgorithm algo);
+/**
+Computes the checksum of the supplied data.
+@param T Must be a tuple type where all the fields are bit-fields or varbits.
+ The total dynamic length of the fields is a multiple of the output size.
+@param O Output type; must be bit type.
+@param condition If 'false' the checksum is not changed
+@param data Data whose checksum is computed.
+@param checksum Checksum of the data.
+@param algo Algorithm to use for checksum (not all algorithms may be supported).
+ Must be a compile-time constant.
+*/
+extern void update_checksum(in bool condition, in T data, inout O checksum, HashAlgorithm algo);
+
+/**
+Verifies the checksum of the supplied data including the payload.
+The payload is defined as "all bytes of the packet which were not parsed by the parser".
+If this method detects that a checksum of the data is not correct it
+sets the standard_metadata checksum_error bit.
+@param T Must be a tuple type where all the fields are bit-fields or varbits.
+ The total dynamic length of the fields is a multiple of the output size.
+@param O Checksum type; must be bit type.
+@param condition If 'false' the verification always succeeds.
+@param data Data whose checksum is verified.
+@param checksum Expected checksum of the data; note that is must be a left-value.
+@param algo Algorithm to use for checksum (not all algorithms may be supported).
+ Must be a compile-time constant.
+*/
+extern void verify_checksum_with_payload(in bool condition, in T data, inout O checksum, HashAlgorithm algo);
+/**
+Computes the checksum of the supplied data including the payload.
+The payload is defined as "all bytes of the packet which were not parsed by the parser".
+@param T Must be a tuple type where all the fields are bit-fields or varbits.
+ The total dynamic length of the fields is a multiple of the output size.
+@param O Output type; must be bit type.
+@param condition If 'false' the checksum is not changed
+@param data Data whose checksum is computed.
+@param checksum Checksum of the data.
+@param algo Algorithm to use for checksum (not all algorithms may be supported).
+ Must be a compile-time constant.
+*/
+extern void update_checksum_with_payload(in bool condition, in T data, inout O checksum, HashAlgorithm algo);
+
+extern void resubmit(in T data);
+extern void recirculate(in T data);
+extern void clone(in CloneType type, in bit<32> session);
+extern void clone3(in CloneType type, in bit<32> session, in T data);
+
+extern void truncate(in bit<32> length);
+
+// The name 'standard_metadata' is reserved
+
+// Architecture.
+// M should be a struct of structs
+// H should be a struct of headers or stacks
+
+parser Parser(packet_in b,
+ out H parsedHdr,
+ inout M meta,
+ inout standard_metadata_t standard_metadata);
+
+/* The only legal statements in the implementation of the
+VerifyChecksum control are: block statements, calls to the
+verify_checksum and verify_checksum_with_payload methods,
+and return statements. */
+control VerifyChecksum(inout H hdr,
+ inout M meta);
+@pipeline
+control Ingress(inout H hdr,
+ inout M meta,
+ inout standard_metadata_t standard_metadata);
+@pipeline
+control Egress(inout H hdr,
+ inout M meta,
+ inout standard_metadata_t standard_metadata);
+
+/* The only legal statements in the implementation of the
+ComputeChecksum control are: block statements, calls to the
+update_checksum and update_checksum_with_payload methods,
+and return statements. */
+control ComputeChecksum(inout H hdr,
+ inout M meta);
+@deparser
+control Deparser(packet_out b, in H hdr);
+
+package V1Switch(Parser p,
+ VerifyChecksum vr,
+ Ingress ig,
+ Egress eg,
+ ComputeChecksum ck,
+ Deparser dep
+ );
+
+#endif /* _V1_MODEL_P4_ */
diff --git a/synthesis/whippersnapper/p4includes2/core.p4 b/synthesis/whippersnapper/p4includes2/core.p4
new file mode 100644
index 00000000..03576ae4
--- /dev/null
+++ b/synthesis/whippersnapper/p4includes2/core.p4
@@ -0,0 +1,81 @@
+/*
+Copyright 2013-present Barefoot Networks, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+/* This is the P4-16 core library, which declares some built-in P4 constructs using P4 */
+
+#ifndef _CORE_P4_
+#define _CORE_P4_
+
+/// Standard error codes. New error codes can be declared by users.
+error {
+ NoError, /// No error.
+ PacketTooShort, /// Not enough bits in packet for 'extract'.
+ NoMatch, /// 'select' expression has no matches.
+ StackOutOfBounds, /// Reference to invalid element of a header stack.
+ HeaderTooShort, /// Extracting too many bits into a varbit field.
+ ParserTimeout /// Parser execution time limit exceeded.
+}
+
+extern packet_in {
+ /// Read a header from the packet into a fixed-sized header @hdr and advance the cursor.
+ /// May trigger error PacketTooShort or StackOutOfBounds.
+ /// @T must be a fixed-size header type
+ void extract(out T hdr);
+ /// Read bits from the packet into a variable-sized header @variableSizeHeader
+ /// and advance the cursor.
+ /// @T must be a header containing exactly 1 varbit field.
+ /// May trigger errors PacketTooShort, StackOutOfBounds, or HeaderTooShort.
+ void extract(out T variableSizeHeader,
+ in bit<32> variableFieldSizeInBits);
+ /// Read bits from the packet without advancing the cursor.
+ /// @returns: the bits read from the packet.
+ /// T may be an arbitrary fixed-size type.
+ T lookahead();
+ /// Advance the packet cursor by the specified number of bits.
+ void advance(in bit<32> sizeInBits);
+ /// @return packet length in bytes. This method may be unavailable on
+ /// some target architectures.
+ bit<32> length();
+}
+
+extern packet_out {
+ /// Write @hdr into the output packet, advancing cursor.
+ /// @T can be a header type, a header stack, a header_union, or a struct
+ /// containing fields with such types.
+ void emit(in T hdr);
+}
+
+// TODO: remove from this file, convert to built-in
+/// Check a predicate @check in the parser; if the predicate is true do nothing,
+/// otherwise set the parser error to @toSignal, and transition to the `reject` state.
+extern void verify(in bool check, in error toSignal);
+
+/// Built-in action that does nothing.
+action NoAction() {}
+
+/// Standard match kinds for table key fields.
+/// Some architectures may not support all these match kinds.
+/// Architectures can declare additional match kinds.
+match_kind {
+ /// Match bits exactly.
+ exact,
+ /// Ternary match, using a mask.
+ ternary,
+ /// Longest-prefix match.
+ lpm
+}
+
+#endif /* _CORE_P4_ */
diff --git a/synthesis/whippersnapper/p4includes2/v1model.p4 b/synthesis/whippersnapper/p4includes2/v1model.p4
new file mode 100644
index 00000000..c10d2d79
--- /dev/null
+++ b/synthesis/whippersnapper/p4includes2/v1model.p4
@@ -0,0 +1,261 @@
+/*
+Copyright 2013-present Barefoot Networks, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+/* P4-16 declaration of the P4 v1.0 switch model */
+
+#ifndef _V1_MODEL_P4_
+#define _V1_MODEL_P4_
+
+#include "core.p4"
+
+match_kind {
+ range,
+ // Used for implementing dynamic_action_selection
+ selector
+}
+
+// Are these correct?
+@metadata @name("standard_metadata")
+struct standard_metadata_t {
+ bit<9> ingress_port;
+ bit<9> egress_spec;
+ bit<9> egress_port;
+ bit<32> clone_spec;
+ bit<32> instance_type;
+ // The drop and recirculate_port fields are not used at all by the
+ // behavioral-model simple_switch software switch as of September
+ // 2018, and perhaps never was. They may be considered
+ // deprecated, at least for that P4 target device. simple_switch
+ // uses the value of the egress_spec field to determine whether a
+ // packet is dropped or not, and it is recommended to use the
+ // P4_14 drop() primitive action, or the P4_16 + v1model
+ // mark_to_drop() primitive action, to cause that field to be
+ // changed so the packet will be dropped.
+ bit<1> drop;
+ bit<16> recirculate_port;
+ bit<32> packet_length;
+ //
+ // @alias is used to generate the field_alias section of the BMV2 JSON.
+ // Field alias creates a mapping from the metadata name in P4 program to
+ // the behavioral model's internal metadata name. Here we use it to
+ // expose all metadata supported by simple switch to the user through
+ // standard_metadata_t.
+ //
+ // flattening fields that exist in bmv2-ss
+ // queueing metadata
+ @alias("queueing_metadata.enq_timestamp") bit<32> enq_timestamp;
+ @alias("queueing_metadata.enq_qdepth") bit<19> enq_qdepth;
+ @alias("queueing_metadata.deq_timedelta") bit<32> deq_timedelta;
+ @alias("queueing_metadata.deq_qdepth") bit<19> deq_qdepth;
+ // intrinsic metadata
+ @alias("intrinsic_metadata.ingress_global_timestamp") bit<48> ingress_global_timestamp;
+ @alias("intrinsic_metadata.egress_global_timestamp") bit<48> egress_global_timestamp;
+ @alias("intrinsic_metadata.lf_field_list") bit<32> lf_field_list;
+ @alias("intrinsic_metadata.mcast_grp") bit<16> mcast_grp;
+ @alias("intrinsic_metadata.resubmit_flag") bit<32> resubmit_flag;
+ @alias("intrinsic_metadata.egress_rid") bit<16> egress_rid;
+ /// Indicates that a verify_checksum() method has failed.
+ // 1 if a checksum error was found, otherwise 0.
+ bit<1> checksum_error;
+ @alias("intrinsic_metadata.recirculate_flag") bit<32> recirculate_flag;
+ /// Error produced by parsing
+ error parser_error;
+}
+
+enum CounterType {
+ packets,
+ bytes,
+ packets_and_bytes
+}
+
+enum MeterType {
+ packets,
+ bytes
+}
+
+extern counter {
+ counter(bit<32> size, CounterType type);
+ void count(in bit<32> index);
+}
+
+extern direct_counter {
+ direct_counter(CounterType type);
+ void count();
+}
+
+extern meter {
+ meter(bit<32> size, MeterType type);
+ void execute_meter(in bit<32> index, out T result);
+}
+
+extern direct_meter {
+ direct_meter(MeterType type);
+ void read(out T result);
+}
+
+extern register {
+ register(bit<32> size);
+ void read(out T result, in bit<32> index);
+ void write(in bit<32> index, in T value);
+}
+
+// used as table implementation attribute
+extern action_profile {
+ action_profile(bit<32> size);
+}
+
+// Get a random number in the range lo..hi
+extern void random(out T result, in T lo, in T hi);
+// If the type T is a named struct, the name is used
+// to generate the control-plane API.
+extern void digest(in bit<32> receiver, in T data);
+
+enum HashAlgorithm {
+ crc32,
+ crc32_custom,
+ crc16,
+ crc16_custom,
+ random,
+ identity,
+ csum16,
+ xor16
+}
+
+extern void mark_to_drop();
+extern void hash(out O result, in HashAlgorithm algo, in T base, in D data, in M max);
+
+extern action_selector {
+ action_selector(HashAlgorithm algorithm, bit<32> size, bit<32> outputWidth);
+}
+
+enum CloneType {
+ I2E,
+ E2E
+}
+
+@deprecated("Please use verify_checksum/update_checksum instead.")
+extern Checksum16 {
+ Checksum16();
+ bit<16> get(in D data);
+}
+
+/**
+Verifies the checksum of the supplied data.
+If this method detects that a checksum of the data is not correct it
+sets the standard_metadata checksum_error bit.
+@param T Must be a tuple type where all the fields are bit-fields or varbits.
+ The total dynamic length of the fields is a multiple of the output size.
+@param O Checksum type; must be bit type.
+@param condition If 'false' the verification always succeeds.
+@param data Data whose checksum is verified.
+@param checksum Expected checksum of the data; note that is must be a left-value.
+@param algo Algorithm to use for checksum (not all algorithms may be supported).
+ Must be a compile-time constant.
+*/
+extern void verify_checksum(in bool condition, in T data, inout O checksum, HashAlgorithm algo);
+/**
+Computes the checksum of the supplied data.
+@param T Must be a tuple type where all the fields are bit-fields or varbits.
+ The total dynamic length of the fields is a multiple of the output size.
+@param O Output type; must be bit type.
+@param condition If 'false' the checksum is not changed
+@param data Data whose checksum is computed.
+@param checksum Checksum of the data.
+@param algo Algorithm to use for checksum (not all algorithms may be supported).
+ Must be a compile-time constant.
+*/
+extern void update_checksum(in bool condition, in T data, inout O checksum, HashAlgorithm algo);
+
+/**
+Verifies the checksum of the supplied data including the payload.
+The payload is defined as "all bytes of the packet which were not parsed by the parser".
+If this method detects that a checksum of the data is not correct it
+sets the standard_metadata checksum_error bit.
+@param T Must be a tuple type where all the fields are bit-fields or varbits.
+ The total dynamic length of the fields is a multiple of the output size.
+@param O Checksum type; must be bit type.
+@param condition If 'false' the verification always succeeds.
+@param data Data whose checksum is verified.
+@param checksum Expected checksum of the data; note that is must be a left-value.
+@param algo Algorithm to use for checksum (not all algorithms may be supported).
+ Must be a compile-time constant.
+*/
+extern void verify_checksum_with_payload(in bool condition, in T data, inout O checksum, HashAlgorithm algo);
+/**
+Computes the checksum of the supplied data including the payload.
+The payload is defined as "all bytes of the packet which were not parsed by the parser".
+@param T Must be a tuple type where all the fields are bit-fields or varbits.
+ The total dynamic length of the fields is a multiple of the output size.
+@param O Output type; must be bit type.
+@param condition If 'false' the checksum is not changed
+@param data Data whose checksum is computed.
+@param checksum Checksum of the data.
+@param algo Algorithm to use for checksum (not all algorithms may be supported).
+ Must be a compile-time constant.
+*/
+extern void update_checksum_with_payload(in bool condition, in T data, inout O checksum, HashAlgorithm algo);
+
+extern void resubmit(in T data);
+extern void recirculate(in T data);
+extern void clone(in CloneType type, in bit<32> session);
+extern void clone3(in CloneType type, in bit<32> session, in T data);
+
+extern void truncate(in bit<32> length);
+
+// The name 'standard_metadata' is reserved
+
+// Architecture.
+// M should be a struct of structs
+// H should be a struct of headers or stacks
+
+parser Parser(packet_in b,
+ out H parsedHdr,
+ inout M meta,
+ inout standard_metadata_t standard_metadata);
+
+/* The only legal statements in the implementation of the
+VerifyChecksum control are: block statements, calls to the
+verify_checksum and verify_checksum_with_payload methods,
+and return statements. */
+control VerifyChecksum(inout H hdr,
+ inout M meta);
+@pipeline
+control Ingress(inout H hdr,
+ inout M meta,
+ inout standard_metadata_t standard_metadata);
+@pipeline
+control Egress(inout H hdr,
+ inout M meta,
+ inout standard_metadata_t standard_metadata);
+
+/* The only legal statements in the implementation of the
+ComputeChecksum control are: block statements, calls to the
+update_checksum and update_checksum_with_payload methods,
+and return statements. */
+control ComputeChecksum(inout H hdr,
+ inout M meta);
+@deparser
+control Deparser(packet_out b, in H hdr);
+
+package V1Switch(Parser p,
+ VerifyChecksum vr,
+ Ingress ig,
+ Egress eg,
+ ComputeChecksum ck,
+ Deparser dep
+ );
+
+#endif /* _V1_MODEL_P4_ */
diff --git a/synthesis/whippersnapper/plotter.py b/synthesis/whippersnapper/plotter.py
new file mode 100644
index 00000000..3e04acee
--- /dev/null
+++ b/synthesis/whippersnapper/plotter.py
@@ -0,0 +1,143 @@
+# -*- coding: utf-8 -*-
+import csv
+import locale
+import time
+import matplotlib as mpl
+import matplotlib.pyplot as plt
+from cycler import cycler
+from collections import defaultdict
+import numpy as np
+import math
+
+locale.setlocale(locale.LC_ALL, 'en_US.UTF8')
+from IPython.display import set_matplotlib_formats
+set_matplotlib_formats('pdf')
+
+onecolsize = (4, 1.5) # Tweak based on figure's appearance in the paper
+seaborn_colorblind = cycler('color', ['#0072B2', '#D55E00', '#009E73', '#CC79A7', '#F0E442', '#56B4E9'])
+seaborn_muted = cycler('color', ['#4878CF', '#6ACC65', '#D65F5F', '#B47CC7', '#C4AD66', '#77BEDB'])
+
+def setrcparams():
+ # setup matplotlib rcparams
+ plt.style.use(['seaborn-paper', 'seaborn-colorblind'])
+ # color cyclers
+ seaborn_colorblind = cycler('color', ['#0072B2', '#D55E00', '#009E73', '#CC79A7', '#F0E442', '#56B4E9'])
+ seaborn_muted = cycler('color', ['#4878CF', '#6ACC65', '#D65F5F', '#B47CC7', '#C4AD66', '#77BEDB'])
+
+ plt.rcParams['axes.prop_cycle'] = seaborn_colorblind + cycler(linestyle=['-', '--', '-.', '--', '-.','-'])
+
+ plt.rcParams['axes.axisbelow'] = True
+ plt.rcParams['axes.edgecolor'] = 'lightgray'
+ plt.rcParams['axes.facecolor'] = 'white'
+ plt.rcParams['axes.spines.left'] = False
+ plt.rcParams['axes.spines.bottom'] = False
+ plt.rcParams["axes.spines.right"] = False
+ plt.rcParams["axes.spines.top"] = False
+ plt.rcParams['axes.grid'] = True
+ plt.rcParams['axes.linewidth'] = 0.1
+
+
+ plt.rcParams['grid.alpha'] = 0.4
+ plt.rcParams['grid.color'] = 'gray'
+ plt.rcParams['grid.linestyle'] = ':'
+ plt.rcParams['grid.linewidth'] = 1.0
+
+ plt.rcParams['hatch.linewidth'] = 1.0
+
+ plt.rcParams['xtick.bottom'] = False
+ plt.rcParams['ytick.left'] = False
+ plt.rcParams['xtick.direction'] = 'in'
+ plt.rcParams['ytick.direction'] = 'in'
+
+
+ plt.rcParams['legend.edgecolor'] = 'none'
+ plt.rcParams['legend.framealpha'] = 0.4
+ plt.rcParams["legend.columnspacing"] = 0.4
+ plt.rcParams["legend.handletextpad"] = 0.2
+
+ plt.rcParams['savefig.bbox'] = 'tight'
+ plt.rcParams['savefig.format'] = 'pdf'
+ plt.rcParams['savefig.pad_inches'] = 0
+
+ plt.rcParams['figure.figsize'] = onecolsize
+
+ plt.rcParams['pdf.fonttype'] = 42
+ plt.rcParams['ps.fonttype'] = 42
+ plt.rcParams['pdf.compression'] = 9
+ # plt.rcParams['text.usetex'] = True
+ # plt.rcParams['pgf.texsystem']= "pdflatex"
+ # plt.rcParams["font.sans-serif"] = "Linux Libertine"
+ # plt.rcParams["text.latex.preamble"] = "\usepackage{libertine},\usepackage[libertine]{newtxmath},\usepackage[T1]{fontenc}"
+ # plt.rcParams["pgf.preamble"] = "\usepackage{libertine},\usepackage[libertine]{newtxmath},\usepackage[T1]{fontenc}"
+ # plt.rcParams["font.family"] = "sans-serif"
+
+# Load the base config
+setrcparams()
+
+# Override some the common rcParams as needed
+plt.rcParams['axes.spines.left'] = True
+plt.rcParams['axes.spines.bottom'] = True
+plt.rcParams["legend.columnspacing"] = 0.8
+plt.rcParams["legend.handletextpad"] = 0.4
+
+# Example plot
+
+def read_data():
+ # Parse and extract data
+ data0 = dict()
+ data1 = dict()
+ for key in range(1, 100):
+ data0[key] = math.sqrt(key)
+ data1[key] = math.log(key)
+
+ return (data0, data1)
+
+def plot_series(data0, data1=None, data2=None, data3=None, name = "plot", xlabel = "LABELME", ylabel = "LABELME"):
+ print("adding data0")
+ fig = plt.figure(figsize=(3.7,1.0)) # Override fig size based on trial
+ xs = sorted(data0.keys())
+ ys = [data0[x] for x in xs]
+ plt.plot(xs, ys, c='#1B68DF', label='Cold cache', ls='-', zorder=2)
+ # plt.axhline(y=5, c='#4285F4', ls=':', label='y=5', zorder=1)
+
+ if data1:
+ print("adding data1")
+ xs = sorted(data1.keys())
+ ys = [data1[x] for x in xs]
+ plt.plot(xs, ys, c='#E33613', label='Hot cache', ls='-', zorder=3)
+ # plt.text(75, 1, "text", size="smaller")
+
+ if data2:
+ print("adding data2")
+ xs = sorted(data2.keys())
+ ys = [data2[x] for x in xs]
+ plt.plot(xs, ys, c = '#098837', label='No cache', ls='-', zorder=3)
+ # plt.text(75, 1, "text", size="smaller")
+
+ if data3:
+ print("adding data3")
+ xs = sorted(data3.keys())
+ ys = [data3[x] for x in xs]
+ plt.plot(xs, ys, label='E', ls='-', zorder=3)
+ plt.text(75, 1, "text", size="smaller")
+
+ # plt.annotate("annotation", xy=(25, 2.5), xytext=(40, 1), arrowprops=dict(arrowstyle="->"))
+
+ # plt.xlim(0, 10000)
+ # plt.ylim(0, 100)
+ print("labelling")
+ plt.xlabel(xlabel, labelpad=0)
+ plt.ylabel(ylabel)
+ plt.legend(loc='upper left', ncol=2, bbox_to_anchor=(0,1.1))
+ # plt.xticks(np.arange(0, 101, 25))
+ # plt.yticks(np.arange(0, 11, 2))
+ print("saving")
+ fig.savefig("%s.pdf" % name)
+ print("closing")
+ plt.close(fig)
+ print("done")
+
+
+def plot():
+ (data0, data1) = read_data()
+ plot_series(data0, data1)