Change hw6 to an unsolved version.
Signed-off-by: jmug <u.g.a.mariano@gmail.com>
This commit is contained in:
parent
0c04936ccf
commit
ee01a8f5b2
186 changed files with 9605 additions and 4019 deletions
2
hw6/.ocamlformat
Normal file
2
hw6/.ocamlformat
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
profile = janestreet
|
||||
version = 0.26.1
|
||||
|
|
@ -1,10 +1,2 @@
|
|||
#use "topfind";;
|
||||
|
||||
#directory "_build"
|
||||
#directory "_build/util"
|
||||
#directory "_build/x86"
|
||||
#directory "_build/grading"
|
||||
#directory "_build/ll"
|
||||
|
||||
#load_rec "x86.cmo"
|
||||
#load_rec "ll.cmo"
|
||||
#cd "_build";;
|
||||
#use_output "dune top";;
|
||||
|
|
|
|||
22
hw6/Makefile
22
hw6/Makefile
|
|
@ -1,6 +1,6 @@
|
|||
SUBMIT := $(shell cat submit_zip_contents.txt)
|
||||
HWNAME := hw6
|
||||
TIMESTAMP := $(shell /bin/date "+%Y-%m-%d-%H-%M-%S")
|
||||
TIMESTAMP := $(shell /bin/date "+%Y-%m-%d-%H:%M:%S")
|
||||
ZIPNAME := $(HWNAME)-submit-$(TIMESTAMP).zip
|
||||
|
||||
.PHONY: all oatc test clean zip
|
||||
|
|
@ -19,13 +19,29 @@ test: oatc
|
|||
./oatc --test
|
||||
|
||||
utop:
|
||||
utop
|
||||
dune utop
|
||||
|
||||
zip: $(SUBMIT)
|
||||
zip '$(ZIPNAME)' $(SUBMIT)
|
||||
|
||||
clean:
|
||||
dune clean
|
||||
rm -rf oatc ocamlbin bin/main.exe printanalysis bin/printanalysis.exe
|
||||
rm -rf oatc ocamlbin bin/main.exe printanalysis bin/printanalysis.exe a*.out
|
||||
|
||||
# make experiments FILE=foo.oat
|
||||
# will create four executables, one for each
|
||||
oat_experiments: oatc
|
||||
echo "Generating executables for $(FILE) with optimization $(OPT)"
|
||||
./oatc -o a_baseline$(OPT).out --liveness trivial --regalloc none $(OPT) $(FILE) bin/runtime.c
|
||||
./oatc -o a_greedy$(OPT).out --liveness dataflow --regalloc greedy $(OPT) $(FILE) bin/runtime.c
|
||||
./oatc -o a_better$(OPT).out --liveness dataflow --regalloc better $(OPT) $(FILE) bin/runtime.c
|
||||
./oatc -o a_clang$(OPT).out --clang $(FILE) $(OPT) bin/runtime.c
|
||||
|
||||
ll_experiments: oatc
|
||||
echo "Generating executables for $(FILE) with optimization $(OPT)"
|
||||
./oatc -o a_baseline$(OPT).out --liveness trivial --regalloc none $(OPT) $(FILE)
|
||||
./oatc -o a_greedy$(OPT).out --liveness dataflow --regalloc greedy $(OPT) $(FILE)
|
||||
./oatc -o a_better$(OPT).out --liveness dataflow --regalloc better $(OPT) $(FILE)
|
||||
./oatc -o a_clang$(OPT).out --clang $(FILE) $(OPT)
|
||||
|
||||
#
|
||||
|
|
|
|||
BIN
hw6/PerformanceExperiments.xlsx
Normal file
BIN
hw6/PerformanceExperiments.xlsx
Normal file
Binary file not shown.
|
|
@ -1,23 +1,25 @@
|
|||
# HW6: Dataflow Analysis and Optimization
|
||||
|
||||
The [instructions for this homework](doc/hw6-opt.html) are in the `doc` directory.
|
||||
|
||||
Quick Start:
|
||||
|
||||
1. open the folder in VSCode
|
||||
2. start an OCaml sandbox terminal
|
||||
3. run `make test` from the command line
|
||||
4. open `bin/solver.ml`
|
||||
1. clone this repository using `git clone`
|
||||
2. open the folder in VSCode
|
||||
3. start an OCaml sandbox terminal
|
||||
4. run `make test` from the command line
|
||||
5. open `bin/solver.ml`
|
||||
|
||||
See the general toolchain and project instructions on the course web site. The
|
||||
course web pages have a link to the html version of the homework instructions.
|
||||
|
||||
|
||||
Using ``oatc``
|
||||
--------------
|
||||
|
||||
``oatc`` acts like the clang compiler. Given several .oat, .ll, .c, and .o
|
||||
files, it will compile the .oat and .ll files to .s files (using the CS153
|
||||
files, it will compile the .oat and .ll files to .s files (using the CIS 341
|
||||
frontend and backend) and then combine the results with the .c and .o files to
|
||||
produce an executable named a.out. You can also compile the .ll files using
|
||||
clang instead of the CS153 backend, which can be useful for testing
|
||||
clang instead of the CIS 341 backend, which can be useful for testing
|
||||
purposes.
|
||||
|
||||
|
||||
|
|
@ -25,7 +27,7 @@ purposes.
|
|||
|
||||
./oatc --test
|
||||
|
||||
* To compile oat files using the CS153 backend:
|
||||
* To compile oat files using the 341 backend:
|
||||
|
||||
./oatc path/to/foo.oat
|
||||
|
||||
|
|
@ -55,8 +57,8 @@ purposes.
|
|||
| --print-ll | echoes the ll program to the terminal |
|
||||
| --print-x86 | echoes the resulting .s file to the terminal |
|
||||
| --interpret-ll | runs the ll file through the reference interpreter and outputs the results to the console |
|
||||
| --execute-x86 | runs the resulting a.out file natively (applies to either the 153 backend or clang-compiled code) |
|
||||
| --clang | compiles to assembly using clang, not the CS153 backend |
|
||||
| --execute-x86 | runs the resulting a.out file natively (applies to either the 341 backend or clang-compiled code) |
|
||||
| --clang | compiles to assembly using clang, not the 341 backend |
|
||||
| -v | generates verbose output, showing which commands are used for linking, etc. |
|
||||
| -op ``<dirname>`` | change the output path [DEFAULT=output] |
|
||||
| -o | change the generated executable's name [DEFAULT=a.out] |
|
||||
|
|
@ -67,7 +69,7 @@ purposes.
|
|||
|
||||
* Example uses:
|
||||
|
||||
Run the test case hw4programs/fact.oat using the CS153 backend:
|
||||
Run the test case hw4programs/fact.oat using the 341 backend:
|
||||
|
||||
./oatc --execute-x86 hw4programs/fact.oat bin/runtime.c
|
||||
120--------------------------------------------------------------- Executing: a.out
|
||||
|
|
|
|||
BIN
hw6/a.out
BIN
hw6/a.out
Binary file not shown.
|
|
@ -33,36 +33,8 @@ type fact = SymPtr.t UidM.t
|
|||
- Other instructions do not define pointers
|
||||
|
||||
*)
|
||||
|
||||
let insn_flow ((u,i):uid * insn) (d:fact) : fact =
|
||||
(* define values *)
|
||||
let unique : SymPtr.t = Unique in
|
||||
let may_alias : SymPtr.t = MayAlias in
|
||||
let undef_alias : SymPtr.t = UndefAlias in
|
||||
|
||||
match i with
|
||||
| Alloca _ -> UidM.add u unique d
|
||||
| Load (ty, _) ->
|
||||
let is_ty_ptr_namedt = match ty with | Ptr t ->
|
||||
let r = begin match t with | Ptr t -> true | _ -> false end in r | _ -> false in
|
||||
if is_ty_ptr_namedt == true then
|
||||
UidM.add u may_alias d
|
||||
else d
|
||||
| Store (_, op, _) ->
|
||||
(* update ptr arg *)
|
||||
let is_op_uid = match op with | Const _ -> true | _ -> false in
|
||||
if is_op_uid == true then d else
|
||||
let op_uid = match op with | Id i -> i | Gid i -> i | _ -> failwith "Store error should be caught above" in
|
||||
if UidM.mem op_uid d == false then d else
|
||||
UidM.update (fun _ -> may_alias) op_uid d
|
||||
| Call (_, op, _) | Bitcast (_, op, _) | Gep (_, op, _) ->
|
||||
(* update ptr arg *)
|
||||
let op_uid = match op with | Id i -> i | Gid i -> i | _ -> failwith "Call is supposed to be a uid" in
|
||||
if UidM.mem op_uid d == true then
|
||||
(* update ptr returned *)
|
||||
let d1 = UidM.update (fun _ -> may_alias) op_uid d in UidM.add u may_alias d1
|
||||
else UidM.add u may_alias d
|
||||
| Binop _ | Icmp _ -> d
|
||||
failwith "Alias.insn_flow unimplemented"
|
||||
|
||||
|
||||
(* The flow function across terminators is trivial: they never change alias info *)
|
||||
|
|
@ -96,33 +68,8 @@ module Fact =
|
|||
It may be useful to define a helper function that knows how to take the
|
||||
meet of two SymPtr.t facts.
|
||||
*)
|
||||
let lattice (m1:SymPtr.t) (m2:SymPtr.t) : SymPtr.t =
|
||||
match m1, m2 with
|
||||
| MayAlias, _ -> MayAlias
|
||||
| _, MayAlias -> MayAlias
|
||||
| Unique, Unique -> Unique
|
||||
| Unique, UndefAlias -> Unique
|
||||
| UndefAlias, Unique -> Unique
|
||||
| UndefAlias, UndefAlias -> UndefAlias
|
||||
|
||||
let combine (ds : fact list) : fact =
|
||||
(* used LLM to understand how the UidM.t merge function could be useful through made-up examples, and what the inputs 'a option meant *)
|
||||
|
||||
(* PART 2: look at the facts, if we have non-None facts, we can merge them based on the lattice *)
|
||||
let look_at_facts _ a_opt b_opt =
|
||||
match a_opt, b_opt with
|
||||
| Some a, Some b -> Some (lattice a b)
|
||||
| Some a, None -> Some a
|
||||
| None, Some b -> Some b
|
||||
| _, _ -> failwith "look_at_facts: incorrect opts" in
|
||||
|
||||
(* PART 1: create combine function that looks at the facts *)
|
||||
let rec combine_function (fl : fact list) (acc : SymPtr.t UidM.t) : SymPtr.t UidM.t =
|
||||
match fl with
|
||||
| [] -> acc
|
||||
| hd :: tl -> let result = UidM.merge look_at_facts acc hd in combine_function tl result in
|
||||
|
||||
combine_function ds UidM.empty
|
||||
let combine (ds:fact list) : fact =
|
||||
failwith "Alias.Fact.combine not implemented"
|
||||
end
|
||||
|
||||
(* instantiate the general framework ---------------------------------------- *)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,9 @@ open Llutil
|
|||
open X86
|
||||
module Platform = Util.Platform
|
||||
|
||||
(* Backend "Layout" compilation strategy ------------------------------------ *)
|
||||
|
||||
|
||||
(* allocated llvmlite function bodies --------------------------------------- *)
|
||||
|
||||
module Alloc = struct
|
||||
|
|
@ -250,7 +253,7 @@ let emit_mov (src:X86.operand) (dst:X86.operand) : x86stream =
|
|||
|
||||
ol ol' 2 2 3 2
|
||||
x <- y x <- y w <- x MOV x, w MOV x, w MOV x, w
|
||||
y <- y ==> ==> ------ ==> -------- ==> PUSH y ==> PUSH y
|
||||
z <- z ==> ==> ------ ==> -------- ==> PUSH y ==> PUSH y
|
||||
w <- x w <- x x <- y x <- y y <- z MOV z, y
|
||||
y <- z y <- z y <- z y <- z POP x POP x
|
||||
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ let add_block (l:lbl) (block:block) (g:cfg) : cfg =
|
|||
module type AS_GRAPH_PARAMS =
|
||||
sig
|
||||
(* The type of dataflow facts and the combine operator. This just implements
|
||||
the FACT interface from cfg.ml *)
|
||||
the FACT interface from solver.ml *)
|
||||
type t
|
||||
val combine : t list -> t
|
||||
val to_string : t -> string
|
||||
|
|
@ -154,8 +154,8 @@ module AsGraph (D:AS_GRAPH_PARAMS) :
|
|||
|
||||
This choice means that we won't use the "exploded" control-flow graph, where
|
||||
each instruction is considered a node. The reason for this decision is two-fold:
|
||||
One: the edges of the cfg are defined in terms of block labesl.
|
||||
Two: we can speed up the dataflow analysis by propagating information across and
|
||||
One: the edges of the cfg are defined in terms of block labels.
|
||||
Two: we can speed up the dataflow analysis by propagating information across an
|
||||
entire block.
|
||||
The cost of this decision is that we have to re-calculate the flow information
|
||||
for individual instructions when we need it.
|
||||
|
|
|
|||
|
|
@ -30,73 +30,15 @@ type fact = SymConst.t UidM.t
|
|||
|
||||
|
||||
(* flow function across Ll instructions ------------------------------------- *)
|
||||
(* - Uid of a binop or icmp with const arguments is constant-out
|
||||
(* - Uid of a binop or icmp with const arguments is constant-out with
|
||||
result that is computed statically (see the Int64 module)
|
||||
- Uid of a binop or icmp with an UndefConst argument is UndefConst-out
|
||||
- Uid of a binop or icmp with an NonConst argument is NonConst-out
|
||||
- Uid of stores and void calls are UndefConst-out
|
||||
- Uid of all other instructions are NonConst-out
|
||||
*)
|
||||
|
||||
let compute_const_bop (bop:bop) (i1:int64) (i2:int64) : int64=
|
||||
match bop with
|
||||
| Add -> Int64.add i1 i2
|
||||
| Sub -> Int64.sub i1 i2
|
||||
| Mul -> Int64.mul i1 i2
|
||||
| And -> Int64.logand i1 i2
|
||||
| Or -> Int64.logor i1 i2
|
||||
| Xor -> Int64.logxor i1 i2
|
||||
| Shl -> Int64.shift_left i1 (Int64.to_int i2)
|
||||
| Ashr -> Int64.shift_right i1 (Int64.to_int i2)
|
||||
| Lshr -> Int64.shift_right_logical i1 (Int64.to_int i2)
|
||||
|
||||
let compute_const_cnd (cnd:cnd) (i1:int64) (i2:int64) =
|
||||
let result = match cnd with
|
||||
| Eq -> i1 == i2
|
||||
| Ne -> i1 != i2
|
||||
| Slt -> i1 < i2
|
||||
| Sle -> i1 <= i2
|
||||
| Sgt -> i1 > i2
|
||||
| Sge -> i1 >= i2
|
||||
in if result then 1L else 0L
|
||||
|
||||
let meet_facts (c1:SymConst.t) (c2:SymConst.t) (bop:bop option) (cnd:cnd option): SymConst.t =
|
||||
(* NonConst <= Const c <= UndefConst *)
|
||||
match c1, c2 with
|
||||
| NonConst, _ -> NonConst
|
||||
| _, NonConst -> NonConst
|
||||
| Const a, Const b ->
|
||||
begin match bop, cnd with
|
||||
| Some c, _ -> Const (compute_const_bop c a b)
|
||||
| _, Some c -> Const (compute_const_cnd c a b)
|
||||
| _ -> failwith "meet_facts self-error: did not supply a bop or a cnd" end
|
||||
| Const a, UndefConst -> Const a
|
||||
| UndefConst, Const b -> Const b
|
||||
| UndefConst, UndefConst -> UndefConst
|
||||
|
||||
let op_symconst (op:operand) (i:insn) (d:fact): SymConst.t =
|
||||
match op with
|
||||
| Const c -> Const c
|
||||
| Null -> NonConst
|
||||
| Id i | Gid i -> begin match UidM.find_opt i d with
|
||||
| Some c -> c | None -> UndefConst end
|
||||
|
||||
let insn_flow (u,i:uid * insn) (d:fact) : fact =
|
||||
let nonconst : SymConst.t = NonConst in
|
||||
let undefconst : SymConst.t = UndefConst in
|
||||
|
||||
match i with
|
||||
| Binop (bop, _, op1, op2) ->
|
||||
let op_symconst1 = op_symconst op1 i d in
|
||||
let op_symconst2 = op_symconst op2 i d in
|
||||
let symconst = meet_facts op_symconst1 op_symconst2 (Some bop) None in
|
||||
UidM.add u symconst d
|
||||
| Icmp (cnd, _, op1, op2) ->
|
||||
let op_symconst1 = op_symconst op1 i d in
|
||||
let op_symconst2 = op_symconst op2 i d in
|
||||
let symconst = meet_facts op_symconst1 op_symconst2 None (Some cnd) in
|
||||
UidM.add u symconst d
|
||||
| Store (_, _, _) | Call (Void, _, _) -> UidM.add u undefconst d
|
||||
| _ -> UidM.add u nonconst d
|
||||
failwith "Constprop.insn_flow unimplemented"
|
||||
|
||||
(* The flow function across terminators is trivial: they never change const info *)
|
||||
let terminator_flow (t:terminator) (d:fact) : fact = d
|
||||
|
|
@ -119,25 +61,10 @@ module Fact =
|
|||
let to_string : fact -> string =
|
||||
UidM.to_string (fun _ v -> SymConst.to_string v)
|
||||
|
||||
|
||||
(* The constprop analysis should take the meet over predecessors to compute the
|
||||
flow into a node. You may find the UidM.merge function useful *)
|
||||
|
||||
let combine (ds:fact list) : fact =
|
||||
(* merge function to call meet facts *)
|
||||
let merge_function _ a_opt b_opt =
|
||||
match a_opt, b_opt with
|
||||
| Some a, Some b -> if a == b then Some b else None
|
||||
| Some a, None -> Some a
|
||||
| None, Some b -> Some b
|
||||
| _, _ -> failwith "" in
|
||||
|
||||
(* combine function to call merge function *)
|
||||
let rec combine_function (fl : fact list) (acc : SymConst.t UidM.t) : SymConst.t UidM.t =
|
||||
match fl with
|
||||
| [] -> acc
|
||||
| hd :: tl -> let result = UidM.merge merge_function acc hd in combine_function tl result in
|
||||
combine_function ds UidM.empty
|
||||
failwith "Constprop.Fact.combine unimplemented"
|
||||
end
|
||||
|
||||
(* instantiate the general framework ---------------------------------------- *)
|
||||
|
|
@ -166,54 +93,11 @@ let analyze (g:Cfg.t) : Graph.t =
|
|||
let run (cg:Graph.t) (cfg:Cfg.t) : Cfg.t =
|
||||
let open SymConst in
|
||||
|
||||
|
||||
let cp_block (l:Ll.lbl) (cfg:Cfg.t) : Cfg.t =
|
||||
let b = Cfg.block cfg l in
|
||||
let cb = Graph.uid_out cg l in
|
||||
|
||||
let rec check_operand (op:operand) (insn:insn) =
|
||||
let op1_new = match op with
|
||||
| Id i | Gid i ->
|
||||
let fact = cb i in
|
||||
let symconst : SymConst.t = op_symconst op insn fact in
|
||||
let r = begin match symconst with
|
||||
| Const c -> Some c
|
||||
| _ -> None end in r
|
||||
| _ -> None in op1_new in
|
||||
|
||||
let rec iterate_instructions (uid_insn_list : (uid * insn) list) (new_uid_insn_list : (uid * insn) list) =
|
||||
match uid_insn_list with
|
||||
| [] -> new_uid_insn_list
|
||||
| hd :: tl ->
|
||||
let uid, insn = hd in
|
||||
(* we want to see if the value is a var = constant *)
|
||||
(* if this is the case, we'll want to check every other instruction and "propogate it" in there *)
|
||||
let new_uid_insn = match insn with
|
||||
| Binop (bop, ty, op1, op2) ->
|
||||
let check_op1 = check_operand op1 insn in
|
||||
let check_op2 = check_operand op2 insn in
|
||||
let new_op1 : operand = match check_op1 with | Some c -> Const c | _ -> op1 in
|
||||
let new_op2 : operand = match check_op2 with | Some c -> Const c | _ -> op2 in
|
||||
(uid, Binop (bop, ty, new_op1, new_op2))
|
||||
| _ -> failwith "nye"
|
||||
in iterate_instructions tl (new_uid_insn_list @ [new_uid_insn]) in
|
||||
|
||||
(* WE ALSO NEED TO DO THE TERMINATOR INSTRUCTION, SAME IDEA :) *)
|
||||
|
||||
|
||||
let new_uid_insns = iterate_instructions b.insns [] in
|
||||
let new_block = { insns = new_uid_insns; term = b.term } in
|
||||
|
||||
let remove_old_block = LblM.remove l cfg.blocks in
|
||||
let new_block_same_lbl = LblM.add l new_block cfg.blocks in
|
||||
|
||||
let new_cfg : Cfg.cfg = {
|
||||
blocks = new_block_same_lbl;
|
||||
preds = cfg.preds;
|
||||
ret_ty = cfg.ret_ty;
|
||||
args = cfg.args;
|
||||
} in
|
||||
|
||||
new_cfg
|
||||
failwith "Constprop.cp_block unimplemented"
|
||||
in
|
||||
|
||||
LblS.fold cp_block (Cfg.nodes cfg) cfg
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ open Ll
|
|||
open Datastructures
|
||||
|
||||
|
||||
(* expose a top-level analysis operation ------------------------------------ *)
|
||||
(* dce_block ---------------------------------------------------------------- *)
|
||||
(* TASK: This function should optimize a block by removing dead instructions
|
||||
- lb: a function from uids to the live-OUT set at the
|
||||
corresponding program point
|
||||
|
|
@ -21,28 +21,12 @@ open Datastructures
|
|||
|
||||
Hint: Consider using List.filter
|
||||
*)
|
||||
let dce_block (lb:uid -> Liveness.Fact.t)
|
||||
(ab:uid -> Alias.fact)
|
||||
(b:Ll.block) : Ll.block =
|
||||
failwith "Dce.dce_block unimplemented"
|
||||
|
||||
|
||||
let dce_block (lb:uid -> Liveness.Fact.t) (ab:uid -> Alias.fact) (b:Ll.block) : Ll.block =
|
||||
(* check by each instruction *)
|
||||
let is_not_dead (uid : uid) (insn: insn) : bool =
|
||||
match insn with
|
||||
| Call _ -> true
|
||||
| Store (_, _, ptr) ->
|
||||
(* not dead if live or mayalias *)
|
||||
(* if we're storing into a *)
|
||||
let ptr_uid = match ptr with | Id i -> i | Gid i -> i | _ -> failwith "Store must be an id" in
|
||||
let ptr_live = UidS.mem ptr_uid (lb uid) in
|
||||
let ptr_alias = UidM.find_opt ptr_uid (ab uid) in (* <= issue: ab ptr_uid returns "Not_Found"*)
|
||||
let ptr_alias = match ptr_alias with | Some alias -> (alias == MayAlias) | None -> false in
|
||||
ptr_live || ptr_alias
|
||||
|
||||
| _ -> if UidS.mem uid (lb uid) then true else false
|
||||
|
||||
in let result = List.filter (fun (uid, insn) -> is_not_dead uid insn) b.insns in
|
||||
let new_block : Ll.block = {insns = result; term = b.term} in new_block
|
||||
|
||||
|
||||
(* Run DCE on all the blocks of a given control-flow-graph. *)
|
||||
let run (lg:Liveness.Graph.t) (ag:Alias.Graph.t) (cfg:Cfg.t) : Cfg.t =
|
||||
|
||||
LblS.fold (fun l cfg ->
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@
|
|||
util
|
||||
x86
|
||||
ll
|
||||
studenttests
|
||||
gradedtests))
|
||||
|
||||
(executable
|
||||
|
|
|
|||
688
hw6/bin/frontend-break.ml
Normal file
688
hw6/bin/frontend-break.ml
Normal file
|
|
@ -0,0 +1,688 @@
|
|||
open Ll
|
||||
open Llutil
|
||||
open Ast
|
||||
|
||||
(* instruction streams ------------------------------------------------------ *)
|
||||
|
||||
(* As in the last project, we'll be working with a flattened representation
|
||||
of LLVMlite programs to make emitting code easier. This version
|
||||
additionally makes it possible to emit elements will be gathered up and
|
||||
"hoisted" to specific parts of the constructed CFG
|
||||
- G of gid * Ll.gdecl: allows you to output global definitions in the middle
|
||||
of the instruction stream. You will find this useful for compiling string
|
||||
literals
|
||||
- E of uid * insn: allows you to emit an instruction that will be moved up
|
||||
to the entry block of the current function. This will be useful for
|
||||
compiling local variable declarations
|
||||
*)
|
||||
|
||||
type elt =
|
||||
| L of Ll.lbl (* block labels *)
|
||||
| I of uid * Ll.insn (* instruction *)
|
||||
| T of Ll.terminator (* block terminators *)
|
||||
| G of gid * Ll.gdecl (* hoisted globals (usually strings) *)
|
||||
| E of uid * Ll.insn (* hoisted entry block instructions *)
|
||||
|
||||
type stream = elt list
|
||||
let ( >@ ) x y = y @ x
|
||||
let ( >:: ) x y = y :: x
|
||||
let lift : (uid * insn) list -> stream = List.rev_map (fun (x,i) -> I (x,i))
|
||||
|
||||
(* Build a CFG and collection of global variable definitions from a stream *)
|
||||
let cfg_of_stream (code:stream) : Ll.cfg * (Ll.gid * Ll.gdecl) list =
|
||||
let gs, einsns, insns, term_opt, blks = List.fold_left
|
||||
(fun (gs, einsns, insns, term_opt, blks) e ->
|
||||
match e with
|
||||
| L l ->
|
||||
begin match term_opt with
|
||||
| None ->
|
||||
if (List.length insns) = 0 then (gs, einsns, [], None, blks)
|
||||
else failwith @@ Printf.sprintf "build_cfg: block labeled %s has\
|
||||
no terminator" l
|
||||
| Some term ->
|
||||
(gs, einsns, [], None, (l, {insns; term})::blks)
|
||||
end
|
||||
| T t -> (gs, einsns, [], Some (Llutil.Parsing.gensym "tmn", t), blks)
|
||||
| I (uid,insn) -> (gs, einsns, (uid,insn)::insns, term_opt, blks)
|
||||
| G (gid,gdecl) -> ((gid,gdecl)::gs, einsns, insns, term_opt, blks)
|
||||
| E (uid,i) -> (gs, (uid, i)::einsns, insns, term_opt, blks)
|
||||
) ([], [], [], None, []) code
|
||||
in
|
||||
match term_opt with
|
||||
| None -> failwith "build_cfg: entry block has no terminator"
|
||||
| Some term ->
|
||||
let insns = einsns @ insns in
|
||||
({insns; term}, blks), gs
|
||||
|
||||
|
||||
(* compilation contexts ----------------------------------------------------- *)
|
||||
|
||||
(* To compile OAT variables, we maintain a mapping of source identifiers to the
|
||||
corresponding LLVMlite operands. Bindings are added for global OAT variables
|
||||
and local variables that are in scope. *)
|
||||
|
||||
module Ctxt = struct
|
||||
|
||||
type t = (Ast.id * (Ll.ty * Ll.operand)) list
|
||||
let empty = []
|
||||
|
||||
(* Add a binding to the context *)
|
||||
let add (c:t) (id:id) (bnd:Ll.ty * Ll.operand) : t = (id,bnd)::c
|
||||
|
||||
(* Lookup a binding in the context *)
|
||||
let lookup (id:Ast.id) (c:t) : Ll.ty * Ll.operand =
|
||||
List.assoc id c
|
||||
|
||||
end
|
||||
|
||||
(* Mapping of identifiers representing struct definitions to
|
||||
* the corresponding name-to-name-to-type map.
|
||||
|
||||
Note: You will need to use these operations when compiling structures.
|
||||
*)
|
||||
module TypeCtxt = struct
|
||||
type t = (Ast.id * Ast.field list) list
|
||||
let empty = []
|
||||
|
||||
let add c id bnd = (id, bnd) :: c
|
||||
let lookup id c = List.assoc id c
|
||||
let lookup_field st_name f_name (c : t) =
|
||||
let rec lookup_field_aux f_name l =
|
||||
match l with
|
||||
| [] -> failwith "TypeCtxt.lookup_field: Not_found"
|
||||
| h :: t -> if h.fieldName = f_name then h.ftyp else lookup_field_aux f_name t in
|
||||
lookup_field_aux f_name (List.assoc st_name c)
|
||||
|
||||
let rec index_of f l i =
|
||||
match l with
|
||||
| [] -> None
|
||||
| h :: t -> if h.fieldName = f then Some i else index_of f t (i + 1)
|
||||
|
||||
(* Return the index of a field in the struct. *)
|
||||
let index_of_field_opt st f (c : t) =
|
||||
index_of f (List.assoc st c) 0
|
||||
|
||||
let index_of_field st f c =
|
||||
match index_of_field_opt st f c with
|
||||
| None -> failwith "index_of_field: Not found"
|
||||
| Some x -> x
|
||||
|
||||
(* Return a pair of base type and index into struct *)
|
||||
let rec lookup_field_name f (c : t) =
|
||||
match c with
|
||||
| [] -> failwith "lookup_field_name: Not found"
|
||||
| (id, field) :: t ->
|
||||
match index_of f field 0 with
|
||||
| None -> lookup_field_name f t
|
||||
| Some x -> List.(nth field x).ftyp, Int64.of_int x
|
||||
end
|
||||
|
||||
(* compiling OAT types ------------------------------------------------------ *)
|
||||
|
||||
(* The mapping of source types onto LLVMlite is straightforward. Booleans and ints
|
||||
are represented as the the corresponding integer types. OAT strings are
|
||||
pointers to bytes (I8). Arrays are the most interesting type: they are
|
||||
represented as pointers to structs where the first component is the number
|
||||
of elements in the following array.
|
||||
|
||||
NOTE: structure types are named, so they compile to their named form
|
||||
*)
|
||||
|
||||
let rec cmp_ty (ct : TypeCtxt.t) : Ast.ty -> Ll.ty = function
|
||||
| Ast.TBool -> I1
|
||||
| Ast.TInt -> I64
|
||||
| Ast.TRef r -> Ptr (cmp_rty ct r)
|
||||
| Ast.TNullRef r -> Ptr (cmp_rty ct r)
|
||||
|
||||
|
||||
and cmp_ret_ty ct : Ast.ret_ty -> Ll.ty = function
|
||||
| Ast.RetVoid -> Void
|
||||
| Ast.RetVal t -> cmp_ty ct t
|
||||
|
||||
and cmp_fty ct (ts, r) : Ll.fty =
|
||||
List.map (cmp_ty ct) ts, cmp_ret_ty ct r
|
||||
|
||||
and cmp_rty ct : Ast.rty -> Ll.ty = function
|
||||
| Ast.RString -> I8
|
||||
| Ast.RArray u -> Struct [I64; Array(0, cmp_ty ct u)]
|
||||
| Ast.RStruct r -> Namedt r
|
||||
| Ast.RFun (ts, t) ->
|
||||
let args, ret = cmp_fty ct (ts, t) in
|
||||
Fun (args, ret)
|
||||
|
||||
let typ_of_binop : Ast.binop -> Ast.ty * Ast.ty * Ast.ty = function
|
||||
| Add | Mul | Sub | Shl | Shr | Sar | IAnd | IOr -> (TInt, TInt, TInt)
|
||||
| Eq | Neq | Lt | Lte | Gt | Gte -> (TInt, TInt, TBool)
|
||||
| And | Or -> (TBool, TBool, TBool)
|
||||
|
||||
let typ_of_unop : Ast.unop -> Ast.ty * Ast.ty = function
|
||||
| Neg | Bitnot -> (TInt, TInt)
|
||||
| Lognot -> (TBool, TBool)
|
||||
|
||||
|
||||
(* Some useful helper functions *)
|
||||
|
||||
(* Generate a fresh temporary identifier. Since OAT identifiers cannot begin
|
||||
with an underscore, these should not clash with any source variables *)
|
||||
let gensym : string -> string =
|
||||
let c = ref 0 in
|
||||
fun (s:string) -> incr c; Printf.sprintf "_%s%d" s (!c)
|
||||
|
||||
(* Amount of space an Oat type takes when stored in the satck, in bytes.
|
||||
Note that since structured values are manipulated by reference, all
|
||||
Oat values take 8 bytes on the stack.
|
||||
*)
|
||||
let size_oat_ty (t : Ast.ty) = 8L
|
||||
|
||||
|
||||
(* Amount of size that needs to be allocated to store a structure *)
|
||||
let rec size_oat_struct (l : Ast.field list) =
|
||||
match l with
|
||||
| [] -> 0L
|
||||
| f :: t -> Int64.(add (size_oat_struct t) (size_oat_ty f.ftyp))
|
||||
|
||||
(* Generate code to allocate an array of source type TRef (RArray t) of the
|
||||
given size. Note "size" is an operand whose value can be computed at
|
||||
runtime *)
|
||||
let oat_alloc_array ct (t:Ast.ty) (size:Ll.operand) : Ll.ty * operand * stream =
|
||||
let ans_id, arr_id = gensym "array", gensym "raw_array" in
|
||||
let ans_ty = cmp_ty ct @@ TRef (RArray t) in
|
||||
let arr_ty = Ptr I64 in
|
||||
ans_ty, Id ans_id, lift
|
||||
[ arr_id, Call(arr_ty, Gid "oat_alloc_array", [I64, size])
|
||||
; ans_id, Bitcast(arr_ty, Id arr_id, ans_ty) ]
|
||||
|
||||
|
||||
(* Allocates an oat structure on the
|
||||
heap and returns a target operand with the appropriate reference.
|
||||
|
||||
- generate a call to 'oat_malloc' and use bitcast to conver the
|
||||
resulting pointer to the right type
|
||||
|
||||
- make sure to calculate the correct amount of space to allocate!
|
||||
*)
|
||||
let oat_alloc_struct ct (id:Ast.id) : Ll.ty * operand * stream =
|
||||
let ret_id, arr_id = gensym "struct", gensym "raw_struct" in
|
||||
let ans_ty = cmp_ty ct (TRef (RStruct id)) in
|
||||
let arr_ty = Ptr I64 in
|
||||
ans_ty, Id ret_id, lift
|
||||
[ arr_id, Call(arr_ty, Gid "oat_malloc", [I64, Const (size_oat_struct (TypeCtxt.lookup id ct))])
|
||||
; ret_id, Bitcast(arr_ty, Id arr_id, ans_ty) ]
|
||||
|
||||
|
||||
let str_arr_ty s = Array(1 + String.length s, I8)
|
||||
let i1_op_of_bool b = Ll.Const (if b then 1L else 0L)
|
||||
let i64_op_of_int i = Ll.Const (Int64.of_int i)
|
||||
|
||||
let cmp_binop t (b : Ast.binop) : Ll.operand -> Ll.operand -> Ll.insn =
|
||||
let ib b op1 op2 = Ll.Binop (b, t, op1, op2) in
|
||||
let ic c op1 op2 = Ll.Icmp (c, t, op1, op2) in
|
||||
match b with
|
||||
| Ast.Add -> ib Ll.Add
|
||||
| Ast.Mul -> ib Ll.Mul
|
||||
| Ast.Sub -> ib Ll.Sub
|
||||
| Ast.And -> ib Ll.And
|
||||
| Ast.IAnd -> ib Ll.And
|
||||
| Ast.IOr -> ib Ll.Or
|
||||
| Ast.Or -> ib Ll.Or
|
||||
| Ast.Shl -> ib Ll.Shl
|
||||
| Ast.Shr -> ib Ll.Lshr
|
||||
| Ast.Sar -> ib Ll.Ashr
|
||||
|
||||
| Ast.Eq -> ic Ll.Eq
|
||||
| Ast.Neq -> ic Ll.Ne
|
||||
| Ast.Lt -> ic Ll.Slt
|
||||
| Ast.Lte -> ic Ll.Sle
|
||||
| Ast.Gt -> ic Ll.Sgt
|
||||
| Ast.Gte -> ic Ll.Sge
|
||||
|
||||
(* Compiles an expression exp in context c, outputting the Ll operand that will
|
||||
recieve the value of the expression, and the stream of instructions
|
||||
implementing the expression.
|
||||
*)
|
||||
let rec cmp_exp (tc : TypeCtxt.t) (c:Ctxt.t) (exp:Ast.exp node) : Ll.ty * Ll.operand * stream =
|
||||
match exp.elt with
|
||||
| Ast.CInt i -> I64, Const i, []
|
||||
| Ast.CNull r -> cmp_ty tc (TNullRef r), Null, []
|
||||
| Ast.CBool b -> I1, i1_op_of_bool b, []
|
||||
|
||||
| Ast.CStr s ->
|
||||
let gid = gensym "str_arr" in
|
||||
let str_typ = str_arr_ty s in
|
||||
let uid = gensym "str" in
|
||||
Ptr I8, Id uid, []
|
||||
>:: G(gid, (str_typ, GString s))
|
||||
>:: I(uid, Gep(Ptr str_typ, Gid gid, [Const 0L; Const 0L;]))
|
||||
|
||||
| Ast.Bop (bop, e1, e2) ->
|
||||
let t, _, ret_ty = typ_of_binop bop in
|
||||
let ll_t = cmp_ty tc t in
|
||||
let op1, code1 = cmp_exp_as tc c e1 ll_t in
|
||||
let op2, code2 = cmp_exp_as tc c e2 ll_t in
|
||||
let ans_id = gensym "bop" in
|
||||
cmp_ty tc ret_ty, Id ans_id, code1 >@ code2 >:: I(ans_id, cmp_binop ll_t bop op1 op2)
|
||||
|
||||
| Ast.Uop (uop, e) ->
|
||||
let t, ret_ty = typ_of_unop uop in
|
||||
let op, code = cmp_exp_as tc c e (cmp_ty tc t) in
|
||||
let ans_id = gensym "unop" in
|
||||
let cmp_uop op = function
|
||||
| Ast.Neg -> Binop (Sub, I64, i64_op_of_int 0, op)
|
||||
| Ast.Lognot -> Icmp (Eq, I1, op, i1_op_of_bool false)
|
||||
| Ast.Bitnot -> Binop (Xor, I64, op, i64_op_of_int (-1)) in
|
||||
cmp_ty tc ret_ty, Id ans_id, code >:: I (ans_id, cmp_uop op uop)
|
||||
|
||||
| Ast.Id id ->
|
||||
let t, op = Ctxt.lookup id c in
|
||||
begin match t with
|
||||
| Ptr (Fun _) -> t, op, []
|
||||
| Ptr t ->
|
||||
let ans_id = gensym id in
|
||||
t, Id ans_id, [I(ans_id, Load(Ptr t, op))]
|
||||
| _ -> failwith "broken invariant: identifier not a pointer"
|
||||
end
|
||||
|
||||
(* compiles the length(e) expression. *)
|
||||
| Ast.Length e ->
|
||||
let arr_ty, arr_op, arr_code = cmp_exp tc c e in
|
||||
let _ = match arr_ty with
|
||||
| Ptr (Struct [_; Array (_,t)]) -> t
|
||||
| _ -> failwith "Length: indexed into non pointer" in
|
||||
let ptr_id, tmp_id = gensym "index_ptr", gensym "tmp" in
|
||||
let ans_id = gensym "len" in
|
||||
I64, (Id ans_id),
|
||||
arr_code >@ lift
|
||||
[
|
||||
ptr_id, Gep(arr_ty, arr_op, [i64_op_of_int 0; i64_op_of_int 0])
|
||||
; ans_id, Load(Ptr I64, Id ptr_id)]
|
||||
|
||||
|
||||
| Ast.Index (e, i) ->
|
||||
let ans_ty, ptr_op, code = cmp_exp_lhs tc c exp in
|
||||
let ans_id = gensym "index" in
|
||||
ans_ty, Id ans_id, code >:: I(ans_id, Load(Ptr ans_ty, ptr_op))
|
||||
|
||||
| Ast.Call (f, es) ->
|
||||
cmp_call tc c f es
|
||||
|
||||
| Ast.CArr (elt_ty, cs) ->
|
||||
let size_op = Ll.Const (Int64.of_int @@ List.length cs) in
|
||||
let arr_ty, arr_op, alloc_code = oat_alloc_array tc elt_ty size_op in
|
||||
let ll_elt_ty = cmp_ty tc elt_ty in
|
||||
let add_elt s (i, elt) =
|
||||
let elt_op, elt_code = cmp_exp_as tc c elt ll_elt_ty in
|
||||
let ind = gensym "ind" in
|
||||
s >@ elt_code >@ lift
|
||||
[ ind, Gep(arr_ty, arr_op, [Const 0L; Const 1L; i64_op_of_int i ])
|
||||
; gensym "store", Store (ll_elt_ty, elt_op, Id ind) ]
|
||||
in
|
||||
let ind_code = List.(fold_left add_elt [] @@ mapi (fun i e -> i, e) cs) in
|
||||
arr_ty, arr_op, alloc_code >@ ind_code
|
||||
|
||||
(* - the initializer is a loop that uses id as the index
|
||||
- each iteration of the loop the code evaluates e2 and assigns it
|
||||
to the index stored in id.
|
||||
*)
|
||||
| Ast.NewArr (elt_ty, e1, id, e2) ->
|
||||
let ptr_id = gensym "ptr_" in
|
||||
let bound_id = gensym "bnd_" in
|
||||
let _, size_op, size_code = cmp_exp tc c e1 in
|
||||
let arr_ty, arr_op, alloc_code = oat_alloc_array tc elt_ty size_op in
|
||||
let for_loop = (no_loc @@ Ast.For ([(id, no_loc (CInt 0L))],
|
||||
Some (no_loc @@ Bop (Lt, no_loc @@ Id id, no_loc @@ Id bound_id)),
|
||||
Some (no_loc @@ Assn (no_loc @@ Id id, no_loc @@ Bop (Add, no_loc @@ Id id, no_loc @@ CInt 1L))),
|
||||
[no_loc @@ Assn (no_loc @@ Index (no_loc @@ Id ptr_id, no_loc @@ Id id), e2)])) in
|
||||
let new_context = Ctxt.add c ptr_id (Ptr arr_ty, Id ptr_id) in
|
||||
let new_context = Ctxt.add new_context bound_id (Ptr I64, Id bound_id) in
|
||||
let _, assign_code = cmp_stmt tc new_context arr_ty for_loop None None in
|
||||
arr_ty, arr_op,
|
||||
size_code >@
|
||||
alloc_code >@
|
||||
[I (bound_id, Alloca(I64))] >@
|
||||
[I (gensym "store", Store (I64, size_op, Id bound_id))] >@
|
||||
[I (ptr_id, Alloca(arr_ty))] >@
|
||||
[I (gensym "store", Store (arr_ty, arr_op, Id ptr_id))] >@
|
||||
assign_code
|
||||
|
||||
(* For each field component of the struct
|
||||
- use the TypeCtxt operations to compute getelementptr indices
|
||||
- compile the initializer expression
|
||||
- store the resulting value into the structure
|
||||
*)
|
||||
| Ast.CStruct (id, l) ->
|
||||
let struct_ty, struct_op, alloc_code = oat_alloc_struct tc id in
|
||||
let add_elt s (fid, fexp) =
|
||||
let field_type = cmp_ty tc @@ TypeCtxt.lookup_field id fid tc in
|
||||
let index = TypeCtxt.index_of_field id fid tc in
|
||||
let elt_op, elt_code = cmp_exp_as tc c fexp field_type in
|
||||
let ind = gensym "ind" in
|
||||
s >@ elt_code >@ lift
|
||||
[ ind, Gep(struct_ty, struct_op, [Const 0L; i64_op_of_int index])
|
||||
; gensym "store", Store(field_type, elt_op, Id ind) ]
|
||||
in
|
||||
let ind_code = List.fold_left add_elt [] l in
|
||||
struct_ty, struct_op, alloc_code >@ ind_code
|
||||
|
||||
| Ast.Proj (e, id) ->
|
||||
let ans_ty, ptr_op, code = cmp_exp_lhs tc c exp in
|
||||
let ans_id = gensym "proj" in
|
||||
ans_ty, Id ans_id, code >:: I(ans_id, Load(Ptr ans_ty, ptr_op))
|
||||
|
||||
|
||||
and cmp_exp_lhs (tc : TypeCtxt.t) (c:Ctxt.t) (e:exp node) : Ll.ty * Ll.operand * stream =
|
||||
match e.elt with
|
||||
| Ast.Id x ->
|
||||
let t, op = Ctxt.lookup x c in
|
||||
t, op, []
|
||||
|
||||
| Ast.Proj (e, i) ->
|
||||
let src_ty, src_op, src_code = cmp_exp tc c e in
|
||||
let ret_ty, ret_index = TypeCtxt.lookup_field_name i tc in
|
||||
let gep_id = gensym "index" in
|
||||
let ret_op = Gep(src_ty, src_op, [Const 0L; Const ret_index]) in
|
||||
cmp_ty tc ret_ty, Id gep_id, src_code >:: I (gep_id, ret_op)
|
||||
|
||||
|
||||
| Ast.Index (e, i) ->
|
||||
let arr_ty, arr_op, arr_code = cmp_exp tc c e in
|
||||
let _, ind_op, ind_code = cmp_exp tc c i in
|
||||
let ans_ty = match arr_ty with
|
||||
| Ptr (Struct [_; Array (_,t)]) -> t
|
||||
| _ -> failwith "Index: indexed into non pointer" in
|
||||
let ptr_id, tmp_id, call_id = gensym "index_ptr", gensym "tmp", gensym "call" in
|
||||
ans_ty, (Id ptr_id),
|
||||
arr_code >@ ind_code >@ lift
|
||||
[tmp_id, Bitcast(arr_ty, arr_op, Ptr I64)
|
||||
;call_id, Call (Void, Gid "oat_assert_array_length", [Ptr I64, Id tmp_id; I64, ind_op ])
|
||||
;ptr_id, Gep(arr_ty, arr_op, [i64_op_of_int 0; i64_op_of_int 1; ind_op]) ]
|
||||
|
||||
|
||||
|
||||
| _ -> failwith "invalid lhs expression"
|
||||
|
||||
and cmp_call (tc : TypeCtxt.t) (c:Ctxt.t) (exp:Ast.exp node) (es:Ast.exp node list) : Ll.ty * Ll.operand * stream =
|
||||
let (t, op, s) = cmp_exp tc c exp in
|
||||
let (ts, rt) =
|
||||
match t with
|
||||
| Ptr (Fun (l, r)) -> l, r
|
||||
| _ -> failwith "nonfunction passed to cmp_call" in
|
||||
let args, args_code = List.fold_right2
|
||||
(fun e t (args, code) ->
|
||||
let arg_op, arg_code = cmp_exp_as tc c e t in
|
||||
(t, arg_op)::args, arg_code @ code
|
||||
) es ts ([],[]) in
|
||||
let res_id = gensym "result" in
|
||||
rt, Id res_id, s >@ args_code >:: I(res_id, Call(rt, op, args))
|
||||
|
||||
and cmp_exp_as (tc : TypeCtxt.t) (c:Ctxt.t) (e:Ast.exp node) (t:Ll.ty) : Ll.operand * stream =
|
||||
let from_t, op, code = cmp_exp tc c e in
|
||||
if from_t = t then op, code
|
||||
else let res_id = gensym "cast" in
|
||||
Id res_id, code >:: I(res_id, Bitcast(from_t, op, t))
|
||||
|
||||
(* Compile a statement in context c with return typ rt. Return a new context,
|
||||
possibly extended with new local bindings, and the instruction stream
|
||||
implementing the statement.
|
||||
|
||||
Left-hand-sides of assignment statements must either be OAT identifiers,
|
||||
or an index into some arbitrary expression of array type. Otherwise, the
|
||||
program is not well-formed and your compiler may throw an error.
|
||||
*)
|
||||
and cmp_stmt (tc : TypeCtxt.t) (c:Ctxt.t) (rt:Ll.ty) (stmt:Ast.stmt node) (lo : Ll.lbl option) (ls : Ll.lbl option) : Ctxt.t * stream =
|
||||
|
||||
match stmt.elt with
|
||||
| Ast.Decl (id, init) ->
|
||||
let ll_ty, init_op, init_code = cmp_exp tc c init in
|
||||
let res_id = gensym id in
|
||||
let c' = Ctxt.add c id (Ptr ll_ty, Id res_id) in
|
||||
c', init_code
|
||||
>:: E(res_id, Alloca ll_ty)
|
||||
>:: I(gensym "store", Store (ll_ty, init_op, Id res_id))
|
||||
|
||||
| Ast.Assn (path ,e) ->
|
||||
let _, pop, path_code = cmp_exp_lhs tc c path in
|
||||
let ll_ty, eop, exp_code = cmp_exp tc c e in
|
||||
c, path_code >@ exp_code >:: I(gensym "store", (Store (ll_ty, eop, pop)))
|
||||
|
||||
| Ast.If (guard, st1, st2) ->
|
||||
let guard_ty, guard_op, guard_code = cmp_exp tc c guard in
|
||||
let then_code = cmp_block tc c rt st1 lo ls in
|
||||
let else_code = cmp_block tc c rt st2 lo ls in
|
||||
let lt, le, lm = gensym "then", gensym "else", gensym "merge" in
|
||||
c, guard_code
|
||||
>:: T(Cbr (guard_op, lt, le))
|
||||
>:: L lt >@ then_code >:: T(Br lm)
|
||||
>:: L le >@ else_code >:: T(Br lm)
|
||||
>:: L lm
|
||||
|
||||
(* the 'if?' checked null downcast statement.
|
||||
- check whether the value computed by exp is null, if so jump to
|
||||
the 'null' block, otherwise take the 'notnull' block
|
||||
|
||||
- the identifier id is in scope in the 'nutnull' block and so
|
||||
needs to be allocated (and added to the context)
|
||||
|
||||
- as in the if-the-else construct, you should jump to the common
|
||||
merge label after either block
|
||||
*)
|
||||
| Ast.Cast (typ, id, exp, notnull, null) ->
|
||||
let translated_typ = cmp_ty tc (TRef typ) in
|
||||
let guard_op, guard_code = cmp_exp_as tc c exp translated_typ in
|
||||
let res_id = gensym id in
|
||||
let c' = Ctxt.add c id (Ptr translated_typ, Id res_id) in
|
||||
let null_code = cmp_block tc c rt null lo ls in
|
||||
let notnull_code = cmp_block tc c' rt notnull lo ls in
|
||||
let cast_id = gensym "cast" in
|
||||
let ln, lnn, lm = gensym "null", gensym "notnull", gensym "merge" in
|
||||
c, guard_code
|
||||
>:: I(cast_id, Icmp(Eq, translated_typ, guard_op, Null))
|
||||
>:: T(Cbr (Id cast_id, ln, lnn))
|
||||
>:: L lnn
|
||||
>:: E(res_id, Alloca translated_typ)
|
||||
>:: I(gensym "store", Store (translated_typ, guard_op, Id res_id))
|
||||
>@ notnull_code >:: T(Br lm)
|
||||
>:: L ln >@ null_code >:: T(Br lm)
|
||||
>:: L lm
|
||||
|
||||
| Ast.While (guard, body) ->
|
||||
let guard_ty, guard_op, guard_code = cmp_exp tc c guard in
|
||||
let lcond, lbody, lpost = gensym "cond", gensym "body", gensym "post" in
|
||||
let body_code = cmp_block tc c rt body (Some lpost) (Some lcond) in
|
||||
c, []
|
||||
>:: T (Br lcond)
|
||||
>:: L lcond >@ guard_code >:: T (Cbr (guard_op, lbody, lpost))
|
||||
>:: L lbody >@ body_code >:: T (Br lcond)
|
||||
>:: L lpost
|
||||
|
||||
| Ast.For (inits, guard, after, body) ->
|
||||
let guard = match guard with Some e -> e | None -> no_loc (CBool true) in
|
||||
let after = match after with Some s -> [s] | None -> [] in
|
||||
let body = body @ after in
|
||||
let ds = List.map (fun d -> no_loc (Decl d)) inits in
|
||||
let stream = cmp_block tc c rt (ds @ [no_loc @@ Ast.While (guard, body)]) None None in
|
||||
c, stream
|
||||
|
||||
| Ast.Ret None ->
|
||||
c, [T (Ret(Void, None))]
|
||||
|
||||
| Ast.Ret (Some e) ->
|
||||
let op, code = cmp_exp_as tc c e rt in
|
||||
c, code >:: T(Ret (rt, Some op))
|
||||
|
||||
| Ast.SCall (f, es) ->
|
||||
let _, op, code = cmp_call tc c f es in
|
||||
c, code
|
||||
|
||||
(* Compile a series of statements *)
|
||||
and cmp_block (tc : TypeCtxt.t) (c:Ctxt.t) (rt:Ll.ty) (stmts:Ast.block) (lo:Ll.lbl option) ls : stream =
|
||||
snd @@ List.fold_left (fun (c, code) s ->
|
||||
let c, stmt_code = cmp_stmt tc c rt s lo ls in
|
||||
c, code >@ stmt_code
|
||||
) (c,[]) stmts
|
||||
|
||||
|
||||
|
||||
(* Construct the structure context for compilation. We could reuse
|
||||
the H component from the Typechecker rather than recomputing this
|
||||
information here, but we do it this way to make the two parts of
|
||||
the project less interdependent. *)
|
||||
let get_struct_defns (p:Ast.prog) : TypeCtxt.t =
|
||||
List.fold_right (fun d ts ->
|
||||
match d with
|
||||
| Ast.Gtdecl { elt=(id, fs) } ->
|
||||
TypeCtxt.add ts id fs
|
||||
| _ -> ts) p TypeCtxt.empty
|
||||
|
||||
|
||||
(* Adds each function identifer to the context at an
|
||||
appropriately translated type.
|
||||
|
||||
NOTE: The Gid of a function is just its source name
|
||||
*)
|
||||
let cmp_function_ctxt (tc : TypeCtxt.t) (c:Ctxt.t) (p:Ast.prog) : Ctxt.t =
|
||||
List.fold_left (fun c -> function
|
||||
| Ast.Gfdecl { elt={ frtyp; fname; args } } ->
|
||||
let ft = TRef (RFun (List.map fst args, frtyp)) in
|
||||
Ctxt.add c fname (cmp_ty tc ft, Gid fname)
|
||||
| _ -> c
|
||||
) c p
|
||||
|
||||
(* Populate a context with bindings for global variables
|
||||
mapping OAT identifiers to LLVMlite gids and their types.
|
||||
|
||||
Only a small subset of OAT expressions can be used as global initializers
|
||||
in well-formed programs. (The constructors starting with C and Id's
|
||||
for global function values).
|
||||
*)
|
||||
let cmp_global_ctxt (tc : TypeCtxt.t) (c:Ctxt.t) (p:Ast.prog) : Ctxt.t =
|
||||
let gexp_ty c = function
|
||||
| Id id -> fst (Ctxt.lookup id c)
|
||||
| CStruct (t, cs) -> Ptr (Namedt t)
|
||||
| CNull r -> cmp_ty tc (TNullRef r)
|
||||
| CBool b -> I1
|
||||
| CInt i -> I64
|
||||
| CStr s -> Ptr (str_arr_ty s)
|
||||
| CArr (u, cs) -> Ptr (Struct [I64; Array(List.length cs, cmp_ty tc u)])
|
||||
| x -> failwith ( "bad global initializer: " ^ (Astlib.string_of_exp (no_loc x)))
|
||||
in
|
||||
List.fold_left (fun c -> function
|
||||
| Ast.Gvdecl { elt={ name; init } } ->
|
||||
Ctxt.add c name (Ptr (gexp_ty c init.elt), Gid name)
|
||||
| _ -> c) c p
|
||||
|
||||
|
||||
(* Compile a function declaration in global context c. Return the LLVMlite cfg
|
||||
and a list of global declarations containing the string literals appearing
|
||||
in the function.
|
||||
*)
|
||||
let cmp_fdecl (tc : TypeCtxt.t) (c:Ctxt.t) (f:Ast.fdecl node) : Ll.fdecl * (Ll.gid * Ll.gdecl) list =
|
||||
let {frtyp; args; body} = f.elt in
|
||||
let add_arg (s_typ, s_id) (c,code,args) =
|
||||
let ll_id = gensym s_id in
|
||||
let ll_ty = cmp_ty tc s_typ in
|
||||
let alloca_id = gensym s_id in
|
||||
let c = Ctxt.add c s_id (Ptr ll_ty, Ll.Id alloca_id)in
|
||||
c, []
|
||||
>:: E(alloca_id, Alloca ll_ty)
|
||||
>:: I(gensym "store", Store(ll_ty, Id ll_id, Id alloca_id))
|
||||
>@ code,
|
||||
(ll_ty, ll_id)::args
|
||||
in
|
||||
let c, args_code, args = List.fold_right add_arg args (c,[],[]) in
|
||||
let ll_rty = cmp_ret_ty tc frtyp in
|
||||
let block_code = cmp_block tc c ll_rty body None None in
|
||||
let argtys, f_param = List.split args in
|
||||
let f_ty = (argtys, ll_rty) in
|
||||
let f_cfg, globals = cfg_of_stream (args_code >@ block_code) in
|
||||
{f_ty; f_param; f_cfg}, globals
|
||||
|
||||
|
||||
|
||||
(* Compile a global initializer, returning the resulting LLVMlite global
|
||||
declaration, and a list of additional global declarations.
|
||||
*)
|
||||
let rec cmp_gexp c (tc : TypeCtxt.t) (e:Ast.exp node) : Ll.gdecl * (Ll.gid * Ll.gdecl) list =
|
||||
match e.elt with
|
||||
| CNull r -> (cmp_ty tc (TNullRef r), GNull), []
|
||||
| CBool b -> (I1, (if b then GInt 1L else GInt 0L)), []
|
||||
| CInt i -> (I64, GInt i), []
|
||||
| Id id -> ((fst @@ Ctxt.lookup id c), GGid id), []
|
||||
|
||||
| CStr s ->
|
||||
let gid = gensym "str" in
|
||||
let ll_ty = str_arr_ty s in
|
||||
(Ptr ll_ty, GGid gid), [gid, (ll_ty, GString s)]
|
||||
|
||||
| CArr (u, cs) ->
|
||||
let elts, gs = List.fold_right
|
||||
(fun cst (elts, gs) ->
|
||||
let gd, gs' = cmp_gexp c tc cst in
|
||||
gd::elts, gs' @ gs) cs ([], [])
|
||||
in
|
||||
let len = List.length cs in
|
||||
let ll_u = cmp_ty tc u in
|
||||
let gid = gensym "global_arr" in
|
||||
let arr_t = Struct [ I64; Array(len, ll_u) ] in
|
||||
let arr_i = GStruct [ I64, GInt (Int64.of_int len); Array(len, ll_u), GArray elts ] in
|
||||
(Ptr arr_t, GGid gid), (gid, (arr_t, arr_i))::gs
|
||||
|
||||
| CStruct (id, cs) ->
|
||||
let fields = TypeCtxt.lookup id tc in
|
||||
let elts, gs =
|
||||
List.fold_right
|
||||
(fun fs (elts, gs) ->
|
||||
let gd, gs' = cmp_gexp c tc (snd (List.find (fun (xid, xname) -> xid = fs.fieldName) cs)) in
|
||||
(gd :: elts, gs' @ gs)) fields ([], []) in
|
||||
let gid = gensym "global_struct" in
|
||||
(Ptr (Namedt id), GGid gid), (gid, (Namedt id, GStruct elts)) :: gs
|
||||
|
||||
| _ -> failwith "bad global initializer"
|
||||
|
||||
(* Oat internals function context ------------------------------------------- *)
|
||||
let internals =
|
||||
[ "oat_malloc", Ll.Fun ([I64], Ptr I64)
|
||||
; "oat_alloc_array", Ll.Fun ([I64], Ptr I64)
|
||||
; "oat_assert_not_null", Ll.Fun ([Ptr I8], Void)
|
||||
; "oat_assert_array_length", Ll.Fun ([Ptr I64; I64], Void)
|
||||
]
|
||||
|
||||
(* Oat builtin function context --------------------------------------------- *)
|
||||
let builtins = List.map
|
||||
(fun (fname, ftyp) ->
|
||||
let args, ret = cmp_fty TypeCtxt.empty ftyp in
|
||||
(fname, Ll.Fun (args, ret)))
|
||||
Typechecker.builtins
|
||||
|
||||
|
||||
let tctxt_to_tdecls c =
|
||||
List.map (fun (i, l) -> i, Struct (List.map (fun f -> cmp_ty c f.ftyp) l)) c
|
||||
|
||||
(* Compile a OAT program to LLVMlite *)
|
||||
let cmp_prog (p:Ast.prog) : Ll.prog =
|
||||
let tc = get_struct_defns p in
|
||||
(* add built-in functions to context *)
|
||||
let init_ctxt =
|
||||
List.fold_left (fun c (i, t) -> Ctxt.add c i (Ll.Ptr t, Gid i))
|
||||
Ctxt.empty builtins
|
||||
in
|
||||
let fc = cmp_function_ctxt tc init_ctxt p in
|
||||
|
||||
(* build global variable context *)
|
||||
let c = cmp_global_ctxt tc fc p in
|
||||
(* compile functions and global variables *)
|
||||
let fdecls, gdecls =
|
||||
List.fold_right (fun d (fs, gs) ->
|
||||
match d with
|
||||
| Ast.Gvdecl { elt=gd } ->
|
||||
let ll_gd, gs' = cmp_gexp c tc gd.init in
|
||||
(fs, (gd.name, ll_gd)::gs' @ gs)
|
||||
| Ast.Gfdecl fd ->
|
||||
let fdecl, gs' = cmp_fdecl tc c fd in
|
||||
(fd.elt.fname,fdecl)::fs, gs' @ gs
|
||||
| Ast.Gtdecl _ ->
|
||||
fs, gs
|
||||
) p ([], [])
|
||||
in
|
||||
(* gather external declarations *)
|
||||
let edecls = internals @ builtins in
|
||||
{ tdecls = tctxt_to_tdecls tc; gdecls; fdecls; edecls }
|
||||
|
|
@ -46,9 +46,9 @@ let terminator_uses (t:terminator) : UidS.t = uids_of_ops (terminator_ops t)
|
|||
|
||||
(In our representation, there is one flow function for instructions
|
||||
and another for terminators. *)
|
||||
let insn_flow (u,i:uid * insn) (out:UidS.t) : UidS.t =
|
||||
out |> UidS.remove u
|
||||
|> UidS.union (insn_uses i)
|
||||
let insn_flow ((u,i):uid * insn) (out:UidS.t) : UidS.t =
|
||||
out |> UidS.remove u (* defs[n] = u for SSA, so compute (out[n] \ defs[n]) *)
|
||||
|> UidS.union (insn_uses i) (* include use[n] = insn_uses i *)
|
||||
|
||||
let terminator_flow (t:terminator) (out:UidS.t) : UidS.t =
|
||||
out |> UidS.union (terminator_uses t)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ module Opt = Oat.Opt
|
|||
module Backend = Oat.Backend
|
||||
|
||||
exception Ran_tests
|
||||
let suite = ref (Studenttests.provided_tests @ Gradedtests.graded_tests)
|
||||
let suite = ref (Gradedtests.graded_tests)
|
||||
|
||||
let execute_tests () =
|
||||
Platform.configure_os ();
|
||||
|
|
@ -16,7 +16,7 @@ let execute_tests () =
|
|||
raise Ran_tests
|
||||
|
||||
let args =
|
||||
[ ("-linux", Set Platform.linux, "use linux-style name mangling [must preceed --test on linux]")
|
||||
[ ("-linux", Set Platform.linux, "force linux-style name mangling [must preceed --test]")
|
||||
; ("--test", Unit execute_tests, "run the test suite, ignoring other files inputs")
|
||||
; ("-op", Set_string Platform.output_path, "set the path to the output files directory [default='output']")
|
||||
; ("-o", Set_string executable_filename, "set the name of the resulting executable [default='a.out']")
|
||||
|
|
@ -24,11 +24,10 @@ let args =
|
|||
; ("-c", Clear link, "stop after generating .o files; do not generate executables")
|
||||
; ("--print-ll", Set print_ll_flag, "prints the program's LL code (after lowering to clang code if --clang-malloc is set)")
|
||||
; ("--print-x86", Set print_x86_flag, "prints the program's assembly code")
|
||||
; ("--clang", Set clang, "compiles to assembly using clang, not the 153 backend (implies --clang-malloc)")
|
||||
; ("--clang", Set clang, "compiles to assembly using clang, not the CS131 backend (implies --clang-malloc)")
|
||||
; ("--execute-x86", Set execute_x86, "run the resulting executable file")
|
||||
; ("-v", Set Platform.verbose, "enables more verbose compilation output")
|
||||
; ("-O1", Unit (fun _ -> Opt.opt_level := 1), "enable optimization")
|
||||
; ("-O2", Unit (fun _ -> Opt.opt_level := 2), "enable additional optimization")
|
||||
; ("-O1", Set Opt.do_opt, "enable optimization")
|
||||
; ("--regalloc", Symbol (["none"; "greedy"; "better"], Backend.set_regalloc), " use the specified register allocator")
|
||||
; ("--liveness", Symbol (["trivial"; "dataflow"], Backend.set_liveness), " use the specified liveness analysis")
|
||||
; ("--print-regs", Set print_regs_flag, "prints the register usage statistics for x86 code")
|
||||
|
|
@ -43,8 +42,8 @@ let _ =
|
|||
Platform.create_output_dir ();
|
||||
try
|
||||
Arg.parse args (fun filename -> files := filename :: !files)
|
||||
"CS153 main test harness\n\
|
||||
USAGE: ./main.native [options] <files>\n\
|
||||
"CS131 main test harness\n\
|
||||
USAGE: ./oatc [options] <files>\n\
|
||||
see README for details about using the compiler";
|
||||
Platform.configure_os ();
|
||||
process_files !files
|
||||
|
|
|
|||
|
|
@ -2,16 +2,6 @@
|
|||
open Ll
|
||||
module Platform = Util.Platform
|
||||
|
||||
(*
|
||||
This file drives the optimization for the compilation. For the leaderboard,
|
||||
you may optionally implement additional optimizations by editing this file.
|
||||
|
||||
NOTE: your additional optimizations should run only if !opt_level = 2.
|
||||
|
||||
That is, your additional optimizations should be enabled only when the
|
||||
flag -O2 is passed to main.native.
|
||||
*)
|
||||
|
||||
(* dead code elimination ---------------------------------------------------- *)
|
||||
let dce (g:Cfg.t) : Cfg.t =
|
||||
let ag = Alias.analyze g in
|
||||
|
|
@ -32,22 +22,19 @@ let rec pass n (g:Cfg.t) =
|
|||
(* optimize an fdecl -------------------------------------------------------- *)
|
||||
(* runs (two) passes of dce followed by constant propagation on the supplied
|
||||
LL IR fdecl. *)
|
||||
let opt_fdecl (gid,fdecl:Ll.gid * Ll.fdecl) : Ll.gid * Ll.fdecl =
|
||||
let opt_fdecl_O1 (gid,fdecl:Ll.gid * Ll.fdecl) : Ll.gid * Ll.fdecl =
|
||||
let g = pass 2 (Cfg.of_ast fdecl) in
|
||||
gid, Cfg.to_ast g
|
||||
|
||||
(* optimization level, set by the main compiler driver *)
|
||||
let opt_level = ref 0
|
||||
(* flag for the main compiler driver *)
|
||||
let do_opt = ref false
|
||||
|
||||
(* optimize each fdecl in the program *)
|
||||
let optimize (p:Ll.prog) : Ll.prog =
|
||||
if !opt_level = 2 then
|
||||
(* OPTIONAL TASK: implement additional optimizations *)
|
||||
failwith "No -O2 optimizations implemented! This is an optional task."
|
||||
else if !opt_level = 1
|
||||
if !do_opt
|
||||
then begin
|
||||
Platform.verb @@ Printf.sprintf "..optimizing";
|
||||
{ p with Ll.fdecls = List.map opt_fdecl p.Ll.fdecls }
|
||||
{ p with Ll.fdecls = List.map opt_fdecl_O1 p.Ll.fdecls }
|
||||
end
|
||||
else p
|
||||
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ let opt_file opt fname =
|
|||
|
||||
let () =
|
||||
if not !Sys.interactive then begin
|
||||
Arg.parse args (fun f -> files := f::!files) "Usage";
|
||||
Arg.parse args (fun f -> files := f::!files) "Usage: printanalysis <opts> filename.ll";
|
||||
(if !do_live then List.iter (fun f -> do_file f print_live) !files);
|
||||
(if !do_cp then List.iter (fun f -> do_file f print_cp) !files);
|
||||
(if !do_alias then List.iter (fun f -> do_file f print_alias) !files);
|
||||
|
|
|
|||
|
|
@ -14,10 +14,10 @@ open Datastructures
|
|||
"forward", but e.g. liveness instantiates the graph so that "forward"
|
||||
here is "backward" in the control-flow graph.
|
||||
|
||||
This means that for a node n, the output information is explicitly
|
||||
represented by the "find_fact" function:
|
||||
out[n] = find_fact g n
|
||||
The input information for [n] is implicitly represented by:
|
||||
This means that for a node n, the output information is *explicitly*
|
||||
represented by the "Graph.out" function:
|
||||
out[n] = Graph.out g n
|
||||
The input information for [n] is *implicitly* represented by:
|
||||
in[n] = combine preds[n] (out[n])
|
||||
|
||||
*)
|
||||
|
|
@ -46,18 +46,17 @@ module type DFA_GRAPH =
|
|||
(* lookup / modify the dataflow annotations associated with a node *)
|
||||
val out : t -> node -> fact
|
||||
val add_fact : node -> fact -> t -> t
|
||||
|
||||
(* printing *)
|
||||
(*
|
||||
val to_string : t -> string
|
||||
val printer : Format.formatter -> t -> unit
|
||||
*)
|
||||
end
|
||||
|
||||
(* abstract dataflow lattice signature -------------------------------------- *)
|
||||
(* The general algorithm works over a generic lattice of abstract "facts".
|
||||
- facts can be combined (this is the 'meet' operation)
|
||||
- facts can be compared *)
|
||||
- facts can be compared:
|
||||
for `compare x y` the result indicates:
|
||||
< 0 : x is less than y
|
||||
= 0 : x equals y
|
||||
> 0 : x is greater than y
|
||||
*)
|
||||
module type FACT =
|
||||
sig
|
||||
type t
|
||||
|
|
@ -88,35 +87,9 @@ module type FACT =
|
|||
TASK: complete the [solve] function, which implements the above algorithm.
|
||||
*)
|
||||
module Make (Fact : FACT) (Graph : DFA_GRAPH with type fact := Fact.t) =
|
||||
(* I used ChatGPT here to help me understand functors and find some helper functions, like "choose, remove, union, and add" *)
|
||||
struct
|
||||
|
||||
let solve (g:Graph.t) : Graph.t =
|
||||
let worklist = Graph.nodes g in
|
||||
|
||||
let rec solve_helper g worklist =
|
||||
if Graph.NodeS.is_empty worklist then g else
|
||||
|
||||
(* choose a node from the worklist *)
|
||||
let current_node = Graph.NodeS.choose worklist in
|
||||
|
||||
(* find the node's predecessors and combine their flow facts *)
|
||||
let preds : Graph.NodeS.t = Graph.preds g current_node in
|
||||
let pred_fact = Graph.NodeS.fold
|
||||
(fun pred acc -> Fact.combine [Graph.out g pred; acc])
|
||||
preds (Fact.combine []) in
|
||||
|
||||
(* apply the flow function to the combined input to find the new output *)
|
||||
let out_fact = Graph.flow g current_node pred_fact in
|
||||
|
||||
(* if the output has changed, update the graph and add the node's successors to the worklist *)
|
||||
let is_zero = Fact.compare out_fact (Graph.out g current_node) in
|
||||
let new_worklist = Graph.NodeS.remove current_node worklist in
|
||||
if is_zero != 0 then let succs = Graph.succs g current_node in
|
||||
solve_helper (Graph.add_fact current_node out_fact g) (Graph.NodeS.union succs new_worklist)
|
||||
else (* it has not changed *)
|
||||
solve_helper g new_worklist
|
||||
in
|
||||
|
||||
let new_g = solve_helper g worklist in new_g
|
||||
failwith "TODO HW6: Solver.solve unimplemented"
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -2,22 +2,15 @@ open Ast
|
|||
open Astlib
|
||||
open Tctxt
|
||||
|
||||
(* This file is from HW5. You are welcome to replace it
|
||||
with your own solution from HW5 or ask the course staff for
|
||||
our version of the file. You do not need to submit this file.
|
||||
|
||||
*)
|
||||
|
||||
(* Error Reporting ---------------------------------------------------------- *)
|
||||
(* NOTE: Use type_error to report error messages for ill-typed programs. *)
|
||||
|
||||
exception TypeError of string
|
||||
|
||||
let type_error (l : 'a node) (err : string) =
|
||||
let type_error (l : 'a node) err =
|
||||
let (_, (s, e), _) = l.loc in
|
||||
raise (TypeError (Printf.sprintf "[%d, %d] %s" s e err))
|
||||
|
||||
let unimpl = "; replace typechecker.ml with your own solution from HW5 or contact course staff for a reference solution"
|
||||
|
||||
(* initial context: G0 ------------------------------------------------------ *)
|
||||
(* The Oat types of the Oat built-in functions *)
|
||||
|
|
@ -54,12 +47,44 @@ let typ_of_unop : Ast.unop -> Ast.ty * Ast.ty = function
|
|||
(Don't forget about OCaml's 'and' keyword.)
|
||||
*)
|
||||
let rec subtype (c : Tctxt.t) (t1 : Ast.ty) (t2 : Ast.ty) : bool =
|
||||
failwith ("todo: subtype"^unimpl)
|
||||
match t1, t2 with
|
||||
| TInt, TInt -> true
|
||||
| TBool, TBool -> true
|
||||
| TNullRef x, TNullRef y
|
||||
| TRef x, TNullRef y
|
||||
| TRef x, TRef y -> subtype_ref c x y
|
||||
| _, _ -> false
|
||||
|
||||
(* Decides whether H |-r ref1 <: ref2 *)
|
||||
and subtype_ref (c : Tctxt.t) (t1 : Ast.rty) (t2 : Ast.rty) : bool =
|
||||
failwith ("todo: subtype_ref"^unimpl)
|
||||
match t1, t2 with
|
||||
| RString, RString -> true
|
||||
| RArray at1, RArray at2 -> at1 = at2
|
||||
| RFun (ts1, rt1), RFun (ts2, rt2) -> subtype_list c ts2 ts1 && subtype_ret c rt1 rt2
|
||||
| RStruct id1, RStruct id2 -> id1 = id2 || subtype_fields c id1 id2
|
||||
| _, _ -> false
|
||||
|
||||
and subtype_ret (c : Tctxt.t) (t1 : Ast.ret_ty) (t2 : Ast.ret_ty) : bool =
|
||||
match t1, t2 with
|
||||
| RetVoid, RetVoid -> true
|
||||
| RetVal v1, RetVal v2 -> subtype c v1 v2
|
||||
| _, _ -> false
|
||||
|
||||
and subtype_list c l1 l2 : bool =
|
||||
if List.length l1 != List.length l2 then false
|
||||
else List.fold_left2 (fun a x y -> a && subtype c x y) true l1 l2
|
||||
|
||||
(* fields n1 are a subtype of n2 if n2 is a prefix of n1 *)
|
||||
and subtype_fields c n1 n2 : bool =
|
||||
let fields1 = Tctxt.lookup_struct n1 c in
|
||||
let fields2 = Tctxt.lookup_struct n2 c in
|
||||
let rec helper l1 l2 =
|
||||
match (l1, l2) with
|
||||
| _, [] -> true
|
||||
| [], _ -> false
|
||||
| f1::t1, f2::t2 -> f1.fieldName = f2.fieldName && f1.ftyp = f2.ftyp
|
||||
&& helper t1 t2 in
|
||||
helper fields1 fields2
|
||||
|
||||
(* well-formed types -------------------------------------------------------- *)
|
||||
(* Implement a (set of) functions that check that types are well formed according
|
||||
|
|
@ -77,14 +102,32 @@ and subtype_ref (c : Tctxt.t) (t1 : Ast.rty) (t2 : Ast.rty) : bool =
|
|||
- tc contains the structure definition context
|
||||
*)
|
||||
let rec typecheck_ty (l : 'a Ast.node) (tc : Tctxt.t) (t : Ast.ty) : unit =
|
||||
failwith ("todo: implement typecheck_ty"^unimpl)
|
||||
begin match t with
|
||||
| TBool -> ()
|
||||
| TInt -> ()
|
||||
| TNullRef r
|
||||
| TRef r -> typecheck_ref l tc r
|
||||
end
|
||||
|
||||
and typecheck_ref l tc (r:Ast.rty) : unit =
|
||||
begin match r with
|
||||
| RString -> ()
|
||||
|
||||
| RStruct id ->
|
||||
if Tctxt.lookup_struct_option id tc = None
|
||||
then type_error l "Unbound struct type" else ()
|
||||
|
||||
| RArray t -> typecheck_ty l tc t
|
||||
|
||||
| RFun (tl, rt) -> (typecheck_ret l tc rt); List.iter (typecheck_ty l tc) tl
|
||||
end
|
||||
|
||||
and typecheck_ret l tc (rt:Ast.ret_ty) : unit =
|
||||
begin match (rt:Ast.ret_ty) with
|
||||
| RetVoid -> ()
|
||||
| RetVal t -> typecheck_ty l tc t
|
||||
end
|
||||
|
||||
(* A helper function to determine whether a type allows the null value *)
|
||||
let is_nullable_ty (t : Ast.ty) : bool =
|
||||
match t with
|
||||
| TNullRef _ -> true
|
||||
| _ -> false
|
||||
|
||||
(* typechecking expressions ------------------------------------------------- *)
|
||||
(* Typechecks an expression in the typing context c, returns the type of the
|
||||
|
|
@ -112,7 +155,121 @@ let is_nullable_ty (t : Ast.ty) : bool =
|
|||
|
||||
*)
|
||||
let rec typecheck_exp (c : Tctxt.t) (e : Ast.exp node) : Ast.ty =
|
||||
failwith ("todo: implement typecheck_exp"^unimpl)
|
||||
match e.elt with
|
||||
| CNull r -> TNullRef r
|
||||
| CBool b -> TBool
|
||||
| CInt i -> TInt
|
||||
| CStr s -> TRef RString
|
||||
|
||||
| Id i ->
|
||||
begin match Tctxt.lookup_option i c with
|
||||
| Some x -> x
|
||||
| None -> type_error e ("Unbound identifier " ^ i)
|
||||
end
|
||||
|
||||
| CArr (t, l) ->
|
||||
typecheck_ty e c t;
|
||||
let types_of = List.map (typecheck_exp c) l in
|
||||
if List.for_all (fun u -> subtype c u t) types_of then TRef (RArray t)
|
||||
else type_error e "Mismatched array type"
|
||||
|
||||
| NewArr(t, e1) ->
|
||||
begin match t with
|
||||
| TBool | TInt | TNullRef _ -> ()
|
||||
| TRef _ -> type_error e "Non-null types cannot be used with default-initialized arrays"
|
||||
end;
|
||||
let size_type = typecheck_exp c e1 in
|
||||
if size_type = TInt then
|
||||
TRef (RArray t)
|
||||
else type_error e "Array size not an int"
|
||||
|
||||
| NewArrInit (t, e1, id, e2) ->
|
||||
typecheck_ty e c t;
|
||||
let size_type = typecheck_exp c e1 in
|
||||
if size_type = TInt then
|
||||
let tc' =
|
||||
if List.exists (fun x -> fst x = id) c.locals
|
||||
then type_error e1 "Cannot redeclare variable"
|
||||
else Tctxt.add_local c id TInt
|
||||
in
|
||||
let t' = typecheck_exp tc' e2 in
|
||||
if subtype c t' t then TRef (RArray t)
|
||||
else type_error e2 "Initializer has incorrect type"
|
||||
else type_error e1 "Array size not an int"
|
||||
|
||||
| Bop (b, l, r) ->
|
||||
let ltyp = typecheck_exp c l in
|
||||
let rtyp = typecheck_exp c r in
|
||||
begin match b with
|
||||
| Eq | Neq -> if (subtype c ltyp rtyp) && (subtype c rtyp ltyp) then TBool else
|
||||
type_error e "== or != used with non type-compatible arguments"
|
||||
| _ ->
|
||||
let (bl, br, bres) = typ_of_binop b in
|
||||
if bl = ltyp then
|
||||
if br = rtyp then bres
|
||||
else type_error r "Incorrect type in binary expression"
|
||||
else type_error l "Incorrect type in binary expression"
|
||||
end
|
||||
|
||||
| Uop (u, e) ->
|
||||
let t = typecheck_exp c e in
|
||||
let (us, ures) = typ_of_unop u in
|
||||
if us = t then ures else type_error e "Incorrect type for unary operator"
|
||||
|
||||
| Index (e1, e2) ->
|
||||
let arr_t = typecheck_exp c e1 in
|
||||
let ind_t = typecheck_exp c e2 in
|
||||
if ind_t = TInt then
|
||||
match arr_t with
|
||||
| TRef (RArray t) -> t
|
||||
| _ -> type_error e1 ("Tried to compute index into type "
|
||||
^ (Astlib.string_of_ty arr_t))
|
||||
else type_error e2 "Index of array index operator not an int"
|
||||
|
||||
| Proj (s, id) ->
|
||||
let str_t = typecheck_exp c s in
|
||||
(match str_t with
|
||||
| TRef (RStruct sn) ->
|
||||
(match Tctxt.lookup_field_option sn id c with
|
||||
| None -> type_error e (id ^ " not member of struct " ^ sn)
|
||||
| Some t -> t)
|
||||
| _ -> type_error s "Cannot project from non-struct")
|
||||
|
||||
| CStruct (id, l) ->
|
||||
(match Tctxt.lookup_struct_option id c with
|
||||
| None -> type_error e (id ^ "not a struct type")
|
||||
| Some x ->
|
||||
let tc_field (id, node) = id, typecheck_exp c node in
|
||||
let field_types = List.map tc_field l in
|
||||
let struct_names = List.sort compare (List.map (fun x -> x.fieldName) x) in
|
||||
let local_names = List.sort compare (List.map fst field_types) in
|
||||
if struct_names <> local_names
|
||||
then type_error e "Mismatch of fields between struct definition and local declaration";
|
||||
List.iter (fun (id, ft) ->
|
||||
let t = (List.find (fun i -> i.fieldName = id) x).ftyp in
|
||||
if not (subtype c ft t) then type_error e (id ^ " field of struct incorrect")
|
||||
else ()) field_types;
|
||||
TRef (RStruct id))
|
||||
|
||||
| Length l ->
|
||||
let t = typecheck_exp c l in
|
||||
(match t with
|
||||
| TRef (RArray t) -> TInt
|
||||
| _ -> type_error l "Cannot take length of non-array")
|
||||
|
||||
| Call (f, args) ->
|
||||
let argtyps = List.map (typecheck_exp c) args in
|
||||
match (typecheck_exp c f) with
|
||||
| TRef (RFun (l, RetVal r)) ->
|
||||
if List.length l <> List.length argtyps
|
||||
then type_error e "Incorrect number of arguments"
|
||||
else List.iter2
|
||||
(fun arg l ->
|
||||
if not (subtype c arg l)
|
||||
then type_error e "Incorrect type of argument")
|
||||
argtyps l;
|
||||
r
|
||||
| _ -> type_error e "Need function argument for function call"
|
||||
|
||||
(* statements --------------------------------------------------------------- *)
|
||||
|
||||
|
|
@ -152,7 +309,117 @@ let rec typecheck_exp (c : Tctxt.t) (e : Ast.exp node) : Ast.ty =
|
|||
block typecheck rules.
|
||||
*)
|
||||
let rec typecheck_stmt (tc : Tctxt.t) (s:Ast.stmt node) (to_ret:ret_ty) : Tctxt.t * bool =
|
||||
failwith ("todo: implement typecheck_stmt"^unimpl)
|
||||
match s.elt with
|
||||
| Assn (e1, e2) ->
|
||||
let () = begin match e1.elt with
|
||||
| Id x ->
|
||||
begin match Tctxt.lookup_local_option x tc with
|
||||
| Some _ -> ()
|
||||
| None ->
|
||||
begin match Tctxt.lookup_global_option x tc with
|
||||
| Some TRef (RFun _) ->
|
||||
type_error s ("cannot assign to global function " ^ x)
|
||||
| _ -> ()
|
||||
end
|
||||
end
|
||||
| _ -> ()
|
||||
end
|
||||
in
|
||||
let assn_to = typecheck_exp tc e1 in
|
||||
let assn_from = typecheck_exp tc e2 in
|
||||
if subtype tc assn_from assn_to
|
||||
then tc, false
|
||||
else type_error s "Mismatched types in assignment"
|
||||
|
||||
| Decl (id, exp) ->
|
||||
let exp_type = typecheck_exp tc exp in
|
||||
if List.exists (fun x -> fst x = id) tc.locals
|
||||
then type_error s "Cannot redeclare variable"
|
||||
else Tctxt.add_local tc id exp_type, false
|
||||
|
||||
| Ret r ->
|
||||
(match r, to_ret with
|
||||
| None, RetVoid -> tc, true
|
||||
| Some r, RetVal to_ret ->
|
||||
let t = typecheck_exp tc r in
|
||||
if subtype tc t to_ret then tc, true
|
||||
else type_error s "Returned incorrect type"
|
||||
| None, RetVal to_ret -> type_error s "Returned void in non-void function"
|
||||
| Some r, RetVoid -> type_error s "Returned non-void in void function")
|
||||
|
||||
| SCall (f, args) ->
|
||||
let argtyps = List.map (typecheck_exp tc) args in
|
||||
(match (typecheck_exp tc f) with
|
||||
| TNullRef (RFun (l, RetVoid)) | TRef (RFun (l, RetVoid)) ->
|
||||
if List.length l <> List.length argtyps
|
||||
then type_error s "Incorrect number of arguments"
|
||||
else List.iter2
|
||||
(fun arg l -> if not (subtype tc arg l)
|
||||
then type_error s "Incorrect type of argument")
|
||||
argtyps l;
|
||||
tc, false
|
||||
| _ -> type_error s "Need function argument for function call")
|
||||
|
||||
| If (e, b1, b2) ->
|
||||
let guard_type = typecheck_exp tc e in
|
||||
if guard_type <> TBool then type_error e "Incorrect type for guard"
|
||||
else
|
||||
let lft_ret = typecheck_block tc b1 to_ret in
|
||||
let rgt_ret = typecheck_block tc b2 to_ret in
|
||||
tc, lft_ret && rgt_ret
|
||||
|
||||
| Cast (r, id, exp, b1, b2) ->
|
||||
let exp_type = typecheck_exp tc exp in
|
||||
begin match exp_type with
|
||||
| TNullRef r' ->
|
||||
if subtype_ref tc r' r then
|
||||
let lft_ret = typecheck_block (Tctxt.add_local tc id (TRef r)) b1 to_ret in
|
||||
let rgt_ret = typecheck_block tc b2 to_ret in
|
||||
tc, lft_ret && rgt_ret
|
||||
else
|
||||
type_error exp "if? expression not a subtype of declared type"
|
||||
| _ -> type_error exp "if? expression has non-? type"
|
||||
end
|
||||
|
||||
| While (b, bl) ->
|
||||
let guard_type = typecheck_exp tc b in
|
||||
if guard_type <> TBool then type_error b "Incorrect type for guard"
|
||||
else
|
||||
let _ = typecheck_block tc bl to_ret in
|
||||
tc, false
|
||||
|
||||
| For (vs, guard, s, b) ->
|
||||
let updated_context =
|
||||
List.fold_left (fun c (id, e) ->
|
||||
let t = typecheck_exp c e in
|
||||
Tctxt.add_local c id t) tc vs in
|
||||
let _ =
|
||||
begin match guard with
|
||||
| None -> ()
|
||||
| Some b ->
|
||||
if TBool <> typecheck_exp updated_context b
|
||||
then type_error b "Incorrect type for guard"
|
||||
else ()
|
||||
end in
|
||||
let _ =
|
||||
begin match s with
|
||||
| None -> ()
|
||||
| Some s ->
|
||||
let (nc, rt) = typecheck_stmt updated_context s to_ret in
|
||||
if rt then type_error s "Cannot return in for loop increment"
|
||||
end in
|
||||
let _ = typecheck_block updated_context b to_ret in
|
||||
tc, false
|
||||
|
||||
and typecheck_block (tc : Tctxt.t) (b : Ast.block) (to_ret : Ast.ret_ty) : bool =
|
||||
match b with
|
||||
| [] -> false
|
||||
| [h] ->
|
||||
let c, r = typecheck_stmt tc h to_ret in r
|
||||
| h1 :: h2 :: t ->
|
||||
let new_context, r = typecheck_stmt tc h1 to_ret in
|
||||
if r then type_error h2 "Dead code"
|
||||
else typecheck_block new_context (h2 :: t) to_ret
|
||||
|
||||
|
||||
(* struct type declarations ------------------------------------------------- *)
|
||||
|
|
@ -161,12 +428,12 @@ let rec typecheck_stmt (tc : Tctxt.t) (s:Ast.stmt node) (to_ret:ret_ty) : Tctxt.
|
|||
*)
|
||||
|
||||
(* Helper function to look for duplicate field names *)
|
||||
let rec check_dups (fs : field list) =
|
||||
let rec check_dups fs =
|
||||
match fs with
|
||||
| [] -> false
|
||||
| h :: t -> (List.exists (fun x -> x.fieldName = h.fieldName) t) || check_dups t
|
||||
|
||||
let typecheck_tdecl (tc : Tctxt.t) (id : id) (fs : field list) (l : 'a Ast.node) : unit =
|
||||
let typecheck_tdecl (tc : Tctxt.t) id fs (l : 'a Ast.node) : unit =
|
||||
if check_dups fs
|
||||
then type_error l ("Repeated fields in " ^ id)
|
||||
else List.iter (fun f -> typecheck_ty l tc f.ftyp) fs
|
||||
|
|
@ -180,7 +447,14 @@ let typecheck_tdecl (tc : Tctxt.t) (id : id) (fs : field list) (l : 'a Ast.node
|
|||
- checks that the function actually returns
|
||||
*)
|
||||
let typecheck_fdecl (tc : Tctxt.t) (f : Ast.fdecl) (l : 'a Ast.node) : unit =
|
||||
failwith ("todo: typecheck_fdecl"^unimpl)
|
||||
let rec has_dups = function
|
||||
| [] -> false
|
||||
| ((_,x)::xs) -> List.exists (fun (_,y) -> x = y) xs
|
||||
in
|
||||
if has_dups f.args then type_error l "Duplicate parameter names";
|
||||
let updated = List.fold_left (fun c (t, i) -> Tctxt.add_local c i t) tc f.args in
|
||||
let returned = typecheck_block updated f.body f.frtyp in
|
||||
if not returned then type_error l "Need return statement"
|
||||
|
||||
(* creating the typchecking context ----------------------------------------- *)
|
||||
|
||||
|
|
@ -211,13 +485,37 @@ let typecheck_fdecl (tc : Tctxt.t) (f : Ast.fdecl) (l : 'a Ast.node) : unit =
|
|||
*)
|
||||
|
||||
let create_struct_ctxt (p:Ast.prog) : Tctxt.t =
|
||||
failwith ("todo: create_struct_ctxt"^unimpl)
|
||||
List.fold_left (fun c d ->
|
||||
match d with
|
||||
| Gtdecl ({elt=(id, fs)} as l) ->
|
||||
if List.exists (fun x -> id = fst x) c.structs then
|
||||
type_error l ("Redeclaration of struct " ^ id)
|
||||
else Tctxt.add_struct c id fs
|
||||
| _ -> c) Tctxt.empty p
|
||||
|
||||
let create_function_ctxt (tc:Tctxt.t) (p:Ast.prog) : Tctxt.t =
|
||||
failwith ("todo: create_function_ctxt"^unimpl)
|
||||
let builtins_context =
|
||||
List.fold_left
|
||||
(fun c (id, (args, ret)) -> Tctxt.add_global c id (TRef (RFun(args,ret))))
|
||||
tc builtins
|
||||
in
|
||||
List.fold_left (fun c d ->
|
||||
match d with
|
||||
| Gfdecl ({elt=f} as l) ->
|
||||
if List.exists (fun x -> fst x = f.fname) c.globals
|
||||
then type_error l ("Redeclaration of " ^ f.fname)
|
||||
else Tctxt.add_global c f.fname (TRef (RFun(List.map fst f.args, f.frtyp)))
|
||||
| _ -> c) builtins_context p
|
||||
|
||||
let create_global_ctxt (tc:Tctxt.t) (p:Ast.prog) : Tctxt.t =
|
||||
failwith ("todo: create_function_ctxt"^unimpl)
|
||||
List.fold_left (fun c d ->
|
||||
match d with
|
||||
| Gvdecl ({elt=decl} as l) ->
|
||||
let e = typecheck_exp c decl.init in
|
||||
if List.exists (fun x -> fst x = decl.name) c.globals
|
||||
then type_error l ("Redeclaration of " ^ decl.name)
|
||||
else Tctxt.add_global c decl.name e
|
||||
| _ -> c) tc p
|
||||
|
||||
|
||||
(* This function implements the |- prog and the H ; G |- prog
|
||||
|
|
|
|||
703
hw6/doc/_static/alabaster.css
vendored
703
hw6/doc/_static/alabaster.css
vendored
|
|
@ -1,703 +0,0 @@
|
|||
@import url("basic.css");
|
||||
|
||||
/* -- page layout ----------------------------------------------------------- */
|
||||
|
||||
body {
|
||||
font-family: "Lato Extended","Lato","Helvetica Neue",Helvetica,Arial,sans-serif;
|
||||
font-size: 17px;
|
||||
background-color: #fff;
|
||||
color: #000;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
|
||||
div.document {
|
||||
width: 85%;
|
||||
margin: 30px auto 0 auto;
|
||||
}
|
||||
|
||||
div.documentwrapper {
|
||||
float: left;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
div.bodywrapper {
|
||||
margin: 0 0 0 220px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar {
|
||||
width: 220px;
|
||||
font-size: 14px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
hr {
|
||||
border: 1px solid #B1B4B6;
|
||||
}
|
||||
|
||||
div.body {
|
||||
background-color: #fff;
|
||||
color: #3E4349;
|
||||
padding: 0 30px 0 30px;
|
||||
}
|
||||
|
||||
div.body > .section {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
div.footer {
|
||||
width: 85%;
|
||||
margin: 20px auto 30px auto;
|
||||
font-size: 14px;
|
||||
color: #888;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
div.footer a {
|
||||
color: #888;
|
||||
}
|
||||
|
||||
p.caption {
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
}
|
||||
|
||||
|
||||
div.relations {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
||||
div.sphinxsidebar a {
|
||||
color: #444;
|
||||
text-decoration: none;
|
||||
border-bottom: 1px dotted #999;
|
||||
}
|
||||
|
||||
div.sphinxsidebar a:hover {
|
||||
border-bottom: 1px solid #999;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper {
|
||||
padding: 18px 10px;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper p.logo {
|
||||
padding: 0;
|
||||
margin: -10px 0 0 0px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper h1.logo {
|
||||
margin-top: -10px;
|
||||
text-align: center;
|
||||
margin-bottom: 5px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper h1.logo-name {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper p.blurb {
|
||||
margin-top: 0;
|
||||
font-style: normal;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h3,
|
||||
div.sphinxsidebar h4 {
|
||||
font-family: "Lato Extended","Lato","Helvetica Neue",Helvetica,Arial,sans-serif;
|
||||
color: #444;
|
||||
font-size: 24px;
|
||||
font-weight: normal;
|
||||
margin: 0 0 5px 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h4 {
|
||||
font-size: 20px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h3 a {
|
||||
color: #444;
|
||||
}
|
||||
|
||||
div.sphinxsidebar p.logo a,
|
||||
div.sphinxsidebar h3 a,
|
||||
div.sphinxsidebar p.logo a:hover,
|
||||
div.sphinxsidebar h3 a:hover {
|
||||
border: none;
|
||||
}
|
||||
|
||||
div.sphinxsidebar p {
|
||||
color: #555;
|
||||
margin: 10px 0;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul {
|
||||
margin: 10px 0;
|
||||
padding: 0;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul li.toctree-l1 > a {
|
||||
font-size: 120%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul li.toctree-l2 > a {
|
||||
font-size: 110%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar input {
|
||||
border: 1px solid #CCC;
|
||||
font-family: "Lato Extended","Lato","Helvetica Neue",Helvetica,Arial,sans-serif;
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
div.sphinxsidebar hr {
|
||||
border: none;
|
||||
height: 1px;
|
||||
color: #AAA;
|
||||
background: #AAA;
|
||||
|
||||
text-align: left;
|
||||
margin-left: 0;
|
||||
width: 50%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar .badge {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
div.sphinxsidebar .badge:hover {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
/* To address an issue with donation coming after search */
|
||||
div.sphinxsidebar h3.donation {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
/* -- body styles ----------------------------------------------------------- */
|
||||
|
||||
a {
|
||||
color: #004B6B;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
color: #6D4100;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
div.body h1,
|
||||
div.body h2,
|
||||
div.body h3,
|
||||
div.body h4,
|
||||
div.body h5,
|
||||
div.body h6 {
|
||||
font-family: "Lato Extended","Lato","Helvetica Neue",Helvetica,Arial,sans-serif;
|
||||
font-weight: normal;
|
||||
margin: 30px 0px 10px 0px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; }
|
||||
div.body h2 { font-size: 180%; }
|
||||
div.body h3 { font-size: 150%; }
|
||||
div.body h4 { font-size: 130%; }
|
||||
div.body h5 { font-size: 100%; }
|
||||
div.body h6 { font-size: 100%; }
|
||||
|
||||
a.headerlink {
|
||||
color: #DDD;
|
||||
padding: 0 4px;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a.headerlink:hover {
|
||||
color: #444;
|
||||
background: #EAEAEA;
|
||||
}
|
||||
|
||||
div.body p, div.body dd, div.body li {
|
||||
line-height: 1.4em;
|
||||
}
|
||||
|
||||
div.admonition {
|
||||
margin: 20px 0px;
|
||||
padding: 10px 30px;
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.admonition tt.xref, div.admonition code.xref, div.admonition a tt {
|
||||
background-color: #FBFBFB;
|
||||
border-bottom: 1px solid #fafafa;
|
||||
}
|
||||
|
||||
div.admonition p.admonition-title {
|
||||
font-family: "Lato Extended","Lato","Helvetica Neue",Helvetica,Arial,sans-serif;
|
||||
font-weight: normal;
|
||||
font-size: 24px;
|
||||
margin: 0 0 10px 0;
|
||||
padding: 0;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
div.admonition p.last {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.highlight {
|
||||
background-color: #fff;
|
||||
}
|
||||
|
||||
dt:target, .highlight {
|
||||
background: #FAF3E8;
|
||||
}
|
||||
|
||||
div.warning {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
}
|
||||
|
||||
div.danger {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
-moz-box-shadow: 2px 2px 4px #D52C2C;
|
||||
-webkit-box-shadow: 2px 2px 4px #D52C2C;
|
||||
box-shadow: 2px 2px 4px #D52C2C;
|
||||
}
|
||||
|
||||
div.error {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
-moz-box-shadow: 2px 2px 4px #D52C2C;
|
||||
-webkit-box-shadow: 2px 2px 4px #D52C2C;
|
||||
box-shadow: 2px 2px 4px #D52C2C;
|
||||
}
|
||||
|
||||
div.caution {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
}
|
||||
|
||||
div.attention {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
}
|
||||
|
||||
div.important {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.note {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.tip {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.hint {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.seealso {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.topic {
|
||||
background-color: #EEE;
|
||||
}
|
||||
|
||||
p.admonition-title {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
p.admonition-title:after {
|
||||
content: ":";
|
||||
}
|
||||
|
||||
pre, tt, code {
|
||||
font-family: monospace,serif;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
.hll {
|
||||
background-color: #FFC;
|
||||
margin: 0 -12px;
|
||||
padding: 0 12px;
|
||||
display: block;
|
||||
}
|
||||
|
||||
img.screenshot {
|
||||
}
|
||||
|
||||
tt.descname, tt.descclassname, code.descname, code.descclassname {
|
||||
font-size: 0.95em;
|
||||
}
|
||||
|
||||
tt.descname, code.descname {
|
||||
padding-right: 0.08em;
|
||||
}
|
||||
|
||||
img.screenshot {
|
||||
-moz-box-shadow: 2px 2px 4px #EEE;
|
||||
-webkit-box-shadow: 2px 2px 4px #EEE;
|
||||
box-shadow: 2px 2px 4px #EEE;
|
||||
}
|
||||
|
||||
table.docutils {
|
||||
border: 1px solid #888;
|
||||
-moz-box-shadow: 2px 2px 4px #EEE;
|
||||
-webkit-box-shadow: 2px 2px 4px #EEE;
|
||||
box-shadow: 2px 2px 4px #EEE;
|
||||
}
|
||||
|
||||
table.docutils td, table.docutils th {
|
||||
border: 1px solid #888;
|
||||
padding: 0.25em 0.7em;
|
||||
}
|
||||
|
||||
table.field-list, table.footnote {
|
||||
border: none;
|
||||
-moz-box-shadow: none;
|
||||
-webkit-box-shadow: none;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
table.footnote {
|
||||
margin: 15px 0;
|
||||
width: 100%;
|
||||
border: 1px solid #EEE;
|
||||
background: #FDFDFD;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
table.footnote + table.footnote {
|
||||
margin-top: -15px;
|
||||
border-top: none;
|
||||
}
|
||||
|
||||
table.field-list th {
|
||||
padding: 0 0.8em 0 0;
|
||||
}
|
||||
|
||||
table.field-list td {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
table.field-list p {
|
||||
margin-bottom: 0.8em;
|
||||
}
|
||||
|
||||
/* Cloned from
|
||||
* https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68
|
||||
*/
|
||||
.field-name {
|
||||
-moz-hyphens: manual;
|
||||
-ms-hyphens: manual;
|
||||
-webkit-hyphens: manual;
|
||||
hyphens: manual;
|
||||
}
|
||||
|
||||
table.footnote td.label {
|
||||
width: .1px;
|
||||
padding: 0.3em 0 0.3em 0.5em;
|
||||
}
|
||||
|
||||
table.footnote td {
|
||||
padding: 0.3em 0.5em;
|
||||
}
|
||||
|
||||
dl {
|
||||
margin-left: 0;
|
||||
margin-right: 0;
|
||||
margin-top: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
dl dd {
|
||||
margin-left: 30px;
|
||||
}
|
||||
|
||||
blockquote {
|
||||
margin: 0 0 0 30px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
ul, ol {
|
||||
/* Matches the 30px from the narrow-screen "li > ul" selector below */
|
||||
margin: 10px 0 10px 30px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
pre {
|
||||
background: #EEE;
|
||||
padding: 7px 30px;
|
||||
margin: 15px 0px;
|
||||
line-height: 1.3em;
|
||||
}
|
||||
|
||||
div.viewcode-block:target {
|
||||
background: #ffd;
|
||||
}
|
||||
|
||||
dl pre, blockquote pre, li pre {
|
||||
margin-left: 0;
|
||||
padding-left: 30px;
|
||||
}
|
||||
|
||||
tt, code {
|
||||
background-color: #ecf0f3;
|
||||
color: #222;
|
||||
/* padding: 1px 2px; */
|
||||
}
|
||||
|
||||
tt.xref, code.xref, a tt {
|
||||
background-color: #FBFBFB;
|
||||
border-bottom: 1px solid #fff;
|
||||
}
|
||||
|
||||
a.reference {
|
||||
text-decoration: none;
|
||||
border-bottom: 1px dotted #004B6B;
|
||||
}
|
||||
|
||||
/* Don't put an underline on images */
|
||||
a.image-reference, a.image-reference:hover {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
a.reference:hover {
|
||||
border-bottom: 1px solid #6D4100;
|
||||
}
|
||||
|
||||
a.footnote-reference {
|
||||
text-decoration: none;
|
||||
font-size: 0.7em;
|
||||
vertical-align: top;
|
||||
border-bottom: 1px dotted #004B6B;
|
||||
}
|
||||
|
||||
a.footnote-reference:hover {
|
||||
border-bottom: 1px solid #6D4100;
|
||||
}
|
||||
|
||||
a:hover tt, a:hover code {
|
||||
background: #EEE;
|
||||
}
|
||||
|
||||
|
||||
@media screen and (max-width: 870px) {
|
||||
|
||||
div.sphinxsidebar {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.document {
|
||||
width: 100%;
|
||||
|
||||
}
|
||||
|
||||
div.documentwrapper {
|
||||
margin-left: 0;
|
||||
margin-top: 0;
|
||||
margin-right: 0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.bodywrapper {
|
||||
margin-top: 0;
|
||||
margin-right: 0;
|
||||
margin-bottom: 0;
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
ul {
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
li > ul {
|
||||
/* Matches the 30px from the "ul, ol" selector above */
|
||||
margin-left: 30px;
|
||||
}
|
||||
|
||||
.document {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.bodywrapper {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.github {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@media screen and (max-width: 875px) {
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 20px 30px;
|
||||
}
|
||||
|
||||
div.documentwrapper {
|
||||
float: none;
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
div.sphinxsidebar {
|
||||
display: block;
|
||||
float: none;
|
||||
width: 102.5%;
|
||||
margin: 50px -30px -20px -30px;
|
||||
padding: 10px 20px;
|
||||
background: #333;
|
||||
color: #FFF;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p,
|
||||
div.sphinxsidebar h3 a {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
div.sphinxsidebar a {
|
||||
color: #AAA;
|
||||
}
|
||||
|
||||
div.sphinxsidebar p.logo {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.document {
|
||||
width: 100%;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
div.footer {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.bodywrapper {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
div.body {
|
||||
min-height: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.rtd_doc_footer {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.document {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.github {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* misc. */
|
||||
|
||||
.revsys-inline {
|
||||
display: none!important;
|
||||
}
|
||||
|
||||
/* Make nested-list/multi-paragraph items look better in Releases changelog
|
||||
* pages. Without this, docutils' magical list fuckery causes inconsistent
|
||||
* formatting between different release sub-lists.
|
||||
*/
|
||||
div#changelog > div.section > ul > li > p:only-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
/* Hide fugly table cell borders in ..bibliography:: directive output */
|
||||
table.docutils.citation, table.docutils.citation td, table.docutils.citation th {
|
||||
border: none;
|
||||
/* Below needed in some edge cases; if not applied, bottom shadows appear */
|
||||
-moz-box-shadow: none;
|
||||
-webkit-box-shadow: none;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
|
||||
/* relbar */
|
||||
|
||||
.related {
|
||||
line-height: 30px;
|
||||
width: 100%;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.related.top {
|
||||
border-bottom: 1px solid #EEE;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.related.bottom {
|
||||
border-top: 1px solid #EEE;
|
||||
}
|
||||
|
||||
.related ul {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.related li {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
nav#rellinks {
|
||||
float: right;
|
||||
}
|
||||
|
||||
nav#rellinks li+li:before {
|
||||
content: "|";
|
||||
}
|
||||
|
||||
nav#breadcrumbs li+li:before {
|
||||
content: "\00BB";
|
||||
}
|
||||
|
||||
/* Hide certain items when printing */
|
||||
@media print {
|
||||
div.related {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
925
hw6/doc/_static/basic.css
vendored
925
hw6/doc/_static/basic.css
vendored
|
|
@ -1,925 +0,0 @@
|
|||
/*
|
||||
* basic.css
|
||||
* ~~~~~~~~~
|
||||
*
|
||||
* Sphinx stylesheet -- basic theme.
|
||||
*
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
|
||||
/* -- main layout ----------------------------------------------------------- */
|
||||
|
||||
div.clearer {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
div.section::after {
|
||||
display: block;
|
||||
content: '';
|
||||
clear: left;
|
||||
}
|
||||
|
||||
/* -- relbar ---------------------------------------------------------------- */
|
||||
|
||||
div.related {
|
||||
width: 100%;
|
||||
font-size: 90%;
|
||||
}
|
||||
|
||||
div.related h3 {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.related ul {
|
||||
margin: 0;
|
||||
padding: 0 0 0 10px;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
div.related li {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
div.related li.right {
|
||||
float: right;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
/* -- sidebar --------------------------------------------------------------- */
|
||||
|
||||
div.sphinxsidebarwrapper {
|
||||
padding: 10px 5px 0 10px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar {
|
||||
float: left;
|
||||
width: 230px;
|
||||
margin-left: -100%;
|
||||
font-size: 90%;
|
||||
word-wrap: break-word;
|
||||
overflow-wrap : break-word;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul {
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul ul,
|
||||
div.sphinxsidebar ul.want-points {
|
||||
margin-left: 20px;
|
||||
list-style: square;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul ul {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.sphinxsidebar form {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar input {
|
||||
border: 1px solid #98dbcc;
|
||||
font-family: sans-serif;
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
div.sphinxsidebar #searchbox form.search {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
div.sphinxsidebar #searchbox input[type="text"] {
|
||||
float: left;
|
||||
width: 80%;
|
||||
padding: 0.25em;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
div.sphinxsidebar #searchbox input[type="submit"] {
|
||||
float: left;
|
||||
width: 20%;
|
||||
border-left: none;
|
||||
padding: 0.25em;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
|
||||
img {
|
||||
border: 0;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
/* -- search page ----------------------------------------------------------- */
|
||||
|
||||
ul.search {
|
||||
margin: 10px 0 0 20px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
ul.search li {
|
||||
padding: 5px 0 5px 20px;
|
||||
background-image: url(file.png);
|
||||
background-repeat: no-repeat;
|
||||
background-position: 0 7px;
|
||||
}
|
||||
|
||||
ul.search li a {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
ul.search li p.context {
|
||||
color: #888;
|
||||
margin: 2px 0 0 30px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
ul.keywordmatches li.goodmatch a {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* -- index page ------------------------------------------------------------ */
|
||||
|
||||
table.contentstable {
|
||||
width: 90%;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table.contentstable p.biglink {
|
||||
line-height: 150%;
|
||||
}
|
||||
|
||||
a.biglink {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
|
||||
span.linkdescr {
|
||||
font-style: italic;
|
||||
padding-top: 5px;
|
||||
font-size: 90%;
|
||||
}
|
||||
|
||||
/* -- general index --------------------------------------------------------- */
|
||||
|
||||
table.indextable {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
table.indextable td {
|
||||
text-align: left;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
table.indextable ul {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
list-style-type: none;
|
||||
}
|
||||
|
||||
table.indextable > tbody > tr > td > ul {
|
||||
padding-left: 0em;
|
||||
}
|
||||
|
||||
table.indextable tr.pcap {
|
||||
height: 10px;
|
||||
}
|
||||
|
||||
table.indextable tr.cap {
|
||||
margin-top: 10px;
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
|
||||
img.toggler {
|
||||
margin-right: 3px;
|
||||
margin-top: 3px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
div.modindex-jumpbox {
|
||||
border-top: 1px solid #ddd;
|
||||
border-bottom: 1px solid #ddd;
|
||||
margin: 1em 0 1em 0;
|
||||
padding: 0.4em;
|
||||
}
|
||||
|
||||
div.genindex-jumpbox {
|
||||
border-top: 1px solid #ddd;
|
||||
border-bottom: 1px solid #ddd;
|
||||
margin: 1em 0 1em 0;
|
||||
padding: 0.4em;
|
||||
}
|
||||
|
||||
/* -- domain module index --------------------------------------------------- */
|
||||
|
||||
table.modindextable td {
|
||||
padding: 2px;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
/* -- general body styles --------------------------------------------------- */
|
||||
|
||||
div.body {
|
||||
min-width: 360px;
|
||||
max-width: auto;
|
||||
}
|
||||
|
||||
div.body p, div.body dd, div.body li, div.body blockquote {
|
||||
-moz-hyphens: auto;
|
||||
-ms-hyphens: auto;
|
||||
-webkit-hyphens: auto;
|
||||
hyphens: auto;
|
||||
}
|
||||
|
||||
a.headerlink {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
a:visited {
|
||||
color: #551A8B;
|
||||
}
|
||||
|
||||
h1:hover > a.headerlink,
|
||||
h2:hover > a.headerlink,
|
||||
h3:hover > a.headerlink,
|
||||
h4:hover > a.headerlink,
|
||||
h5:hover > a.headerlink,
|
||||
h6:hover > a.headerlink,
|
||||
dt:hover > a.headerlink,
|
||||
caption:hover > a.headerlink,
|
||||
p.caption:hover > a.headerlink,
|
||||
div.code-block-caption:hover > a.headerlink {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
div.body p.caption {
|
||||
text-align: inherit;
|
||||
}
|
||||
|
||||
div.body td {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.first {
|
||||
margin-top: 0 !important;
|
||||
}
|
||||
|
||||
p.rubric {
|
||||
margin-top: 30px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
img.align-left, figure.align-left, .figure.align-left, object.align-left {
|
||||
clear: left;
|
||||
float: left;
|
||||
margin-right: 1em;
|
||||
}
|
||||
|
||||
img.align-right, figure.align-right, .figure.align-right, object.align-right {
|
||||
clear: right;
|
||||
float: right;
|
||||
margin-left: 1em;
|
||||
}
|
||||
|
||||
img.align-center, figure.align-center, .figure.align-center, object.align-center {
|
||||
display: block;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
img.align-default, figure.align-default, .figure.align-default {
|
||||
display: block;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
.align-left {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.align-center {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.align-default {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.align-right {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
/* -- sidebars -------------------------------------------------------------- */
|
||||
|
||||
div.sidebar,
|
||||
aside.sidebar {
|
||||
margin: 0 0 0.5em 1em;
|
||||
border: 1px solid #ddb;
|
||||
padding: 7px;
|
||||
background-color: #ffe;
|
||||
width: 40%;
|
||||
float: right;
|
||||
clear: right;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
p.sidebar-title {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
nav.contents,
|
||||
aside.topic,
|
||||
div.admonition, div.topic, blockquote {
|
||||
clear: left;
|
||||
}
|
||||
|
||||
/* -- topics ---------------------------------------------------------------- */
|
||||
|
||||
nav.contents,
|
||||
aside.topic,
|
||||
div.topic {
|
||||
border: 1px solid #ccc;
|
||||
padding: 7px;
|
||||
margin: 10px 0 10px 0;
|
||||
}
|
||||
|
||||
p.topic-title {
|
||||
font-size: 1.1em;
|
||||
font-weight: bold;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
/* -- admonitions ----------------------------------------------------------- */
|
||||
|
||||
div.admonition {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
padding: 7px;
|
||||
}
|
||||
|
||||
div.admonition dt {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
p.admonition-title {
|
||||
margin: 0px 10px 5px 0px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
div.body p.centered {
|
||||
text-align: center;
|
||||
margin-top: 25px;
|
||||
}
|
||||
|
||||
/* -- content of sidebars/topics/admonitions -------------------------------- */
|
||||
|
||||
div.sidebar > :last-child,
|
||||
aside.sidebar > :last-child,
|
||||
nav.contents > :last-child,
|
||||
aside.topic > :last-child,
|
||||
div.topic > :last-child,
|
||||
div.admonition > :last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.sidebar::after,
|
||||
aside.sidebar::after,
|
||||
nav.contents::after,
|
||||
aside.topic::after,
|
||||
div.topic::after,
|
||||
div.admonition::after,
|
||||
blockquote::after {
|
||||
display: block;
|
||||
content: '';
|
||||
clear: both;
|
||||
}
|
||||
|
||||
/* -- tables ---------------------------------------------------------------- */
|
||||
|
||||
table.docutils {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
border: 0;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
table.align-center {
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table.align-default {
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table caption span.caption-number {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
table caption span.caption-text {
|
||||
}
|
||||
|
||||
table.docutils td, table.docutils th {
|
||||
padding: 1px 8px 1px 5px;
|
||||
border-top: 0;
|
||||
border-left: 0;
|
||||
border-right: 0;
|
||||
border-bottom: 1px solid #aaa;
|
||||
}
|
||||
|
||||
th {
|
||||
text-align: left;
|
||||
padding-right: 5px;
|
||||
}
|
||||
|
||||
table.citation {
|
||||
border-left: solid 1px gray;
|
||||
margin-left: 1px;
|
||||
}
|
||||
|
||||
table.citation td {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
th > :first-child,
|
||||
td > :first-child {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
th > :last-child,
|
||||
td > :last-child {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
/* -- figures --------------------------------------------------------------- */
|
||||
|
||||
div.figure, figure {
|
||||
margin: 0.5em;
|
||||
padding: 0.5em;
|
||||
}
|
||||
|
||||
div.figure p.caption, figcaption {
|
||||
padding: 0.3em;
|
||||
}
|
||||
|
||||
div.figure p.caption span.caption-number,
|
||||
figcaption span.caption-number {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
div.figure p.caption span.caption-text,
|
||||
figcaption span.caption-text {
|
||||
}
|
||||
|
||||
/* -- field list styles ----------------------------------------------------- */
|
||||
|
||||
table.field-list td, table.field-list th {
|
||||
border: 0 !important;
|
||||
}
|
||||
|
||||
.field-list ul {
|
||||
margin: 0;
|
||||
padding-left: 1em;
|
||||
}
|
||||
|
||||
.field-list p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.field-name {
|
||||
-moz-hyphens: manual;
|
||||
-ms-hyphens: manual;
|
||||
-webkit-hyphens: manual;
|
||||
hyphens: manual;
|
||||
}
|
||||
|
||||
/* -- hlist styles ---------------------------------------------------------- */
|
||||
|
||||
table.hlist {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
table.hlist td {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
/* -- object description styles --------------------------------------------- */
|
||||
|
||||
.sig {
|
||||
font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
|
||||
}
|
||||
|
||||
.sig-name, code.descname {
|
||||
background-color: transparent;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.sig-name {
|
||||
font-size: 1.1em;
|
||||
}
|
||||
|
||||
code.descname {
|
||||
font-size: 1.2em;
|
||||
}
|
||||
|
||||
.sig-prename, code.descclassname {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.optional {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
|
||||
.sig-paren {
|
||||
font-size: larger;
|
||||
}
|
||||
|
||||
.sig-param.n {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* C++ specific styling */
|
||||
|
||||
.sig-inline.c-texpr,
|
||||
.sig-inline.cpp-texpr {
|
||||
font-family: unset;
|
||||
}
|
||||
|
||||
.sig.c .k, .sig.c .kt,
|
||||
.sig.cpp .k, .sig.cpp .kt {
|
||||
color: #0033B3;
|
||||
}
|
||||
|
||||
.sig.c .m,
|
||||
.sig.cpp .m {
|
||||
color: #1750EB;
|
||||
}
|
||||
|
||||
.sig.c .s, .sig.c .sc,
|
||||
.sig.cpp .s, .sig.cpp .sc {
|
||||
color: #067D17;
|
||||
}
|
||||
|
||||
|
||||
/* -- other body styles ----------------------------------------------------- */
|
||||
|
||||
ol.arabic {
|
||||
list-style: decimal;
|
||||
}
|
||||
|
||||
ol.loweralpha {
|
||||
list-style: lower-alpha;
|
||||
}
|
||||
|
||||
ol.upperalpha {
|
||||
list-style: upper-alpha;
|
||||
}
|
||||
|
||||
ol.lowerroman {
|
||||
list-style: lower-roman;
|
||||
}
|
||||
|
||||
ol.upperroman {
|
||||
list-style: upper-roman;
|
||||
}
|
||||
|
||||
:not(li) > ol > li:first-child > :first-child,
|
||||
:not(li) > ul > li:first-child > :first-child {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
:not(li) > ol > li:last-child > :last-child,
|
||||
:not(li) > ul > li:last-child > :last-child {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
ol.simple ol p,
|
||||
ol.simple ul p,
|
||||
ul.simple ol p,
|
||||
ul.simple ul p {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
ol.simple > li:not(:first-child) > p,
|
||||
ul.simple > li:not(:first-child) > p {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
ol.simple p,
|
||||
ul.simple p {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
aside.footnote > span,
|
||||
div.citation > span {
|
||||
float: left;
|
||||
}
|
||||
aside.footnote > span:last-of-type,
|
||||
div.citation > span:last-of-type {
|
||||
padding-right: 0.5em;
|
||||
}
|
||||
aside.footnote > p {
|
||||
margin-left: 2em;
|
||||
}
|
||||
div.citation > p {
|
||||
margin-left: 4em;
|
||||
}
|
||||
aside.footnote > p:last-of-type,
|
||||
div.citation > p:last-of-type {
|
||||
margin-bottom: 0em;
|
||||
}
|
||||
aside.footnote > p:last-of-type:after,
|
||||
div.citation > p:last-of-type:after {
|
||||
content: "";
|
||||
clear: both;
|
||||
}
|
||||
|
||||
dl.field-list {
|
||||
display: grid;
|
||||
grid-template-columns: fit-content(30%) auto;
|
||||
}
|
||||
|
||||
dl.field-list > dt {
|
||||
font-weight: bold;
|
||||
word-break: break-word;
|
||||
padding-left: 0.5em;
|
||||
padding-right: 5px;
|
||||
}
|
||||
|
||||
dl.field-list > dd {
|
||||
padding-left: 0.5em;
|
||||
margin-top: 0em;
|
||||
margin-left: 0em;
|
||||
margin-bottom: 0em;
|
||||
}
|
||||
|
||||
dl {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
dd > :first-child {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
dd ul, dd table {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
dd {
|
||||
margin-top: 3px;
|
||||
margin-bottom: 10px;
|
||||
margin-left: 30px;
|
||||
}
|
||||
|
||||
.sig dd {
|
||||
margin-top: 0px;
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
.sig dl {
|
||||
margin-top: 0px;
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
dl > dd:last-child,
|
||||
dl > dd:last-child > :last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
dt:target, span.highlighted {
|
||||
background-color: #fbe54e;
|
||||
}
|
||||
|
||||
rect.highlighted {
|
||||
fill: #fbe54e;
|
||||
}
|
||||
|
||||
dl.glossary dt {
|
||||
font-weight: bold;
|
||||
font-size: 1.1em;
|
||||
}
|
||||
|
||||
.versionmodified {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.system-message {
|
||||
background-color: #fda;
|
||||
padding: 5px;
|
||||
border: 3px solid red;
|
||||
}
|
||||
|
||||
.footnote:target {
|
||||
background-color: #ffa;
|
||||
}
|
||||
|
||||
.line-block {
|
||||
display: block;
|
||||
margin-top: 1em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.line-block .line-block {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
margin-left: 1.5em;
|
||||
}
|
||||
|
||||
.guilabel, .menuselection {
|
||||
font-family: sans-serif;
|
||||
}
|
||||
|
||||
.accelerator {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.classifier {
|
||||
font-style: oblique;
|
||||
}
|
||||
|
||||
.classifier:before {
|
||||
font-style: normal;
|
||||
margin: 0 0.5em;
|
||||
content: ":";
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
abbr, acronym {
|
||||
border-bottom: dotted 1px;
|
||||
cursor: help;
|
||||
}
|
||||
|
||||
.translated {
|
||||
background-color: rgba(207, 255, 207, 0.2)
|
||||
}
|
||||
|
||||
.untranslated {
|
||||
background-color: rgba(255, 207, 207, 0.2)
|
||||
}
|
||||
|
||||
/* -- code displays --------------------------------------------------------- */
|
||||
|
||||
pre {
|
||||
overflow: auto;
|
||||
overflow-y: hidden; /* fixes display issues on Chrome browsers */
|
||||
}
|
||||
|
||||
pre, div[class*="highlight-"] {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
span.pre {
|
||||
-moz-hyphens: none;
|
||||
-ms-hyphens: none;
|
||||
-webkit-hyphens: none;
|
||||
hyphens: none;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
div[class*="highlight-"] {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
td.linenos pre {
|
||||
border: 0;
|
||||
background-color: transparent;
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
table.highlighttable {
|
||||
display: block;
|
||||
}
|
||||
|
||||
table.highlighttable tbody {
|
||||
display: block;
|
||||
}
|
||||
|
||||
table.highlighttable tr {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
table.highlighttable td {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
table.highlighttable td.linenos {
|
||||
padding-right: 0.5em;
|
||||
}
|
||||
|
||||
table.highlighttable td.code {
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.highlight .hll {
|
||||
display: block;
|
||||
}
|
||||
|
||||
div.highlight pre,
|
||||
table.highlighttable pre {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
div.code-block-caption + div {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
div.code-block-caption {
|
||||
margin-top: 1em;
|
||||
padding: 2px 5px;
|
||||
font-size: small;
|
||||
}
|
||||
|
||||
div.code-block-caption code {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
table.highlighttable td.linenos,
|
||||
span.linenos,
|
||||
div.highlight span.gp { /* gp: Generic.Prompt */
|
||||
user-select: none;
|
||||
-webkit-user-select: text; /* Safari fallback only */
|
||||
-webkit-user-select: none; /* Chrome/Safari */
|
||||
-moz-user-select: none; /* Firefox */
|
||||
-ms-user-select: none; /* IE10+ */
|
||||
}
|
||||
|
||||
div.code-block-caption span.caption-number {
|
||||
padding: 0.1em 0.3em;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
div.code-block-caption span.caption-text {
|
||||
}
|
||||
|
||||
div.literal-block-wrapper {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
code.xref, a code {
|
||||
background-color: transparent;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
h1 code, h2 code, h3 code, h4 code, h5 code, h6 code {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.viewcode-link {
|
||||
float: right;
|
||||
}
|
||||
|
||||
.viewcode-back {
|
||||
float: right;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
|
||||
div.viewcode-block:target {
|
||||
margin: -1px -10px;
|
||||
padding: 0 10px;
|
||||
}
|
||||
|
||||
/* -- math display ---------------------------------------------------------- */
|
||||
|
||||
img.math {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
div.body div.math p {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
span.eqno {
|
||||
float: right;
|
||||
}
|
||||
|
||||
span.eqno a.headerlink {
|
||||
position: absolute;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
div.math:hover a.headerlink {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
/* -- printout stylesheet --------------------------------------------------- */
|
||||
|
||||
@media print {
|
||||
div.document,
|
||||
div.documentwrapper,
|
||||
div.bodywrapper {
|
||||
margin: 0 !important;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar,
|
||||
div.related,
|
||||
div.footer,
|
||||
#top-link {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
56
hw6/doc/_static/cs153-handout.css
vendored
56
hw6/doc/_static/cs153-handout.css
vendored
|
|
@ -1,56 +0,0 @@
|
|||
/* CS153 Style sheet for handouts */
|
||||
|
||||
h1, h2, h3, h4, h5, h6 {
|
||||
font-size: 1.5em;
|
||||
line-height: 1.5;
|
||||
font-weight: normal;
|
||||
}
|
||||
h1 {
|
||||
font-size: 2em;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 1.8em;
|
||||
}
|
||||
|
||||
tt,code,pre,.literal {
|
||||
font-family:monospace,serif;
|
||||
}
|
||||
|
||||
pre {
|
||||
background-color: #f7f7f7;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 4px;
|
||||
padding: 10px;
|
||||
white-space: pre-wrap; /* Wrap long lines */
|
||||
font-size: 14px;
|
||||
line-height: 1.4;
|
||||
color: #333;
|
||||
overflow: auto;
|
||||
page-break-inside:avoid
|
||||
|
||||
}
|
||||
|
||||
/* Add a bit of syntax highlighting for code */
|
||||
pre code {
|
||||
display: block;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-size: 14px;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
/* Style code blocks within pre tags */
|
||||
pre code {
|
||||
background-color: #f7f7f7;
|
||||
border: none;
|
||||
border-radius: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.docutils.literal {
|
||||
color: #e74c3c;
|
||||
white-space: normal;
|
||||
border: 1px solid #e1e4e5;
|
||||
}
|
||||
|
||||
105
hw6/doc/_static/custom.css
vendored
105
hw6/doc/_static/custom.css
vendored
|
|
@ -1,105 +0,0 @@
|
|||
.wy-side-nav-search, .wy-nav-top {
|
||||
background: #6ba339;
|
||||
}
|
||||
|
||||
.wy-nav-content {
|
||||
max-width: none;
|
||||
}
|
||||
|
||||
.wy-table-responsive table td {
|
||||
white-space: normal;
|
||||
}
|
||||
|
||||
/********************************************/
|
||||
/* LECTURE TABLE STYLES */
|
||||
/********************************************/
|
||||
|
||||
:root {
|
||||
--Color--border : #FFFFFF; /* border color */
|
||||
--Color--link : #DE3A0D; /* hyperlinks */ /* red */
|
||||
|
||||
--Color--white : #FFFFFF;
|
||||
|
||||
--Color--background : #FFFFFF;
|
||||
--Color--text : #000000;
|
||||
--Color--header : #000000;
|
||||
|
||||
--Color--menu-bg : #FFFFFF;
|
||||
--Color--menu-fg : #000000;
|
||||
--Color--menu-fg-hover : #FFFFFF;
|
||||
--Color--menu-bg-hover : #DE3A0D;
|
||||
|
||||
--Color--section-bg : #F7F7F7;
|
||||
--Color--section-fg : #121519;
|
||||
|
||||
--Color--week1 : #FFFFFF;
|
||||
--Color--week2 : #F7F7F7;
|
||||
--Color--dimweek1 : #CCCCCC;
|
||||
--Color--dimweek2 : #BBBBBB;
|
||||
--Color--dimhw : #AAAAAA;
|
||||
|
||||
--Color--note-bg : #FCD63A;
|
||||
--Color--note-border : #FCBF3A;
|
||||
--Color--notice-fg : #DE3A0D;
|
||||
|
||||
--Color--code-fg : #000000;
|
||||
--Color--code-bg : #FEFEFB;
|
||||
--Color--code-border : #E1EDB9;
|
||||
--Color--code-error : #A40000;
|
||||
|
||||
--Color--hdr-bg : #666666;
|
||||
--Color--hdr-fg : #000000;
|
||||
}
|
||||
|
||||
|
||||
.bright {
|
||||
color: var(--Color--white);
|
||||
}
|
||||
|
||||
td.date {
|
||||
color: #050505;
|
||||
}
|
||||
|
||||
div.hdr {
|
||||
color: var(--Color--white);
|
||||
}
|
||||
|
||||
|
||||
.week1 {
|
||||
background-color: var(--Color--week1);/*week1*/
|
||||
}
|
||||
|
||||
.week2 {
|
||||
background-color: var(--Color--week2);;/*week2*/
|
||||
}
|
||||
|
||||
tr.week2.elide > td.topic {
|
||||
background-color: var(--Color--week2);
|
||||
color: var(--Color--dimweek2);
|
||||
}
|
||||
|
||||
tr.week1.elide > td.topic {
|
||||
background-color: var(--Color--week1);
|
||||
color: var(--Color--dimweek1);
|
||||
}
|
||||
|
||||
tr.elide > td.slides > br {
|
||||
display:none;
|
||||
}
|
||||
|
||||
tr.elide > td.handout > br {
|
||||
display:none;
|
||||
}
|
||||
|
||||
.tr.elide.hw > .td > a {
|
||||
display:none;
|
||||
}
|
||||
|
||||
tr.elide > td.slides > a {
|
||||
display:none;
|
||||
}
|
||||
|
||||
tr.elide > td.handout > a {
|
||||
display:none;
|
||||
}
|
||||
|
||||
156
hw6/doc/_static/doctools.js
vendored
156
hw6/doc/_static/doctools.js
vendored
|
|
@ -1,156 +0,0 @@
|
|||
/*
|
||||
* doctools.js
|
||||
* ~~~~~~~~~~~
|
||||
*
|
||||
* Base JavaScript utilities for all Sphinx HTML documentation.
|
||||
*
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([
|
||||
"TEXTAREA",
|
||||
"INPUT",
|
||||
"SELECT",
|
||||
"BUTTON",
|
||||
]);
|
||||
|
||||
const _ready = (callback) => {
|
||||
if (document.readyState !== "loading") {
|
||||
callback();
|
||||
} else {
|
||||
document.addEventListener("DOMContentLoaded", callback);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Small JavaScript module for the documentation.
|
||||
*/
|
||||
const Documentation = {
|
||||
init: () => {
|
||||
Documentation.initDomainIndexTable();
|
||||
Documentation.initOnKeyListeners();
|
||||
},
|
||||
|
||||
/**
|
||||
* i18n support
|
||||
*/
|
||||
TRANSLATIONS: {},
|
||||
PLURAL_EXPR: (n) => (n === 1 ? 0 : 1),
|
||||
LOCALE: "unknown",
|
||||
|
||||
// gettext and ngettext don't access this so that the functions
|
||||
// can safely bound to a different name (_ = Documentation.gettext)
|
||||
gettext: (string) => {
|
||||
const translated = Documentation.TRANSLATIONS[string];
|
||||
switch (typeof translated) {
|
||||
case "undefined":
|
||||
return string; // no translation
|
||||
case "string":
|
||||
return translated; // translation exists
|
||||
default:
|
||||
return translated[0]; // (singular, plural) translation tuple exists
|
||||
}
|
||||
},
|
||||
|
||||
ngettext: (singular, plural, n) => {
|
||||
const translated = Documentation.TRANSLATIONS[singular];
|
||||
if (typeof translated !== "undefined")
|
||||
return translated[Documentation.PLURAL_EXPR(n)];
|
||||
return n === 1 ? singular : plural;
|
||||
},
|
||||
|
||||
addTranslations: (catalog) => {
|
||||
Object.assign(Documentation.TRANSLATIONS, catalog.messages);
|
||||
Documentation.PLURAL_EXPR = new Function(
|
||||
"n",
|
||||
`return (${catalog.plural_expr})`
|
||||
);
|
||||
Documentation.LOCALE = catalog.locale;
|
||||
},
|
||||
|
||||
/**
|
||||
* helper function to focus on search bar
|
||||
*/
|
||||
focusSearchBar: () => {
|
||||
document.querySelectorAll("input[name=q]")[0]?.focus();
|
||||
},
|
||||
|
||||
/**
|
||||
* Initialise the domain index toggle buttons
|
||||
*/
|
||||
initDomainIndexTable: () => {
|
||||
const toggler = (el) => {
|
||||
const idNumber = el.id.substr(7);
|
||||
const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`);
|
||||
if (el.src.substr(-9) === "minus.png") {
|
||||
el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`;
|
||||
toggledRows.forEach((el) => (el.style.display = "none"));
|
||||
} else {
|
||||
el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`;
|
||||
toggledRows.forEach((el) => (el.style.display = ""));
|
||||
}
|
||||
};
|
||||
|
||||
const togglerElements = document.querySelectorAll("img.toggler");
|
||||
togglerElements.forEach((el) =>
|
||||
el.addEventListener("click", (event) => toggler(event.currentTarget))
|
||||
);
|
||||
togglerElements.forEach((el) => (el.style.display = ""));
|
||||
if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler);
|
||||
},
|
||||
|
||||
initOnKeyListeners: () => {
|
||||
// only install a listener if it is really needed
|
||||
if (
|
||||
!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS &&
|
||||
!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
|
||||
)
|
||||
return;
|
||||
|
||||
document.addEventListener("keydown", (event) => {
|
||||
// bail for input elements
|
||||
if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
|
||||
// bail with special keys
|
||||
if (event.altKey || event.ctrlKey || event.metaKey) return;
|
||||
|
||||
if (!event.shiftKey) {
|
||||
switch (event.key) {
|
||||
case "ArrowLeft":
|
||||
if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
|
||||
|
||||
const prevLink = document.querySelector('link[rel="prev"]');
|
||||
if (prevLink && prevLink.href) {
|
||||
window.location.href = prevLink.href;
|
||||
event.preventDefault();
|
||||
}
|
||||
break;
|
||||
case "ArrowRight":
|
||||
if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
|
||||
|
||||
const nextLink = document.querySelector('link[rel="next"]');
|
||||
if (nextLink && nextLink.href) {
|
||||
window.location.href = nextLink.href;
|
||||
event.preventDefault();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// some keyboard layouts may need Shift to get /
|
||||
switch (event.key) {
|
||||
case "/":
|
||||
if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break;
|
||||
Documentation.focusSearchBar();
|
||||
event.preventDefault();
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
// quick alias for translations
|
||||
const _ = Documentation.gettext;
|
||||
|
||||
_ready(Documentation.init);
|
||||
13
hw6/doc/_static/documentation_options.js
vendored
13
hw6/doc/_static/documentation_options.js
vendored
|
|
@ -1,13 +0,0 @@
|
|||
const DOCUMENTATION_OPTIONS = {
|
||||
VERSION: '',
|
||||
LANGUAGE: 'en',
|
||||
COLLAPSE_INDEX: false,
|
||||
BUILDER: 'html',
|
||||
FILE_SUFFIX: '.html',
|
||||
LINK_SUFFIX: '.html',
|
||||
HAS_SOURCE: false,
|
||||
SOURCELINK_SUFFIX: '.txt',
|
||||
NAVIGATION_WITH_KEYS: false,
|
||||
SHOW_SEARCH_SUMMARY: true,
|
||||
ENABLE_SEARCH_SHORTCUTS: true,
|
||||
};
|
||||
BIN
hw6/doc/_static/file.png
vendored
BIN
hw6/doc/_static/file.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 286 B |
199
hw6/doc/_static/language_data.js
vendored
199
hw6/doc/_static/language_data.js
vendored
|
|
@ -1,199 +0,0 @@
|
|||
/*
|
||||
* language_data.js
|
||||
* ~~~~~~~~~~~~~~~~
|
||||
*
|
||||
* This script contains the language-specific data used by searchtools.js,
|
||||
* namely the list of stopwords, stemmer, scorer and splitter.
|
||||
*
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
|
||||
var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"];
|
||||
|
||||
|
||||
/* Non-minified version is copied as a separate JS file, is available */
|
||||
|
||||
/**
|
||||
* Porter Stemmer
|
||||
*/
|
||||
var Stemmer = function() {
|
||||
|
||||
var step2list = {
|
||||
ational: 'ate',
|
||||
tional: 'tion',
|
||||
enci: 'ence',
|
||||
anci: 'ance',
|
||||
izer: 'ize',
|
||||
bli: 'ble',
|
||||
alli: 'al',
|
||||
entli: 'ent',
|
||||
eli: 'e',
|
||||
ousli: 'ous',
|
||||
ization: 'ize',
|
||||
ation: 'ate',
|
||||
ator: 'ate',
|
||||
alism: 'al',
|
||||
iveness: 'ive',
|
||||
fulness: 'ful',
|
||||
ousness: 'ous',
|
||||
aliti: 'al',
|
||||
iviti: 'ive',
|
||||
biliti: 'ble',
|
||||
logi: 'log'
|
||||
};
|
||||
|
||||
var step3list = {
|
||||
icate: 'ic',
|
||||
ative: '',
|
||||
alize: 'al',
|
||||
iciti: 'ic',
|
||||
ical: 'ic',
|
||||
ful: '',
|
||||
ness: ''
|
||||
};
|
||||
|
||||
var c = "[^aeiou]"; // consonant
|
||||
var v = "[aeiouy]"; // vowel
|
||||
var C = c + "[^aeiouy]*"; // consonant sequence
|
||||
var V = v + "[aeiou]*"; // vowel sequence
|
||||
|
||||
var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
|
||||
var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
|
||||
var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
|
||||
var s_v = "^(" + C + ")?" + v; // vowel in stem
|
||||
|
||||
this.stemWord = function (w) {
|
||||
var stem;
|
||||
var suffix;
|
||||
var firstch;
|
||||
var origword = w;
|
||||
|
||||
if (w.length < 3)
|
||||
return w;
|
||||
|
||||
var re;
|
||||
var re2;
|
||||
var re3;
|
||||
var re4;
|
||||
|
||||
firstch = w.substr(0,1);
|
||||
if (firstch == "y")
|
||||
w = firstch.toUpperCase() + w.substr(1);
|
||||
|
||||
// Step 1a
|
||||
re = /^(.+?)(ss|i)es$/;
|
||||
re2 = /^(.+?)([^s])s$/;
|
||||
|
||||
if (re.test(w))
|
||||
w = w.replace(re,"$1$2");
|
||||
else if (re2.test(w))
|
||||
w = w.replace(re2,"$1$2");
|
||||
|
||||
// Step 1b
|
||||
re = /^(.+?)eed$/;
|
||||
re2 = /^(.+?)(ed|ing)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
re = new RegExp(mgr0);
|
||||
if (re.test(fp[1])) {
|
||||
re = /.$/;
|
||||
w = w.replace(re,"");
|
||||
}
|
||||
}
|
||||
else if (re2.test(w)) {
|
||||
var fp = re2.exec(w);
|
||||
stem = fp[1];
|
||||
re2 = new RegExp(s_v);
|
||||
if (re2.test(stem)) {
|
||||
w = stem;
|
||||
re2 = /(at|bl|iz)$/;
|
||||
re3 = new RegExp("([^aeiouylsz])\\1$");
|
||||
re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
|
||||
if (re2.test(w))
|
||||
w = w + "e";
|
||||
else if (re3.test(w)) {
|
||||
re = /.$/;
|
||||
w = w.replace(re,"");
|
||||
}
|
||||
else if (re4.test(w))
|
||||
w = w + "e";
|
||||
}
|
||||
}
|
||||
|
||||
// Step 1c
|
||||
re = /^(.+?)y$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
re = new RegExp(s_v);
|
||||
if (re.test(stem))
|
||||
w = stem + "i";
|
||||
}
|
||||
|
||||
// Step 2
|
||||
re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
suffix = fp[2];
|
||||
re = new RegExp(mgr0);
|
||||
if (re.test(stem))
|
||||
w = stem + step2list[suffix];
|
||||
}
|
||||
|
||||
// Step 3
|
||||
re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
suffix = fp[2];
|
||||
re = new RegExp(mgr0);
|
||||
if (re.test(stem))
|
||||
w = stem + step3list[suffix];
|
||||
}
|
||||
|
||||
// Step 4
|
||||
re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
|
||||
re2 = /^(.+?)(s|t)(ion)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
re = new RegExp(mgr1);
|
||||
if (re.test(stem))
|
||||
w = stem;
|
||||
}
|
||||
else if (re2.test(w)) {
|
||||
var fp = re2.exec(w);
|
||||
stem = fp[1] + fp[2];
|
||||
re2 = new RegExp(mgr1);
|
||||
if (re2.test(stem))
|
||||
w = stem;
|
||||
}
|
||||
|
||||
// Step 5
|
||||
re = /^(.+?)e$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
re = new RegExp(mgr1);
|
||||
re2 = new RegExp(meq1);
|
||||
re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
|
||||
if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
|
||||
w = stem;
|
||||
}
|
||||
re = /ll$/;
|
||||
re2 = new RegExp(mgr1);
|
||||
if (re.test(w) && re2.test(w)) {
|
||||
re = /.$/;
|
||||
w = w.replace(re,"");
|
||||
}
|
||||
|
||||
// and turn initial Y back to y
|
||||
if (firstch == "y")
|
||||
w = firstch.toLowerCase() + w.substr(1);
|
||||
return w;
|
||||
}
|
||||
}
|
||||
|
||||
BIN
hw6/doc/_static/minus.png
vendored
BIN
hw6/doc/_static/minus.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 90 B |
BIN
hw6/doc/_static/plus.png
vendored
BIN
hw6/doc/_static/plus.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 90 B |
84
hw6/doc/_static/pygments.css
vendored
84
hw6/doc/_static/pygments.css
vendored
|
|
@ -1,84 +0,0 @@
|
|||
pre { line-height: 125%; }
|
||||
td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
|
||||
span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
|
||||
td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
|
||||
span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
|
||||
.highlight .hll { background-color: #ffffcc }
|
||||
.highlight { background: #f8f8f8; }
|
||||
.highlight .c { color: #8f5902; font-style: italic } /* Comment */
|
||||
.highlight .err { color: #a40000; border: 1px solid #ef2929 } /* Error */
|
||||
.highlight .g { color: #000000 } /* Generic */
|
||||
.highlight .k { color: #004461; font-weight: bold } /* Keyword */
|
||||
.highlight .l { color: #000000 } /* Literal */
|
||||
.highlight .n { color: #000000 } /* Name */
|
||||
.highlight .o { color: #582800 } /* Operator */
|
||||
.highlight .x { color: #000000 } /* Other */
|
||||
.highlight .p { color: #000000; font-weight: bold } /* Punctuation */
|
||||
.highlight .ch { color: #8f5902; font-style: italic } /* Comment.Hashbang */
|
||||
.highlight .cm { color: #8f5902; font-style: italic } /* Comment.Multiline */
|
||||
.highlight .cp { color: #8f5902 } /* Comment.Preproc */
|
||||
.highlight .cpf { color: #8f5902; font-style: italic } /* Comment.PreprocFile */
|
||||
.highlight .c1 { color: #8f5902; font-style: italic } /* Comment.Single */
|
||||
.highlight .cs { color: #8f5902; font-style: italic } /* Comment.Special */
|
||||
.highlight .gd { color: #a40000 } /* Generic.Deleted */
|
||||
.highlight .ge { color: #000000; font-style: italic } /* Generic.Emph */
|
||||
.highlight .ges { color: #000000 } /* Generic.EmphStrong */
|
||||
.highlight .gr { color: #ef2929 } /* Generic.Error */
|
||||
.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
|
||||
.highlight .gi { color: #00A000 } /* Generic.Inserted */
|
||||
.highlight .go { color: #888888 } /* Generic.Output */
|
||||
.highlight .gp { color: #745334 } /* Generic.Prompt */
|
||||
.highlight .gs { color: #000000; font-weight: bold } /* Generic.Strong */
|
||||
.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
|
||||
.highlight .gt { color: #a40000; font-weight: bold } /* Generic.Traceback */
|
||||
.highlight .kc { color: #004461; font-weight: bold } /* Keyword.Constant */
|
||||
.highlight .kd { color: #004461; font-weight: bold } /* Keyword.Declaration */
|
||||
.highlight .kn { color: #004461; font-weight: bold } /* Keyword.Namespace */
|
||||
.highlight .kp { color: #004461; font-weight: bold } /* Keyword.Pseudo */
|
||||
.highlight .kr { color: #004461; font-weight: bold } /* Keyword.Reserved */
|
||||
.highlight .kt { color: #004461; font-weight: bold } /* Keyword.Type */
|
||||
.highlight .ld { color: #000000 } /* Literal.Date */
|
||||
.highlight .m { color: #990000 } /* Literal.Number */
|
||||
.highlight .s { color: #4e9a06 } /* Literal.String */
|
||||
.highlight .na { color: #c4a000 } /* Name.Attribute */
|
||||
.highlight .nb { color: #004461 } /* Name.Builtin */
|
||||
.highlight .nc { color: #000000 } /* Name.Class */
|
||||
.highlight .no { color: #000000 } /* Name.Constant */
|
||||
.highlight .nd { color: #888888 } /* Name.Decorator */
|
||||
.highlight .ni { color: #ce5c00 } /* Name.Entity */
|
||||
.highlight .ne { color: #cc0000; font-weight: bold } /* Name.Exception */
|
||||
.highlight .nf { color: #000000 } /* Name.Function */
|
||||
.highlight .nl { color: #f57900 } /* Name.Label */
|
||||
.highlight .nn { color: #000000 } /* Name.Namespace */
|
||||
.highlight .nx { color: #000000 } /* Name.Other */
|
||||
.highlight .py { color: #000000 } /* Name.Property */
|
||||
.highlight .nt { color: #004461; font-weight: bold } /* Name.Tag */
|
||||
.highlight .nv { color: #000000 } /* Name.Variable */
|
||||
.highlight .ow { color: #004461; font-weight: bold } /* Operator.Word */
|
||||
.highlight .pm { color: #000000; font-weight: bold } /* Punctuation.Marker */
|
||||
.highlight .w { color: #f8f8f8; text-decoration: underline } /* Text.Whitespace */
|
||||
.highlight .mb { color: #990000 } /* Literal.Number.Bin */
|
||||
.highlight .mf { color: #990000 } /* Literal.Number.Float */
|
||||
.highlight .mh { color: #990000 } /* Literal.Number.Hex */
|
||||
.highlight .mi { color: #990000 } /* Literal.Number.Integer */
|
||||
.highlight .mo { color: #990000 } /* Literal.Number.Oct */
|
||||
.highlight .sa { color: #4e9a06 } /* Literal.String.Affix */
|
||||
.highlight .sb { color: #4e9a06 } /* Literal.String.Backtick */
|
||||
.highlight .sc { color: #4e9a06 } /* Literal.String.Char */
|
||||
.highlight .dl { color: #4e9a06 } /* Literal.String.Delimiter */
|
||||
.highlight .sd { color: #8f5902; font-style: italic } /* Literal.String.Doc */
|
||||
.highlight .s2 { color: #4e9a06 } /* Literal.String.Double */
|
||||
.highlight .se { color: #4e9a06 } /* Literal.String.Escape */
|
||||
.highlight .sh { color: #4e9a06 } /* Literal.String.Heredoc */
|
||||
.highlight .si { color: #4e9a06 } /* Literal.String.Interpol */
|
||||
.highlight .sx { color: #4e9a06 } /* Literal.String.Other */
|
||||
.highlight .sr { color: #4e9a06 } /* Literal.String.Regex */
|
||||
.highlight .s1 { color: #4e9a06 } /* Literal.String.Single */
|
||||
.highlight .ss { color: #4e9a06 } /* Literal.String.Symbol */
|
||||
.highlight .bp { color: #3465a4 } /* Name.Builtin.Pseudo */
|
||||
.highlight .fm { color: #000000 } /* Name.Function.Magic */
|
||||
.highlight .vc { color: #000000 } /* Name.Variable.Class */
|
||||
.highlight .vg { color: #000000 } /* Name.Variable.Global */
|
||||
.highlight .vi { color: #000000 } /* Name.Variable.Instance */
|
||||
.highlight .vm { color: #000000 } /* Name.Variable.Magic */
|
||||
.highlight .il { color: #990000 } /* Literal.Number.Integer.Long */
|
||||
574
hw6/doc/_static/searchtools.js
vendored
574
hw6/doc/_static/searchtools.js
vendored
|
|
@ -1,574 +0,0 @@
|
|||
/*
|
||||
* searchtools.js
|
||||
* ~~~~~~~~~~~~~~~~
|
||||
*
|
||||
* Sphinx JavaScript utilities for the full-text search.
|
||||
*
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Simple result scoring code.
|
||||
*/
|
||||
if (typeof Scorer === "undefined") {
|
||||
var Scorer = {
|
||||
// Implement the following function to further tweak the score for each result
|
||||
// The function takes a result array [docname, title, anchor, descr, score, filename]
|
||||
// and returns the new score.
|
||||
/*
|
||||
score: result => {
|
||||
const [docname, title, anchor, descr, score, filename] = result
|
||||
return score
|
||||
},
|
||||
*/
|
||||
|
||||
// query matches the full name of an object
|
||||
objNameMatch: 11,
|
||||
// or matches in the last dotted part of the object name
|
||||
objPartialMatch: 6,
|
||||
// Additive scores depending on the priority of the object
|
||||
objPrio: {
|
||||
0: 15, // used to be importantResults
|
||||
1: 5, // used to be objectResults
|
||||
2: -5, // used to be unimportantResults
|
||||
},
|
||||
// Used when the priority is not in the mapping.
|
||||
objPrioDefault: 0,
|
||||
|
||||
// query found in title
|
||||
title: 15,
|
||||
partialTitle: 7,
|
||||
// query found in terms
|
||||
term: 5,
|
||||
partialTerm: 2,
|
||||
};
|
||||
}
|
||||
|
||||
const _removeChildren = (element) => {
|
||||
while (element && element.lastChild) element.removeChild(element.lastChild);
|
||||
};
|
||||
|
||||
/**
|
||||
* See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
|
||||
*/
|
||||
const _escapeRegExp = (string) =>
|
||||
string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||
|
||||
const _displayItem = (item, searchTerms, highlightTerms) => {
|
||||
const docBuilder = DOCUMENTATION_OPTIONS.BUILDER;
|
||||
const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX;
|
||||
const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX;
|
||||
const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY;
|
||||
const contentRoot = document.documentElement.dataset.content_root;
|
||||
|
||||
const [docName, title, anchor, descr, score, _filename] = item;
|
||||
|
||||
let listItem = document.createElement("li");
|
||||
let requestUrl;
|
||||
let linkUrl;
|
||||
if (docBuilder === "dirhtml") {
|
||||
// dirhtml builder
|
||||
let dirname = docName + "/";
|
||||
if (dirname.match(/\/index\/$/))
|
||||
dirname = dirname.substring(0, dirname.length - 6);
|
||||
else if (dirname === "index/") dirname = "";
|
||||
requestUrl = contentRoot + dirname;
|
||||
linkUrl = requestUrl;
|
||||
} else {
|
||||
// normal html builders
|
||||
requestUrl = contentRoot + docName + docFileSuffix;
|
||||
linkUrl = docName + docLinkSuffix;
|
||||
}
|
||||
let linkEl = listItem.appendChild(document.createElement("a"));
|
||||
linkEl.href = linkUrl + anchor;
|
||||
linkEl.dataset.score = score;
|
||||
linkEl.innerHTML = title;
|
||||
if (descr) {
|
||||
listItem.appendChild(document.createElement("span")).innerHTML =
|
||||
" (" + descr + ")";
|
||||
// highlight search terms in the description
|
||||
if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
|
||||
highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
|
||||
}
|
||||
else if (showSearchSummary)
|
||||
fetch(requestUrl)
|
||||
.then((responseData) => responseData.text())
|
||||
.then((data) => {
|
||||
if (data)
|
||||
listItem.appendChild(
|
||||
Search.makeSearchSummary(data, searchTerms)
|
||||
);
|
||||
// highlight search terms in the summary
|
||||
if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
|
||||
highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
|
||||
});
|
||||
Search.output.appendChild(listItem);
|
||||
};
|
||||
const _finishSearch = (resultCount) => {
|
||||
Search.stopPulse();
|
||||
Search.title.innerText = _("Search Results");
|
||||
if (!resultCount)
|
||||
Search.status.innerText = Documentation.gettext(
|
||||
"Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories."
|
||||
);
|
||||
else
|
||||
Search.status.innerText = _(
|
||||
`Search finished, found ${resultCount} page(s) matching the search query.`
|
||||
);
|
||||
};
|
||||
const _displayNextItem = (
|
||||
results,
|
||||
resultCount,
|
||||
searchTerms,
|
||||
highlightTerms,
|
||||
) => {
|
||||
// results left, load the summary and display it
|
||||
// this is intended to be dynamic (don't sub resultsCount)
|
||||
if (results.length) {
|
||||
_displayItem(results.pop(), searchTerms, highlightTerms);
|
||||
setTimeout(
|
||||
() => _displayNextItem(results, resultCount, searchTerms, highlightTerms),
|
||||
5
|
||||
);
|
||||
}
|
||||
// search finished, update title and status message
|
||||
else _finishSearch(resultCount);
|
||||
};
|
||||
|
||||
/**
|
||||
* Default splitQuery function. Can be overridden in ``sphinx.search`` with a
|
||||
* custom function per language.
|
||||
*
|
||||
* The regular expression works by splitting the string on consecutive characters
|
||||
* that are not Unicode letters, numbers, underscores, or emoji characters.
|
||||
* This is the same as ``\W+`` in Python, preserving the surrogate pair area.
|
||||
*/
|
||||
if (typeof splitQuery === "undefined") {
|
||||
var splitQuery = (query) => query
|
||||
.split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu)
|
||||
.filter(term => term) // remove remaining empty strings
|
||||
}
|
||||
|
||||
/**
|
||||
* Search Module
|
||||
*/
|
||||
const Search = {
|
||||
_index: null,
|
||||
_queued_query: null,
|
||||
_pulse_status: -1,
|
||||
|
||||
htmlToText: (htmlString) => {
|
||||
const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html');
|
||||
htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() });
|
||||
const docContent = htmlElement.querySelector('[role="main"]');
|
||||
if (docContent !== undefined) return docContent.textContent;
|
||||
console.warn(
|
||||
"Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template."
|
||||
);
|
||||
return "";
|
||||
},
|
||||
|
||||
init: () => {
|
||||
const query = new URLSearchParams(window.location.search).get("q");
|
||||
document
|
||||
.querySelectorAll('input[name="q"]')
|
||||
.forEach((el) => (el.value = query));
|
||||
if (query) Search.performSearch(query);
|
||||
},
|
||||
|
||||
loadIndex: (url) =>
|
||||
(document.body.appendChild(document.createElement("script")).src = url),
|
||||
|
||||
setIndex: (index) => {
|
||||
Search._index = index;
|
||||
if (Search._queued_query !== null) {
|
||||
const query = Search._queued_query;
|
||||
Search._queued_query = null;
|
||||
Search.query(query);
|
||||
}
|
||||
},
|
||||
|
||||
hasIndex: () => Search._index !== null,
|
||||
|
||||
deferQuery: (query) => (Search._queued_query = query),
|
||||
|
||||
stopPulse: () => (Search._pulse_status = -1),
|
||||
|
||||
startPulse: () => {
|
||||
if (Search._pulse_status >= 0) return;
|
||||
|
||||
const pulse = () => {
|
||||
Search._pulse_status = (Search._pulse_status + 1) % 4;
|
||||
Search.dots.innerText = ".".repeat(Search._pulse_status);
|
||||
if (Search._pulse_status >= 0) window.setTimeout(pulse, 500);
|
||||
};
|
||||
pulse();
|
||||
},
|
||||
|
||||
/**
|
||||
* perform a search for something (or wait until index is loaded)
|
||||
*/
|
||||
performSearch: (query) => {
|
||||
// create the required interface elements
|
||||
const searchText = document.createElement("h2");
|
||||
searchText.textContent = _("Searching");
|
||||
const searchSummary = document.createElement("p");
|
||||
searchSummary.classList.add("search-summary");
|
||||
searchSummary.innerText = "";
|
||||
const searchList = document.createElement("ul");
|
||||
searchList.classList.add("search");
|
||||
|
||||
const out = document.getElementById("search-results");
|
||||
Search.title = out.appendChild(searchText);
|
||||
Search.dots = Search.title.appendChild(document.createElement("span"));
|
||||
Search.status = out.appendChild(searchSummary);
|
||||
Search.output = out.appendChild(searchList);
|
||||
|
||||
const searchProgress = document.getElementById("search-progress");
|
||||
// Some themes don't use the search progress node
|
||||
if (searchProgress) {
|
||||
searchProgress.innerText = _("Preparing search...");
|
||||
}
|
||||
Search.startPulse();
|
||||
|
||||
// index already loaded, the browser was quick!
|
||||
if (Search.hasIndex()) Search.query(query);
|
||||
else Search.deferQuery(query);
|
||||
},
|
||||
|
||||
/**
|
||||
* execute search (requires search index to be loaded)
|
||||
*/
|
||||
query: (query) => {
|
||||
const filenames = Search._index.filenames;
|
||||
const docNames = Search._index.docnames;
|
||||
const titles = Search._index.titles;
|
||||
const allTitles = Search._index.alltitles;
|
||||
const indexEntries = Search._index.indexentries;
|
||||
|
||||
// stem the search terms and add them to the correct list
|
||||
const stemmer = new Stemmer();
|
||||
const searchTerms = new Set();
|
||||
const excludedTerms = new Set();
|
||||
const highlightTerms = new Set();
|
||||
const objectTerms = new Set(splitQuery(query.toLowerCase().trim()));
|
||||
splitQuery(query.trim()).forEach((queryTerm) => {
|
||||
const queryTermLower = queryTerm.toLowerCase();
|
||||
|
||||
// maybe skip this "word"
|
||||
// stopwords array is from language_data.js
|
||||
if (
|
||||
stopwords.indexOf(queryTermLower) !== -1 ||
|
||||
queryTerm.match(/^\d+$/)
|
||||
)
|
||||
return;
|
||||
|
||||
// stem the word
|
||||
let word = stemmer.stemWord(queryTermLower);
|
||||
// select the correct list
|
||||
if (word[0] === "-") excludedTerms.add(word.substr(1));
|
||||
else {
|
||||
searchTerms.add(word);
|
||||
highlightTerms.add(queryTermLower);
|
||||
}
|
||||
});
|
||||
|
||||
if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js
|
||||
localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" "))
|
||||
}
|
||||
|
||||
// console.debug("SEARCH: searching for:");
|
||||
// console.info("required: ", [...searchTerms]);
|
||||
// console.info("excluded: ", [...excludedTerms]);
|
||||
|
||||
// array of [docname, title, anchor, descr, score, filename]
|
||||
let results = [];
|
||||
_removeChildren(document.getElementById("search-progress"));
|
||||
|
||||
const queryLower = query.toLowerCase();
|
||||
for (const [title, foundTitles] of Object.entries(allTitles)) {
|
||||
if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) {
|
||||
for (const [file, id] of foundTitles) {
|
||||
let score = Math.round(100 * queryLower.length / title.length)
|
||||
results.push([
|
||||
docNames[file],
|
||||
titles[file] !== title ? `${titles[file]} > ${title}` : title,
|
||||
id !== null ? "#" + id : "",
|
||||
null,
|
||||
score,
|
||||
filenames[file],
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// search for explicit entries in index directives
|
||||
for (const [entry, foundEntries] of Object.entries(indexEntries)) {
|
||||
if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) {
|
||||
for (const [file, id] of foundEntries) {
|
||||
let score = Math.round(100 * queryLower.length / entry.length)
|
||||
results.push([
|
||||
docNames[file],
|
||||
titles[file],
|
||||
id ? "#" + id : "",
|
||||
null,
|
||||
score,
|
||||
filenames[file],
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// lookup as object
|
||||
objectTerms.forEach((term) =>
|
||||
results.push(...Search.performObjectSearch(term, objectTerms))
|
||||
);
|
||||
|
||||
// lookup as search terms in fulltext
|
||||
results.push(...Search.performTermsSearch(searchTerms, excludedTerms));
|
||||
|
||||
// let the scorer override scores with a custom scoring function
|
||||
if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item)));
|
||||
|
||||
// now sort the results by score (in opposite order of appearance, since the
|
||||
// display function below uses pop() to retrieve items) and then
|
||||
// alphabetically
|
||||
results.sort((a, b) => {
|
||||
const leftScore = a[4];
|
||||
const rightScore = b[4];
|
||||
if (leftScore === rightScore) {
|
||||
// same score: sort alphabetically
|
||||
const leftTitle = a[1].toLowerCase();
|
||||
const rightTitle = b[1].toLowerCase();
|
||||
if (leftTitle === rightTitle) return 0;
|
||||
return leftTitle > rightTitle ? -1 : 1; // inverted is intentional
|
||||
}
|
||||
return leftScore > rightScore ? 1 : -1;
|
||||
});
|
||||
|
||||
// remove duplicate search results
|
||||
// note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept
|
||||
let seen = new Set();
|
||||
results = results.reverse().reduce((acc, result) => {
|
||||
let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(',');
|
||||
if (!seen.has(resultStr)) {
|
||||
acc.push(result);
|
||||
seen.add(resultStr);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
results = results.reverse();
|
||||
|
||||
// for debugging
|
||||
//Search.lastresults = results.slice(); // a copy
|
||||
// console.info("search results:", Search.lastresults);
|
||||
|
||||
// print the results
|
||||
_displayNextItem(results, results.length, searchTerms, highlightTerms);
|
||||
},
|
||||
|
||||
/**
|
||||
* search for object names
|
||||
*/
|
||||
performObjectSearch: (object, objectTerms) => {
|
||||
const filenames = Search._index.filenames;
|
||||
const docNames = Search._index.docnames;
|
||||
const objects = Search._index.objects;
|
||||
const objNames = Search._index.objnames;
|
||||
const titles = Search._index.titles;
|
||||
|
||||
const results = [];
|
||||
|
||||
const objectSearchCallback = (prefix, match) => {
|
||||
const name = match[4]
|
||||
const fullname = (prefix ? prefix + "." : "") + name;
|
||||
const fullnameLower = fullname.toLowerCase();
|
||||
if (fullnameLower.indexOf(object) < 0) return;
|
||||
|
||||
let score = 0;
|
||||
const parts = fullnameLower.split(".");
|
||||
|
||||
// check for different match types: exact matches of full name or
|
||||
// "last name" (i.e. last dotted part)
|
||||
if (fullnameLower === object || parts.slice(-1)[0] === object)
|
||||
score += Scorer.objNameMatch;
|
||||
else if (parts.slice(-1)[0].indexOf(object) > -1)
|
||||
score += Scorer.objPartialMatch; // matches in last name
|
||||
|
||||
const objName = objNames[match[1]][2];
|
||||
const title = titles[match[0]];
|
||||
|
||||
// If more than one term searched for, we require other words to be
|
||||
// found in the name/title/description
|
||||
const otherTerms = new Set(objectTerms);
|
||||
otherTerms.delete(object);
|
||||
if (otherTerms.size > 0) {
|
||||
const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase();
|
||||
if (
|
||||
[...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0)
|
||||
)
|
||||
return;
|
||||
}
|
||||
|
||||
let anchor = match[3];
|
||||
if (anchor === "") anchor = fullname;
|
||||
else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname;
|
||||
|
||||
const descr = objName + _(", in ") + title;
|
||||
|
||||
// add custom score for some objects according to scorer
|
||||
if (Scorer.objPrio.hasOwnProperty(match[2]))
|
||||
score += Scorer.objPrio[match[2]];
|
||||
else score += Scorer.objPrioDefault;
|
||||
|
||||
results.push([
|
||||
docNames[match[0]],
|
||||
fullname,
|
||||
"#" + anchor,
|
||||
descr,
|
||||
score,
|
||||
filenames[match[0]],
|
||||
]);
|
||||
};
|
||||
Object.keys(objects).forEach((prefix) =>
|
||||
objects[prefix].forEach((array) =>
|
||||
objectSearchCallback(prefix, array)
|
||||
)
|
||||
);
|
||||
return results;
|
||||
},
|
||||
|
||||
/**
|
||||
* search for full-text terms in the index
|
||||
*/
|
||||
performTermsSearch: (searchTerms, excludedTerms) => {
|
||||
// prepare search
|
||||
const terms = Search._index.terms;
|
||||
const titleTerms = Search._index.titleterms;
|
||||
const filenames = Search._index.filenames;
|
||||
const docNames = Search._index.docnames;
|
||||
const titles = Search._index.titles;
|
||||
|
||||
const scoreMap = new Map();
|
||||
const fileMap = new Map();
|
||||
|
||||
// perform the search on the required terms
|
||||
searchTerms.forEach((word) => {
|
||||
const files = [];
|
||||
const arr = [
|
||||
{ files: terms[word], score: Scorer.term },
|
||||
{ files: titleTerms[word], score: Scorer.title },
|
||||
];
|
||||
// add support for partial matches
|
||||
if (word.length > 2) {
|
||||
const escapedWord = _escapeRegExp(word);
|
||||
Object.keys(terms).forEach((term) => {
|
||||
if (term.match(escapedWord) && !terms[word])
|
||||
arr.push({ files: terms[term], score: Scorer.partialTerm });
|
||||
});
|
||||
Object.keys(titleTerms).forEach((term) => {
|
||||
if (term.match(escapedWord) && !titleTerms[word])
|
||||
arr.push({ files: titleTerms[word], score: Scorer.partialTitle });
|
||||
});
|
||||
}
|
||||
|
||||
// no match but word was a required one
|
||||
if (arr.every((record) => record.files === undefined)) return;
|
||||
|
||||
// found search word in contents
|
||||
arr.forEach((record) => {
|
||||
if (record.files === undefined) return;
|
||||
|
||||
let recordFiles = record.files;
|
||||
if (recordFiles.length === undefined) recordFiles = [recordFiles];
|
||||
files.push(...recordFiles);
|
||||
|
||||
// set score for the word in each file
|
||||
recordFiles.forEach((file) => {
|
||||
if (!scoreMap.has(file)) scoreMap.set(file, {});
|
||||
scoreMap.get(file)[word] = record.score;
|
||||
});
|
||||
});
|
||||
|
||||
// create the mapping
|
||||
files.forEach((file) => {
|
||||
if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1)
|
||||
fileMap.get(file).push(word);
|
||||
else fileMap.set(file, [word]);
|
||||
});
|
||||
});
|
||||
|
||||
// now check if the files don't contain excluded terms
|
||||
const results = [];
|
||||
for (const [file, wordList] of fileMap) {
|
||||
// check if all requirements are matched
|
||||
|
||||
// as search terms with length < 3 are discarded
|
||||
const filteredTermCount = [...searchTerms].filter(
|
||||
(term) => term.length > 2
|
||||
).length;
|
||||
if (
|
||||
wordList.length !== searchTerms.size &&
|
||||
wordList.length !== filteredTermCount
|
||||
)
|
||||
continue;
|
||||
|
||||
// ensure that none of the excluded terms is in the search result
|
||||
if (
|
||||
[...excludedTerms].some(
|
||||
(term) =>
|
||||
terms[term] === file ||
|
||||
titleTerms[term] === file ||
|
||||
(terms[term] || []).includes(file) ||
|
||||
(titleTerms[term] || []).includes(file)
|
||||
)
|
||||
)
|
||||
break;
|
||||
|
||||
// select one (max) score for the file.
|
||||
const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w]));
|
||||
// add result to the result list
|
||||
results.push([
|
||||
docNames[file],
|
||||
titles[file],
|
||||
"",
|
||||
null,
|
||||
score,
|
||||
filenames[file],
|
||||
]);
|
||||
}
|
||||
return results;
|
||||
},
|
||||
|
||||
/**
|
||||
* helper function to return a node containing the
|
||||
* search summary for a given text. keywords is a list
|
||||
* of stemmed words.
|
||||
*/
|
||||
makeSearchSummary: (htmlText, keywords) => {
|
||||
const text = Search.htmlToText(htmlText);
|
||||
if (text === "") return null;
|
||||
|
||||
const textLower = text.toLowerCase();
|
||||
const actualStartPosition = [...keywords]
|
||||
.map((k) => textLower.indexOf(k.toLowerCase()))
|
||||
.filter((i) => i > -1)
|
||||
.slice(-1)[0];
|
||||
const startWithContext = Math.max(actualStartPosition - 120, 0);
|
||||
|
||||
const top = startWithContext === 0 ? "" : "...";
|
||||
const tail = startWithContext + 240 < text.length ? "..." : "";
|
||||
|
||||
let summary = document.createElement("p");
|
||||
summary.classList.add("context");
|
||||
summary.textContent = top + text.substr(startWithContext, 240).trim() + tail;
|
||||
|
||||
return summary;
|
||||
},
|
||||
};
|
||||
|
||||
_ready(Search.init);
|
||||
154
hw6/doc/_static/sphinx_highlight.js
vendored
154
hw6/doc/_static/sphinx_highlight.js
vendored
|
|
@ -1,154 +0,0 @@
|
|||
/* Highlighting utilities for Sphinx HTML documentation. */
|
||||
"use strict";
|
||||
|
||||
const SPHINX_HIGHLIGHT_ENABLED = true
|
||||
|
||||
/**
|
||||
* highlight a given string on a node by wrapping it in
|
||||
* span elements with the given class name.
|
||||
*/
|
||||
const _highlight = (node, addItems, text, className) => {
|
||||
if (node.nodeType === Node.TEXT_NODE) {
|
||||
const val = node.nodeValue;
|
||||
const parent = node.parentNode;
|
||||
const pos = val.toLowerCase().indexOf(text);
|
||||
if (
|
||||
pos >= 0 &&
|
||||
!parent.classList.contains(className) &&
|
||||
!parent.classList.contains("nohighlight")
|
||||
) {
|
||||
let span;
|
||||
|
||||
const closestNode = parent.closest("body, svg, foreignObject");
|
||||
const isInSVG = closestNode && closestNode.matches("svg");
|
||||
if (isInSVG) {
|
||||
span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
|
||||
} else {
|
||||
span = document.createElement("span");
|
||||
span.classList.add(className);
|
||||
}
|
||||
|
||||
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
|
||||
const rest = document.createTextNode(val.substr(pos + text.length));
|
||||
parent.insertBefore(
|
||||
span,
|
||||
parent.insertBefore(
|
||||
rest,
|
||||
node.nextSibling
|
||||
)
|
||||
);
|
||||
node.nodeValue = val.substr(0, pos);
|
||||
/* There may be more occurrences of search term in this node. So call this
|
||||
* function recursively on the remaining fragment.
|
||||
*/
|
||||
_highlight(rest, addItems, text, className);
|
||||
|
||||
if (isInSVG) {
|
||||
const rect = document.createElementNS(
|
||||
"http://www.w3.org/2000/svg",
|
||||
"rect"
|
||||
);
|
||||
const bbox = parent.getBBox();
|
||||
rect.x.baseVal.value = bbox.x;
|
||||
rect.y.baseVal.value = bbox.y;
|
||||
rect.width.baseVal.value = bbox.width;
|
||||
rect.height.baseVal.value = bbox.height;
|
||||
rect.setAttribute("class", className);
|
||||
addItems.push({ parent: parent, target: rect });
|
||||
}
|
||||
}
|
||||
} else if (node.matches && !node.matches("button, select, textarea")) {
|
||||
node.childNodes.forEach((el) => _highlight(el, addItems, text, className));
|
||||
}
|
||||
};
|
||||
const _highlightText = (thisNode, text, className) => {
|
||||
let addItems = [];
|
||||
_highlight(thisNode, addItems, text, className);
|
||||
addItems.forEach((obj) =>
|
||||
obj.parent.insertAdjacentElement("beforebegin", obj.target)
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Small JavaScript module for the documentation.
|
||||
*/
|
||||
const SphinxHighlight = {
|
||||
|
||||
/**
|
||||
* highlight the search words provided in localstorage in the text
|
||||
*/
|
||||
highlightSearchWords: () => {
|
||||
if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
|
||||
|
||||
// get and clear terms from localstorage
|
||||
const url = new URL(window.location);
|
||||
const highlight =
|
||||
localStorage.getItem("sphinx_highlight_terms")
|
||||
|| url.searchParams.get("highlight")
|
||||
|| "";
|
||||
localStorage.removeItem("sphinx_highlight_terms")
|
||||
url.searchParams.delete("highlight");
|
||||
window.history.replaceState({}, "", url);
|
||||
|
||||
// get individual terms from highlight string
|
||||
const terms = highlight.toLowerCase().split(/\s+/).filter(x => x);
|
||||
if (terms.length === 0) return; // nothing to do
|
||||
|
||||
// There should never be more than one element matching "div.body"
|
||||
const divBody = document.querySelectorAll("div.body");
|
||||
const body = divBody.length ? divBody[0] : document.querySelector("body");
|
||||
window.setTimeout(() => {
|
||||
terms.forEach((term) => _highlightText(body, term, "highlighted"));
|
||||
}, 10);
|
||||
|
||||
const searchBox = document.getElementById("searchbox");
|
||||
if (searchBox === null) return;
|
||||
searchBox.appendChild(
|
||||
document
|
||||
.createRange()
|
||||
.createContextualFragment(
|
||||
'<p class="highlight-link">' +
|
||||
'<a href="javascript:SphinxHighlight.hideSearchWords()">' +
|
||||
_("Hide Search Matches") +
|
||||
"</a></p>"
|
||||
)
|
||||
);
|
||||
},
|
||||
|
||||
/**
|
||||
* helper function to hide the search marks again
|
||||
*/
|
||||
hideSearchWords: () => {
|
||||
document
|
||||
.querySelectorAll("#searchbox .highlight-link")
|
||||
.forEach((el) => el.remove());
|
||||
document
|
||||
.querySelectorAll("span.highlighted")
|
||||
.forEach((el) => el.classList.remove("highlighted"));
|
||||
localStorage.removeItem("sphinx_highlight_terms")
|
||||
},
|
||||
|
||||
initEscapeListener: () => {
|
||||
// only install a listener if it is really needed
|
||||
if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return;
|
||||
|
||||
document.addEventListener("keydown", (event) => {
|
||||
// bail for input elements
|
||||
if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
|
||||
// bail with special keys
|
||||
if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return;
|
||||
if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) {
|
||||
SphinxHighlight.hideSearchWords();
|
||||
event.preventDefault();
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
_ready(() => {
|
||||
/* Do not call highlightSearchWords() when we are on the search page.
|
||||
* It will highlight words from the *previous* search query.
|
||||
*/
|
||||
if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords();
|
||||
SphinxHighlight.initEscapeListener();
|
||||
});
|
||||
|
|
@ -1,572 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
|
||||
<html lang="en" data-content_root="./">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.18.1: http://docutils.sourceforge.net/" />
|
||||
|
||||
<title>1. HW6: Dataflow Analysis and Optimizations — CS 153 2023</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css?v=4f649999" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css?v=a2fbdfc9" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/custom.css?v=3dba9716" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/cs153-handout.css?v=bc747a33" />
|
||||
<script src="_static/documentation_options.js?v=7f41d439"></script>
|
||||
<script src="_static/doctools.js?v=888ff710"></script>
|
||||
<script src="_static/sphinx_highlight.js?v=dc90522c"></script>
|
||||
<link rel="index" title="Index" href="genindex.html" />
|
||||
<link rel="search" title="Search" href="search.html" />
|
||||
<link rel="prev" title="<no title>" href="index.html" />
|
||||
|
||||
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
|
||||
|
||||
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
|
||||
|
||||
</head><body>
|
||||
|
||||
|
||||
<div class="document">
|
||||
<div class="documentwrapper">
|
||||
<div class="bodywrapper">
|
||||
|
||||
|
||||
<div class="body" role="main">
|
||||
|
||||
<section id="hw6-dataflow-analysis-and-optimizations">
|
||||
<span id="hw6-opt"></span><h1><span class="section-number">1. </span>HW6: Dataflow Analysis and Optimizations<a class="headerlink" href="#hw6-dataflow-analysis-and-optimizations" title="Link to this heading">¶</a></h1>
|
||||
<section id="getting-started">
|
||||
<h2><span class="section-number">1.1. </span>Getting Started<a class="headerlink" href="#getting-started" title="Link to this heading">¶</a></h2>
|
||||
<p>Many of the files in this project are taken from the earlier projects. The
|
||||
new files (only) and their uses are listed below. Those marked with <code class="docutils literal notranslate"><span class="pre">*</span></code> are
|
||||
the only ones you should need to modify while completing this assignment.</p>
|
||||
<table class="docutils align-default">
|
||||
<tbody>
|
||||
<tr class="row-odd"><td><p>bin/datastructures.ml</p></td>
|
||||
<td><p>set and map modules (enhanced with printing)</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/cfg.ml</p></td>
|
||||
<td><p>“view” of LL control-flow graphs as dataflow graphs</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/analysis.ml</p></td>
|
||||
<td><p>helper functions for propagating dataflow facts</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/solver.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> the general-purpose iterative dataflow analysis solver</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/alias.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> alias analysis</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/dce.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> dead code elimination optimization</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/constprop.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> constant propagation analysis & optimization</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/liveness.ml</p></td>
|
||||
<td><p>provided liveness analysis code</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/analysistests.ml</p></td>
|
||||
<td><p>test cases (for liveness, constprop, alias)</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/opt.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> optimizer that runs dce and constprop (and more if you want)</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/backend.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> you will implement register allocation heuristics here</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/registers.ml</p></td>
|
||||
<td><p>collects statistics about register usage</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/printanalysis.ml</p></td>
|
||||
<td><p>a standalone program to print the results of an analysis</p></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>You’ll need to have <a class="reference external" href="http://gallium.inria.fr/~fpottier/menhir/">menhir</a> and <a class="reference external" href="https://clang.llvm.org/">clang</a> installed on your system for this
|
||||
assignment. If you have any difficulty installing these files, please
|
||||
post on <a class="reference external" href="https://edstem.org/us/courses/40936/discussion/">Ed</a> and/or contact the course staff.</p>
|
||||
</div>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>As usual, running <code class="docutils literal notranslate"><span class="pre">oatc</span> <span class="pre">--test</span></code> will run the test suite. <code class="docutils literal notranslate"><span class="pre">oatc</span></code>
|
||||
also now supports several new flags having to do with optimizations.</p>
|
||||
<div class="highlight-none notranslate"><div class="highlight"><pre><span></span>-O1 : runs two iterations of (constprop followed by dce)
|
||||
--liveness {trivial|dataflow} : select which liveness analysis to use for register allocation
|
||||
--regalloc {none|greedy|better} : select which register allocator to use
|
||||
--print-regs : print a histogram of the registers used
|
||||
</pre></div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
<section id="overview">
|
||||
<h2><span class="section-number">1.2. </span>Overview<a class="headerlink" href="#overview" title="Link to this heading">¶</a></h2>
|
||||
<p>The Oat compiler we have developed so far produces very inefficient code,
|
||||
since it performs no optimizations at any stage of the compilation
|
||||
pipeline. In this project, you will implement several simple dataflow analyses
|
||||
and some optimizations at the level of our LLVMlite intermediate
|
||||
representation in order to improve code size and speed.</p>
|
||||
<section id="provided-code">
|
||||
<h3>Provided Code<a class="headerlink" href="#provided-code" title="Link to this heading">¶</a></h3>
|
||||
<p>The provided code makes extensive use of modules, module signatures, and
|
||||
functors. These aid in code reuse and abstraction. If you need a refresher on
|
||||
OCaml functors, we recommend reading through the <a class="reference external" href="https://dev.realworldocaml.org/functors.html">Functors Chapter</a> of Real World OCaml.</p>
|
||||
<p>In <code class="docutils literal notranslate"><span class="pre">datastructures.ml</span></code>, we provide you with a number of useful modules,
|
||||
module signatures, and functors for the assignment, including:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">OrdPrintT</span></code>: A module signature for a type that is both comparable and
|
||||
can be converted to a string for printing. This is used in conjunction with
|
||||
some of our other custom modules described below. Wrapper modules <code class="docutils literal notranslate"><span class="pre">Lbl</span></code>
|
||||
and <code class="docutils literal notranslate"><span class="pre">Uid</span></code> satisfying this signature are defined later in the file for the
|
||||
<code class="docutils literal notranslate"><span class="pre">Ll.lbl</span></code> and <code class="docutils literal notranslate"><span class="pre">Ll.uid</span></code> types.</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">SetS</span></code>: A module signature that extends OCaml’s
|
||||
built-in set to include string conversion and printing capabilities.</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">MakeSet</span></code>: A functor that creates an extended set (<code class="docutils literal notranslate"><span class="pre">SetS</span></code>) from a type
|
||||
that satisfies the <code class="docutils literal notranslate"><span class="pre">OrdPrintT</span></code> module signature. This is applied to the
|
||||
<code class="docutils literal notranslate"><span class="pre">Lbl</span></code> and <code class="docutils literal notranslate"><span class="pre">Uid</span></code> wrapper modules to create a label set module <code class="docutils literal notranslate"><span class="pre">LblS</span></code>
|
||||
and a UID set module <code class="docutils literal notranslate"><span class="pre">UidS</span></code>.</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">MapS</span></code>: A module signature that extends OCaml’s built-in maps to include
|
||||
string conversion and printing capabilities. Three additional helper
|
||||
functions are also included: <code class="docutils literal notranslate"><span class="pre">update</span></code> for updating the value associated
|
||||
with a particular key, <code class="docutils literal notranslate"><span class="pre">find_or</span></code> for performing a map look-up with a
|
||||
default value to be supplied when the key is not present, and <code class="docutils literal notranslate"><span class="pre">update_or</span></code>
|
||||
for updating the value associated with a key if it is present, or adding an
|
||||
entry with a default value if not.</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">MakeMap</span></code>: A functor that creates an extended map (<code class="docutils literal notranslate"><span class="pre">MapS</span></code>) from a type
|
||||
that satisfies the <code class="docutils literal notranslate"><span class="pre">OrdPrintT</span></code> module signature. This is applied to the
|
||||
<code class="docutils literal notranslate"><span class="pre">Lbl</span></code> and <code class="docutils literal notranslate"><span class="pre">Uid</span></code> wrapper modules to create a label map module <code class="docutils literal notranslate"><span class="pre">LblM</span></code>
|
||||
and a UID map module <code class="docutils literal notranslate"><span class="pre">UidM</span></code>. These map modules have fixed key types, but
|
||||
are polymorphic in the types of their values.</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
</section>
|
||||
</section>
|
||||
<section id="task-i-dataflow-analysis">
|
||||
<h2><span class="section-number">1.3. </span>Task I: Dataflow Analysis<a class="headerlink" href="#task-i-dataflow-analysis" title="Link to this heading">¶</a></h2>
|
||||
<p>Your first task is to implement a version of the worklist algorithm for
|
||||
solving dataflow flow equations presented in lecture. Since we plan to
|
||||
implement several analyses, we’d like to reuse as much code as possible
|
||||
between each one. In lecture, we saw that each analysis differs only in the
|
||||
choice of the lattice, the flow function, the direction of the analysis,
|
||||
and how to compute the meet of facts flowing into a node. We can take
|
||||
advantage of this by writing a generic solver as an OCaml functor and
|
||||
instantiating it with these parameters.</p>
|
||||
<section id="the-algorithm">
|
||||
<h3>The Algorithm<a class="headerlink" href="#the-algorithm" title="Link to this heading">¶</a></h3>
|
||||
<p>Assuming only that we have a directed graph where each node is labeled with a
|
||||
<em>dataflow fact</em> and a <em>flow function</em>, we can compute a fixpoint of the flow
|
||||
on the graph as follows:</p>
|
||||
<div class="highlight-none notranslate"><div class="highlight"><pre><span></span>let w = new set with all nodes
|
||||
repeat until w is empty
|
||||
let n = w.pop()
|
||||
old_out = out[n]
|
||||
let in = combine(preds[n])
|
||||
out[n] := flow[n](in)
|
||||
if (!equal old_out out[n]),
|
||||
for all m in succs[n], w.add(m)
|
||||
end
|
||||
</pre></div>
|
||||
</div>
|
||||
<p>Here <code class="docutils literal notranslate"><span class="pre">equal</span></code>, <code class="docutils literal notranslate"><span class="pre">combine</span></code> and <code class="docutils literal notranslate"><span class="pre">flow</span></code> are abstract operations that will be
|
||||
instantiated with lattice equality, the meet operation and the flow function
|
||||
(e.g., defined by the gen and kill sets of the analysis),
|
||||
respectively. Similarly, <code class="docutils literal notranslate"><span class="pre">preds</span></code> and <code class="docutils literal notranslate"><span class="pre">succs</span></code> are the graph predecessors
|
||||
and successors in the <em>flow graph</em>, and do not correspond to the control flow
|
||||
of the program. They can be instantiated appropriately to create a forwards or
|
||||
backwards analysis.</p>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>Don’t try to use OCaml’s polymorphic equality operator (<code class="docutils literal notranslate"><span class="pre">=</span></code>) to compare
|
||||
<code class="docutils literal notranslate"><span class="pre">old_out</span></code> and <code class="docutils literal notranslate"><span class="pre">out[n]</span></code> – that’s <em>reference equality</em>, not <em>structural
|
||||
equality</em>. Use the supplied <code class="docutils literal notranslate"><span class="pre">Fact.compare</span></code> instead.</p>
|
||||
</div>
|
||||
</section>
|
||||
<section id="getting-started-and-testing">
|
||||
<h3>Getting Started and Testing<a class="headerlink" href="#getting-started-and-testing" title="Link to this heading">¶</a></h3>
|
||||
<p>Be sure to review the comments in the <code class="docutils literal notranslate"><span class="pre">DFA_GRAPH</span></code> (<em>data flow analysis graph</em>)
|
||||
and <code class="docutils literal notranslate"><span class="pre">FACT</span></code> module signatures in <code class="docutils literal notranslate"><span class="pre">solver.ml</span></code>, which define the parameters of
|
||||
the solver. Make sure you understand what each declaration in the signature does
|
||||
– your solver will need to use each one (other than the printing functions)!
|
||||
It will also be helpful for you to understand the way that <code class="docutils literal notranslate"><span class="pre">cfg.ml</span></code> connects
|
||||
to the solver. Read the commentary there for more information.</p>
|
||||
</section>
|
||||
<section id="now-implement-the-solver">
|
||||
<h3>Now implement the solver<a class="headerlink" href="#now-implement-the-solver" title="Link to this heading">¶</a></h3>
|
||||
<p>Your first task is to fill in the <code class="docutils literal notranslate"><span class="pre">solve</span></code> function in the <code class="docutils literal notranslate"><span class="pre">Solver.Make</span></code>
|
||||
functor in <code class="docutils literal notranslate"><span class="pre">solver.ml</span></code>. The input to the function is a flow graph labeled
|
||||
with the initial facts. It should compute the fixpoint and return a graph with
|
||||
the corresponding labeling. You will find the set datatype from
|
||||
<code class="docutils literal notranslate"><span class="pre">datastructures.ml</span></code> useful for manipulating sets of nodes.</p>
|
||||
<p>To test your solver, we have provided a full implementation of a liveness
|
||||
analysis in <code class="docutils literal notranslate"><span class="pre">liveness.ml</span></code>. Once you’ve completed the solver, the liveness
|
||||
tests in the test suite should all be passing. These tests compare the output
|
||||
of your solver on a number of programs with pre-computed solutions in
|
||||
<code class="docutils literal notranslate"><span class="pre">analysistest.ml</span></code>. Each entry in this file describes the set of uids that
|
||||
are <strong>live-in</strong> at a label in a program from <code class="docutils literal notranslate"><span class="pre">./llprograms</span></code>. To debug,
|
||||
you can compare these with the output of the <code class="docutils literal notranslate"><span class="pre">Graph.to_string</span></code> function on
|
||||
the flow graphs you will be manipulating.</p>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>The stand-alone program <code class="docutils literal notranslate"><span class="pre">printanalysis</span></code> can print out the results of a
|
||||
dataflow analysis for a given .ll program. You can build it by doing
|
||||
<code class="docutils literal notranslate"><span class="pre">make</span> <span class="pre">printanalysis</span></code>. It takes flags for each analysis (run with <code class="docutils literal notranslate"><span class="pre">--h</span></code>
|
||||
for a list).</p>
|
||||
</div>
|
||||
</section>
|
||||
</section>
|
||||
<section id="task-ii-alias-analysis-and-dead-code-elimination">
|
||||
<h2><span class="section-number">1.4. </span>Task II: Alias Analysis and Dead Code Elimination<a class="headerlink" href="#task-ii-alias-analysis-and-dead-code-elimination" title="Link to this heading">¶</a></h2>
|
||||
<p>The goal of this task is to implement a simple dead code elimination
|
||||
optimization that can also remove <code class="docutils literal notranslate"><span class="pre">store</span></code> instructions when we can prove
|
||||
that they have no effect on the result of the program. Though we already have
|
||||
a liveness analysis, it doesn’t give us enough information to eliminate
|
||||
<code class="docutils literal notranslate"><span class="pre">store</span></code> instructions: even if we know the UID of the destination pointer is
|
||||
dead after a store and is not used in a load in the rest of the program, we
|
||||
can not remove a store instruction because of <em>aliasing</em>. The problem is that
|
||||
there may be different UIDs that name the same stack slot. There are a number
|
||||
of ways this can happen after a pointer is returned by <code class="docutils literal notranslate"><span class="pre">alloca</span></code>:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p>The pointer is used as an argument to a <code class="docutils literal notranslate"><span class="pre">getelementptr</span></code> or <code class="docutils literal notranslate"><span class="pre">bitcast</span></code> instruction</p></li>
|
||||
<li><p>The pointer is stored into memory and then later loaded</p></li>
|
||||
<li><p>The pointer is passed as an argument to a function, which can manipulate it
|
||||
in arbitrary ways</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
<p>Some pointers are never aliased. For example, the code generated by the Oat
|
||||
frontend for local variables never creates aliases because the Oat language
|
||||
itself doesn’t have an “address of” operator. We can find such uses of
|
||||
<code class="docutils literal notranslate"><span class="pre">alloca</span></code> by applying a simple alias analysis.</p>
|
||||
<section id="alias-analysis">
|
||||
<h3>Alias Analysis<a class="headerlink" href="#alias-analysis" title="Link to this heading">¶</a></h3>
|
||||
<p>We have provided some code to get you started in <code class="docutils literal notranslate"><span class="pre">alias.ml</span></code>. You will have
|
||||
to fill in the flow function and lattice operations. The type of lattice
|
||||
elements, <code class="docutils literal notranslate"><span class="pre">fact</span></code>, is a map from UIDs to <em>symbolic pointers</em> of type
|
||||
<code class="docutils literal notranslate"><span class="pre">SymPtr.t</span></code>. Your analysis should compute, at every program point, the set of
|
||||
UIDs of pointer type that are in scope and, additionally, whether that pointer
|
||||
is the unique name for a stack slot according to the rules above. See the
|
||||
comments in <code class="docutils literal notranslate"><span class="pre">alias.ml</span></code> for details.</p>
|
||||
<blockquote>
|
||||
<div><ol class="arabic simple">
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">Alias.insn_flow</span></code>: the flow function over instructions</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">Alias.fact.combine</span></code>: the combine function for alias facts</p></li>
|
||||
</ol>
|
||||
</div></blockquote>
|
||||
</section>
|
||||
<section id="dead-code-elimination">
|
||||
<h3>Dead Code Elimination<a class="headerlink" href="#dead-code-elimination" title="Link to this heading">¶</a></h3>
|
||||
<p>Now we can use our liveness and alias analyses to implement a dead code
|
||||
elimination pass. We will simply compute the results of the analysis at each
|
||||
program point, then iterate over the blocks of the CFG removing any
|
||||
instructions that do not contribute to the output of the program.</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p>For all instructions except <code class="docutils literal notranslate"><span class="pre">store</span></code> and <code class="docutils literal notranslate"><span class="pre">call</span></code>, the instruction can
|
||||
be removed if the UID it defines is not live-out at the point of definition</p></li>
|
||||
<li><p>A <code class="docutils literal notranslate"><span class="pre">store</span></code> instruction can be removed if we know the UID of the destination
|
||||
pointer is not aliased and not live-out at the program point of the store</p></li>
|
||||
<li><p>A <code class="docutils literal notranslate"><span class="pre">call</span></code> instruction can never be removed</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
<p>Complete the dead-code elimination optimization in <code class="docutils literal notranslate"><span class="pre">dce.ml</span></code>, where you will
|
||||
only need to fill out the <code class="docutils literal notranslate"><span class="pre">dce_block</span></code> function that implements these rules.</p>
|
||||
</section>
|
||||
</section>
|
||||
<section id="task-iii-constant-propagation">
|
||||
<h2><span class="section-number">1.5. </span>Task III: Constant Propagation<a class="headerlink" href="#task-iii-constant-propagation" title="Link to this heading">¶</a></h2>
|
||||
<p>Programmers don’t often write dead code directly. However, dead code is often
|
||||
produced as a result of other optimizations that execute parts of the original
|
||||
program at compile time, for instance <em>constant propagation</em>. In this section
|
||||
you’ll implement a simple constant propagation analysis and constant folding
|
||||
optimization.</p>
|
||||
<p>Start by reading through the <code class="docutils literal notranslate"><span class="pre">constprop.ml</span></code>. Constant propagation is similar
|
||||
to the alias analysis from the previous section. Dataflow facts will be maps
|
||||
from UIDs to the type <code class="docutils literal notranslate"><span class="pre">SymConst.t</span></code>, which corresponds to the lattice from
|
||||
the lecture slides. Your analysis will compute the set of UIDs in scope at
|
||||
each program point, and the integer value of any UID that is computed as a
|
||||
result of a series of <code class="docutils literal notranslate"><span class="pre">binop</span></code> and <code class="docutils literal notranslate"><span class="pre">icmp</span></code> instructions on constant
|
||||
operands. More specifically:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p>The flow out of any <code class="docutils literal notranslate"><span class="pre">binop</span></code> or <code class="docutils literal notranslate"><span class="pre">icmp</span></code> whose operands have been
|
||||
determined to be constants is the incoming flow with the defined UID to
|
||||
<code class="docutils literal notranslate"><span class="pre">Const</span></code> with the expected constant value</p></li>
|
||||
<li><p>The flow out of any <code class="docutils literal notranslate"><span class="pre">binop</span></code> or <code class="docutils literal notranslate"><span class="pre">icmp</span></code> with a <code class="docutils literal notranslate"><span class="pre">NonConst</span></code> operand sets
|
||||
the defined UID to <code class="docutils literal notranslate"><span class="pre">NonConst</span></code></p></li>
|
||||
<li><p>Similarly, the flow out of any <code class="docutils literal notranslate"><span class="pre">binop</span></code> or <code class="docutils literal notranslate"><span class="pre">icmp</span></code> with a <code class="docutils literal notranslate"><span class="pre">UndefConst</span></code>
|
||||
operand sets the defined UID to <code class="docutils literal notranslate"><span class="pre">UndefConst</span></code></p></li>
|
||||
<li><p>A <code class="docutils literal notranslate"><span class="pre">store</span></code> or <code class="docutils literal notranslate"><span class="pre">call</span></code> of type <code class="docutils literal notranslate"><span class="pre">Void</span></code> sets the defined UID to
|
||||
<code class="docutils literal notranslate"><span class="pre">UndefConst</span></code></p></li>
|
||||
<li><p>All other instructions set the defined UID to <code class="docutils literal notranslate"><span class="pre">NonConst</span></code></p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
<p>(At this point we could also include some arithmetic identities, for instance
|
||||
optimizing multiplication by 0, but we’ll keep the specification simple.)
|
||||
Next, you will have to implement the constant folding optimization itself,
|
||||
which just traverses the blocks of the CFG and replaces operands whose values
|
||||
we have computed with the appropriate constants. The structure of the code is
|
||||
very similar to that in the previous section. You will have to fill in:</p>
|
||||
<blockquote>
|
||||
<div><ol class="arabic simple">
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">Constprop.insn_flow</span></code> with the rules defined above</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">Constprop.Fact.combine</span></code> with the combine operation for the analysis</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">Constprop.cp_block</span></code> (inside the <code class="docutils literal notranslate"><span class="pre">run</span></code> function) with the code needed
|
||||
to perform the constant propagation transformation</p></li>
|
||||
</ol>
|
||||
</div></blockquote>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>Once you have implemented constant folding and dead-code elimination, the
|
||||
compiler’s <code class="docutils literal notranslate"><span class="pre">-O1</span></code> option will optimize your ll code by doing 2 iterations
|
||||
of (constant prop followed by dce). See <code class="docutils literal notranslate"><span class="pre">opt.ml</span></code>. The <code class="docutils literal notranslate"><span class="pre">-O1</span></code>
|
||||
optimizations are <em>not</em> used for testing <em>except</em> that they are <em>always</em>
|
||||
performed in the register-allocation quality tests – these optimizations
|
||||
improve register allocation (see below).</p>
|
||||
<p>This coupling means that if you have a faulty optimization pass, it might
|
||||
cause the quality of your register allocator to degrade. And it might make
|
||||
getting a high score harder.</p>
|
||||
</div>
|
||||
</section>
|
||||
<section id="task-iv-register-allocationn-optional">
|
||||
<h2><span class="section-number">1.6. </span>Task IV: Register Allocationn (Optional)<a class="headerlink" href="#task-iv-register-allocationn-optional" title="Link to this heading">¶</a></h2>
|
||||
<p>The backend implementation that we have given you provides two basic register
|
||||
allocation stragies:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p><strong>none</strong>: spills all uids to the stack;</p></li>
|
||||
<li><p><strong>greedy</strong>: uses register and a greedy linear-scan algorithm.</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
<p>For this task, you will implement a <strong>better</strong> register allocation strategy
|
||||
that makes use of the liveness information that you compute in Task I. Most
|
||||
of the instructions for this part of the assignment are found in
|
||||
<code class="docutils literal notranslate"><span class="pre">backend.ml</span></code>, where we have modified the code generation strategy to be able
|
||||
to make use of liveness information. The task is to implement a single
|
||||
function <code class="docutils literal notranslate"><span class="pre">better_layout</span></code> that beats our example “greedy” register allocation
|
||||
strategy. We recommend familiarizing yourself with the way that the simple
|
||||
strategies work before attempting to write your own allocator.</p>
|
||||
<p>The compiler now also supports several additional command-line switches that
|
||||
can be used to select among different analysis and code generation options for
|
||||
testing purposes:</p>
|
||||
<div class="highlight-none notranslate"><div class="highlight"><pre><span></span>--print-regs prints the register usage statistics for x86 code
|
||||
--liveness {trivial|dataflow} use the specified liveness analysis
|
||||
--regalloc {none|greedy|better} use the specified register allocator
|
||||
</pre></div>
|
||||
</div>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>The flags above <em>do not</em> imply the <code class="docutils literal notranslate"><span class="pre">-O1</span></code> flag (despite the fact that we
|
||||
always turn on optimization for testing purposes when running with
|
||||
<code class="docutils literal notranslate"><span class="pre">--test</span></code>). You should enable it explicitly.</p>
|
||||
</div>
|
||||
<p>For testing purposes, you can run the compiler with the <code class="docutils literal notranslate"><span class="pre">-v</span></code> verbose flag
|
||||
and/or use the <code class="docutils literal notranslate"><span class="pre">--print-regs</span></code> flag to get more information about how your
|
||||
algorithm is performing. It is also useful to sprinkle your own verbose
|
||||
output into the backend.</p>
|
||||
<p>The goal for this part of the homework is to create a strategy such that code
|
||||
generated with the <code class="docutils literal notranslate"><span class="pre">--regalloc</span> <span class="pre">better</span></code> <code class="docutils literal notranslate"><span class="pre">--liveness</span> <span class="pre">dataflow</span></code> flags is
|
||||
“better” than code generated using the simple settings, which are <code class="docutils literal notranslate"><span class="pre">--regalloc</span>
|
||||
<span class="pre">greedy</span></code> <code class="docutils literal notranslate"><span class="pre">--liveness</span> <span class="pre">dataflow</span></code>. See the discussion about how we compare
|
||||
register allocation strategies in <code class="docutils literal notranslate"><span class="pre">backend.ml</span></code>. The “quality” test cases
|
||||
report the results of these comparisons.</p>
|
||||
<p>Of course your register allocation strategy should produce correct code, so we
|
||||
still perform all of the correctness tests that we have used in previous
|
||||
version of the compiler. Your allocation strategy should not break any of
|
||||
these tests – and you cannot earn points for the “quality” tests unless all
|
||||
of the correctness tests also pass.</p>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>Since this task is optional, the quality test cases in <code class="docutils literal notranslate"><span class="pre">gradedtests.ml</span></code>
|
||||
are commented out. If you are doing this task, uncomment the additional
|
||||
tests in that file. (Look for the text “Uncomment the following code if
|
||||
you are doing the optional Task IV Register Allocation”.)</p>
|
||||
</div>
|
||||
</section>
|
||||
<section id="task-v-experimentation-validation-only-if-task-iv-completed">
|
||||
<h2><span class="section-number">1.7. </span>Task V: Experimentation / Validation (Only if Task Iv completed)<a class="headerlink" href="#task-v-experimentation-validation-only-if-task-iv-completed" title="Link to this heading">¶</a></h2>
|
||||
<p>Of course we want to understand how much of an impact your register allocation
|
||||
strategy has on actual execution time. For the final task, you will create a
|
||||
new Oat program that highlights the difference. There are two parts to this
|
||||
task.</p>
|
||||
<section id="create-a-test-case">
|
||||
<h3>Create a test case<a class="headerlink" href="#create-a-test-case" title="Link to this heading">¶</a></h3>
|
||||
<p>Post an Oat program to <a class="reference external" href="https://edstem.org/us/courses/40936/discussion/">Ed</a>. This program should exhibit significantly
|
||||
different performance when compiled using the “greedy” register allocation
|
||||
strategy vs. using your “better” register allocation strategy with dataflow
|
||||
information. See the file <code class="docutils literal notranslate"><span class="pre">hw4programs/regalloctest.oat</span></code> and
|
||||
<code class="docutils literal notranslate"><span class="pre">hw4programs/regalloctest2.oat</span></code> for uninspired examples of such a
|
||||
program. Yours should be more interesting.</p>
|
||||
</section>
|
||||
<section id="post-your-running-time">
|
||||
<h3>Post your running time<a class="headerlink" href="#post-your-running-time" title="Link to this heading">¶</a></h3>
|
||||
<p>Use the unix <code class="docutils literal notranslate"><span class="pre">time</span></code> command to test the performance of your
|
||||
register allocation algorithm. This should take the form of a simple table of
|
||||
timing information for several test cases, including the one you create and
|
||||
those mentioned below. You should test the performance in several
|
||||
configurations:</p>
|
||||
<blockquote>
|
||||
<div><ol class="arabic simple">
|
||||
<li><p>using the <code class="docutils literal notranslate"><span class="pre">--liveness</span> <span class="pre">trivial</span></code> <code class="docutils literal notranslate"><span class="pre">--regalloc</span> <span class="pre">none</span></code> flags (baseline)</p></li>
|
||||
<li><p>using the <code class="docutils literal notranslate"><span class="pre">--liveness</span> <span class="pre">dataflow</span></code> <code class="docutils literal notranslate"><span class="pre">--regalloc</span> <span class="pre">greedy</span></code> flags (greedy)</p></li>
|
||||
<li><p>using the <code class="docutils literal notranslate"><span class="pre">--liveness</span> <span class="pre">dataflow</span></code> <code class="docutils literal notranslate"><span class="pre">--regalloc</span> <span class="pre">better</span></code> flags (better)</p></li>
|
||||
<li><p>using the <code class="docutils literal notranslate"><span class="pre">--clang</span></code> flags (clang)</p></li>
|
||||
</ol>
|
||||
</div></blockquote>
|
||||
<p>And… all of the above plus the <code class="docutils literal notranslate"><span class="pre">-O1</span></code> flag.</p>
|
||||
<p>Test your compiler on at least these three programs:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">hw4programs/regalloctest.oat</span></code></p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">llprograms/matmul.ll</span></code></p></li>
|
||||
<li><p>your own test case</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
<p>Report the processor and OS version that you use to test. For best results,
|
||||
use a “lightly loaded” machine (close all other applications) and average the
|
||||
timing over several trial runs.</p>
|
||||
<p>The example below shows one interaction used to test the <code class="docutils literal notranslate"><span class="pre">matmul.ll</span></code> file in
|
||||
several configurations from the command line:</p>
|
||||
<div class="highlight-none notranslate"><div class="highlight"><pre><span></span>> ./oatc --liveness trivial --regalloc none llprograms/matmul.ll
|
||||
> time ./a.out
|
||||
|
||||
real 0m1.647s
|
||||
user 0m1.639s
|
||||
sys 0m0.002s
|
||||
|
||||
|
||||
> ./oatc --liveness dataflow --regalloc greedy llprograms/matmul.ll
|
||||
> time ./a.out
|
||||
|
||||
real 0m1.127s
|
||||
user 0m1.123s
|
||||
sys 0m0.002s
|
||||
|
||||
> ./oatc --liveness dataflow --regalloc better llprograms/matmul.ll
|
||||
> time ./a.out
|
||||
|
||||
real 0m0.500s
|
||||
user 0m0.496s
|
||||
sys 0m0.002s
|
||||
|
||||
> ./oatc --clang llprograms/matmul.ll
|
||||
> time ./a.out
|
||||
|
||||
real 0m0.061s
|
||||
user 0m0.053s
|
||||
sys 0m0.004s
|
||||
</pre></div>
|
||||
</div>
|
||||
<p>Don’t get too discouraged when clang beats your compiler’s performance by many
|
||||
orders of magnitude. It uses register promotion and many other optimizations
|
||||
to get high-quality code!</p>
|
||||
</section>
|
||||
</section>
|
||||
<section id="optional-task-leaderboard">
|
||||
<h2><span class="section-number">1.8. </span>Optional Task: Leaderboard!<a class="headerlink" href="#optional-task-leaderboard" title="Link to this heading">¶</a></h2>
|
||||
<p>As an optional and hopefully fun activity, we will run a leaderboard for efficient
|
||||
compilation. When you submit your homework, we will use it to compile a test suite.
|
||||
(You can choose what name will appear for you on the leaderboard; feel free to use
|
||||
your real name or a pseudonym.) We will compare the time that your compiled version
|
||||
takes to execute compared to a compilation using the Clang backend.</p>
|
||||
<p>You are welcome to implement additional optimizations by editing the file <code class="docutils literal notranslate"><span class="pre">opt.ml</span></code>.
|
||||
Note that your additional optimizations should run only if the <code class="docutils literal notranslate"><span class="pre">-O2</span></code> flag is passed
|
||||
(which will set <code class="docutils literal notranslate"><span class="pre">Opt.opt_level</span></code> to 2).</p>
|
||||
<p>All of your additional optimizations should be implemented in the <code class="docutils literal notranslate"><span class="pre">opt.ml</span></code> file; we
|
||||
know this isn’t good software engineering practice, but it helps us simplify our
|
||||
code submission framework sorry.</p>
|
||||
<p>We will post on Ed a link to the leaderboard test suite, so you can access the latest
|
||||
version of the test suite.</p>
|
||||
<p>Info about leaderboard results: The leaderboard shows the execution time of your
|
||||
compiled version compared to the Clang-compiled version. Specifically, we compile
|
||||
a testcase with the command
|
||||
<code class="docutils literal notranslate"><span class="pre">./oatc</span> <span class="pre">-O2</span> <span class="pre">--liveness</span> <span class="pre">dataflow</span> <span class="pre">--regalloc</span> <span class="pre">better</span> <span class="pre">testfile</span> <span class="pre">runtime.c</span></code> and
|
||||
measure the execution time of the resulting executable. Let this time be
|
||||
<em>t_student</em>. We also compile the test case with the additional flag
|
||||
<code class="docutils literal notranslate"><span class="pre">--clang</span></code> and measure the execution time of the resulting executable. Let
|
||||
this time be <em>t_clang</em>. The leaderboard displays <em>t_student</em>
|
||||
divided by <em>t_clang</em> for each test case, and also the geometric mean
|
||||
of all the test cases. (The “version” column is the md5 sum of all the testcases.)</p>
|
||||
<p>Propose a test case to add to the leaderboard: If you implement an additional
|
||||
optimization and have developed a test case that your optimization does well on,
|
||||
you can post a description of your optimization and the test case on Ed, and we
|
||||
will consider the test case for inclusion in the test suite. Your test case must
|
||||
satisfy the following properties:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p>Does not require any command line arguments to run.</p></li>
|
||||
<li><p>Takes on the order of 1-3 seconds to execute</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
</section>
|
||||
<section id="grading">
|
||||
<h2><span class="section-number">1.9. </span>Grading<a class="headerlink" href="#grading" title="Link to this heading">¶</a></h2>
|
||||
<p><strong>Projects that do not compile will receive no credit!</strong></p>
|
||||
<dl class="simple">
|
||||
<dt>Your grade for this project will be based on:</dt><dd><ul class="simple">
|
||||
<li><p>100 Points: the various automated tests that we provide.</p></li>
|
||||
</ul>
|
||||
</dd>
|
||||
</dl>
|
||||
<ul class="simple">
|
||||
<li><p>Bonus points and unlimited bragging rights: completing
|
||||
one or more of the optional tasks. Note that the register-allocator
|
||||
quality tests don’t run unless your allocator passes all the correctness tests.</p></li>
|
||||
</ul>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
|
||||
<div class="sphinxsidebarwrapper"><h3>Navigation</h3>
|
||||
<ul class="current">
|
||||
<li class="toctree-l1 current"><a class="current reference internal" href="#">1. HW6: Dataflow Analysis and Optimizations</a><ul>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#getting-started">1.1. Getting Started</a></li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#overview">1.2. Overview</a><ul>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#provided-code">Provided Code</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#task-i-dataflow-analysis">1.3. Task I: Dataflow Analysis</a><ul>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#the-algorithm">The Algorithm</a></li>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#getting-started-and-testing">Getting Started and Testing</a></li>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#now-implement-the-solver">Now implement the solver</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#task-ii-alias-analysis-and-dead-code-elimination">1.4. Task II: Alias Analysis and Dead Code Elimination</a><ul>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#alias-analysis">Alias Analysis</a></li>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#dead-code-elimination">Dead Code Elimination</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#task-iii-constant-propagation">1.5. Task III: Constant Propagation</a></li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#task-iv-register-allocationn-optional">1.6. Task IV: Register Allocationn (Optional)</a></li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#task-v-experimentation-validation-only-if-task-iv-completed">1.7. Task V: Experimentation / Validation (Only if Task Iv completed)</a><ul>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#create-a-test-case">Create a test case</a></li>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#post-your-running-time">Post your running time</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#optional-task-leaderboard">1.8. Optional Task: Leaderboard!</a></li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#grading">1.9. Grading</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="clearer"></div>
|
||||
</div>
|
||||
<div class="footer">
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
26
hw6/hw4programs/binary_gcd.oat
Normal file
26
hw6/hw4programs/binary_gcd.oat
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
int binary_gcd (int x, int y) {
|
||||
if (x == y) { return x; }
|
||||
if (x == 0) { return y; }
|
||||
if (y == 0) { return x; }
|
||||
if ((~x [&] 1) == 1) {
|
||||
if ((y [&] 1) == 1) {
|
||||
return binary_gcd(x >> 1, y);
|
||||
}
|
||||
else {
|
||||
return binary_gcd(x >> 1, y >> 1) << 1;
|
||||
}
|
||||
}
|
||||
if ((~y [&] 1) == 1) {
|
||||
return binary_gcd(x, y >> 1);
|
||||
}
|
||||
if (x > y) {
|
||||
return binary_gcd((x - y) >> 1, y);
|
||||
}
|
||||
return binary_gcd((y - x) >> 1, x);
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var x = 21;
|
||||
var y = 15;
|
||||
return binary_gcd(x, y);
|
||||
}
|
||||
59
hw6/hw4programs/count_sort.oat
Normal file
59
hw6/hw4programs/count_sort.oat
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
|
||||
int min(int[] arr, int len) {
|
||||
var min = arr[0];
|
||||
for (var i = 0; i < len; i = i + 1;) {
|
||||
if (arr[i] < min) {
|
||||
min = arr[i];
|
||||
}
|
||||
}
|
||||
return min;
|
||||
}
|
||||
|
||||
int max(int[] arr, int len) {
|
||||
var max = arr[0];
|
||||
for (var i = 0; i < len; i = i + 1;) {
|
||||
if (arr[i] > max) {
|
||||
max = arr[i];
|
||||
}
|
||||
}
|
||||
return max;
|
||||
}
|
||||
|
||||
int[] count_sort(int[] arr, int len) {
|
||||
var min = min(arr, len);
|
||||
var max = max(arr, len);
|
||||
|
||||
var counts = new int[max - min + 1];
|
||||
|
||||
for (var i = 0; i < len; i = i + 1;) {
|
||||
counts[arr[i] - min] = counts[arr[i] - min] + 1;
|
||||
}
|
||||
|
||||
var i = min;
|
||||
var j = 0;
|
||||
|
||||
var out = new int[len];
|
||||
|
||||
while (i <= max) {
|
||||
|
||||
if (counts[i - min] > 0) {
|
||||
out[j] = i;
|
||||
counts[i - min] = counts[i - min] - 1;
|
||||
j = j + 1;
|
||||
} else {
|
||||
i = i + 1;
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var arr = new int[]{65, 70, 72, 90, 65, 65, 69, 89, 67};
|
||||
var len = 9;
|
||||
|
||||
print_string(string_of_array(arr));
|
||||
print_string("\n");
|
||||
var sorted = count_sort(arr, len);
|
||||
print_string(string_of_array(sorted));
|
||||
return 0;
|
||||
}
|
||||
|
|
@ -16,7 +16,7 @@ void proc2 ( ) {
|
|||
}
|
||||
|
||||
bool foo ( int x, int[] y ) {
|
||||
var s = bar (x, "CS153");
|
||||
var s = bar (x, "cis341");
|
||||
proc1 ();
|
||||
return true;
|
||||
}
|
||||
|
|
|
|||
26
hw6/hw4programs/fibo.oat
Normal file
26
hw6/hw4programs/fibo.oat
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
int fibR(int n) {
|
||||
if(n == 0) {return 0;}
|
||||
if(n == 1) {return 1;}
|
||||
return fibR(n - 1) + fibR(n-2);
|
||||
}
|
||||
|
||||
int fibI(int n) {
|
||||
var a = 0;
|
||||
var b = 1;
|
||||
if(n == 0) {return a;}
|
||||
if(n == 1) {return b;}
|
||||
while(n-2 > 0) {
|
||||
var old = b;
|
||||
b = b + a;
|
||||
a = old;
|
||||
n = n - 1;
|
||||
}
|
||||
return a + b;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv)
|
||||
{
|
||||
var val = 1;
|
||||
if(fibR(12) == 144 & fibI(12) == 144) {val = 0;}
|
||||
return val;
|
||||
}
|
||||
35
hw6/hw4programs/gnomesort.oat
Normal file
35
hw6/hw4programs/gnomesort.oat
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
|
||||
void gnomeSort(int[] a, int len) {
|
||||
var i = 1;
|
||||
var j = 2;
|
||||
|
||||
while(i < len) {
|
||||
if (a[i-1] <= a[i]) {
|
||||
i = j;
|
||||
j = j + 1;
|
||||
} else {
|
||||
var tmp = a[i-1];
|
||||
a[i-1] = a[i];
|
||||
a[i] = tmp;
|
||||
i = i - 1;
|
||||
|
||||
if (i == 0) {
|
||||
i = j;
|
||||
j = j + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var arr = new int[]{ 5, 200, 1, 65, 30, 99, 2, 0 };
|
||||
var len = 8;
|
||||
|
||||
gnomeSort(arr, len);
|
||||
for(var i=0; i<8; i=i+1;) {
|
||||
print_int(arr[i]);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
53
hw6/hw4programs/heap.oat
Normal file
53
hw6/hw4programs/heap.oat
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
void min_heapify(int[] array, int i, int len) {
|
||||
var l = i * 2;
|
||||
var r = i + 1;
|
||||
var tmp = 0;
|
||||
var m = i;
|
||||
|
||||
if (l < len) {
|
||||
if (array[l] > array[m]) {
|
||||
m = l;
|
||||
}
|
||||
}
|
||||
|
||||
if (r < len) {
|
||||
if (array[r] > array[m]) {
|
||||
m = r;
|
||||
}
|
||||
}
|
||||
|
||||
if (m != i) {
|
||||
tmp = array[i];
|
||||
array[i] = array[m];
|
||||
array[m] = tmp;
|
||||
|
||||
min_heapify(array, m, len);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
void make_min_heap(int[] array, int len) {
|
||||
for (var i = len; i >= 1; i = i - 1;) {
|
||||
min_heapify(array, i, len);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var array = new int[]{ 0, 9, 1, 2, 8, 10, 7, 3, 6, 4, 5 };
|
||||
var end_result = new int[]{ 0, 1, 4, 2, 8, 5, 7, 3, 6, 9, 10 };
|
||||
|
||||
make_min_heap(array, 10);
|
||||
|
||||
var same = 0;
|
||||
|
||||
for (var i = 0; i < 11; i = i + 1;) {
|
||||
if (array[i] != end_result[i]) {
|
||||
same = 1;
|
||||
}
|
||||
}
|
||||
|
||||
return same;
|
||||
}
|
||||
38
hw6/hw4programs/insertion_sort.oat
Normal file
38
hw6/hw4programs/insertion_sort.oat
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
int[] insert(int[] partial, int len, int insertee) {
|
||||
var inserted = new int[len+1];
|
||||
for (var i=0; i < len+1; i=i+1;) { inserted[i] = -1; }
|
||||
var not_yet_inserted = true;
|
||||
if (insertee < partial[0]) {
|
||||
not_yet_inserted = false;
|
||||
inserted[0] = insertee;
|
||||
}
|
||||
for (var i = 0; i < len; i = i + 1;) {
|
||||
if (not_yet_inserted) {
|
||||
if (insertee > partial[i]) {
|
||||
not_yet_inserted = false;
|
||||
inserted[i+1] = insertee;
|
||||
inserted[i] = partial[i];
|
||||
} else {
|
||||
inserted[i] = partial[i];
|
||||
}
|
||||
} else {
|
||||
inserted[i+1] = partial[i];
|
||||
}
|
||||
}
|
||||
return inserted;
|
||||
}
|
||||
|
||||
int[] insort(int[] unsorted, int len) {
|
||||
var out = new int[]{0};
|
||||
out[0] = unsorted[0];
|
||||
for (var i = 1; i < len; i = i + 1;) {
|
||||
out = insert(out, i, unsorted[i]);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var array = new int[]{13, 42, 32, 3, 2, 6};
|
||||
var result = insort(array, 6);
|
||||
return result[5];
|
||||
}
|
||||
20
hw6/hw4programs/josh_joyce_test.oat
Normal file
20
hw6/hw4programs/josh_joyce_test.oat
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
global arr1 = new int[]{1,2,3,4};
|
||||
global arr2 = new int[]{1,2,3,5};
|
||||
|
||||
int arrcheck(int[] ar1, int[] ar2, int len){
|
||||
var val = 0;
|
||||
for(var i =0; i < len; i= i+1;){
|
||||
if (ar1[i] != ar2[i]) {
|
||||
val = 1;
|
||||
}
|
||||
}
|
||||
return val;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
|
||||
var val = 1;
|
||||
if(arrcheck(arr1, arr2, 4) == 1) {val = 0;}
|
||||
return val;
|
||||
|
||||
}
|
||||
68
hw6/hw4programs/kmp.oat
Normal file
68
hw6/hw4programs/kmp.oat
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
/* Paul Lou and Tanner Haldeman */
|
||||
/* References: Algorithms (Sedgewick), https://en.wikipedia.org/wiki/Knuth%E2%80%93Morris%E2%80%93Pratt_algorithm */
|
||||
|
||||
int[] construct_table(string w) {
|
||||
var length = length_of_string(w);
|
||||
var arr_of_w = array_of_string(w);
|
||||
var t = new int[length];
|
||||
var curr = 2;
|
||||
var next = 0;
|
||||
|
||||
t[0] = -1;
|
||||
t[1] = 0;
|
||||
while (curr < length) {
|
||||
if (arr_of_w[curr-1] == arr_of_w[next]) {
|
||||
t[curr] = next + 1;
|
||||
next = next + 1;
|
||||
curr = curr + 1;
|
||||
}
|
||||
else if (next > 0) {
|
||||
next = t[next];
|
||||
}
|
||||
else {
|
||||
t[curr] = 0;
|
||||
curr = curr + 1;
|
||||
}
|
||||
}
|
||||
|
||||
return t;
|
||||
}
|
||||
|
||||
|
||||
int kmp(string str, string w) {
|
||||
var str_idx = 0;
|
||||
var word_idx = 0;
|
||||
var word_length = length_of_string(w);
|
||||
var word_arr = array_of_string(w);
|
||||
var str_arr = array_of_string(str);
|
||||
var t = construct_table(w);
|
||||
|
||||
while (str_idx + word_idx < length_of_string(str)) {
|
||||
if (word_arr[word_idx] == str_arr[str_idx + word_idx]) {
|
||||
if (word_idx == word_length - 1) {
|
||||
return str_idx;
|
||||
}
|
||||
word_idx = word_idx + 1;
|
||||
}
|
||||
else {
|
||||
if (t[word_idx] > -1) {
|
||||
str_idx = str_idx + word_idx - t[word_idx];
|
||||
word_idx = t[word_idx];
|
||||
}
|
||||
else {
|
||||
str_idx = str_idx + 1;
|
||||
word_idx = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var str = "abcdabcdabcdcbab";
|
||||
var word = "dabcdc";
|
||||
|
||||
var ret = kmp(str, word);
|
||||
return ret;
|
||||
}
|
||||
48
hw6/hw4programs/lcs.oat
Normal file
48
hw6/hw4programs/lcs.oat
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* CIS 341 Homework 4
|
||||
* Thomas Delacour & Max McCarthy
|
||||
*/
|
||||
|
||||
/**
|
||||
* Computes longest common subsequence of two strings a and b.
|
||||
*/
|
||||
global buf = new int[]{0};
|
||||
|
||||
string lcs(int i, int j, string a, string b) {
|
||||
if (i < 0 | j < 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
var a_chars = array_of_string(a);
|
||||
var b_chars = array_of_string(b);
|
||||
|
||||
var last_char_a = a_chars[i];
|
||||
var last_char_b = b_chars[j];
|
||||
|
||||
if (last_char_a == last_char_b) {
|
||||
var prev_lcs = lcs(i - 1, j - 1, a, b);
|
||||
buf[0] = a_chars[i];
|
||||
var next_char = string_of_array(buf);
|
||||
return string_cat(prev_lcs, next_char);
|
||||
}
|
||||
|
||||
var left_lcs = lcs(i, j - 1, a, b);
|
||||
var right_lcs = lcs(i - 1, j, a, b);
|
||||
|
||||
var left_len = length_of_string(left_lcs);
|
||||
var right_len = length_of_string(right_lcs);
|
||||
|
||||
if (left_len < right_len) {
|
||||
return right_lcs;
|
||||
} else {
|
||||
return left_lcs;
|
||||
}
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var tomato = "TOMATO";
|
||||
var orating = "ORATING";
|
||||
print_string(lcs(5, 6, tomato, orating));
|
||||
return 0;
|
||||
}
|
||||
|
||||
44
hw6/hw4programs/lfsr.oat
Normal file
44
hw6/hw4programs/lfsr.oat
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
global lfsr_iterations = 5;
|
||||
global lfsr_length = 4;
|
||||
global lfsr_init_values = new bool[]{true, false, true, false};
|
||||
|
||||
bool xor(bool x, bool y) {
|
||||
return (x & !y) | (!x & y);
|
||||
}
|
||||
|
||||
string string_of_bool(bool b) {
|
||||
if (b) { return "T"; }
|
||||
else { return "F"; }
|
||||
}
|
||||
|
||||
void print_lfsr(bool[] lfsr_register, int len) {
|
||||
for (var i = 0; i < len; i = i + 1;) {
|
||||
print_string(string_of_bool(lfsr_register[i]));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
/* Initialize the working register */
|
||||
var lfsr_register = new bool[lfsr_length];
|
||||
for (var i=0; i < lfsr_length; i=i+1;) {
|
||||
lfsr_register[i] = lfsr_init_values[i];
|
||||
}
|
||||
|
||||
/* Do the computations */
|
||||
for (var i = 0; i < lfsr_iterations; i = i + 1;) {
|
||||
var new_first =
|
||||
xor(lfsr_register[lfsr_length - 1], lfsr_register[lfsr_length - 2]);
|
||||
for (var j = lfsr_length - 1; j > 0; j = j - 1;) {
|
||||
lfsr_register[j] = lfsr_register[j - 1];
|
||||
}
|
||||
lfsr_register[0] = new_first;
|
||||
}
|
||||
|
||||
/* Print the initial and final bool arrays with a space separator */
|
||||
print_lfsr(lfsr_init_values, lfsr_length);
|
||||
print_string(" ");
|
||||
print_lfsr(lfsr_register, lfsr_length);
|
||||
|
||||
return 0;
|
||||
}
|
||||
62
hw6/hw4programs/life.oat
Normal file
62
hw6/hw4programs/life.oat
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
global len = 4;
|
||||
|
||||
int check(int[][] board, int i, int j, int count) {
|
||||
if ((i >= 0) & (j >= 0) & (i < len) & (j < len)) {
|
||||
return count + board[i][j];
|
||||
} else {
|
||||
return count;
|
||||
}
|
||||
}
|
||||
|
||||
int val_at(int[][] board, int i, int j) {
|
||||
var alive = board[i][j];
|
||||
var count = 0;
|
||||
count = check(board, i-1, j-1, count);
|
||||
count = check(board, i-1, j , count);
|
||||
count = check(board, i-1, j+1, count);
|
||||
|
||||
count = check(board, i , j-1, count);
|
||||
count = check(board, i , j+1, count);
|
||||
|
||||
count = check(board, i+1, j-1, count);
|
||||
count = check(board, i+1, j , count);
|
||||
count = check(board, i+1, j+1, count);
|
||||
|
||||
if (alive == 1) {
|
||||
if (count < 2) {
|
||||
return 0;
|
||||
} else if (count < 4) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
if (count == 3) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var board = new int[][]{ new int[]{0, 0, 0, 0},
|
||||
new int[]{0, 1, 1, 1},
|
||||
new int[]{1, 1, 1, 0},
|
||||
new int[]{0, 0, 0, 0} };
|
||||
|
||||
var new_board = new int[][]{ new int[]{0, 0, 0, 0},
|
||||
new int[]{0, 0, 0, 0},
|
||||
new int[]{0, 0, 0, 0},
|
||||
new int[]{0, 0, 0, 0} };
|
||||
for (var i=0; i < 4; i=i+1;) {
|
||||
new_board[i] = new int[4];
|
||||
for (var j=0; j < 4; j=j+1;) { new_board[i][j] = val_at(board, i,j); }
|
||||
}
|
||||
|
||||
for (var i = 0; i < len; i = i+1;) {
|
||||
for (var j = 0; j < len; j = j+1;) {
|
||||
print_int(new_board[i][j]);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
26
hw6/hw4programs/maxsubsequence.oat
Normal file
26
hw6/hw4programs/maxsubsequence.oat
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
int maxsum(int[] arr, int size) {
|
||||
var maxarr = new int[size];
|
||||
var maxs = 0;
|
||||
maxarr[0] = arr[0];
|
||||
for(var i = 0; i < size; i = i+1;){
|
||||
for(var j = 0; j < i; j=j+1;){
|
||||
if(arr[i] > arr[j] & maxarr[i] < maxarr[j] + arr[i]){
|
||||
maxarr[i] = maxarr[j] + arr[i];
|
||||
}
|
||||
}
|
||||
if(maxs < maxarr[i]){
|
||||
maxs = maxarr[i];
|
||||
}
|
||||
}
|
||||
return maxs;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var array = new int[]{1,101,2,3,101,4,5};
|
||||
var max_ans = maxsum(array, 7);
|
||||
return max_ans;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
30
hw6/hw4programs/sieve.oat
Normal file
30
hw6/hw4programs/sieve.oat
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
int sieve(int n) {
|
||||
var arr = new bool[n];
|
||||
for (var i=0; i < n; i=i+1;) { arr[i] = true; }
|
||||
|
||||
arr[0] = false;
|
||||
arr[1] = false;
|
||||
|
||||
for(var i = 0; i < n; i=i+1;) {
|
||||
if(arr[i]){
|
||||
for(var j = i * 2; j < n; j=j+i;){
|
||||
arr[j] = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var count = 0;
|
||||
for(var i = 0; i < n; i=i+1;){
|
||||
if(arr[i]) {
|
||||
count = count + 1;
|
||||
}
|
||||
}
|
||||
|
||||
return count;
|
||||
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var n = 100;
|
||||
return sieve(n);
|
||||
}
|
||||
62
hw6/hw4programs/sp22_tests/bellmanford.oat
Normal file
62
hw6/hw4programs/sp22_tests/bellmanford.oat
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
/* take infinity to be int max value, assume shortest path is shorter than this */
|
||||
global infinity = 9223372036854775807;
|
||||
|
||||
/* takes number of vertices n (ints 0 to n -1), number of edges, edge list (u, v, w) of weighted directed graph, source vertex
|
||||
computes length of shortest path from source vertex to every vertex, distances invalid if negative cycles exist
|
||||
*/
|
||||
int[] bellman_ford(int n, int m, int[][] edges, int source) {
|
||||
var distance = new int[n];
|
||||
|
||||
for (var i = 0; i < n; i = i + 1;) {
|
||||
distance[i] = infinity;
|
||||
}
|
||||
distance[source] = 0;
|
||||
|
||||
for (var i = 0; i < n - 1; i = i + 1;) {
|
||||
for(var j = 0; j < m; j = j + 1;) {
|
||||
var edge = edges[j];
|
||||
var u = edge[0];
|
||||
var v = edge[1];
|
||||
var w = edge[2];
|
||||
if (distance[u] + w < distance[v]) {
|
||||
distance[v] = distance[u] + w;
|
||||
}
|
||||
}
|
||||
}
|
||||
/* check negative cycles */
|
||||
for(var j = 0; j < m; j = j + 1;) {
|
||||
var edge = edges[j];
|
||||
var u = edge[0];
|
||||
var v = edge[1];
|
||||
var w = edge[2];
|
||||
if (distance[u] + w < distance[v]) {
|
||||
print_string("Negative cycle detected! Distances are invalid\n");
|
||||
}
|
||||
}
|
||||
return distance;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var n = 6;
|
||||
var m = 9;
|
||||
var edges = new int[][]{ new int[]{0, 1, 4},
|
||||
new int[]{0, 2, -3},
|
||||
new int[]{2, 1, -2},
|
||||
new int[]{2, 3, 10},
|
||||
new int[]{3, 1, 6},
|
||||
new int[]{2, 4, 1},
|
||||
new int[]{2, 5, -9},
|
||||
new int[]{3, 5, -1},
|
||||
new int[]{5, 4, 4} };
|
||||
|
||||
var source = 0;
|
||||
var dest = 5;
|
||||
var shortest_path = -12;
|
||||
|
||||
var distance = bellman_ford(n, m, edges, source);
|
||||
if (distance[dest] == shortest_path) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
54
hw6/hw4programs/sp22_tests/bucket_sort.oat
Normal file
54
hw6/hw4programs/sp22_tests/bucket_sort.oat
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
global arr = new int[]{394, 901, 617, 876, 714, 865, 234, 79, 100, 123};
|
||||
global arr_size = 10;
|
||||
|
||||
int max_in_arr(int[] arr, int size) {
|
||||
var max = 0;
|
||||
for (var i = 0; i < 10; i = i + 1;) {
|
||||
if (arr[i] > max) {
|
||||
max = arr[i];
|
||||
}
|
||||
}
|
||||
return max;
|
||||
}
|
||||
|
||||
void print_arr(int[] arr, int size) {
|
||||
print_string("[");
|
||||
for (var i = 0; i < 10; i = i + 1;) {
|
||||
var elt_string = string_of_int(arr[i]);
|
||||
print_string(elt_string);
|
||||
if (i != size - 1) {
|
||||
print_string(", ");
|
||||
}
|
||||
}
|
||||
print_string("]\n");
|
||||
return;
|
||||
}
|
||||
|
||||
void populate_buckets(int[] arr, int size, int[] buckets) {
|
||||
for (var i = 0; i < size; i = i + 1;) {
|
||||
buckets[arr[i]] = arr[i];
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
void copy_from_buckets(int[] buckets, int size, int[] arr) {
|
||||
var counter = 0;
|
||||
for (var i = 0; i < size; i = i + 1;) {
|
||||
if (buckets[i] != 0) {
|
||||
arr[counter] = buckets[i];
|
||||
counter = counter + 1;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var buckets_size = max_in_arr(arr, arr_size) + 1;
|
||||
var buckets = new int[buckets_size];
|
||||
|
||||
populate_buckets(arr, arr_size, buckets);
|
||||
copy_from_buckets(buckets, buckets_size, arr);
|
||||
print_arr(arr, arr_size);
|
||||
|
||||
return 1;
|
||||
}
|
||||
31
hw6/hw4programs/sp22_tests/carfueling.oat
Normal file
31
hw6/hw4programs/sp22_tests/carfueling.oat
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
int program (int argc, string[] argv) {
|
||||
var gas = new int[]{1,2,3,4,5};
|
||||
var costs = new int[]{3,4,5,1,2};
|
||||
var size = 5;
|
||||
var index = can_complete(gas, costs, size);
|
||||
return index;
|
||||
}
|
||||
|
||||
int can_complete(int[] gas, int[] costs, int size) {
|
||||
var curr = 0;
|
||||
var total = 0;
|
||||
var diff = 0;
|
||||
var startIdx = 0;
|
||||
|
||||
for (var i = 0; i < size; i = i + 1;) {
|
||||
diff = gas[i] - costs[i];
|
||||
total = total + diff;
|
||||
curr = curr + diff;
|
||||
|
||||
if (curr < 0) {
|
||||
startIdx = i + 1;
|
||||
curr = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (total >= 0) {
|
||||
return startIdx;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
51
hw6/hw4programs/sp22_tests/cla.oat
Normal file
51
hw6/hw4programs/sp22_tests/cla.oat
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
int program (int argc, string[] argv) {
|
||||
return cla(11, 15);
|
||||
}
|
||||
|
||||
int cla(int a, int b) {
|
||||
var prop0 = propagate(((a [&] 1) == 1), ((b [&] 1) == 1));
|
||||
var prop1 = propagate(((a [&] 2) == 2), ((b [&] 2) == 2));
|
||||
var prop2 = propagate(((a [&] 4) == 4), ((b [&] 4) == 4));
|
||||
var prop3 = propagate(((a [&] 8) == 8), ((b [&] 8) == 8));
|
||||
var gen0 = generate(((a [&] 1) == 1), ((b [&] 1) == 1));
|
||||
var gen1 = generate(((a [&] 2) == 2), ((b [&] 2) == 2));
|
||||
var gen2 = generate(((a [&] 4) == 4), ((b [&] 4) == 4));
|
||||
var gen3 = generate(((a [&] 8) == 8), ((b [&] 8) == 8));
|
||||
|
||||
var c1 = gen0;
|
||||
var c2 = gen1 | (gen0 & prop1);
|
||||
var c3 = gen2 | (gen0 & prop1 & prop2) | (gen1 & prop2);
|
||||
var c4 = gen3 | (gen0 & prop1 & prop2 & prop3) | (gen1 & prop2 & prop3) | (gen2 & prop3);
|
||||
|
||||
var carryValues = 0;
|
||||
if (c1) {
|
||||
carryValues = carryValues [|] 2;
|
||||
}
|
||||
if (c2) {
|
||||
carryValues = carryValues [|] 4;
|
||||
}
|
||||
if (c3) {
|
||||
carryValues = carryValues [|] 8;
|
||||
}
|
||||
if (c4) {
|
||||
carryValues = carryValues [|] 16;
|
||||
}
|
||||
var x = xor(a, b);
|
||||
|
||||
var r = xor(carryValues, x);
|
||||
return r;
|
||||
}
|
||||
|
||||
bool propagate (bool p1, bool p2)
|
||||
{
|
||||
return p1 | p2;
|
||||
}
|
||||
|
||||
bool generate (bool g1, bool g2)
|
||||
{
|
||||
return g1 & g2;
|
||||
}
|
||||
|
||||
int xor (int x, int y) {
|
||||
return ~(x [&] y) [&] (x [|] y);
|
||||
}
|
||||
38
hw6/hw4programs/sp22_tests/cocktailsort.oat
Normal file
38
hw6/hw4programs/sp22_tests/cocktailsort.oat
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
int[] cocktailsort(int[] a, int size) {
|
||||
var swapped = true;
|
||||
var start = 0;
|
||||
var end = size - 1;
|
||||
|
||||
while (swapped) {
|
||||
swapped = false;
|
||||
for (var i = start; i < end; i = i + 1;) {
|
||||
if (a[i] > a[i + 1]) {
|
||||
var temp = a[i];
|
||||
a[i] = a[i + 1];
|
||||
a[i + 1] = temp;
|
||||
swapped = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (swapped) {
|
||||
swapped = false;
|
||||
end = end - 1;
|
||||
for (var i = end - 1; i >= start; i = i - 1;) {
|
||||
if (a[i] > a[i + 1]) {
|
||||
var temp = a[i + 1];
|
||||
a[i + 1] = a[i];
|
||||
a[i] = temp;
|
||||
swapped = true;
|
||||
}
|
||||
}
|
||||
start = start + 1;
|
||||
}
|
||||
}
|
||||
return a;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var array = new int[]{13, 42, 32, 3, 2, 6};
|
||||
var result = cocktailsort(array, 6);
|
||||
return result[5];
|
||||
}
|
||||
27
hw6/hw4programs/sp22_tests/coinchange.oat
Normal file
27
hw6/hw4programs/sp22_tests/coinchange.oat
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
int change(int amount, int[] coins, int length) {
|
||||
|
||||
var dp = new int[amount + 1];
|
||||
dp[0] = 1;
|
||||
|
||||
for (var i = 0; i < length; i = i+1;) {
|
||||
var coinVal = coins[i];
|
||||
|
||||
for (var j = coinVal; j <= amount; j=j+1;) {
|
||||
dp[j] = dp[j] + dp[j - coinVal];
|
||||
}
|
||||
}
|
||||
return dp[amount];
|
||||
}
|
||||
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var amount = 11;
|
||||
var coins = new int[]{1, 5, 10, 25};
|
||||
|
||||
|
||||
var ret = change(amount, coins, 4);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
33
hw6/hw4programs/sp22_tests/collatz.oat
Normal file
33
hw6/hw4programs/sp22_tests/collatz.oat
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
bool isEven(int x) {
|
||||
if (divide(x, 2) * 2 == x) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
int divide (int a, int b) {
|
||||
if (b == 0) {
|
||||
return 0;
|
||||
}
|
||||
var result = 0;
|
||||
while (a > 0) {
|
||||
a = a - b;
|
||||
result = result + 1;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
int helper(int x, int count) {
|
||||
if (x == 1) {
|
||||
return count;
|
||||
} else if (isEven(x)) {
|
||||
return helper(divide(x, 2), count + 1);
|
||||
} else {
|
||||
return helper(3 * x + 1, count + 1);
|
||||
}
|
||||
}
|
||||
int collatz(int x) {
|
||||
return helper(x, 0);
|
||||
}
|
||||
int program(int argc, string[] argv) {
|
||||
return collatz(12);
|
||||
}
|
||||
69
hw6/hw4programs/sp22_tests/dijkstras.oat
Normal file
69
hw6/hw4programs/sp22_tests/dijkstras.oat
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
global intmax = 2147483645;
|
||||
global n = 9;
|
||||
|
||||
int minDistance(int[] dist, bool[] sptSet) {
|
||||
var min = intmax;
|
||||
var min_index = intmax;
|
||||
for (var v = 0; v < n; v = v + 1;) {
|
||||
if ((sptSet[v] == false) & (dist[v] <= min)) {
|
||||
min = dist[v];
|
||||
min_index = v;
|
||||
}
|
||||
}
|
||||
|
||||
return min_index;
|
||||
}
|
||||
|
||||
|
||||
int[] dijkstra(int[][] adjGraph, int src) {
|
||||
var dist = new int[n];
|
||||
|
||||
var sptSet = new bool[n];
|
||||
|
||||
for (var i = 0; i < n; i=i+1;) {
|
||||
dist[i] = intmax;
|
||||
sptSet[i] = false;
|
||||
}
|
||||
|
||||
dist[src] = 0;
|
||||
|
||||
for (var count = 0; count < n - 1; count = 1 + count;) {
|
||||
|
||||
var u = minDistance(dist, sptSet);
|
||||
|
||||
sptSet[u] = true;
|
||||
|
||||
for (var v = 0; v < n; v = v + 1;) {
|
||||
if (!sptSet[v] & adjGraph[u][v] != 0 & (dist[u] != intmax)
|
||||
& dist[u] + adjGraph[u][v] < dist[v]) {
|
||||
dist[v] = dist[u] + adjGraph[u][v];
|
||||
}
|
||||
}
|
||||
}
|
||||
return dist;
|
||||
}
|
||||
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var graph = new int[][] { new int[] { 0, 4, 0, 0, 0, 0, 0, 8, 0 },
|
||||
new int[] { 4, 0, 8, 0, 0, 0, 0, 11, 0 },
|
||||
new int[] { 0, 8, 0, 7, 0, 4, 0, 0, 2 },
|
||||
new int[] { 0, 0, 7, 0, 9, 14, 0, 0, 0 },
|
||||
new int[] { 0, 0, 0, 9, 0, 10, 0, 0, 0 },
|
||||
new int[] { 0, 0, 4, 14, 10, 0, 2, 0, 0 },
|
||||
new int[] { 0, 0, 0, 0, 0, 2, 0, 1, 6 },
|
||||
new int[] { 8, 11, 0, 0, 0, 0, 1, 0, 7 },
|
||||
new int[] { 0, 0, 2, 0, 0, 0, 6, 7, 0 } };
|
||||
|
||||
var output = dijkstra(graph, 0);
|
||||
var dist = new int[] {0, 4, 12, 19, 21, 11, 9, 8, 14};
|
||||
var success = true;
|
||||
for (var i = 0; i< n; i = i+1;) {
|
||||
success = success & (output[i] == dist[i]);
|
||||
}
|
||||
if (success) {
|
||||
return 0;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
72
hw6/hw4programs/sp22_tests/evil.oat
Normal file
72
hw6/hw4programs/sp22_tests/evil.oat
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
int pow (int a, int b) {
|
||||
var out = 1;
|
||||
while (b != 0) {
|
||||
out = out * a;
|
||||
b = b - 1;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
int remainder (int a, int b) {
|
||||
if (b == 0) {
|
||||
print_string("Can't divide by 0");
|
||||
return -1;
|
||||
}
|
||||
|
||||
while (a >= b) {
|
||||
a = a - b;
|
||||
}
|
||||
|
||||
return a;
|
||||
}
|
||||
|
||||
int divide (int a, int b) {
|
||||
if (b == 0) {
|
||||
print_string("Can't divide by 0");
|
||||
return -1;
|
||||
}
|
||||
|
||||
var out = 0;
|
||||
while (a >= b) {
|
||||
a = a - b;
|
||||
out = out + 1;
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
int is_evil (int a) {
|
||||
if (a < 0) {
|
||||
return 0;
|
||||
}
|
||||
var cont = true;
|
||||
var y = 0;
|
||||
var num1 = 0;
|
||||
var num2 = 0;
|
||||
var r = 0;
|
||||
var remctr = 0;
|
||||
while (cont) {
|
||||
num1 = pow(2, y);
|
||||
r = remainder(a, num1);
|
||||
if (r == a) {
|
||||
cont = false;
|
||||
} else {
|
||||
y = y + 1;
|
||||
}
|
||||
}
|
||||
for (var i = 0; i < y; i = i + 1;) {
|
||||
num1 = pow(2, i);
|
||||
num2 = divide(a, num1);
|
||||
if (remainder(num2, 2) == 1) {
|
||||
remctr = remctr + 1;
|
||||
}
|
||||
}
|
||||
if (remainder(remctr, 2) == 0) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
return is_evil(378);
|
||||
}
|
||||
67
hw6/hw4programs/sp22_tests/fizzbuzz.oat
Normal file
67
hw6/hw4programs/sp22_tests/fizzbuzz.oat
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
/* naive string-to-int conversion */
|
||||
int int_of_string(string s) {
|
||||
var result = 0;
|
||||
var chars = array_of_string(s);
|
||||
|
||||
for (var i = 0; i < length_of_string(s); i = i + 1;) {
|
||||
result = result * 10;
|
||||
/* ascii('0') == 48 */
|
||||
result = result + chars[i] - 48;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/* returns true if a is a multiple of b. naive */
|
||||
bool is_multiple_of(int a, int b) {
|
||||
for (var i = 0;; i = i + 1;) {
|
||||
if (b * i > a) {
|
||||
return false;
|
||||
}
|
||||
if (b * i == a) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
computes the general fizzbuzz of n.
|
||||
first argument should be the length of argv
|
||||
argv should have an unused element at index 0,
|
||||
and then contiguous pairs of strings of integers and their replacements.
|
||||
(e.g. pass "3 fizz 5 buzz" as command line arguments for classic fizzbuzz)
|
||||
*/
|
||||
string generalFizzBuzz(int argc, string[] argv, int n) {
|
||||
var s = "";
|
||||
|
||||
for (var j = 1; j < argc; j = j + 2;) {
|
||||
var modulo_base = argv[j];
|
||||
|
||||
if (is_multiple_of(n, int_of_string(modulo_base))) {
|
||||
s = string_cat(s, argv[j + 1]);
|
||||
}
|
||||
}
|
||||
|
||||
if (length_of_string(s) == 0) {
|
||||
return string_of_int(n);
|
||||
} else {
|
||||
return s;
|
||||
}
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
for (var i = 1; i <= 100; i = i + 1;) {
|
||||
print_string(generalFizzBuzz(argc, argv, i));
|
||||
print_string("\n");
|
||||
}
|
||||
|
||||
/* as far as i can tell, the testing suite trims the last character
|
||||
if it's a newline. so print an extra newline before the '0' from our
|
||||
return gets concatenated to our stdout
|
||||
*/
|
||||
print_string("\n");
|
||||
|
||||
return 0;
|
||||
}
|
||||
114
hw6/hw4programs/sp22_tests/hash_table.oat
Normal file
114
hw6/hw4programs/sp22_tests/hash_table.oat
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
global empty = 1;
|
||||
global filled = 2;
|
||||
global tlen = 0;
|
||||
global metas = 1;
|
||||
global keys = 2;
|
||||
global values = 3;
|
||||
|
||||
int[][] create(int size) {
|
||||
var hash_table= new int[][]{
|
||||
new int[]{size},
|
||||
new int[size],
|
||||
new int[size],
|
||||
new int[size]
|
||||
};
|
||||
|
||||
for (var i = 0; i < size; i = i + 1;) {
|
||||
hash_table[metas][i] = empty;
|
||||
}
|
||||
|
||||
return hash_table;
|
||||
}
|
||||
|
||||
int hash(int h) {
|
||||
return h;
|
||||
}
|
||||
|
||||
int table_len(int[][] hash_table) {
|
||||
return hash_table[tlen][0];
|
||||
}
|
||||
|
||||
int get_index(int[][] hash_table, int key) {
|
||||
var len = table_len(hash_table);
|
||||
var index = hash(key) [&] (len - 1);
|
||||
return index;
|
||||
}
|
||||
|
||||
bool insert(int[][] hash_table, int key, int value) {
|
||||
var start_index = get_index(hash_table, key);
|
||||
var len = table_len(hash_table);
|
||||
var mask = len - 1;
|
||||
|
||||
for (var i = 0; i < len; i = i + 1;) {
|
||||
var index = (start_index + i) [&] mask;
|
||||
|
||||
if (hash_table[metas][index] == filled) {
|
||||
if (hash_table[keys][index] == key) {
|
||||
hash_table[values][index] = value;
|
||||
return true;
|
||||
}
|
||||
} else if (hash_table[metas][index] == empty) {
|
||||
hash_table[metas][index] = filled;
|
||||
hash_table[keys][index] = key;
|
||||
hash_table[values][index] = value;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
int[] get(int[][] hash_table, int key) {
|
||||
var start_index = get_index(hash_table, key);
|
||||
var len = table_len(hash_table);
|
||||
var mask = len - 1;
|
||||
|
||||
for (var i = 0; i < len; i = i + 1;) {
|
||||
var index = (start_index + i) [&] mask;
|
||||
if (hash_table[metas][index] == filled & hash_table[keys][index] == key) {
|
||||
return new int[]{hash_table[values][index], 1};
|
||||
} else if (hash_table[metas][index] == empty) {
|
||||
return new int[]{0, 0};
|
||||
}
|
||||
}
|
||||
|
||||
return new int[]{0, 0};
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var tbl = create(16);
|
||||
|
||||
var sample_keys = new int[]{5, 7, 2, 23, 65, 878, 23, 56, 76, 12};
|
||||
var sample_values = new int[]{5, 7, 2, 23, 65, 878, 23, 56, 76, 12};
|
||||
for (var i = 0; i < 10; i = i + 1;) {
|
||||
var success = insert(tbl, sample_keys[i], sample_values[i]);
|
||||
if (!success) {
|
||||
print_string("failed to insert!\n");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
for (var i = 0; i < 10; i = i + 1;) {
|
||||
var result = get(tbl, sample_keys[i]);
|
||||
if (result[1] == 1) {
|
||||
print_int(sample_keys[i]);
|
||||
print_string(" => ");
|
||||
print_int(result[0]);
|
||||
print_string(", ");
|
||||
|
||||
if (result[0] != sample_values[i]) {
|
||||
print_string("error! should have found value ");
|
||||
print_int(sample_values[i]);
|
||||
print_string("\n");
|
||||
return 1;
|
||||
}
|
||||
} else {
|
||||
print_string("entry ");
|
||||
print_int(sample_keys[i]);
|
||||
print_string(" not found\n");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
58
hw6/hw4programs/sp22_tests/intpalindrome.oat
Normal file
58
hw6/hw4programs/sp22_tests/intpalindrome.oat
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
int divide (int a, int b) {
|
||||
if (b == 0) {
|
||||
print_string("Impossible!");
|
||||
return 0;
|
||||
}
|
||||
|
||||
var result = 0;
|
||||
while (a >= b) {
|
||||
a = a - b;
|
||||
result = result + 1;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
int remainder (int a, int b) {
|
||||
if (b == 0) {
|
||||
print_string("Impossible!");
|
||||
return 0;
|
||||
}
|
||||
|
||||
while (a >= b) {
|
||||
a = a - b;
|
||||
}
|
||||
|
||||
return a;
|
||||
}
|
||||
|
||||
bool isPalindrome (int input) {
|
||||
var testInput = input;
|
||||
var reversed = 0;
|
||||
var remainder = 0;
|
||||
|
||||
while (testInput != 0) {
|
||||
remainder = remainder(testInput, 10);
|
||||
reversed = reversed * 10 + remainder;
|
||||
testInput = divide(testInput, 10);
|
||||
}
|
||||
|
||||
if (input == reversed) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var palindrome = 123454321;
|
||||
var nonPalindrome = 5589260144;
|
||||
|
||||
var bool1 = isPalindrome(palindrome);
|
||||
var bool2 = isPalindrome(nonPalindrome);
|
||||
|
||||
if (bool1 & !bool2) {
|
||||
print_string("Correct!");
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
37
hw6/hw4programs/sp22_tests/islands.oat
Normal file
37
hw6/hw4programs/sp22_tests/islands.oat
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
global islands = new int[][] {
|
||||
new int[] {0, 1, 0, 0, 0, 1, 0},
|
||||
new int[] {1, 1, 0, 1, 1, 1, 0},
|
||||
new int[] {1, 0, 0, 0, 0, 0, 0},
|
||||
new int[] {0, 0, 1, 1, 1, 0, 1},
|
||||
new int[] {0, 0, 1, 1, 0, 0, 0},
|
||||
new int[] {0, 0, 0, 1, 0, 0, 0}
|
||||
};
|
||||
|
||||
void dfs(int i, int j) {
|
||||
if (i >= 0 & i < 6 & j >= 0 & j < 7) {
|
||||
if (islands[i][j] != 0) {
|
||||
islands[i][j] = 0;
|
||||
dfs(i - 1, j);
|
||||
dfs(i + 1, j);
|
||||
dfs(i, j + 1);
|
||||
dfs(i, j - 1);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var num_islands = 0;
|
||||
|
||||
for (var i = 0; i < 6; i = i + 1;) {
|
||||
for (var j = 0; j < 7; j = j + 1;) {
|
||||
if (islands[i][j] != 0) {
|
||||
num_islands = num_islands + 1;
|
||||
dfs(i, j);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return num_islands;
|
||||
}
|
||||
|
||||
41
hw6/hw4programs/sp22_tests/knapsack.oat
Normal file
41
hw6/hw4programs/sp22_tests/knapsack.oat
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
int max(int a, int b) {
|
||||
if (a > b) {
|
||||
return a;
|
||||
}
|
||||
return b;
|
||||
}
|
||||
|
||||
int knapsack(int[] v, int[] w, int n, int maxw)
|
||||
{
|
||||
if (maxw < 0) {
|
||||
return -99999999;
|
||||
}
|
||||
else if (maxw == 0) {
|
||||
return 0;
|
||||
}
|
||||
else if (n < 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var include = v[n] + knapsack(v, w, n - 1, maxw - w[n]);
|
||||
|
||||
var exclude = knapsack(v, w, n - 1, maxw);
|
||||
|
||||
return max(include, exclude);
|
||||
}
|
||||
|
||||
|
||||
int program(int argc, string[] argv)
|
||||
{
|
||||
var v = new int[]{12, 1, 33, 4, 1, 2, 1, 59, 4};
|
||||
var w = new int[]{2, 10, 34, 9, 20, 26, 14, 5, 4};
|
||||
|
||||
var maxw = 30;
|
||||
|
||||
var n = 4;
|
||||
|
||||
var res = knapsack(v, w, 8, maxw);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
41
hw6/hw4programs/sp22_tests/largest_cont_sum.oat
Normal file
41
hw6/hw4programs/sp22_tests/largest_cont_sum.oat
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
int largest_continuous_sum(int[] arr, int lo, int hi) {
|
||||
if (lo == hi) {
|
||||
return arr[lo];
|
||||
}
|
||||
var mid = (lo + hi) >> 1;
|
||||
var left_sum = largest_continuous_sum(arr, lo, mid);
|
||||
var right_sum = largest_continuous_sum(arr, mid + 1, hi);
|
||||
|
||||
var sum = arr[mid + 1];
|
||||
var middle_right_sum = arr[mid + 1];
|
||||
for (var i = mid + 2; i <= hi; i = i + 1; ) {
|
||||
sum = sum + arr[i];
|
||||
if (sum > middle_right_sum) {
|
||||
middle_right_sum = sum;
|
||||
}
|
||||
}
|
||||
|
||||
sum = arr[mid];
|
||||
var middle_left_sum = arr[mid];
|
||||
for (var i = mid - 1; i >= lo; i = i - 1; ) {
|
||||
sum = sum + arr[i];
|
||||
if (sum > middle_left_sum) {
|
||||
middle_left_sum = sum;
|
||||
}
|
||||
}
|
||||
|
||||
var middle_sum = middle_left_sum + middle_right_sum;
|
||||
|
||||
if (middle_sum > left_sum & middle_sum > right_sum) {
|
||||
return middle_sum;
|
||||
} else if (left_sum > middle_sum & left_sum > right_sum) {
|
||||
return left_sum;
|
||||
}
|
||||
|
||||
return right_sum;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var arr = new int[]{2, -3, 100, 5, 20, -50};
|
||||
return largest_continuous_sum(arr, 0, 5);
|
||||
}
|
||||
22
hw6/hw4programs/sp22_tests/maxBuckets.oat
Normal file
22
hw6/hw4programs/sp22_tests/maxBuckets.oat
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
int maxBuckets(int[] sBucket, int[] eBucket, int numBuckets, int range) {
|
||||
var buckets = new int[range];
|
||||
for (var i=0; i<numBuckets; i=i+1;) {
|
||||
buckets[sBucket[i]] = buckets[sBucket[i]] + 1;
|
||||
buckets[eBucket[i]] = buckets[eBucket[i]] - 1;
|
||||
}
|
||||
var max = 0;
|
||||
var curr = 0;
|
||||
for (var i=0; i<range; i=i+1;) {
|
||||
curr = curr + buckets[i];
|
||||
if (curr > max) {
|
||||
max = curr;
|
||||
}
|
||||
}
|
||||
return max;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var s1 = new int[]{0, 1, 2, 3, 4};
|
||||
var e1 = new int[]{5, 6, 7, 8, 9};
|
||||
return maxBuckets(s1, e1, 5, 10);
|
||||
}
|
||||
58
hw6/hw4programs/sp22_tests/max_envelopes.oat
Normal file
58
hw6/hw4programs/sp22_tests/max_envelopes.oat
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
void bubble_sort(int[][] arr, int n) {
|
||||
for (var i = 0; i < n-1; i = i+1;) {
|
||||
for (var j = 0; j < n-i-1; j = j+1;) {
|
||||
if ((arr[j][0] > arr[j+1][0]) |
|
||||
((arr[j][0] == arr[j+1][0]) & (arr[j][1] < arr[j+1][1])) ) {
|
||||
var temp = arr[j+1];
|
||||
arr[j+1] = arr[j];
|
||||
arr[j] = temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int longest_increasing_subsequence(int[] arr, int n) {
|
||||
var dp = new int[n];
|
||||
var max_subsequence = 0;
|
||||
|
||||
for (var i = 0; i < n; i = i+1;) {
|
||||
dp[i] = 1;
|
||||
var max_seq = 0;
|
||||
for (var j = 0; j < i; j = j+1;) {
|
||||
if (arr[i] > arr[j] & dp[j] > max_seq) {
|
||||
max_seq = dp[j];
|
||||
}
|
||||
}
|
||||
dp[i] = max_seq + 1;
|
||||
|
||||
if (dp[i] > max_subsequence) {
|
||||
max_subsequence = dp[i];
|
||||
}
|
||||
}
|
||||
|
||||
return max_subsequence;
|
||||
}
|
||||
|
||||
int max_envelopes(int[][] envelopes, int n) {
|
||||
bubble_sort(envelopes, n);
|
||||
|
||||
var heights = new int[n];
|
||||
for (var i = 0; i < n; i = i+1;) {
|
||||
heights[i] = envelopes[i][1];
|
||||
}
|
||||
|
||||
return longest_increasing_subsequence(heights, n);
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var envelopes = new int[][] {
|
||||
new int[] {5, 4},
|
||||
new int[] {6, 4},
|
||||
new int[] {6, 7},
|
||||
new int[] {2, 3}
|
||||
};
|
||||
var n = 4;
|
||||
var res = max_envelopes(envelopes, n);
|
||||
return res;
|
||||
}
|
||||
64
hw6/hw4programs/sp22_tests/maximalBlock.oat
Normal file
64
hw6/hw4programs/sp22_tests/maximalBlock.oat
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
int findLargestBlock(int[][][] mat, int i, int j, int k, int maxsize)
|
||||
{
|
||||
if (i < 0 | j < 0 | k < 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var d = new int[7];
|
||||
|
||||
d[0] = findLargestBlock(mat, i, j, k - 1, maxsize);
|
||||
d[1] = findLargestBlock(mat, i, j - 1, k, maxsize);
|
||||
d[2] = findLargestBlock(mat, i-1, j - 1, k, maxsize);
|
||||
d[3] = findLargestBlock(mat, i, j - 1, k-1, maxsize);
|
||||
d[4] = findLargestBlock(mat, i-1, j - 1, k, maxsize);
|
||||
d[5] = findLargestBlock(mat, i-1, j , k-1, maxsize);
|
||||
d[6] = findLargestBlock(mat, i-1, j - 1, k-1, maxsize);
|
||||
|
||||
var size = 0;
|
||||
if (mat[i][j][k] == 1) {
|
||||
var min = d[0];
|
||||
for (var i = 1; i < 7; i = i + 1;){
|
||||
if (min > d[i]) {
|
||||
min = d[i];
|
||||
}
|
||||
}
|
||||
size = 1 + min;
|
||||
}
|
||||
|
||||
var max = 0;
|
||||
if (maxsize > size){
|
||||
max = maxsize;
|
||||
} else {
|
||||
max = size;
|
||||
}
|
||||
return max;
|
||||
}
|
||||
|
||||
int[][][] getTensor(){
|
||||
return new int[][][]
|
||||
{new int[][]{
|
||||
new int[]{1, 0, 1},
|
||||
new int[]{1, 1, 1},
|
||||
new int[]{1, 1, 0}
|
||||
},
|
||||
new int[][]{
|
||||
new int[]{1, 1, 1},
|
||||
new int[]{1, 1, 1},
|
||||
new int[]{1, 1, 1}
|
||||
},
|
||||
new int[][]{
|
||||
new int[]{0, 1, 1},
|
||||
new int[]{0, 1, 1},
|
||||
new int[]{1, 1, 1}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
int program()
|
||||
{
|
||||
|
||||
var mat = getTensor();
|
||||
|
||||
print_string("The size of the largest cubical block of 1's is ");
|
||||
return findLargestBlock(mat, 3-1, 3-1, 3-1, 0);
|
||||
}
|
||||
54
hw6/hw4programs/sp22_tests/nqueens.oat
Normal file
54
hw6/hw4programs/sp22_tests/nqueens.oat
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
global n = 4;
|
||||
|
||||
int n_queens(int row, int state) {
|
||||
if (row >= n) {
|
||||
for (var i = 0; i < n; i = i + 1; ) {
|
||||
var cur = state >> 4 * i [&] 15;
|
||||
for (var j = 0; j < n; j = j + 1; ) {
|
||||
if (cur == j) {
|
||||
print_string("*");
|
||||
} else {
|
||||
print_string("-");
|
||||
}
|
||||
}
|
||||
print_string("\n");
|
||||
}
|
||||
print_string("\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
var total = 0;
|
||||
for (var i = 0; i < n; i = i + 1; ) {
|
||||
var ok = true;
|
||||
for (var j = 1; j <= row; j = j + 1; ) {
|
||||
var cur = state >> 4 * (j - 1) [&] 15;
|
||||
if (cur == i | cur == i - j | cur == i + j) {
|
||||
ok = false;
|
||||
j = row;
|
||||
}
|
||||
}
|
||||
if (ok) {
|
||||
total = total + n_queens(row + 1, state << 4 [|] i);
|
||||
}
|
||||
}
|
||||
|
||||
return total;
|
||||
}
|
||||
|
||||
int atoi(string s) {
|
||||
var ret = 0;
|
||||
var arr = array_of_string(s);
|
||||
for (var i = 0; i < arr[-1]; i = i + 1; ) {
|
||||
ret = ret * 10 + arr[i] - 48;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
if (argc >= 2) {
|
||||
n = atoi(argv[1]);
|
||||
}
|
||||
|
||||
print_int(n_queens(0, 0));
|
||||
return 0;
|
||||
}
|
||||
51
hw6/hw4programs/sp22_tests/pancake_sort.oat
Normal file
51
hw6/hw4programs/sp22_tests/pancake_sort.oat
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
void flip(int[] arr, int n) {
|
||||
var top = 0;
|
||||
var bottom = n - 1;
|
||||
|
||||
while (top < bottom) {
|
||||
var top_val = arr[top];
|
||||
arr[top] = arr[bottom];
|
||||
arr[bottom] = top_val;
|
||||
top = top + 1;
|
||||
bottom = bottom - 1;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int get_max_index(int[] arr, int n) {
|
||||
var max_index = 0;
|
||||
var max = 0;
|
||||
|
||||
for (var i = 0; i < n; i = i + 1;) {
|
||||
if (arr[i] > max) {
|
||||
max = arr[i];
|
||||
max_index = i;
|
||||
}
|
||||
}
|
||||
return max_index;
|
||||
}
|
||||
|
||||
void pancake_sort(int[] arr, int len) {
|
||||
var n = len;
|
||||
|
||||
while (n > 1) {
|
||||
var max_index = get_max_index(arr, n);
|
||||
flip(arr, max_index + 1);
|
||||
flip(arr, n);
|
||||
n = n - 1;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var arr = new int[]{111, 73, 18, 139, 64, 193, 212, 97, 4, 87, 46, 201, 27, 117, 82, 9, 258, 2, 65, 152};
|
||||
|
||||
pancake_sort(arr, 20);
|
||||
|
||||
for (var i = 0; i < 20; i = i + 1;) {
|
||||
print_int(arr[i]);
|
||||
print_string(" ");
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
35
hw6/hw4programs/sp22_tests/pascal.oat
Normal file
35
hw6/hw4programs/sp22_tests/pascal.oat
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
string string_of_int_array(int[] arr, int len) {
|
||||
var res = "[";
|
||||
for (var i = 0; i < len; i = i + 1;) {
|
||||
res = string_cat(res, " ");
|
||||
res = string_cat(res, string_of_int(arr[i]));
|
||||
}
|
||||
res = string_cat(res, " ]\n");
|
||||
return res;
|
||||
}
|
||||
|
||||
string pascal_triangle(int num_rows) {
|
||||
var current_row = new int[] {1};
|
||||
|
||||
var result = "";
|
||||
|
||||
for (var current_len = 1; current_len <= num_rows; current_len = current_len + 1;) {
|
||||
result = string_cat(result, string_of_int_array(current_row, current_len));
|
||||
|
||||
var new_row = new int[current_len + 1];
|
||||
|
||||
new_row[0] = current_row[0];
|
||||
for (var j = 1; j < current_len; j = j + 1;) {
|
||||
new_row[j] = current_row[j - 1] + current_row[j];
|
||||
}
|
||||
new_row[current_len] = current_row[current_len - 1];
|
||||
current_row = new_row;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
print_string(pascal_triangle(10));
|
||||
return 0;
|
||||
}
|
||||
22
hw6/hw4programs/sp22_tests/rotate.oat
Normal file
22
hw6/hw4programs/sp22_tests/rotate.oat
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
void left_rotate_by_one(int[] arr, int size) {
|
||||
var temp = arr[0];
|
||||
for (var i = 0; i < size - 1; i = i + 1;) {
|
||||
arr[i] = arr[i + 1];
|
||||
}
|
||||
arr[size - 1] = temp;
|
||||
return;
|
||||
}
|
||||
|
||||
void left_rotate(int[] arr, int size, int num) {
|
||||
for (var i = 0; i < num; i = i + 1;) {
|
||||
left_rotate_by_one(arr, size);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int program(int argc, string[] args) {
|
||||
var arr = new int[]{1, 2, 3, 4, 5, 6, 7, 8, 9};
|
||||
left_rotate(arr, 9, 3);
|
||||
return arr[0];
|
||||
}
|
||||
|
||||
34
hw6/hw4programs/sp22_tests/squaresort.oat
Normal file
34
hw6/hw4programs/sp22_tests/squaresort.oat
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
int[] sortedSquares(int[] input, int n) {
|
||||
var result = new int[n];
|
||||
var i = 0;
|
||||
var j = n - 1;
|
||||
for (var p = n - 1; p >= 0; p = p - 1;) {
|
||||
if (abs(input[i]) > abs(input[j])) {
|
||||
result[p] = input[i] * input[i];
|
||||
i = i + 1;
|
||||
} else {
|
||||
result[p] = input[j] * input[j];
|
||||
j = j - 1;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
int abs(int x) {
|
||||
return -x;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
|
||||
var ar = new int[]{ -8, -2 , 1, 3, 10};
|
||||
|
||||
var out = sortedSquares(ar, 5);
|
||||
for(var i=0; i<5; i=i+1;)
|
||||
{
|
||||
print_int(out[i]);
|
||||
print_string(" ");
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
69
hw6/hw4programs/toascii.oat
Normal file
69
hw6/hw4programs/toascii.oat
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
Takes a .gim graphics file;
|
||||
prints a simple ascii representation of the colors.
|
||||
Optimized for light-text-on-dark-background terminals
|
||||
(inverts the colors), and subsamples the height by .5
|
||||
to better maintain aspect ratio of square files.
|
||||
|
||||
-- John Hewitt, CIS 341 2017sp
|
||||
*/
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var s = argv[1];
|
||||
var width = get_width(s);
|
||||
var height = get_height(s);
|
||||
var bytes = load_image(s);
|
||||
print_string(string_of_int(width));
|
||||
print_string("x");
|
||||
print_string(string_of_int(height));
|
||||
print_string("\n");
|
||||
var rowlen = 0;
|
||||
var row = new int[width];
|
||||
var off = 1;
|
||||
for (var i=0; i < width*height; i=i+1;) {
|
||||
/*print_string(string_of_int(i));
|
||||
print_string("x");
|
||||
print_string(string_of_int(rowlen));
|
||||
print_string("x");
|
||||
print_string(string_of_int(width));
|
||||
print_string("x");
|
||||
print_string(string_of_int(bytes[i]));
|
||||
print_string("\n");*/
|
||||
if (bytes[i] > 230) {
|
||||
row[rowlen] = 64; /* @ */
|
||||
} else if (bytes[i] > 204) {
|
||||
row[rowlen] = 37; /* % */
|
||||
} else if (bytes[i] > 179) {
|
||||
row[rowlen] = 35; /* # */
|
||||
} else if (bytes[i] > 153) {
|
||||
row[rowlen] = 42; /* * */
|
||||
} else if (bytes[i] > 128) {
|
||||
row[rowlen] = 43; /* + */
|
||||
} else if (bytes[i] > 102) {
|
||||
row[rowlen] = 61; /* = */
|
||||
} else if (bytes[i] > 77) {
|
||||
row[rowlen] = 58; /* : */
|
||||
} else if (bytes[i] > 51) {
|
||||
row[rowlen] = 45; /* - */
|
||||
} else if (bytes[i] > 26) {
|
||||
row[rowlen] = 46; /* . */
|
||||
} else {
|
||||
row[rowlen] = 32; /* */
|
||||
}
|
||||
if (rowlen == width-1) {
|
||||
var test = off [&] 1;
|
||||
if (test == 1) {
|
||||
print_string(string_of_array(row));
|
||||
print_string("\n");
|
||||
off = 0;
|
||||
|
||||
} else {
|
||||
off = 1;
|
||||
}
|
||||
rowlen = 0;
|
||||
} else {
|
||||
rowlen = rowlen + 1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
54
hw6/hw4programs/toposort.oat
Normal file
54
hw6/hw4programs/toposort.oat
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
/* Sumit Shyamsukha (ssumit) and Robert Zajac (rzajac) */
|
||||
/* CIS341 Spring 2017 */
|
||||
|
||||
/* 0 is white, 1 is gray, 2 is black */
|
||||
global color = int[]{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
|
||||
global startTimes = int[]{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
|
||||
global finishTimes = int[]{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
|
||||
global topoSort = int[]{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
|
||||
global numVertices = 16;
|
||||
global index = 15;
|
||||
|
||||
void dfs(int[][] adj) {
|
||||
|
||||
for (var i = 0; i < numVertices; i=i+1;) {
|
||||
if (color[i] == 0) {
|
||||
dfsHelper(adj, i, 0);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
void dfsHelper(int[][] adj, int s, int t) {
|
||||
color[s] = 1;
|
||||
startTimes[s] = t;
|
||||
|
||||
var stringRep = string_of_array(adj[s]);
|
||||
var length = length_of_string(stringRep);
|
||||
|
||||
for (var i = 0; i < length; i=i+1;) {
|
||||
var neighbor = adj[s][i];
|
||||
if (color[neighbor] == 0) {
|
||||
dfsHelper(adj, neighbor, t + 1);
|
||||
}
|
||||
}
|
||||
|
||||
color[s] = 2;
|
||||
finishTimes[s] = t + 1;
|
||||
topoSort[index] = s;
|
||||
index = index - 1;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
/* Graph taken from https://i.stack.imgur.com/zuLmn.png */
|
||||
var adjList = new int[][]{new int[]{7, 10, 13, 14}, new int[]{2, 9, 13}, new int[]{10, 12, 13, 14}, new int[]{6, 8, 9, 11}, new int[]{7}, new int[]{6, 7, 9, 10}, new int[]{15}, new int[]{14}, new int[]{15}, new int[]{11, 14}, new int[]{14}, new int[]{}, new int[]{}, new int[]{}, new int[]{}, new int[]{}};
|
||||
dfs(adjList);
|
||||
for (var i = 0; i < numVertices; i=i+1;) {
|
||||
print_int(topoSort[i]);
|
||||
print_string (" ");
|
||||
}
|
||||
print_string ("-");
|
||||
return 0;
|
||||
}
|
||||
60
hw6/hw5programs/apoth_composition.oat
Normal file
60
hw6/hw5programs/apoth_composition.oat
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
struct Dog {
|
||||
string name;
|
||||
int food
|
||||
}
|
||||
|
||||
struct TrainableDog {
|
||||
Dog dog;
|
||||
int intelligence;
|
||||
(TrainableDog, int) -> int train
|
||||
}
|
||||
|
||||
struct Person {
|
||||
string name;
|
||||
bool[][] areaMap;
|
||||
((Person, int, int) -> void) visit
|
||||
}
|
||||
|
||||
struct DogOwner {
|
||||
Person person;
|
||||
int numDogs;
|
||||
Dog[] dogs;
|
||||
(DogOwner, int) -> void feedDogs
|
||||
}
|
||||
|
||||
int train(TrainableDog tdog, int food) {
|
||||
tdog.intelligence = tdog.intelligence + 1;
|
||||
tdog.dog.food = tdog.dog.food + food;
|
||||
return tdog.dog.food;
|
||||
}
|
||||
|
||||
void visit(Person p, int x, int y) { p.areaMap[x][y] = true; return; }
|
||||
|
||||
void feedDogs(DogOwner do, int numDogs) {
|
||||
for (var i = 0; i < numDogs; i = i + 1;) {
|
||||
do.dogs[i].food = do.dogs[i].food + 1;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var dog = new Dog { name = "Holmes"; food = 10 };
|
||||
var tdog = new TrainableDog { dog = dog; intelligence = 0; train = train };
|
||||
var areaMap = new bool[][] { new bool[] {false, false}, new bool[] {false, true} };
|
||||
var p = new Person { name = "Guy"; areaMap = areaMap; visit = visit };
|
||||
var do = new DogOwner { feedDogs = feedDogs; numDogs = 1; person = p; dogs = new Dog[] { dog } };
|
||||
|
||||
var newFood = tdog.train(tdog, 5);
|
||||
p.visit(p, 0, 0);
|
||||
do.feedDogs(do, 1);
|
||||
|
||||
var numVisited = 0;
|
||||
for (var i = 0; i < 2; i = i + 1;) {
|
||||
for (var j = 0; j < 2; j = j + 1;) {
|
||||
if (p.areaMap[i][j]) {
|
||||
numVisited = numVisited + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
return dog.food + numVisited;
|
||||
}
|
||||
174
hw6/hw5programs/burowski_bfs.oat
Normal file
174
hw6/hw5programs/burowski_bfs.oat
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
global debug = false;
|
||||
|
||||
struct Node {
|
||||
string val;
|
||||
bool visited;
|
||||
Queue neighbors;
|
||||
bool hasNext
|
||||
}
|
||||
|
||||
struct QNode {
|
||||
Node node;
|
||||
QNode next;
|
||||
bool qhasNext
|
||||
}
|
||||
|
||||
struct Queue {
|
||||
QNode head;
|
||||
QNode tail;
|
||||
int size
|
||||
}
|
||||
|
||||
Node getIthInQueue(int i, Queue q) {
|
||||
var tmp = q.head;
|
||||
if (i + 1 == q.size) {
|
||||
return q.tail.node;
|
||||
}
|
||||
while (tmp.qhasNext) {
|
||||
if (i == 0) {
|
||||
return tmp.node;
|
||||
}
|
||||
tmp = tmp.next;
|
||||
i = i - 1;
|
||||
}
|
||||
return newNode("ERROR");
|
||||
}
|
||||
|
||||
Node newNode(string v) {
|
||||
return new Node {val=v; hasNext=false; visited=false; neighbors=newEmptyQueue()};
|
||||
}
|
||||
|
||||
QNode newQNode(Node n) {
|
||||
return new QNode {node=n; next=QNode null; qhasNext=false};
|
||||
}
|
||||
|
||||
void printNode(Node n) {
|
||||
print_string(n.val);
|
||||
return;
|
||||
}
|
||||
|
||||
Queue newEmptyQueue() {
|
||||
return new Queue {head=QNode null; tail=QNode null; size=0};
|
||||
}
|
||||
|
||||
|
||||
bool isEmpty(Queue q) {
|
||||
return q.size == 0;
|
||||
}
|
||||
|
||||
void printNeighbors(Node n) {
|
||||
printNeighborsDebug(n, debug);
|
||||
return;
|
||||
}
|
||||
|
||||
void printNeighborsDebug(Node n, bool d) {
|
||||
if (!d) {
|
||||
return;
|
||||
}
|
||||
var s = n.neighbors.size;
|
||||
for (var i = 0; i < s; i = i + 1;) {
|
||||
var x = getIthInQueue(i, n.neighbors);
|
||||
printNode(x);
|
||||
}
|
||||
print_string("\n");
|
||||
return;
|
||||
}
|
||||
|
||||
void enqueue(Queue q, Node n) {
|
||||
var node = newQNode(n);
|
||||
if (q.size == 0) {
|
||||
q.head = node;
|
||||
} else {
|
||||
q.tail.qhasNext = true;
|
||||
q.tail.next = node;
|
||||
}
|
||||
q.size = q.size + 1;
|
||||
node.qhasNext = false;
|
||||
q.tail = node;
|
||||
return;
|
||||
}
|
||||
|
||||
Node dequeue(Queue q) {
|
||||
if (isEmpty(q)) {
|
||||
return newNode("");
|
||||
}
|
||||
var tmp = q.head;
|
||||
q.head = tmp.next;
|
||||
q.size = q.size - 1;
|
||||
return tmp.node;
|
||||
}
|
||||
|
||||
void addNeighbor(Node tgt, Node toAdd) {
|
||||
enqueue(tgt.neighbors, toAdd);
|
||||
return;
|
||||
}
|
||||
|
||||
void bfs(Node start) {
|
||||
var q = newEmptyQueue();
|
||||
start.visited = true;
|
||||
enqueue(q, start);
|
||||
while (!isEmpty(q)) {
|
||||
var curr = dequeue(q);
|
||||
printNode(curr);
|
||||
var s = curr.neighbors.size;
|
||||
for (var i = 0; i < s; i = i + 1;) {
|
||||
var n = getIthInQueue(i, curr.neighbors);
|
||||
if (!(n.visited)) {
|
||||
n.visited = true;
|
||||
enqueue(q, n);
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
void print_debug(string msg) {
|
||||
if (!debug) {
|
||||
return;
|
||||
}
|
||||
print_string(msg);
|
||||
return;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
|
||||
var there = newNode("there ");
|
||||
var should = newNode("should ");
|
||||
var be = newNode("be ");
|
||||
var better = newNode("better ");
|
||||
var food = newNode("food ");
|
||||
var options = newNode("options ");
|
||||
var on = newNode("on ");
|
||||
var campus = newNode("campus");
|
||||
|
||||
addNeighbor(there, should);
|
||||
addNeighbor(there, be);
|
||||
addNeighbor(there, better);
|
||||
|
||||
addNeighbor(should, there);
|
||||
addNeighbor(should, food);
|
||||
|
||||
addNeighbor(be, there);
|
||||
addNeighbor(be, better);
|
||||
|
||||
|
||||
addNeighbor(better, there);
|
||||
addNeighbor(better, be);
|
||||
addNeighbor(better, options);
|
||||
|
||||
addNeighbor(food, should);
|
||||
addNeighbor(food, options);
|
||||
|
||||
addNeighbor(options, food);
|
||||
addNeighbor(options, better);
|
||||
addNeighbor(options, on);
|
||||
addNeighbor(options, campus);
|
||||
|
||||
addNeighbor(on, options);
|
||||
|
||||
addNeighbor(campus, options);
|
||||
|
||||
bfs(there);
|
||||
return 0;
|
||||
}
|
||||
77
hw6/hw5programs/chmartin_heapsort.oat
Normal file
77
hw6/hw5programs/chmartin_heapsort.oat
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
struct Heap {
|
||||
int[] values;
|
||||
int arr_length;
|
||||
int size;
|
||||
(Heap, int) -> void insert;
|
||||
(Heap) -> int extract_min;
|
||||
(Heap) -> int peek
|
||||
}
|
||||
|
||||
void swim (Heap heap, int index) {
|
||||
while (index > 1 & (heap.values[index >> 1] > heap.values[index])) {
|
||||
var parent = heap.values[index >> 1];
|
||||
heap.values[index >> 1] = heap.values[index];
|
||||
heap.values[index] = parent;
|
||||
index = index >> 1;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
void sink (Heap heap, int index) {
|
||||
while (2 * index <= heap.size) {
|
||||
var j = 2 * index;
|
||||
if (j < heap.size & (heap.values[j] > heap.values[j + 1])) {
|
||||
j = j + 1;
|
||||
}
|
||||
if (heap.values[index] <= heap.values[j]) {
|
||||
index = heap.size;
|
||||
} else {
|
||||
var parent = heap.values[index];
|
||||
heap.values[index] = heap.values[j];
|
||||
heap.values[j] = parent;
|
||||
index = j;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
void insert (Heap heap, int n) {
|
||||
heap.size = heap.size + 1;
|
||||
heap.values[heap.size] = n;
|
||||
swim(heap, heap.size);
|
||||
return;
|
||||
}
|
||||
|
||||
int peek (Heap heap) {
|
||||
return heap.values[1];
|
||||
}
|
||||
|
||||
int extract_min (Heap heap) {
|
||||
var min = heap.values[1];
|
||||
heap.values[1] = heap.values[heap.size];
|
||||
heap.values[heap.size] = min;
|
||||
heap.size = heap.size - 1;
|
||||
sink(heap, 1);
|
||||
return min;
|
||||
}
|
||||
|
||||
int[] heapsort (int[] arr, int arr_len) {
|
||||
var heap = new Heap {values = new int[arr_len + 1]; arr_length = arr_len + 1; size = 0; insert = insert; extract_min = extract_min; peek = peek};
|
||||
for (var i = 0; i < arr_len; i = i + 1;) {
|
||||
heap.insert(heap, arr[i]);
|
||||
}
|
||||
for (var i = 0; i < arr_len; i = i + 1;) {
|
||||
arr[i] = heap.extract_min(heap);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var arr1 = new int[]{11, -5, 0, 2, 7, 7, 3, -11};
|
||||
var sorted_arr = heapsort(arr1, 8);
|
||||
for (var i = 0; i < 8; i = i + 1;) {
|
||||
print_int(sorted_arr[i]);
|
||||
print_string(", ");
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
142
hw6/hw5programs/conquest.oat
Normal file
142
hw6/hw5programs/conquest.oat
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
global meaning_of_life = 42;
|
||||
global kesha_to_fling = true;
|
||||
global professor = "Zdancewic!";
|
||||
global global_arr = new int[]{1, 1, 2, 3, 5, 8, 13};
|
||||
global null_arr = int[] null;
|
||||
global ideal_341_midterm_score = new int[]{100};
|
||||
global actual_341_midterm_score = new int[]{0};
|
||||
|
||||
int four () {
|
||||
var hakuna_matata = "Meaning of Life";
|
||||
var what_is_the = meaning_of_life;
|
||||
var what_rhymes_with_moore = meaning_of_life - global_arr[5] * global_arr[4] + global_arr[2];
|
||||
return 0 + what_rhymes_with_moore;
|
||||
}
|
||||
|
||||
int[] asian_brother_of_foo_named_fui (string s, bool b, int i) {
|
||||
var fui = global_arr;
|
||||
return fui;
|
||||
}
|
||||
|
||||
void dfs (int[][] arr, int[][] visited, int row, int col, int i, int j) {
|
||||
if (i - 1 >= 0) {
|
||||
if (visited[i - 1][j] != 1) {
|
||||
visited[i - 1][j] = 1;
|
||||
|
||||
if (arr[i - 1][j] == 1) {
|
||||
dfs(arr, visited, row, col, i - 1, j);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (i + 1 < row) {
|
||||
if (visited[i + 1][j] != 1) {
|
||||
visited[i + 1][j] = 1;
|
||||
|
||||
if (arr[i + 1][j] == 1) {
|
||||
dfs(arr, visited, row, col, i + 1, j);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (j - 1 >= 0) {
|
||||
if (visited[i][j - 1] != 1) {
|
||||
visited[i][j - 1] = 1;
|
||||
|
||||
if (arr[i][j - 1] == 1) {
|
||||
dfs(arr, visited, row, col, i, j - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (j + 1 < col) {
|
||||
if (visited[i][j + 1] != 1) {
|
||||
visited[i][j + 1] = 1;
|
||||
|
||||
if (arr[i][j + 1] == 1) {
|
||||
dfs(arr, visited, row, col, i, j + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
int connected (int[][] arr, int row, int col) {
|
||||
var visited = new int[][row]{ i ->
|
||||
new int[col]
|
||||
};
|
||||
var counter = 0;
|
||||
|
||||
for (var i = 0; i < row; i = i + 1;) {
|
||||
var j = 0;
|
||||
|
||||
while (j < col) {
|
||||
if (visited[i][j] == 0) {
|
||||
visited[i][j] = 1;
|
||||
|
||||
if (arr[i][j] == 1) {
|
||||
counter = counter + 1;
|
||||
|
||||
dfs(arr, visited, row, col, i, j);
|
||||
}
|
||||
}
|
||||
|
||||
j = j + 1;
|
||||
}
|
||||
}
|
||||
|
||||
return counter;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var territory_a = new int[][]{new int[]{1, 0, 1, 0},
|
||||
new int[]{1, 1, 0, 1},
|
||||
new int[]{1, 0, 1, 1},
|
||||
new int[]{0, 1, 1, 0}};
|
||||
var territory_b = new int[][]{new int[]{0, 0, 1, 0, 1},
|
||||
new int[]{0, 1, 1, 0, 1},
|
||||
new int[]{1, 1, 1, 1, 1}};
|
||||
var territory_c = new int[][]{new int[]{1, 0, 1},
|
||||
new int[]{0, 1, 0},
|
||||
new int[]{1, 0, 1}};
|
||||
|
||||
var none_conquered = new int[][four()]{i ->
|
||||
new int[]{
|
||||
actual_341_midterm_score[0],
|
||||
actual_341_midterm_score[0]
|
||||
}
|
||||
};
|
||||
|
||||
var all_conquered = new int[][6]{ i ->
|
||||
new int[]{1, 1, 1, 1, 1, 1}
|
||||
};
|
||||
|
||||
var island = new int[][] {new int[]{}};
|
||||
var emptyland = new int[][]{
|
||||
new int[]{asian_brother_of_foo_named_fui(professor,kesha_to_fling,ideal_341_midterm_score[0])[1]}
|
||||
};
|
||||
|
||||
all_conquered = all_conquered;
|
||||
var temp = island;
|
||||
island = emptyland;
|
||||
emptyland = temp;
|
||||
|
||||
print_string("My name is Jeff...\n");
|
||||
|
||||
var a = connected(territory_a, 4, 4);
|
||||
var b = connected(territory_b, 3, 5);
|
||||
var c = connected(territory_c, 3, 3);
|
||||
var none = connected(none_conquered, 4, 2);
|
||||
var all = connected(all_conquered, 6, 6);
|
||||
var i = connected(island, 1, 1);
|
||||
var e = connected(emptyland, 0, 0);
|
||||
|
||||
if (a == 3 & b == 1 & c == 5 & none == 0 & all == 1 & i == 1 & e == 0) {
|
||||
print_string("Charizard is the BEST Pokemon ever!!!");
|
||||
}
|
||||
|
||||
var sum = a + b + c + none + all + i + e;
|
||||
|
||||
return sum;
|
||||
}
|
||||
119
hw6/hw5programs/davidcao_treefunctions.oat
Normal file
119
hw6/hw5programs/davidcao_treefunctions.oat
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
struct Node {
|
||||
int val;
|
||||
bool hasleft;
|
||||
bool hasright;
|
||||
Node left;
|
||||
Node right
|
||||
}
|
||||
|
||||
void treemap(Node t, (int) -> int f) {
|
||||
t.val = f(t.val);
|
||||
if (t.hasleft) {
|
||||
treemap(t.left, f);
|
||||
}
|
||||
if (t.hasright) {
|
||||
treemap(t.right, f);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
bool for_all(Node t, (int) -> bool pred) {
|
||||
var result = pred(t.val);
|
||||
if(t.hasleft & t.hasright) {
|
||||
return result & for_all(t.left, pred) & for_all(t.right, pred);
|
||||
} else if (t.hasleft) {
|
||||
return result & for_all(t.left, pred);
|
||||
} else if (t.hasright) {
|
||||
return result & for_all(t.right, pred);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
bool xor(bool b1, bool b2) {
|
||||
return (b1 | b2) & !(b1 & b2);
|
||||
}
|
||||
|
||||
bool tree_eq (Node t1, Node t2) {
|
||||
if (t1.val != t2.val) {
|
||||
return false;
|
||||
} else {
|
||||
var flag = true;
|
||||
if (t1.hasleft & t2.hasleft) {
|
||||
flag = flag & tree_eq(t1.left, t2.left);
|
||||
}
|
||||
if (t1.hasright & t2.hasright) {
|
||||
flag = flag & tree_eq(t1.right, t2.right);
|
||||
}
|
||||
if (xor(t1.hasleft, t2.hasleft)) {
|
||||
return false;
|
||||
}
|
||||
if (xor(t1.hasright, t2.hasright)) {
|
||||
return false;
|
||||
}
|
||||
return flag;
|
||||
}
|
||||
}
|
||||
|
||||
int double(int i) {
|
||||
return i*2;
|
||||
}
|
||||
|
||||
bool pred_lt_6 (int i) {
|
||||
return i < 6;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var a1 = new Node{val = 1; hasleft = false; hasright = false; left = Node null; right = Node null };
|
||||
var a2 = new Node{val = 2; hasleft = true; hasright = false; left = a1; right = Node null };
|
||||
var a3 = new Node{val = 3; hasleft = false; hasright = false; left = Node null; right = Node null };
|
||||
var a4 = new Node{val = 4; hasleft = true; hasright = true; left = a2; right = a3 };
|
||||
var a5 = new Node{val = 5; hasleft = false; hasright = false; left = Node null; right = Node null };
|
||||
var root = new Node{val = 0; hasleft = true; hasright = true; left = a5; right = a4 };
|
||||
|
||||
var b1 = new Node{val = 1; hasleft = false; hasright = false; left = Node null; right = Node null };
|
||||
var b6 = new Node{val = 6; hasleft = false; hasright = false; left = Node null; right = Node null };
|
||||
var b2 = new Node{val = 2; hasleft = true; hasright = true; left = b1; right = b6 };
|
||||
var b3 = new Node{val = 3; hasleft = false; hasright = false; left = Node null; right = Node null };
|
||||
var b4 = new Node{val = 4; hasleft = true; hasright = true; left = b2; right = b3 };
|
||||
var b5 = new Node{val = 5; hasleft = false; hasright = false; left = Node null; right = Node null };
|
||||
var root2 = new Node{val = 0; hasleft = true; hasright = true; left = b5; right = b4 };
|
||||
|
||||
var c1 = new Node{val = 2; hasleft = false; hasright = false; left = Node null; right = Node null };
|
||||
var c2 = new Node{val = 4; hasleft = true; hasright = false; left = c1; right = Node null };
|
||||
var c3 = new Node{val = 6; hasleft = false; hasright = false; left = Node null; right = Node null };
|
||||
var c4 = new Node{val = 8; hasleft = true; hasright = true; left = c2; right = c3 };
|
||||
var c5 = new Node{val = 10; hasleft = false; hasright = false; left = Node null; right = Node null };
|
||||
var root3 = new Node{val = 0; hasleft = true; hasright = true; left = c5; right = c4 };
|
||||
|
||||
if (tree_eq(root,root)) {
|
||||
print_string("1");
|
||||
}
|
||||
|
||||
if (tree_eq(root2,root2)) {
|
||||
print_string("2");
|
||||
}
|
||||
|
||||
if (!tree_eq(root,root2)) {
|
||||
print_string("3");
|
||||
}
|
||||
|
||||
if (!tree_eq(root2,root)) {
|
||||
print_string("4");
|
||||
}
|
||||
|
||||
if (for_all(root, pred_lt_6)) {
|
||||
print_string("5");
|
||||
}
|
||||
|
||||
treemap(root,double);
|
||||
if (tree_eq(root, root3)) {
|
||||
print_string("6");
|
||||
}
|
||||
|
||||
if (!for_all(root, pred_lt_6)) {
|
||||
print_string("7");
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
98
hw6/hw5programs/hta_map_reduce.oat
Normal file
98
hw6/hw5programs/hta_map_reduce.oat
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
/* Simulates nodes which perform a map job */
|
||||
int[] map ((int) -> int f, int[] src, int len) {
|
||||
var tgt = new int[len];
|
||||
for (var i = 0; i < len; i = i + 1;) { tgt[i] = f(src[i]); }
|
||||
return tgt;
|
||||
}
|
||||
|
||||
struct MapJob {
|
||||
(int) -> int f;
|
||||
int[] arr;
|
||||
int arrlen
|
||||
}
|
||||
|
||||
int[] mapNode(MapJob j) { return map(j.f, j.arr, j.arrlen); }
|
||||
|
||||
/* Simulates nodes which perform a reduce job */
|
||||
|
||||
int reduce((int, int) -> int f, int[] arr, int len, int base) {
|
||||
var acc = base;
|
||||
for (var i = 0; i < len; i = i + 1;) { acc = f(acc, arr[i]); }
|
||||
return acc;
|
||||
}
|
||||
|
||||
struct ReduceJob {
|
||||
int[] arr;
|
||||
int arrlen;
|
||||
(int, int) -> int f;
|
||||
int base
|
||||
}
|
||||
|
||||
int reduceNode(ReduceJob j) { return reduce(j.f, j.arr, j.arrlen, j.base); }
|
||||
|
||||
/* Generates an array of data, partitions it into map jobs and assigns
|
||||
them to virtual map nodes. When all data is mapped, reduces it into
|
||||
an answer and returns it */
|
||||
|
||||
int square(int elt) { return elt * elt; }
|
||||
|
||||
int sum(int acc, int elt) { return acc + elt; }
|
||||
|
||||
int sumOfSquares(int[] arr, int arrlen, int numPartitions, int partitionLen) {
|
||||
/* Partition array into almost equal subarrays */
|
||||
var src = new int[][numPartitions];
|
||||
for (var i = 0; i < numPartitions; i = i + 1;) {
|
||||
src[i] = new int[partitionLen];
|
||||
}
|
||||
var j = 0;
|
||||
var k = 0;
|
||||
for (var i = 0; i < arrlen; i = i + 1;) {
|
||||
src[j][k] = arr[i];
|
||||
if (j == numPartitions - 1) {
|
||||
j = 0;
|
||||
k = k + 1;
|
||||
} else { j = j + 1; }
|
||||
}
|
||||
/* Build map-jobs */
|
||||
var mapJobs = new MapJob[numPartitions];
|
||||
for (var i = 0; i < numPartitions; i = i + 1;) {
|
||||
mapJobs[i] = new MapJob {
|
||||
f = square;
|
||||
arr = src[i];
|
||||
arrlen = partitionLen
|
||||
};
|
||||
}
|
||||
/* Map! */
|
||||
var tgt = new int[][numPartitions];
|
||||
for (var i = 0; i < numPartitions; i = i + 1;) {
|
||||
tgt[i] = mapNode(mapJobs[i]);
|
||||
}
|
||||
|
||||
/* Build reduce-jobs */
|
||||
var reduceJobs = new ReduceJob[numPartitions];
|
||||
for (var i = 0; i < numPartitions; i = i + 1;) {
|
||||
reduceJobs[i] = new ReduceJob {
|
||||
f = sum;
|
||||
arr = tgt[i];
|
||||
arrlen = partitionLen;
|
||||
base = 0
|
||||
};
|
||||
}
|
||||
/* Reduce! */
|
||||
var acc = 0;
|
||||
for (var i = 0; i < numPartitions; i = i + 1;) {
|
||||
acc = acc + reduceNode(reduceJobs[i]);
|
||||
}
|
||||
return acc;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
/* Initialize array with first n positive ints */
|
||||
var n = 30;
|
||||
var arr = new int[n];
|
||||
for (var i = 0; i < n; i = i + 1;) { arr[i] = i + 1; }
|
||||
/* Need partitionLen * numPartitions >= arr.length */
|
||||
var numPartitions = 5;
|
||||
var partitionLen = 6;
|
||||
return sumOfSquares(arr, n, numPartitions, partitionLen);
|
||||
}
|
||||
120
hw6/hw5programs/jacbrad_kruskal.oat
Normal file
120
hw6/hw5programs/jacbrad_kruskal.oat
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
struct Edge {
|
||||
int u;
|
||||
int v;
|
||||
int weight
|
||||
}
|
||||
|
||||
int compare(Edge e1, Edge e2) {
|
||||
return e1.weight - e2.weight;
|
||||
}
|
||||
|
||||
void sort((Edge, Edge) -> int cmp, int len, Edge[] list) {
|
||||
|
||||
for(var i = 1; i < len; i = i + 1;){
|
||||
var j = i - 1;
|
||||
var toswap = list[i];
|
||||
while(j >= 0) {
|
||||
if(cmp(list[j], toswap) > 0) {
|
||||
var temp = list[j];
|
||||
list[j] = list[j+1];
|
||||
list[j+1] = temp;
|
||||
j = j - 1;
|
||||
}
|
||||
else {
|
||||
j = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
int[] create_ufind(int len)
|
||||
{
|
||||
var arr = new int[len];
|
||||
for(var i = 0; i < len; i = i + 1;)
|
||||
{
|
||||
arr[i] = i;
|
||||
}
|
||||
return arr;
|
||||
|
||||
}
|
||||
|
||||
void union(int[] comps, int u, int v)
|
||||
{
|
||||
var cU = find(comps, u);
|
||||
var cV = find(comps, v);
|
||||
|
||||
if(cU == cV)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
comps[cU] = cV;
|
||||
return;
|
||||
}
|
||||
|
||||
int find(int[] comps, int u)
|
||||
{
|
||||
var root = u;
|
||||
while(root != comps[root])
|
||||
{
|
||||
root = comps[root];
|
||||
}
|
||||
|
||||
while(u != root)
|
||||
{
|
||||
var parent = find(comps, comps[u]);
|
||||
comps[u] = root;
|
||||
u = parent;
|
||||
|
||||
}
|
||||
|
||||
return root;
|
||||
|
||||
}
|
||||
|
||||
Edge[] findMST(Edge[] edges, int m, int n) {
|
||||
sort(compare, m, edges);
|
||||
var uf = create_ufind(n);
|
||||
var result = new Edge[n-1];
|
||||
var size = 0;
|
||||
var count = 0;
|
||||
|
||||
while(size < n - 1){
|
||||
|
||||
if(find(uf, edges[count].u) != find(uf, edges[count].v)){
|
||||
result[size] = edges[count];
|
||||
union(uf, edges[count].u, edges[count].v);
|
||||
size = size + 1;
|
||||
}
|
||||
|
||||
count = count + 1;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var e1 = new Edge {u = 1; v = 2; weight = 3};
|
||||
var e2 = new Edge {u = 0; v = 1; weight = 20};
|
||||
var e3 = new Edge {u = 1; v = 4; weight = 1};
|
||||
var e4 = new Edge {u = 2; v = 4; weight = 5};
|
||||
var e5 = new Edge {u = 3; v = 4; weight = 6};
|
||||
var e6 = new Edge {u = 2; v = 3; weight = 4};
|
||||
|
||||
var edges = new Edge[]{e1, e2, e3, e4, e5, e6};
|
||||
var mst = findMST(edges, 6, 5);
|
||||
|
||||
for(var i = 0; i < 4; i = i + 1;){
|
||||
print_string("(");
|
||||
print_int(mst[i].u);
|
||||
print_string(",");
|
||||
print_int(mst[i].v);
|
||||
print_string(",");
|
||||
print_int(mst[i].weight);
|
||||
print_string(") ");
|
||||
}
|
||||
print_string("=");
|
||||
return 0;
|
||||
}
|
||||
81
hw6/hw5programs/johnhew_pagerank.oat
Normal file
81
hw6/hw5programs/johnhew_pagerank.oat
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
struct Node {
|
||||
int value;
|
||||
int new_value;
|
||||
bool[] incoming_edges;
|
||||
int outgoing_count
|
||||
}
|
||||
|
||||
struct PageRank {
|
||||
int iterations;
|
||||
(int, int) -> int division_fun;
|
||||
(Node[], int) -> void iteration_fun;
|
||||
(Node[], int, int) -> void run_fun
|
||||
}
|
||||
|
||||
int divide(int dividend, int divisor) {
|
||||
var quotient = 0;
|
||||
while (dividend > divisor) {
|
||||
quotient = quotient + 1;
|
||||
dividend = dividend - divisor;
|
||||
}
|
||||
/*print_int(quotient);
|
||||
print_string("quotient\n");*/
|
||||
return quotient;
|
||||
}
|
||||
|
||||
void inter(Node[] nodes, int node_count) {
|
||||
for (var i = 0; i < node_count; i = i+1; ) {
|
||||
for (var j = 0; j < node_count; j = j+1; ) {
|
||||
if (nodes[i].incoming_edges[j]) {
|
||||
nodes[i].new_value = nodes[i].new_value + divide(nodes[j].value, nodes[j].outgoing_count); /* TODO: implement divide by number of outgoing edges. */
|
||||
}
|
||||
}
|
||||
}
|
||||
for(var i = 0; i < node_count; i = i+1;) {
|
||||
var half = nodes[i].new_value >> 1;
|
||||
var quarter = nodes[i].new_value >> 2;
|
||||
nodes[i].value = divide(250000,node_count) + half + quarter; /* This is 250000 (rather than 0.25) because of the lack of floating point support. */
|
||||
nodes[i].new_value = 0;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
void run(Node[] nodes, int node_count, int iterations) {
|
||||
for (var i = 0; i < 100; i = i + 1;) {
|
||||
inter(nodes, node_count);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var node_count = 3;
|
||||
var nodes = new Node[node_count];
|
||||
var node_1_arr = new bool[] {false, false, true};
|
||||
var node_2_arr = new bool[] {false, false, false};
|
||||
var node_3_arr = new bool[] {true, true, false};
|
||||
nodes[0] = new Node {value = 333333; new_value = 0; incoming_edges = node_1_arr; outgoing_count = 1};
|
||||
nodes[1] = new Node {value = 333333; new_value = 0; incoming_edges = node_2_arr; outgoing_count = 1};
|
||||
nodes[2] = new Node {value = 333333; new_value = 0; incoming_edges = node_3_arr; outgoing_count = 1};
|
||||
|
||||
var pr = new PageRank {iterations = 100; division_fun = divide; iteration_fun = inter; run_fun = run};
|
||||
pr.run_fun(nodes, 3, 100);
|
||||
|
||||
/*for (var i = 0; i < 100; i = i + 1;) {
|
||||
inter(nodes, node_count);
|
||||
}*/
|
||||
/*print_string("Results\n");
|
||||
print_string("Node 0: ");
|
||||
print_int(nodes[0].value);
|
||||
print_string("\n");
|
||||
print_string("Node 1: ");
|
||||
print_int(nodes[1].value);
|
||||
print_string("\n");
|
||||
print_string("Node 2: ");
|
||||
print_int(nodes[2].value);
|
||||
print_string("\n");
|
||||
print_string("Sum: ");
|
||||
print_int(nodes[0].value + nodes[1].value + nodes[2].value);
|
||||
print_string("\n");*/
|
||||
print_int(nodes[2].value);
|
||||
return 0;
|
||||
}
|
||||
53
hw6/hw5programs/kyim_test.oat
Normal file
53
hw6/hw5programs/kyim_test.oat
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
global path1 = int[]{0, 0, 1};
|
||||
global path2 = int[]{1, 1, 0};
|
||||
|
||||
struct Node {
|
||||
int val;
|
||||
Node left;
|
||||
Node right;
|
||||
(int, int) -> int fun
|
||||
}
|
||||
|
||||
int sum_plus_one(int x, int y) {
|
||||
return x+y+1;
|
||||
}
|
||||
|
||||
int two_x_plus_y(int x, int y) {
|
||||
return 2*x+y;
|
||||
}
|
||||
|
||||
void make_children(Node root, int depth) {
|
||||
if (depth != 0) {
|
||||
var left_node = new Node{val=root.fun(root.val, 0); left=Node null; right=Node null; fun=root.fun};
|
||||
make_children(left_node, depth-1);
|
||||
var right_node = new Node{val=root.fun(root.val, 1); left=Node null; right=Node null; fun=root.fun};
|
||||
make_children(right_node, depth-1);
|
||||
root.left = left_node;
|
||||
root.right = right_node;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int retrieve(Node root, int depth, int[] path) {
|
||||
var next = root.right;
|
||||
if (path[depth] == 0) {
|
||||
next = root.left;
|
||||
}
|
||||
if (depth == 0) {
|
||||
return next.val;
|
||||
} else {
|
||||
return retrieve(next, depth-1, path);
|
||||
}
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var root1 = new Node{val=1; left=Node null; right=Node null; fun=sum_plus_one};
|
||||
var root2 = new Node{val=2; left=Node null; right=Node null; fun=two_x_plus_y};
|
||||
make_children(root1, 3);
|
||||
make_children(root2, 3);
|
||||
if (retrieve(root1, 2, path1) == 5 & retrieve(root2, 2, path2) == 19) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
61
hw6/hw5programs/leqiliu_traversal.oat
Normal file
61
hw6/hw5programs/leqiliu_traversal.oat
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
struct Tree {
|
||||
int val;
|
||||
Tree left;
|
||||
Tree right;
|
||||
(int) -> void f
|
||||
}
|
||||
|
||||
void print(int val) {
|
||||
print_string(string_cat("->", string_of_int(val)));
|
||||
return;
|
||||
}
|
||||
|
||||
void in_order_traverse(Tree tree) {
|
||||
tree.f(tree.val);
|
||||
if(tree.left.val != 0) {
|
||||
in_order_traverse(tree.left);
|
||||
}
|
||||
|
||||
if(tree.right.val != 0) {
|
||||
in_order_traverse(tree.right);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
void pre_order_traverse(Tree tree) {
|
||||
if(tree.left.val != 0) {
|
||||
pre_order_traverse(tree.left);
|
||||
}
|
||||
|
||||
tree.f(tree.val);
|
||||
|
||||
if(tree.right.val != 0) {
|
||||
pre_order_traverse(tree.right);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
void post_order_traverse(Tree tree) {
|
||||
if(tree.left.val != 0) {
|
||||
post_order_traverse(tree.left);
|
||||
}
|
||||
|
||||
if(tree.right.val != 0) {
|
||||
post_order_traverse(tree.right);
|
||||
}
|
||||
|
||||
tree.f(tree.val);
|
||||
return;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var nullchild = new Tree { val = 0; left = Tree null; right = Tree null; f = print};
|
||||
var rightchild = new Tree { val = 1; left = nullchild; right = nullchild; f = print };
|
||||
var leftchild = new Tree { val = 1; left = nullchild; right = nullchild; f = print };
|
||||
var tree = new Tree { val = 2; left = leftchild; right = rightchild; f = print };
|
||||
|
||||
pre_order_traverse(tree);
|
||||
in_order_traverse(tree);
|
||||
post_order_traverse(tree);
|
||||
return 0;
|
||||
}
|
||||
35
hw6/hw5programs/maale_odd_even.oat
Normal file
35
hw6/hw5programs/maale_odd_even.oat
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
int[] oddevensort (int[] input, int len) {
|
||||
var sorted = false;
|
||||
while(!sorted) {
|
||||
sorted = true;
|
||||
for(var i = 1; i < len - 1; i = i + 2;) {
|
||||
if(input[i] > input[i + 1]) {
|
||||
var temp = input[i];
|
||||
input[i] = input[i + 1];
|
||||
input[i + 1] = temp;
|
||||
sorted = false;
|
||||
}
|
||||
}
|
||||
|
||||
for(var j = 0; j < len - 1; j = j + 2;) {
|
||||
if(input[j] > input[j+1]) {
|
||||
var temp = input[j];
|
||||
input[j] = input[j + 1];
|
||||
input[j + 1] = temp;
|
||||
sorted = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return input;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var arr = new int[]{ 5, 200, 1, 65, 30, 99, 2, 0 };
|
||||
var len = 8;
|
||||
var sorted = oddevensort(arr, len);
|
||||
for(var i=0; i<len; i=i+1;)
|
||||
{
|
||||
print_int(sorted[i]);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
49
hw6/hw5programs/minski_pythagorean.oat
Normal file
49
hw6/hw5programs/minski_pythagorean.oat
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
int sqrt (int n) {
|
||||
if (n < 0) {
|
||||
return 0;
|
||||
}
|
||||
var s = 0;
|
||||
while (n > 0) {
|
||||
var d = s * s;
|
||||
if (d > n) {
|
||||
n = -1;
|
||||
} else {
|
||||
s = s + 1;
|
||||
}
|
||||
}
|
||||
return s - 1;
|
||||
}
|
||||
|
||||
int side_squared(Point p1, Point p2) {
|
||||
var m1 = (p1.x - p2.x) * (p1.x - p2.x);
|
||||
var m2 = (p1.y - p2.y) * (p1.y - p2.y);
|
||||
if (m1 > m2) {
|
||||
return m1 - m2;
|
||||
} else {
|
||||
return m2 - m1;
|
||||
}
|
||||
}
|
||||
|
||||
int pythagorean(Triangle t) {
|
||||
var s1 = side_squared(t.p1, t.p2);
|
||||
var s2 = side_squared(t.p2, t.p3);
|
||||
return sqrt(s1 + s2);
|
||||
}
|
||||
|
||||
struct Triangle {
|
||||
Point p1;
|
||||
Point p2;
|
||||
Point p3
|
||||
}
|
||||
|
||||
struct Point {
|
||||
int x;
|
||||
int y
|
||||
}
|
||||
|
||||
global points = Point[] {Point { x = 0; y = 0 }, Point { x = 4; y = 0 }, Point { x = 4; y = 3 }};
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var t = new Triangle { p1 = points[0]; p2 = points[1]; p3 = points[2] };
|
||||
return pythagorean(t);
|
||||
}
|
||||
32
hw6/hw5programs/olekg_fold_struct.oat
Normal file
32
hw6/hw5programs/olekg_fold_struct.oat
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
struct Node {
|
||||
int i;
|
||||
Node next;
|
||||
bool hasNext
|
||||
}
|
||||
|
||||
int minus(int x, int y) { return x - y; }
|
||||
|
||||
int plus(int x, int y) { return x + y; }
|
||||
|
||||
int fold_str_int(Node n, (int, int) -> int f, int base) {
|
||||
if(n.hasNext) {
|
||||
var newBase = f(base, n.i);
|
||||
return fold_str_int(n.next, f, newBase);
|
||||
} else {
|
||||
return f(base, n.i);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var n9 = new Node {i=9; next=Node null; hasNext=false};
|
||||
var n8 = new Node {i=8; next=n9; hasNext=true};
|
||||
var n7 = new Node {i=7; next=n8; hasNext=true};
|
||||
var n6 = new Node {i=6; next=n7; hasNext=true};
|
||||
var n5 = new Node {i=5; next=n6; hasNext=true};
|
||||
var n4 = new Node {i=4; next=n5; hasNext=true};
|
||||
var n3 = new Node {i=3; next=n4; hasNext=true};
|
||||
var n2 = new Node {i=2; next=n3; hasNext=true};
|
||||
var n1 = new Node {i=1; next=n2; hasNext=true};
|
||||
return fold_str_int(n1, plus, 0) - fold_str_int(n1, minus, 2);
|
||||
}
|
||||
52
hw6/hw5programs/poem.oat
Normal file
52
hw6/hw5programs/poem.oat
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
/*De*/struct Ive {int entions}
|
||||
|
||||
|
||||
global u=
|
||||
|
||||
true; Ive left(The? now, The[] out , The any,
|
||||
|
||||
int ent) { for(;;) { var iety=
|
||||
|
||||
new int[ent];
|
||||
|
||||
while(u){
|
||||
you ("searched");
|
||||
for (var iations = "untold";;) {
|
||||
while(u) {
|
||||
you ("strove");
|
||||
for (var ious = "dimensions, not"; false;) {
|
||||
|
||||
if? (The i = now) {
|
||||
var ily = i. ended;
|
||||
|
||||
} else {
|
||||
you (saved_me);
|
||||
}}}}}}
|
||||
|
||||
you("");
|
||||
return left(out
|
||||
[any.desired.needed.act],
|
||||
/*with*/out,
|
||||
any.desired.needed.one,
|
||||
|
||||
|
||||
1 );} global
|
||||
|
||||
saved_me = "from the"; void
|
||||
|
||||
you(string me_a_lifeline) {
|
||||
you("call to me");
|
||||
for (var iability = "is forever"; true;) {}
|
||||
you("and i"); return;
|
||||
}
|
||||
|
||||
/*recon*/struct The {
|
||||
int ended;
|
||||
The desired;
|
||||
The needed;
|
||||
The one;
|
||||
|
||||
int act
|
||||
}
|
||||
|
||||
|
||||
69
hw6/hw5programs/sp22_tests/ben_kai_insertion.oat
Normal file
69
hw6/hw5programs/sp22_tests/ben_kai_insertion.oat
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
int[] sort(int[] arr, int len)
|
||||
{
|
||||
var copy = new int[len];
|
||||
for (var i = 0; i < len; i = i + 1;) {
|
||||
copy[i] = arr[i];
|
||||
}
|
||||
|
||||
for (var i = 1; i < len; i = i + 1;) {
|
||||
var key = copy[i];
|
||||
var j = (i - 1);
|
||||
|
||||
var inwhile = j >= 0 & copy[j] > key;
|
||||
while (inwhile) {
|
||||
copy[j + 1] = copy[j];
|
||||
j = j - 1;
|
||||
if (j < 0) {
|
||||
inwhile = false;
|
||||
} else {
|
||||
if (copy[j] <= key) {
|
||||
inwhile = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
copy[j + 1] = key;
|
||||
}
|
||||
|
||||
return copy;
|
||||
}
|
||||
|
||||
bool checksorted(int[] arr, int len) {
|
||||
if (len == 1) {
|
||||
return true;
|
||||
}
|
||||
for (var i = 0; i < (len - 1); i = i + 1;)
|
||||
{
|
||||
if (arr[i+1] < arr[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* for debugging */
|
||||
void print_arr(int[] sorted, int len)
|
||||
{
|
||||
print_string("\nDescending sorted array[");
|
||||
for (var i = 0; i < len; i = i + 1;)
|
||||
{
|
||||
print_int(sorted[i]);
|
||||
if (i < len - 1) {
|
||||
print_string(",");
|
||||
}
|
||||
}
|
||||
print_string("]\n\n");
|
||||
return;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var arr = new int[]{ 12, 11, 13, 5, 6 };
|
||||
var len = length(arr);
|
||||
var sorted = sort(arr, len);
|
||||
|
||||
if (checksorted(sorted, len)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
76
hw6/hw5programs/sp22_tests/binary_tree_tests.oat
Normal file
76
hw6/hw5programs/sp22_tests/binary_tree_tests.oat
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
struct Node {
|
||||
int val;
|
||||
Node? left;
|
||||
Node? right
|
||||
}
|
||||
|
||||
struct Tuple {
|
||||
int max_sum_path;
|
||||
int max_path_term_node
|
||||
}
|
||||
|
||||
Node newNode (int val) {
|
||||
return new Node {val = val; left = Node null; right = Node null};
|
||||
}
|
||||
|
||||
int getMax(int a, int b) {
|
||||
if(a >= b) {
|
||||
return a;
|
||||
}
|
||||
return b;
|
||||
}
|
||||
|
||||
Tuple findMaxSumHelper (Node? root, Tuple input_tuple) {
|
||||
|
||||
if?(Node x = root) {
|
||||
|
||||
var l_result = findMaxSumHelper(x.left, input_tuple).max_path_term_node;
|
||||
var r_result = findMaxSumHelper(x.right, input_tuple).max_path_term_node;
|
||||
|
||||
var max_children = getMax(l_result, r_result);
|
||||
var curr_root_terminating_path = getMax(max_children + x.val, x.val);
|
||||
|
||||
var max_no_parent = getMax(curr_root_terminating_path, x.val + l_result + r_result);
|
||||
|
||||
var res = getMax(input_tuple.max_sum_path, max_no_parent);
|
||||
|
||||
var ret_tuple = new Tuple {max_sum_path = res; max_path_term_node = curr_root_terminating_path};
|
||||
|
||||
return ret_tuple;
|
||||
|
||||
} else {
|
||||
var ret = new Tuple {max_sum_path = input_tuple.max_sum_path; max_path_term_node = 0};
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int findMaxSumDriver(Node root) {
|
||||
var input_tuple = new Tuple {max_sum_path = -99999; max_path_term_node = 0};
|
||||
var temp = findMaxSumHelper(root, input_tuple);
|
||||
return temp.max_sum_path;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var root = newNode(8);
|
||||
|
||||
root.left = newNode(2);
|
||||
root.right = newNode(70);
|
||||
|
||||
|
||||
if?(Node r_l = root.left) {
|
||||
r_l.left = newNode(50);
|
||||
r_l.right = newNode(0);
|
||||
}
|
||||
|
||||
if?(Node r_r = root.right) {
|
||||
r_r.right = newNode(15);
|
||||
|
||||
if?(Node r_r_r = r_r.right) {
|
||||
r_r_r.right = newNode(4);
|
||||
r_r_r.left = newNode(-9);
|
||||
}
|
||||
}
|
||||
|
||||
return findMaxSumDriver(root);
|
||||
}
|
||||
150
hw6/hw5programs/sp22_tests/books.oat
Normal file
150
hw6/hw5programs/sp22_tests/books.oat
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
struct Book {
|
||||
string title;
|
||||
int critic_rating;
|
||||
int general_rating;
|
||||
int pages;
|
||||
string genre
|
||||
}
|
||||
|
||||
global library = new Book[]{
|
||||
new Book{title = "The Hunger Games"; critic_rating = 7; general_rating = 9; pages = 374; genre = "science fiction"},
|
||||
new Book{title = "Dune"; critic_rating = 8; general_rating = 10; pages = 412; genre = "science fiction"},
|
||||
new Book{title = "Ender's Game"; critic_rating = 9; general_rating = 7; pages = 324; genre = "science fiction"},
|
||||
new Book{title = "A Game of Thrones Song of Ice and Fire"; critic_rating = 8; general_rating = 7; pages = 694; genre = "fantasy"},
|
||||
new Book{title = "Eragon"; critic_rating = 9; general_rating = 8; pages = 509; genre = "fantasy"},
|
||||
new Book{title = "The Diary of a Young Girl"; critic_rating = 10; general_rating = 8; pages = 318; genre = "non-fiction"}
|
||||
};
|
||||
|
||||
Book? reduce_to_book(Book[] books, (Book, Book?) -> Book? reducer, Book? initial_value) {
|
||||
var ret = initial_value;
|
||||
for (var i = 0; i < length(books); i = i + 1;) {
|
||||
ret = reducer(books[i], ret);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
Book? best_critic_reducer(Book book, Book? acc) {
|
||||
if? (Book nonnull_acc = acc) {
|
||||
if (nonnull_acc.critic_rating < book.critic_rating) {
|
||||
return book;
|
||||
} else {
|
||||
return nonnull_acc;
|
||||
}
|
||||
} else {
|
||||
return book;
|
||||
}
|
||||
}
|
||||
|
||||
Book? best_critic(Book[] books) {
|
||||
return reduce_to_book(library, best_critic_reducer, Book null);
|
||||
}
|
||||
|
||||
Book? best_general_reducer(Book book, Book? acc) {
|
||||
if? (Book nonnull_acc = acc) {
|
||||
if (nonnull_acc.general_rating < book.general_rating) {
|
||||
return book;
|
||||
} else {
|
||||
return nonnull_acc;
|
||||
}
|
||||
} else {
|
||||
return book;
|
||||
}
|
||||
}
|
||||
|
||||
Book? best_general(Book[] books) {
|
||||
return reduce_to_book(library, best_general_reducer, Book null);
|
||||
}
|
||||
|
||||
Book? most_pages_reducer(Book book, Book? acc) {
|
||||
if? (Book nonnull_acc = acc) {
|
||||
if (nonnull_acc.pages < book.pages) {
|
||||
return book;
|
||||
} else {
|
||||
return nonnull_acc;
|
||||
}
|
||||
} else {
|
||||
return book;
|
||||
}
|
||||
}
|
||||
|
||||
Book? most_pages(Book[] books) {
|
||||
return reduce_to_book(library, most_pages_reducer, Book null);
|
||||
}
|
||||
|
||||
int divide(int num, int den) {
|
||||
var curr = 0;
|
||||
while (num >= den) {
|
||||
curr = curr + 1;
|
||||
num = num - den;
|
||||
}
|
||||
return curr;
|
||||
}
|
||||
|
||||
int average(Book[] books, (Book) -> int select) {
|
||||
var curr_sum = 0;
|
||||
for (var i = 0; i < length(books); i = i + 1;) {
|
||||
curr_sum = curr_sum + select(books[i]);
|
||||
}
|
||||
return divide(curr_sum, length(books));
|
||||
}
|
||||
|
||||
int select_crit_rating(Book b) {
|
||||
return b.critic_rating;
|
||||
}
|
||||
|
||||
int select_gen_rating(Book b) {
|
||||
return b.general_rating;
|
||||
}
|
||||
|
||||
int select_page(Book b) {
|
||||
return b.pages;
|
||||
}
|
||||
|
||||
int average_crit(Book[] books) {
|
||||
return average(books, select_crit_rating);
|
||||
}
|
||||
|
||||
int average_gen(Book[] books) {
|
||||
return average(books, select_gen_rating);
|
||||
}
|
||||
|
||||
int average_pages(Book[] books) {
|
||||
return average(books, select_page);
|
||||
}
|
||||
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var best_c = best_critic(library);
|
||||
var best_g = best_general(library);
|
||||
var most_p = most_pages(library);
|
||||
var avg_crit = average_crit(library);
|
||||
var avg_gen = average_gen(library);
|
||||
var avg_pages = average_pages(library);
|
||||
|
||||
if? (Book nonnull_best_c = best_c)
|
||||
{
|
||||
print_string("The best book by critic reviews is ");
|
||||
print_string(nonnull_best_c.title);
|
||||
}
|
||||
|
||||
if? (Book nonnull_best_g = best_g)
|
||||
{
|
||||
print_string(", the best book by general reviews is ");
|
||||
print_string(nonnull_best_g.title);
|
||||
}
|
||||
|
||||
if? (Book nonnull_most_pages = most_p)
|
||||
{
|
||||
print_string(", the longest book is ");
|
||||
print_string(nonnull_most_pages.title);
|
||||
}
|
||||
|
||||
print_string(", the average critic rating is ");
|
||||
print_int(avg_crit);
|
||||
print_string(", the average general rating is ");
|
||||
print_int(avg_gen);
|
||||
print_string(", the average page count is ");
|
||||
print_int(avg_pages);
|
||||
|
||||
return 0;
|
||||
}
|
||||
72
hw6/hw5programs/sp22_tests/census.oat
Normal file
72
hw6/hw5programs/sp22_tests/census.oat
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
struct Person {
|
||||
string name;
|
||||
int age;
|
||||
int height;
|
||||
int income
|
||||
}
|
||||
|
||||
global people = new Person[]{
|
||||
new Person{name = "Alice"; age = 25; height = 160; income = 75000},
|
||||
new Person{name = "Bob"; age = 52; height = 192; income = 78000},
|
||||
new Person{name = "Carol"; age = 37; height = 156; income = 100000},
|
||||
new Person{name = "Dave"; age = 19; height = 200; income = 12000}
|
||||
};
|
||||
|
||||
Person? reduce_to_person(Person[] people, (Person, Person?) -> Person? reducer, Person? initial_value) {
|
||||
var out = initial_value;
|
||||
for (var i = 0; i < length(people); i = i + 1;) {
|
||||
out = reducer(people[i], out);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
Person? youngest_reducer(Person new_person, Person? acc) {
|
||||
if? (Person nonnull_acc = acc) {
|
||||
if (nonnull_acc.age > new_person.age) {
|
||||
return new_person;
|
||||
} else {
|
||||
return nonnull_acc;
|
||||
}
|
||||
} else {
|
||||
return new_person;
|
||||
}
|
||||
}
|
||||
|
||||
Person? youngest(Person[] people) {
|
||||
return reduce_to_person(people, youngest_reducer, Person null);
|
||||
}
|
||||
|
||||
Person? oldest_reducer(Person new_person, Person? acc) {
|
||||
if? (Person nonnull_acc = acc) {
|
||||
if (nonnull_acc.age < new_person.age) {
|
||||
return new_person;
|
||||
} else {
|
||||
return nonnull_acc;
|
||||
}
|
||||
} else {
|
||||
return new_person;
|
||||
}
|
||||
}
|
||||
|
||||
Person? oldest(Person[] people) {
|
||||
return reduce_to_person(people, oldest_reducer, Person null);
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var youngest = youngest(people);
|
||||
var oldest = oldest(people);
|
||||
|
||||
if? (Person nonnull_youngest = youngest)
|
||||
{
|
||||
print_string("The youngest is ");
|
||||
print_string(nonnull_youngest.name);
|
||||
}
|
||||
|
||||
if? (Person nonnull_oldest = oldest)
|
||||
{
|
||||
print_string(" and the oldest is ");
|
||||
print_string(nonnull_oldest.name);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
62
hw6/hw5programs/sp22_tests/cla_struct.oat
Normal file
62
hw6/hw5programs/sp22_tests/cla_struct.oat
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
struct Adder {
|
||||
bool prop;
|
||||
bool gen
|
||||
}
|
||||
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
return cla(11, 20);
|
||||
}
|
||||
|
||||
|
||||
|
||||
int cla(int a, int b) {
|
||||
var a0 = new Adder {prop = (propagate(((a [&] 1) == 1), ((b [&] 1) == 1)));
|
||||
gen = generate(((a [&] 1) == 1), ((b [&] 1) == 1))};
|
||||
|
||||
var a1 = new Adder {prop = propagate(((a [&] 2) == 2), ((b [&] 2) == 2));
|
||||
gen = generate(((a [&] 2) == 2), ((b [&] 2) == 2)) };
|
||||
|
||||
var a2 = new Adder {prop = propagate(((a [&] 4) == 4), ((b [&] 4) == 4));
|
||||
gen = generate(((a [&] 4) == 4), ((b [&] 4) == 4)) };
|
||||
|
||||
var a3 = new Adder {prop = propagate(((a [&] 8) == 8), ((b [&] 8) == 8));
|
||||
gen = generate(((a [&] 8) == 8), ((b [&] 8) == 8))};
|
||||
|
||||
var c1 = a0.gen;
|
||||
var c2 = a1.gen | (a0.gen & a1.prop);
|
||||
var c3 = a2.gen | (a0.gen & a1.prop & a2.prop) | (a1.gen & a2.prop);
|
||||
var c4 = a3.gen | (a0.gen & a1.prop & a2.prop & a3.prop) | (a1.gen & a2.prop & a3.prop) | (a2.gen & a3.prop);
|
||||
|
||||
var carryValues = 0;
|
||||
if (c1) {
|
||||
carryValues = carryValues [|] 2;
|
||||
}
|
||||
if (c2) {
|
||||
carryValues = carryValues [|] 4;
|
||||
}
|
||||
if (c3) {
|
||||
carryValues = carryValues [|] 8;
|
||||
}
|
||||
if (c4) {
|
||||
carryValues = carryValues [|] 16;
|
||||
}
|
||||
var x = xor(a, b);
|
||||
|
||||
var r = xor(carryValues, x);
|
||||
return r;
|
||||
}
|
||||
|
||||
bool propagate (bool p1, bool p2)
|
||||
{
|
||||
return p1 | p2;
|
||||
}
|
||||
|
||||
bool generate (bool g1, bool g2)
|
||||
{
|
||||
return g1 & g2;
|
||||
}
|
||||
|
||||
int xor (int x, int y) {
|
||||
return ~(x [&] y) [&] (x [|] y);
|
||||
}
|
||||
112
hw6/hw5programs/sp22_tests/deque.oat
Normal file
112
hw6/hw5programs/sp22_tests/deque.oat
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
struct Deque {
|
||||
Node? head;
|
||||
Node? tail
|
||||
}
|
||||
struct Node {
|
||||
Node? prev;
|
||||
Node? next;
|
||||
int v
|
||||
}
|
||||
|
||||
|
||||
|
||||
void enqueueLeft(Deque q, int k) {
|
||||
var new_node = new Node {prev = Node null; next = Node null; v = k};
|
||||
if?(Node h = q.head) {
|
||||
new_node.next = q.head;
|
||||
h.prev = new_node;
|
||||
q.head = new_node;
|
||||
} else {
|
||||
q.head = new_node;
|
||||
q.tail = new_node;
|
||||
}
|
||||
return;
|
||||
}
|
||||
void enqueueRight(Deque q, int k) {
|
||||
var new_node = new Node {prev = Node null; next = Node null; v = k};
|
||||
if?(Node t = q.tail) {
|
||||
new_node.prev = q.tail;
|
||||
t.next = new_node;
|
||||
q.tail = new_node;
|
||||
} else {
|
||||
q.head = new_node;
|
||||
q.tail = new_node;
|
||||
}
|
||||
return;
|
||||
}
|
||||
int dequeueRight(Deque q) {
|
||||
if?(Node t = q.tail) {
|
||||
q.tail = t.prev;
|
||||
if?(Node p = t.prev) {
|
||||
p.next = Node null;
|
||||
}
|
||||
return t.v;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
/* return -1; */
|
||||
}
|
||||
int dequeueLeft(Deque q) {
|
||||
if?(Node t = q.head) {
|
||||
q.head = t.next;
|
||||
if?(Node p = t.next) {
|
||||
p.prev = Node null;
|
||||
}
|
||||
return t.v;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
/* return -1; */
|
||||
}
|
||||
int peakLeft(Deque q) {
|
||||
if?(Node t = q.head) {
|
||||
return t.v;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
int peakRight(Deque q) {
|
||||
if?(Node t = q.tail) {
|
||||
return t.v;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
bool isEmpty(Deque q) {
|
||||
if?(Node t = q.tail) {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
void clear(Deque q) {
|
||||
q.head = Node null;
|
||||
q.tail = Node null;
|
||||
return;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var q = new Deque { head = Node null; tail = Node null };
|
||||
enqueueLeft(q, 3);
|
||||
enqueueRight(q, 4);
|
||||
enqueueRight(q, 1);
|
||||
enqueueLeft(q, 2);
|
||||
enqueueRight(q, 6);
|
||||
print_int(peakRight(q));
|
||||
print_int(dequeueRight(q));
|
||||
print_int(peakRight(q));
|
||||
print_int(dequeueRight(q));
|
||||
print_bool(isEmpty(q));
|
||||
|
||||
print_int(peakRight(q));
|
||||
print_int(dequeueRight(q));
|
||||
print_int(peakRight(q));
|
||||
print_int(dequeueRight(q));
|
||||
print_int(peakRight(q));
|
||||
print_int(dequeueRight(q));
|
||||
print_bool(isEmpty(q));
|
||||
return 0;
|
||||
}
|
||||
111
hw6/hw5programs/sp22_tests/dijkstras.oat
Normal file
111
hw6/hw5programs/sp22_tests/dijkstras.oat
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
struct Graph {
|
||||
AdjList?[] adj_lists
|
||||
}
|
||||
|
||||
struct AdjList {
|
||||
Edge[] edges
|
||||
}
|
||||
|
||||
struct Edge {
|
||||
int neighbor;
|
||||
int weight
|
||||
}
|
||||
|
||||
global e01 = new Edge { neighbor = 1; weight = 4 };
|
||||
global e07 = new Edge { neighbor = 7; weight = 8 };
|
||||
|
||||
global e12 = new Edge { neighbor = 2; weight = 8 };
|
||||
global e17 = new Edge { neighbor = 7; weight = 11 };
|
||||
|
||||
global e23 = new Edge { neighbor = 3; weight = 7 };
|
||||
global e25 = new Edge { neighbor = 5; weight = 4 };
|
||||
global e28 = new Edge { neighbor = 8; weight = 2 };
|
||||
|
||||
global e34 = new Edge { neighbor = 4; weight = 9 };
|
||||
|
||||
global e53 = new Edge { neighbor = 3; weight = 14 };
|
||||
global e54 = new Edge { neighbor = 4; weight = 10 };
|
||||
|
||||
global e65 = new Edge { neighbor = 5; weight = 2 };
|
||||
global e68 = new Edge { neighbor = 8; weight = 6 };
|
||||
|
||||
global e76 = new Edge { neighbor = 6; weight = 1 };
|
||||
global e78 = new Edge { neighbor = 8; weight = 7 };
|
||||
|
||||
void print_int_array(int[] arr) {
|
||||
for (var i = 0; i < length(arr); i = i + 1;) {
|
||||
print_int(arr[i]);
|
||||
print_string(" ");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
bool is_queue_empty(bool[] queued) {
|
||||
for (var i = 0; i < length(queued); i = i + 1;) {
|
||||
if (queued[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
int get_min_dist_node(bool[] queued, int[] dist) {
|
||||
var curr_min = -1;
|
||||
var curr_min_node = 0;
|
||||
for (var i = 0; i < length(queued); i = i + 1;) {
|
||||
if (queued[i]) {
|
||||
var curr_dist = dist[i];
|
||||
if (curr_dist != -1 & (curr_dist < curr_min | curr_min == -1)) {
|
||||
curr_min = curr_dist;
|
||||
curr_min_node = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
return curr_min_node;
|
||||
}
|
||||
|
||||
int[] dijkstras(Graph graph, int source) {
|
||||
var n = length(graph.adj_lists);
|
||||
var dist = new int[n]{id -> -1};
|
||||
var queued = new bool[n]{id -> true};
|
||||
|
||||
dist[source] = 0;
|
||||
|
||||
while (!is_queue_empty(queued)) {
|
||||
var curr = get_min_dist_node(queued, dist);
|
||||
queued[curr] = false;
|
||||
|
||||
if?(AdjList a = graph.adj_lists[curr]) {
|
||||
var a_edges = a.edges;
|
||||
for (var i = 0; i < length(a_edges); i = i + 1;) {
|
||||
var curr_edge = a_edges[i];
|
||||
var curr_neighbor = curr_edge.neighbor;
|
||||
var alt_dist = dist[curr] + curr_edge.weight;
|
||||
if (dist[curr_neighbor] == -1 | alt_dist < dist[curr_neighbor]) {
|
||||
dist[curr_neighbor] = alt_dist;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dist;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var a0 = new AdjList { edges = new Edge[]{e01, e07} };
|
||||
var a1 = new AdjList { edges = new Edge[]{e12, e17} };
|
||||
var a2 = new AdjList { edges = new Edge[]{e23, e25, e28} };
|
||||
var a3 = new AdjList { edges = new Edge[]{e34} };
|
||||
var a4 = AdjList null;
|
||||
var a5 = new AdjList { edges = new Edge[]{e53, e54} };
|
||||
var a6 = new AdjList { edges = new Edge[]{e65, e68} };
|
||||
var a7 = new AdjList { edges = new Edge[]{e76, e78} };
|
||||
var a8 = AdjList null;
|
||||
|
||||
var graph = new Graph { adj_lists = new AdjList?[]{a0, a1, a2, a3, a4, a5, a6, a7, a8} };
|
||||
|
||||
var result = dijkstras(graph, 0);
|
||||
print_int_array(result);
|
||||
|
||||
return 0;
|
||||
}
|
||||
68
hw6/hw5programs/sp22_tests/first_class_functions.oat
Normal file
68
hw6/hw5programs/sp22_tests/first_class_functions.oat
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
int[]? map ((int) -> int f, int[]? l) {
|
||||
if? (int[] a = l) {
|
||||
var size = length(a);
|
||||
var new_arr = new int[size] {i -> f(a[i])};
|
||||
return new_arr;
|
||||
} else {
|
||||
return int[] null;
|
||||
}
|
||||
}
|
||||
|
||||
int[]? filter ((int) -> bool f, int[]? l) {
|
||||
if? (int[] a = l) {
|
||||
var size = length(a);
|
||||
var bool_arr = new bool[size] {i -> f(a[i])};
|
||||
var new_size = 0;
|
||||
for (var i = 0; i < size; i = i + 1;) {
|
||||
if (bool_arr[i]) {
|
||||
new_size = new_size + 1;
|
||||
}
|
||||
}
|
||||
var ind = 0;
|
||||
var new_arr = new int[new_size];
|
||||
for (var i = 0; i < size; i = i + 1;) {
|
||||
if (bool_arr[i]) {
|
||||
new_arr[ind] = a[i];
|
||||
ind = ind + 1;
|
||||
}
|
||||
}
|
||||
return new_arr;
|
||||
} else {
|
||||
return l;
|
||||
}
|
||||
}
|
||||
|
||||
int fold_left ((int, int) -> int f, int acc, int[] l, int curr_indx) {
|
||||
var size = length(l);
|
||||
if (curr_indx == size) {
|
||||
return acc;
|
||||
}
|
||||
var new_acc = f(acc, l[curr_indx]);
|
||||
return fold_left(f, new_acc, l, curr_indx + 1);
|
||||
}
|
||||
|
||||
int incr(int x) {
|
||||
return x + 1;
|
||||
}
|
||||
|
||||
bool lt_five(int x) {
|
||||
return x < 5;
|
||||
}
|
||||
|
||||
int sum(int acc, int v) {
|
||||
return acc + v;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var l = new int[] {1, 2, 3, 7, 8, 9};
|
||||
var mapped_l = map(incr, l);
|
||||
if? (int[] a = mapped_l) {
|
||||
print_int(a[0]);
|
||||
}
|
||||
var filtered_l = filter(lt_five, l);
|
||||
if? (int[] a = filtered_l) {
|
||||
print_int(length(a));
|
||||
}
|
||||
print_int(fold_left(sum, 0, l, 0));
|
||||
return 0;
|
||||
}
|
||||
108
hw6/hw5programs/sp22_tests/floyd_warshall.oat
Normal file
108
hw6/hw5programs/sp22_tests/floyd_warshall.oat
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
struct Graph {
|
||||
AdjList?[] adj_lists
|
||||
}
|
||||
|
||||
struct AdjList {
|
||||
Edge[] edges
|
||||
}
|
||||
|
||||
struct Edge {
|
||||
int neighbor;
|
||||
int weight;
|
||||
bool neg
|
||||
}
|
||||
|
||||
global e01 = new Edge { neighbor = 1; weight = 5; neg = true };
|
||||
global e02 = new Edge { neighbor = 2; weight = 10; neg = false };
|
||||
global e04 = new Edge { neighbor = 4; weight = 0; neg = false };
|
||||
global e07 = new Edge { neighbor = 7; weight = 51; neg = false};
|
||||
|
||||
global e12 = new Edge { neighbor = 2; weight = 12; neg = false };
|
||||
|
||||
global e23 = new Edge { neighbor = 3; weight = 3; neg = true };
|
||||
|
||||
global e34 = new Edge { neighbor = 4; weight = 5; neg = false };
|
||||
global e36 = new Edge { neighbor = 6; weight = 50; neg = false };
|
||||
|
||||
global e40 = new Edge {neighbor = 0; weight = 1; neg = true };
|
||||
global e45 = new Edge { neighbor = 5; weight = 100; neg = false };
|
||||
|
||||
global e78 = new Edge { neighbor = 8; weight = 10; neg = false };
|
||||
global e710 = new Edge { neighbor = 10; weight = 5; neg = true};
|
||||
|
||||
global e89 = new Edge { neighbor = 9; weight = 3; neg = false };
|
||||
|
||||
global e97 = new Edge { neighbor = 7; weight = 12; neg = false };
|
||||
global e910 = new Edge { neighbor = 10; weight = 2; neg = true };
|
||||
|
||||
global e100 = new Edge {neighbor = 0; weight = 79; neg = false };
|
||||
|
||||
void print_int_array(int[][] arr) {
|
||||
for (var i = 0; i < length(arr); i = i + 1;) {
|
||||
for (var j = 0; j < length(arr[i]); j = j + 1;) {
|
||||
print_int(arr[i][j]);
|
||||
print_string(" ");
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int[][] floyd_warshall(Graph graph) {
|
||||
var n = length(graph.adj_lists);
|
||||
var dist = new int[][n]{i -> new int[n]{j -> 999}};
|
||||
|
||||
for (var i = 0; i < n; i = i+1;) {
|
||||
dist[i][i] = 0;
|
||||
}
|
||||
|
||||
for (var i = 0; i < n; i = i + 1;) {
|
||||
if?(AdjList v = graph.adj_lists[i]) {
|
||||
var v_edges = v.edges;
|
||||
for (var j = 0; j < length(v_edges); j = j + 1;) {
|
||||
var nbr = v_edges[j].neighbor;
|
||||
var weight = v_edges[j].weight;
|
||||
var negative = v_edges[j].neg;
|
||||
if (negative) {
|
||||
weight = 0 - weight;
|
||||
}
|
||||
dist[i][nbr] = weight;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (var k = 0; k < n; k = k + 1;) {
|
||||
for (var i = 0; i < n; i = i + 1;) {
|
||||
for (var j = 0; j < n; j = j + 1;) {
|
||||
if (dist[i][k] < 999 & (dist[k][j] < 999)) {
|
||||
if (dist[i][k] + dist[k][j] < dist[i][j]) {
|
||||
dist[i][j] = dist[i][k] + dist[k][j];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return dist;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var a0 = new AdjList { edges = new Edge[]{e01, e02, e04, e07} };
|
||||
var a1 = new AdjList { edges = new Edge[]{e12} };
|
||||
var a2 = new AdjList { edges = new Edge[]{e23} };
|
||||
var a3 = new AdjList { edges = new Edge[]{e34, e36} };
|
||||
var a4 = new AdjList { edges = new Edge[]{e45, e40} };
|
||||
var a5 = AdjList null;
|
||||
var a6 = AdjList null;
|
||||
var a7 = new AdjList { edges = new Edge[]{e78, e710} };
|
||||
var a8 = new AdjList { edges = new Edge[]{e89} };
|
||||
var a9 = new AdjList { edges = new Edge[]{e910, e97} };
|
||||
var a10 = new AdjList {edges = new Edge[]{e100} };
|
||||
|
||||
var graph = new Graph { adj_lists = new AdjList?[]{a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10} };
|
||||
|
||||
var result = floyd_warshall(graph)[0];
|
||||
for (var i = 0; i < 10; i = i + 1;) {
|
||||
print_int(result[i]);
|
||||
print_string(" ");
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
111
hw6/hw5programs/sp22_tests/ford_fulkerson.oat
Normal file
111
hw6/hw5programs/sp22_tests/ford_fulkerson.oat
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
struct Color {
|
||||
int white;
|
||||
int gray;
|
||||
int black
|
||||
}
|
||||
|
||||
global cenum = new Color {white = 0; gray = 1; black = 2};
|
||||
|
||||
global max = 9223372036854775807;
|
||||
|
||||
global n = 10;
|
||||
|
||||
int min(int x, int y) {
|
||||
if (x < y) {
|
||||
return x;
|
||||
} else {
|
||||
return y;
|
||||
}
|
||||
}
|
||||
|
||||
global head = 0;
|
||||
global tail = 0;
|
||||
|
||||
void enqueue(int[] queue, int[] color, int x) {
|
||||
queue[tail] = x;
|
||||
tail = tail + 1;
|
||||
color[x] = cenum.gray;
|
||||
return;
|
||||
}
|
||||
|
||||
int dequeue(int[] queue, int[] color) {
|
||||
var x = queue[head];
|
||||
head = head + 1;
|
||||
color[x] = cenum.black;
|
||||
return x;
|
||||
}
|
||||
|
||||
bool bfs(int[][] capacity, int[][] flow, int[] pred, int start, int target) {
|
||||
var color = new int[n]{i -> cenum.white};
|
||||
var q = new int[n + 2]{i -> -1};
|
||||
|
||||
head = 0;
|
||||
tail = 0;
|
||||
enqueue(q, color, start);
|
||||
pred[start] = -1;
|
||||
while (head != tail) {
|
||||
var u = dequeue(q, color);
|
||||
for (var v = 0; v < n; v = v + 1;) {
|
||||
if (color[v] == cenum.white & capacity[u][v] - flow[u][v] > 0) {
|
||||
enqueue(q, color, v);
|
||||
pred[v] = u;
|
||||
}
|
||||
}
|
||||
}
|
||||
return color[target] == cenum.black;
|
||||
}
|
||||
|
||||
int fordFulkerson(int[][] capacity, int source, int sink) {
|
||||
var max_flow = 0;
|
||||
var flow = new int[][n]{i -> new int[n]{j -> 0}};
|
||||
var pred = new int[n]{i -> -1};
|
||||
|
||||
while (bfs(capacity, flow, pred, source, sink)) {
|
||||
var increment = max;
|
||||
for (var u = n - 1; pred[u] >= 0; u = pred[u];) {
|
||||
increment = min(increment, capacity[pred[u]][u] - flow[pred[u]][u]);
|
||||
}
|
||||
for (var u = n - 1; pred[u] >= 0; u = pred[u];) {
|
||||
var t1 = flow[pred[u]][u];
|
||||
flow[pred[u]][u] = t1 + increment;
|
||||
var t2 = flow[u][pred[u]];
|
||||
flow[u][pred[u]] = t2 - increment;
|
||||
}
|
||||
max_flow = max_flow + increment;
|
||||
}
|
||||
return max_flow;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
|
||||
var capacity = new int[][n]{i -> new int[n]{j -> 0}};
|
||||
|
||||
capacity[0][1] = 1;
|
||||
capacity[0][2] = 2;
|
||||
capacity[0][3] = 4;
|
||||
capacity[0][7] = 3;
|
||||
|
||||
capacity[1][4] = 5;
|
||||
capacity[1][5] = 3;
|
||||
|
||||
capacity[2][4] = 3;
|
||||
|
||||
capacity[3][5] = 5;
|
||||
|
||||
capacity[4][6] = 5;
|
||||
|
||||
capacity[5][8] = 12;
|
||||
|
||||
capacity[6][5] = 4;
|
||||
|
||||
capacity[7][9] = 2;
|
||||
|
||||
capacity[8][9] = 15;
|
||||
|
||||
var s = 0;
|
||||
var t = 9;
|
||||
print_string("Max Flow: ");
|
||||
print_int(fordFulkerson(capacity, s, t));
|
||||
print_string(" flow.");
|
||||
return 0;
|
||||
}
|
||||
131
hw6/hw5programs/sp22_tests/hashmap.oat
Normal file
131
hw6/hw5programs/sp22_tests/hashmap.oat
Normal file
|
|
@ -0,0 +1,131 @@
|
|||
global p = 31;
|
||||
global size = 32;
|
||||
global deleted_sp = new HashEntry{key=""; value=""};
|
||||
global hash_table = new HashEntry?[]{};
|
||||
|
||||
struct HashEntry {
|
||||
string key;
|
||||
string value
|
||||
}
|
||||
|
||||
int wrap(int i) {
|
||||
return i [&] (size - 1);
|
||||
}
|
||||
|
||||
int hash_function(string s) {
|
||||
var arr = array_of_string(s);
|
||||
var hash = 0;
|
||||
var poly = 1;
|
||||
for (var i = 0; i < length(arr); i = i + 1;) {
|
||||
hash = hash + arr[i] * poly;
|
||||
poly = poly * p;
|
||||
}
|
||||
return wrap(hash);
|
||||
}
|
||||
|
||||
bool string_eq(string s1, string s2) {
|
||||
var len = length_of_string(s1);
|
||||
if (len != length_of_string(s2)) {
|
||||
return false;
|
||||
}
|
||||
var arr1 = array_of_string(s1);
|
||||
var arr2 = array_of_string(s2);
|
||||
for (var i = 0; i < len; i = i + 1;) {
|
||||
if (arr1[i] != arr2[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool put(string key, string value) {
|
||||
var hash = hash_function(key);
|
||||
for (var probe = 0; probe < size; probe = probe + 1;) {
|
||||
var cur = wrap(hash + probe);
|
||||
if? (HashEntry sc = hash_table[cur]) {
|
||||
if (sc == deleted_sp | string_eq(sc.key, key)) {
|
||||
hash_table[cur] = new HashEntry{key=key; value=value};
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
hash_table[cur] = new HashEntry{key=key; value=value};
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
string? get(string key) {
|
||||
var hash = hash_function(key);
|
||||
for (var probe = 0; probe < size; probe = probe + 1;) {
|
||||
var cur = wrap(hash + probe);
|
||||
if? (HashEntry sc = hash_table[cur]) {
|
||||
if (sc != deleted_sp & string_eq(sc.key, key)) {
|
||||
return sc.value;
|
||||
}
|
||||
} else {
|
||||
return string null;
|
||||
}
|
||||
}
|
||||
return string null;
|
||||
}
|
||||
|
||||
bool remove(string key) {
|
||||
var hash = hash_function(key);
|
||||
for (var probe = 0; probe < size; probe = probe + 1;) {
|
||||
var cur = wrap(hash + probe);
|
||||
if? (HashEntry sc = hash_table[cur]) {
|
||||
if (sc != deleted_sp & string_eq(sc.key, key)) {
|
||||
hash_table[cur] = deleted_sp;
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
hash_table = new HashEntry?[size];
|
||||
if (!put("course", "cis341")) {
|
||||
return 1;
|
||||
}
|
||||
if (!put("student", "munyam & hanbang")) {
|
||||
return 1;
|
||||
}
|
||||
if?(string s = get("student")) {
|
||||
if (!string_eq(s, "munyam & hanbang")) {
|
||||
return 1;
|
||||
}
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
if?(string s = get("course")) {
|
||||
if (!string_eq(s, "cis341")) {
|
||||
return 1;
|
||||
}
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
if?(string s = get("prof")) {
|
||||
return 1;
|
||||
}
|
||||
if (remove("prof")) {
|
||||
return 1;
|
||||
}
|
||||
if (!remove("course")) {
|
||||
return 1;
|
||||
}
|
||||
if?(string s = get("course")) {
|
||||
return 1;
|
||||
}
|
||||
if?(string s = get("student")) {
|
||||
if (!string_eq(s, "munyam & hanbang")) {
|
||||
return 1;
|
||||
}
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
92
hw6/hw5programs/sp22_tests/inorder_successor.oat
Normal file
92
hw6/hw5programs/sp22_tests/inorder_successor.oat
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
struct Node {
|
||||
Node? left;
|
||||
Node? parent;
|
||||
int val;
|
||||
Node? right
|
||||
}
|
||||
|
||||
global t1_root = new Node { left = Node null; parent = Node null; val = 1; right = Node null };
|
||||
global t1 = t1_root;
|
||||
global t1_ans = Node null;
|
||||
|
||||
Node first_node (Node t) {
|
||||
while (true) {
|
||||
if? (Node t_new = t.left) {
|
||||
t = t_new;
|
||||
} else {
|
||||
return t;
|
||||
}
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
Node? inorder_successor (Node t) {
|
||||
while (true) {
|
||||
if? (Node t_right = t.right) {
|
||||
return first_node(t_right);
|
||||
}
|
||||
else {
|
||||
if? (Node t_parent = t.parent) {
|
||||
if? (Node t_parent_left = t_parent.left) {
|
||||
if (t == t_parent_left) {
|
||||
return t_parent;
|
||||
}
|
||||
}
|
||||
t = t_parent;
|
||||
}
|
||||
else {
|
||||
return Node null;
|
||||
}
|
||||
}
|
||||
}
|
||||
return Node null;
|
||||
}
|
||||
|
||||
bool nodes_equal(Node? node1, Node? node2) {
|
||||
return node1 == node2;
|
||||
}
|
||||
|
||||
bool test_t1 () {
|
||||
return nodes_equal(inorder_successor(t1), t1_ans);
|
||||
}
|
||||
|
||||
bool test_t2 () {
|
||||
var t_root = new Node { left = Node null; parent = Node null; val = 1; right = Node null };
|
||||
var t_l = new Node {left = Node null; parent = t_root; val = 2; right = Node null };
|
||||
t_root.left = t_l;
|
||||
var t_r = new Node { left = Node null; parent = Node null; val = 3; right = Node null };
|
||||
t_root.right = t_r;
|
||||
var t = t_root;
|
||||
var t_ans = t_r;
|
||||
return nodes_equal(inorder_successor(t), t_ans);
|
||||
}
|
||||
|
||||
bool test_t3 () {
|
||||
var t_root = new Node { left = Node null; parent = Node null; val = 1; right = Node null };
|
||||
var t_l = new Node {left = Node null; parent = t_root; val = 2; right = Node null };
|
||||
t_root.left = t_l;
|
||||
var t_r = new Node { left = Node null; parent = Node null; val = 3; right = Node null };
|
||||
t_root.right = t_r;
|
||||
var t = t_l;
|
||||
var t_ans = t_root;
|
||||
return nodes_equal(inorder_successor(t), t_ans);
|
||||
}
|
||||
|
||||
bool test_t4 () {
|
||||
var t_root = new Node { left = Node null; parent = Node null; val = 1; right = Node null };
|
||||
var t_l = new Node {left = Node null; parent = t_root; val = 2; right = Node null };
|
||||
t_root.left = t_l;
|
||||
var t_r = new Node { left = Node null; parent = Node null; val = 3; right = Node null };
|
||||
t_root.right = t_r;
|
||||
var t = t_r;
|
||||
var t_ans = Node null;
|
||||
return nodes_equal(inorder_successor(t), t_ans);
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
if (!test_t1()) { return 1; }
|
||||
if (!test_t2()) { return 2; }
|
||||
if (!test_t3()) { return 3; }
|
||||
if (!test_t4()) { return 4; }
|
||||
return 0;
|
||||
}
|
||||
164
hw6/hw5programs/sp22_tests/kNN.oat
Normal file
164
hw6/hw5programs/sp22_tests/kNN.oat
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
struct List {
|
||||
Node? head;
|
||||
int size
|
||||
}
|
||||
|
||||
struct Node {
|
||||
Node? next;
|
||||
Point p;
|
||||
int index
|
||||
}
|
||||
|
||||
struct Point {
|
||||
int x;
|
||||
int y
|
||||
}
|
||||
|
||||
|
||||
|
||||
void insert(List l, Point p0) {
|
||||
if?(Node head = l.head) {
|
||||
var add = new Node {next = head; p = p0; index = l.size};
|
||||
var cur = Node null;
|
||||
for (var i = 0; i <= l.size; i = i+1;){
|
||||
if (i == 0){
|
||||
cur = head;
|
||||
}
|
||||
else {
|
||||
if?(Node x = cur){
|
||||
if (i < l.size) {
|
||||
cur = x.next;
|
||||
} else {
|
||||
x.next = add;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
l.size = l.size + 1;
|
||||
|
||||
} else {
|
||||
var add = new Node {next = Node null; p = p0; index = 0};
|
||||
l.head = add;
|
||||
l.size = 1;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
Node? get (List l, int index){
|
||||
var cur = l.head;
|
||||
if (index >= l.size){
|
||||
print_string ("Accessing an index of the list out of bounds!");
|
||||
return (Node null);
|
||||
}
|
||||
for (var i = 1; i <= index; i = i+1;){
|
||||
if?(Node x = cur){
|
||||
cur = x.next;
|
||||
}
|
||||
}
|
||||
return cur;
|
||||
|
||||
}
|
||||
|
||||
List get_nearest_k_points (int[] arr, Point[] pts_list, int k)
|
||||
{
|
||||
var n = length(arr);
|
||||
/*for (var i = 0; i < k; i = i+1;){
|
||||
print_int (arr[i]);
|
||||
print_string (" ");
|
||||
print_point (pts_list[i]);
|
||||
print_string ("\n");
|
||||
}*/
|
||||
|
||||
for (var i = 0; i < n - 1; i = i + 1;){
|
||||
for (var j = 0; j < n - i - 1; j = j + 1;){
|
||||
if (arr[j] > arr[j + 1]) {
|
||||
var temp1 = arr[j];
|
||||
arr[j] = arr[j + 1];
|
||||
arr[j + 1] = temp1;
|
||||
|
||||
var temp2 = pts_list[j];
|
||||
pts_list[j] = pts_list[j+1];
|
||||
pts_list[j+1] = temp2;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var nearest_k = new List {head = Node null; size = 0};
|
||||
for (var i = 0; i < k; i = i+1;){
|
||||
/*print_string ("Inserted ");
|
||||
print_int (arr[i]);
|
||||
print_string (" ");
|
||||
print_point (pts_list[i]);
|
||||
print_string ("\n");*/
|
||||
insert(nearest_k, pts_list[i]);
|
||||
}
|
||||
|
||||
return nearest_k;
|
||||
}
|
||||
|
||||
|
||||
int euclidean_distance_metric (Point p1, Point p2) {
|
||||
var dist = (p1.x - p2.x) * (p1.x - p2.x) + (p1.y - p2.y) * (p1.y - p2.y);
|
||||
return dist;
|
||||
}
|
||||
|
||||
|
||||
|
||||
List get_k_nearest (Point p, (Point, Point) -> int distance_metric, Point[] pts_list, int k) {
|
||||
|
||||
var n = length(pts_list);
|
||||
var dist_from_p_to_others = new int[n];
|
||||
for (var i = 0; i < n; i = i + 1;){
|
||||
dist_from_p_to_others[i] = distance_metric(p, pts_list[i]);
|
||||
/*print_point(pts_list[i]);
|
||||
print_int(dist_from_p_to_others[i]);
|
||||
print_string(" ");*/
|
||||
}
|
||||
var new_pts_list = new Point[n]{x -> pts_list[x]};
|
||||
return get_nearest_k_points(dist_from_p_to_others, new_pts_list, k);
|
||||
|
||||
|
||||
}
|
||||
|
||||
void print_point (Point p){
|
||||
|
||||
print_string ("(");
|
||||
print_int(p.x);
|
||||
print_string (", ");
|
||||
print_int(p.y);
|
||||
print_string (")");
|
||||
return;
|
||||
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv) {
|
||||
var k = 2;
|
||||
var pts_list = new Point[]{new Point{x = 4; y = 5}, new Point{x = 2; y = 10}, new Point{x = 5; y = 10}, new Point{x = 2; y = -1}};
|
||||
var num_of_points = length(pts_list);
|
||||
|
||||
|
||||
var knn_graph = new List[num_of_points]{x -> new List{head = Node null; size = 0}};
|
||||
for (var i = 0; i < num_of_points; i = i + 1;){
|
||||
knn_graph[i] = get_k_nearest(pts_list[i], euclidean_distance_metric, pts_list, k+1);
|
||||
}
|
||||
|
||||
for (var i = 0; i < num_of_points; i = i+1;){
|
||||
print_string ("Nearest k = ");
|
||||
print_int (k);
|
||||
print_string(" points to ");
|
||||
print_point (pts_list[i]);
|
||||
print_string (": ");
|
||||
for (var j = 1; j < knn_graph[i].size; j = j+1;){
|
||||
if?(Node node = get (knn_graph[i], j)){
|
||||
print_point (node.p);
|
||||
}
|
||||
}
|
||||
|
||||
print_string ("\n");
|
||||
}
|
||||
return 0;
|
||||
|
||||
|
||||
}
|
||||
61
hw6/hw5programs/sp22_tests/kmeans.oat
Normal file
61
hw6/hw5programs/sp22_tests/kmeans.oat
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
struct Point {
|
||||
int x;
|
||||
int y
|
||||
}
|
||||
|
||||
struct TrainPoint {
|
||||
Point p;
|
||||
int cluster_num
|
||||
}
|
||||
|
||||
Point[] train (int iters, int num_clusters, int num_train_pts, Point[] training_pts) {
|
||||
var clusters = new Point[num_clusters]{i -> new Point {x=i;y=i}};
|
||||
var train_pts = new TrainPoint[num_train_pts]{i -> new TrainPoint {p=training_pts[i]; cluster_num=i}};
|
||||
|
||||
var x = 1;
|
||||
|
||||
for (var iter = 0; iter < iters; iter = iter + 1;) {
|
||||
for (var p = 0; p < num_train_pts; p = p + 1;) {
|
||||
var min_dist = -1;
|
||||
var min_dist_cluster = -1;
|
||||
|
||||
for (var c = 0; c < num_clusters; c = c + 1;) {
|
||||
var dist = (clusters[c].x - train_pts[p].p.x) * (clusters[c].x - train_pts[p].p.x) + (clusters[c].y - train_pts[p].p.y) * (clusters[c].y- train_pts[p].p.y);
|
||||
if ((min_dist == -1) | (dist < min_dist)) {
|
||||
min_dist = dist;
|
||||
min_dist_cluster = c;
|
||||
}
|
||||
}
|
||||
train_pts[p].cluster_num = min_dist_cluster;
|
||||
}
|
||||
}
|
||||
|
||||
return clusters;
|
||||
}
|
||||
|
||||
int predict_cluster_num (Point p, Point[] clusters, int num_clusters) {
|
||||
var min_dist = -1;
|
||||
var min_dist_cluster = -1;
|
||||
|
||||
for (var c = 0; c < num_clusters; c = c + 1;) {
|
||||
var dist = (clusters[c].x - p.x) * (clusters[c].x - p.x) + (clusters[c].y - p.y) * (clusters[c].y- p.y);
|
||||
if ((min_dist == -1) | (dist < min_dist)) {
|
||||
min_dist = dist;
|
||||
min_dist_cluster = c;
|
||||
}
|
||||
}
|
||||
|
||||
return min_dist_cluster;
|
||||
}
|
||||
|
||||
int program(int argc, string[] args) {
|
||||
var num_clusters = 10;
|
||||
var num_pts = num_clusters * 2;
|
||||
var iters = 10;
|
||||
|
||||
var train_pts = new Point[num_pts]{i -> new Point {x=-i;y=i * i * i}};
|
||||
var clusters = train (iters, num_clusters, num_pts, train_pts);
|
||||
|
||||
var test_point = new Point{x=0;y=0};
|
||||
return predict_cluster_num(test_point, clusters, num_clusters);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue