Updated hw6 to a newer version
Signed-off-by: jmug <u.g.a.mariano@gmail.com>
This commit is contained in:
parent
9224001a22
commit
0c04936ccf
356 changed files with 8408 additions and 4725 deletions
9
hw6/.gitignore
vendored
Normal file
9
hw6/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
.vscode
|
||||
_build
|
||||
bin/main.exe
|
||||
bin/printanalysis.exe
|
||||
oatc
|
||||
printanalysis
|
||||
ocamlbin
|
||||
*~
|
||||
output
|
||||
48
hw6/Makefile
48
hw6/Makefile
|
|
@ -1,39 +1,31 @@
|
|||
INCLUDES= util,x86,ll
|
||||
SUBMIT := solver.ml alias.ml backend.ml dce.ml constprop.ml team.txt
|
||||
SUBMIT := $(shell cat submit_zip_contents.txt)
|
||||
HWNAME := hw6
|
||||
TIMESTAMP := $(shell /bin/date "+%Y-%m-%d-%H-%M-%S")
|
||||
ZIPNAME := $(HWNAME)-submit-$(TIMESTAMP).zip
|
||||
|
||||
HWNAME := hw06
|
||||
ZIPNAME := $(HWNAME)-submit.zip
|
||||
.PHONY: all oatc test clean zip
|
||||
|
||||
# diffent compilation cmd for Ocaml >= 4.08.1
|
||||
# otherwise package `num` won't be correctly located
|
||||
OCAMLNEW := $(shell expr `ocaml --version | sed -e 's/^.* //g' -e 's/\.\([0-9][0-9]\)/\1/g' -e 's/\.\([0-9]\)/0\1/g' -e 's/^[0-9]\{3,4\}$$/&00/'` \>= 40800)
|
||||
ifeq "$(OCAMLNEW)" "1"
|
||||
LIBS = unix,str
|
||||
PKGS = -package num
|
||||
else
|
||||
LIBS = unix,str,nums
|
||||
PKGS =
|
||||
endif
|
||||
all: oatc
|
||||
|
||||
all: main.native
|
||||
oatc:
|
||||
dune build bin/main.exe
|
||||
@cp bin/main.exe oatc
|
||||
|
||||
.PHONY: test
|
||||
test: main.native
|
||||
./main.native --test
|
||||
printanalysis:
|
||||
dune build bin/printanalysis.exe
|
||||
@cp bin/printanalysis.exe printanalysis
|
||||
|
||||
.PHONY: main.native
|
||||
main.native: $(SUBMIT) ast.ml astlib.ml backend.ml driver.ml main.ml runtime.c
|
||||
ocamlbuild -Is $(INCLUDES) $(PKGS) -libs $(LIBS) main.native -use-menhir -yaccflag --explain
|
||||
|
||||
.PHONY: printanalysis.native
|
||||
printanalysis.native: $(SUBMIT) ast.ml astlib.ml backend.ml driver.ml main.ml runtime.c
|
||||
ocamlbuild -Is $(INCLUDES) $(PKGS) -libs $(LIBS) printanalysis.native -use-menhir -yaccflag --explain
|
||||
test: oatc
|
||||
./oatc --test
|
||||
|
||||
utop:
|
||||
utop
|
||||
|
||||
zip: $(SUBMIT)
|
||||
zip '$(ZIPNAME)' $(SUBMIT)
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
ocamlbuild -clean
|
||||
rm -rf output a.out
|
||||
dune clean
|
||||
rm -rf oatc ocamlbin bin/main.exe printanalysis bin/printanalysis.exe
|
||||
|
||||
#
|
||||
|
|
|
|||
69
hw6/README
69
hw6/README
|
|
@ -1,69 +0,0 @@
|
|||
Using main.native for testing:
|
||||
|
||||
* To run the automated test harness do:
|
||||
- on OS X: ./main.native --test
|
||||
- on Linux: ./main.native -linux --test
|
||||
|
||||
* To compile ll files using the Compiler Design backend:
|
||||
./main.native path/to/foo.ll
|
||||
|
||||
- creates output/foo.s backend assembly code
|
||||
- creates output/foo.o assembled object file
|
||||
- creates a.out linked executable
|
||||
|
||||
NOTE: by default the .s and .o files are created in
|
||||
a directory called output, and the filenames are
|
||||
chosen so that multiple runs of the compiler will
|
||||
not overwrite previous outputs. foo.ll will be
|
||||
compiled first to foo.s then foo_1.s, foo_2.s, etc.
|
||||
|
||||
|
||||
* To compile ll files using the clang backend:
|
||||
./main.native --clang path/to/foo.ll
|
||||
|
||||
* Useful flags:
|
||||
|
||||
--print-ll
|
||||
echoes the ll program to the terminal
|
||||
|
||||
--print-x86
|
||||
echoes the resulting .s file to the terminal
|
||||
|
||||
--simulate-x86
|
||||
runs the resulting .s file through the reference
|
||||
x86 simulator and outputs the result to the console
|
||||
|
||||
--execute-x86
|
||||
runs the resulting a.out file natively
|
||||
(applies to either the Compiler Design backend or clang-compiled code)
|
||||
|
||||
-v
|
||||
generates verbose output, showing which commands are used
|
||||
for linking, etc.
|
||||
|
||||
-op <dirname>
|
||||
change the output path [DEFAULT=output]
|
||||
|
||||
-o
|
||||
change the generated executable's name [DEFAULT=a.out]
|
||||
|
||||
-S
|
||||
stop after generating .s files
|
||||
|
||||
-c
|
||||
stop after generating .o files
|
||||
|
||||
-h or --help
|
||||
display the list of options
|
||||
|
||||
* Example uses:
|
||||
|
||||
Run the test case /programs/factrect.ll using the Compiler Design backend:
|
||||
|
||||
|
||||
./main.native --execute-x86 programs/factrect.ll
|
||||
--------------------------------------------------------------- Executing: a.out
|
||||
* a.out returned 120
|
||||
|
||||
|
||||
Run the test
|
||||
74
hw6/README.md
Normal file
74
hw6/README.md
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
# HW6: Dataflow Analysis and Optimization
|
||||
|
||||
The [instructions for this homework](doc/hw6-opt.html) are in the `doc` directory.
|
||||
|
||||
Quick Start:
|
||||
|
||||
1. open the folder in VSCode
|
||||
2. start an OCaml sandbox terminal
|
||||
3. run `make test` from the command line
|
||||
4. open `bin/solver.ml`
|
||||
|
||||
|
||||
Using ``oatc``
|
||||
--------------
|
||||
|
||||
``oatc`` acts like the clang compiler. Given several .oat, .ll, .c, and .o
|
||||
files, it will compile the .oat and .ll files to .s files (using the CS153
|
||||
frontend and backend) and then combine the results with the .c and .o files to
|
||||
produce an executable named a.out. You can also compile the .ll files using
|
||||
clang instead of the CS153 backend, which can be useful for testing
|
||||
purposes.
|
||||
|
||||
|
||||
* To run the automated test harness do:
|
||||
|
||||
./oatc --test
|
||||
|
||||
* To compile oat files using the CS153 backend:
|
||||
|
||||
./oatc path/to/foo.oat
|
||||
|
||||
- creates output/foo.ll frontend ll code
|
||||
- creates output/foo.s backend assembly code
|
||||
- creates output/foo.o assembled object file
|
||||
- creates a.out linked executable
|
||||
|
||||
NOTE: by default the .s and .o files are created in
|
||||
a directory called output, and the filenames are
|
||||
chosen so that multiple runs of the compiler will
|
||||
not overwrite previous outputs. foo.ll will be
|
||||
compiled first to foo.s then foo_1.s, foo_2.s, etc.
|
||||
|
||||
* To compile oat files using the clang backend:
|
||||
|
||||
./oatc --clang path/to/foo.oat
|
||||
|
||||
* Useful flags:
|
||||
|
||||
| Flag | Description |
|
||||
|---------------------------------|---------------------------------------------------------------------------------------------------|
|
||||
| --regalloc {none,greedy,better} | use the specified register allocator |
|
||||
| --liveness {trivial,dataflow} | use the specified liveness analysis |
|
||||
| --print-regs | prints the register usage statistics for x86 code |
|
||||
| --print-oat | pretty prints the Oat abstract syntax to the terminal |
|
||||
| --print-ll | echoes the ll program to the terminal |
|
||||
| --print-x86 | echoes the resulting .s file to the terminal |
|
||||
| --interpret-ll | runs the ll file through the reference interpreter and outputs the results to the console |
|
||||
| --execute-x86 | runs the resulting a.out file natively (applies to either the 153 backend or clang-compiled code) |
|
||||
| --clang | compiles to assembly using clang, not the CS153 backend |
|
||||
| -v | generates verbose output, showing which commands are used for linking, etc. |
|
||||
| -op ``<dirname>`` | change the output path [DEFAULT=output] |
|
||||
| -o | change the generated executable's name [DEFAULT=a.out] |
|
||||
| -S | stop after generating .s files |
|
||||
| -c | stop after generating .o files |
|
||||
| -h or --help | display the list of options |
|
||||
|
||||
|
||||
* Example uses:
|
||||
|
||||
Run the test case hw4programs/fact.oat using the CS153 backend:
|
||||
|
||||
./oatc --execute-x86 hw4programs/fact.oat bin/runtime.c
|
||||
120--------------------------------------------------------------- Executing: a.out
|
||||
* a.out returned 0
|
||||
BIN
hw6/a.out
Executable file
BIN
hw6/a.out
Executable file
Binary file not shown.
|
|
@ -10,7 +10,7 @@ module SymPtr =
|
|||
| Unique (* uid is the unique name for a pointer *)
|
||||
| UndefAlias (* uid is not in scope or not a pointer *)
|
||||
|
||||
let compare : t -> t -> int = Pervasives.compare
|
||||
let compare : t -> t -> int = Stdlib.compare
|
||||
|
||||
let to_string = function
|
||||
| MayAlias -> "MayAlias"
|
||||
|
|
@ -29,12 +29,40 @@ type fact = SymPtr.t UidM.t
|
|||
- After an alloca, the defined UID is the unique name for a stack slot
|
||||
- A pointer returned by a load, call, bitcast, or GEP may be aliased
|
||||
- A pointer passed as an argument to a call, bitcast, GEP, or store
|
||||
may be aliased
|
||||
(as the value being stored) may be aliased
|
||||
- Other instructions do not define pointers
|
||||
|
||||
*)
|
||||
|
||||
let insn_flow ((u,i):uid * insn) (d:fact) : fact =
|
||||
failwith "Alias.insn_flow unimplemented"
|
||||
(* define values *)
|
||||
let unique : SymPtr.t = Unique in
|
||||
let may_alias : SymPtr.t = MayAlias in
|
||||
let undef_alias : SymPtr.t = UndefAlias in
|
||||
|
||||
match i with
|
||||
| Alloca _ -> UidM.add u unique d
|
||||
| Load (ty, _) ->
|
||||
let is_ty_ptr_namedt = match ty with | Ptr t ->
|
||||
let r = begin match t with | Ptr t -> true | _ -> false end in r | _ -> false in
|
||||
if is_ty_ptr_namedt == true then
|
||||
UidM.add u may_alias d
|
||||
else d
|
||||
| Store (_, op, _) ->
|
||||
(* update ptr arg *)
|
||||
let is_op_uid = match op with | Const _ -> true | _ -> false in
|
||||
if is_op_uid == true then d else
|
||||
let op_uid = match op with | Id i -> i | Gid i -> i | _ -> failwith "Store error should be caught above" in
|
||||
if UidM.mem op_uid d == false then d else
|
||||
UidM.update (fun _ -> may_alias) op_uid d
|
||||
| Call (_, op, _) | Bitcast (_, op, _) | Gep (_, op, _) ->
|
||||
(* update ptr arg *)
|
||||
let op_uid = match op with | Id i -> i | Gid i -> i | _ -> failwith "Call is supposed to be a uid" in
|
||||
if UidM.mem op_uid d == true then
|
||||
(* update ptr returned *)
|
||||
let d1 = UidM.update (fun _ -> may_alias) op_uid d in UidM.add u may_alias d1
|
||||
else UidM.add u may_alias d
|
||||
| Binop _ | Icmp _ -> d
|
||||
|
||||
|
||||
(* The flow function across terminators is trivial: they never change alias info *)
|
||||
|
|
@ -62,14 +90,39 @@ module Fact =
|
|||
|
||||
(* TASK: complete the "combine" operation for alias analysis.
|
||||
|
||||
The alias analysis should take the join over predecessors to compute the
|
||||
The alias analysis should take the meet over predecessors to compute the
|
||||
flow into a node. You may find the UidM.merge function useful.
|
||||
|
||||
It may be useful to define a helper function that knows how to take the
|
||||
join of two SymPtr.t facts.
|
||||
meet of two SymPtr.t facts.
|
||||
*)
|
||||
let combine (ds:fact list) : fact =
|
||||
failwith "Alias.Fact.combine not implemented"
|
||||
let lattice (m1:SymPtr.t) (m2:SymPtr.t) : SymPtr.t =
|
||||
match m1, m2 with
|
||||
| MayAlias, _ -> MayAlias
|
||||
| _, MayAlias -> MayAlias
|
||||
| Unique, Unique -> Unique
|
||||
| Unique, UndefAlias -> Unique
|
||||
| UndefAlias, Unique -> Unique
|
||||
| UndefAlias, UndefAlias -> UndefAlias
|
||||
|
||||
let combine (ds : fact list) : fact =
|
||||
(* used LLM to understand how the UidM.t merge function could be useful through made-up examples, and what the inputs 'a option meant *)
|
||||
|
||||
(* PART 2: look at the facts, if we have non-None facts, we can merge them based on the lattice *)
|
||||
let look_at_facts _ a_opt b_opt =
|
||||
match a_opt, b_opt with
|
||||
| Some a, Some b -> Some (lattice a b)
|
||||
| Some a, None -> Some a
|
||||
| None, Some b -> Some b
|
||||
| _, _ -> failwith "look_at_facts: incorrect opts" in
|
||||
|
||||
(* PART 1: create combine function that looks at the facts *)
|
||||
let rec combine_function (fl : fact list) (acc : SymPtr.t UidM.t) : SymPtr.t UidM.t =
|
||||
match fl with
|
||||
| [] -> acc
|
||||
| hd :: tl -> let result = UidM.merge look_at_facts acc hd in combine_function tl result in
|
||||
|
||||
combine_function ds UidM.empty
|
||||
end
|
||||
|
||||
(* instantiate the general framework ---------------------------------------- *)
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
module Range = Util.Range
|
||||
|
||||
type 'a node = { elt : 'a; loc : Range.t }
|
||||
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
open Ll
|
||||
open Llutil
|
||||
open X86
|
||||
|
||||
module Platform = Util.Platform
|
||||
|
||||
(* allocated llvmlite function bodies --------------------------------------- *)
|
||||
|
||||
|
|
@ -139,6 +139,9 @@ module AsGraph (D:AS_GRAPH_PARAMS) :
|
|||
(* For testing purposes, we would like to be able to access the underlying
|
||||
map of dataflow facts *)
|
||||
val dfa : t -> D.t LblM.t
|
||||
|
||||
val to_string : (tid * ty) list -> t -> string
|
||||
val printer : (tid * ty) list -> Format.formatter -> t -> unit
|
||||
end =
|
||||
struct
|
||||
module NodeS = LblS
|
||||
|
|
@ -166,7 +169,7 @@ module AsGraph (D:AS_GRAPH_PARAMS) :
|
|||
let bound_lbl = "__bound"
|
||||
|
||||
(* The only way to create a flow graph is to provide an initial labeling *)
|
||||
let of_cfg init flow_in cfg =
|
||||
let of_cfg init flow_in cfg =
|
||||
let dfa = cfg.blocks
|
||||
|> LblM.mapi (fun l _ -> init l)
|
||||
|> LblM.add bound_lbl flow_in
|
||||
|
|
@ -328,10 +331,10 @@ module AsGraph (D:AS_GRAPH_PARAMS) :
|
|||
(* Printing functions *)
|
||||
(* printing functions ------------------------------------------------------- *)
|
||||
|
||||
let annot_insn g l (u,i:uid*insn) =
|
||||
let annot_insn tdecls g l (u,i:uid*insn) =
|
||||
Printf.sprintf " IN : %s\n %s\n OUT: %s"
|
||||
(D.to_string (uid_in g l u))
|
||||
(Llutil.string_of_named_insn (u,i))
|
||||
(Llutil.string_of_named_insn tdecls (u,i))
|
||||
(D.to_string (uid_out g l u))
|
||||
|
||||
let annot_terminator g l (u,t:uid*terminator) =
|
||||
|
|
@ -340,23 +343,29 @@ module AsGraph (D:AS_GRAPH_PARAMS) :
|
|||
(Llutil.string_of_terminator t)
|
||||
(D.to_string (uid_out g l u))
|
||||
|
||||
let to_string_annot (annot:lbl -> string) (g:t) : string =
|
||||
let to_string_annot (tdecls:(tid * ty) list) (annot:lbl -> string) (g:t) : string =
|
||||
LblM.to_string
|
||||
(fun l block ->
|
||||
Printf.sprintf "%s\n%s\n%s\n\n"
|
||||
(annot l)
|
||||
(Llutil.mapcat "\n" (annot_insn g l) (block.insns))
|
||||
(Llutil.mapcat "\n" (annot_insn tdecls g l) (block.insns))
|
||||
(annot_terminator g l (block.term))
|
||||
) (g.cfg.blocks)
|
||||
|
||||
let printer_annot (annot:lbl -> string) (f:Format.formatter) (g:t) : unit =
|
||||
Format.pp_print_string f (to_string_annot annot g)
|
||||
let printer_annot (tdecls:(tid * ty) list) (annot:lbl -> string) (f:Format.formatter) (g:t) : unit =
|
||||
Format.pp_print_string f (to_string_annot tdecls annot g)
|
||||
|
||||
let to_string g =
|
||||
to_string_annot (fun l -> D.to_string (out g l)) g
|
||||
let to_string (tdecls:(tid * ty) list) g =
|
||||
to_string_annot tdecls (fun l -> D.to_string (out g l)) g
|
||||
(* let to_string g = to_string_annot [] (fun l -> D.to_string (out g l)) g*)
|
||||
|
||||
let printer (tdecls:(tid * ty) list) f g =
|
||||
printer_annot tdecls (fun l -> D.to_string (out g l)) f g
|
||||
|
||||
(*
|
||||
let printer f g =
|
||||
printer_annot (fun l -> D.to_string (out g l)) f g
|
||||
printer_annot [] (fun l -> D.to_string (out g l)) f g
|
||||
*)
|
||||
end
|
||||
|
||||
(* exported type *)
|
||||
64
hw6/bin/cinterop.c
Normal file
64
hw6/bin/cinterop.c
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
|
||||
void ll_puts(int8_t *s) {
|
||||
puts((char *)s);
|
||||
}
|
||||
|
||||
int8_t* ll_strcat(int8_t* s1, int8_t* s2) {
|
||||
int l1 = strlen((char*)s1);
|
||||
int l2 = strlen((char*)s2);
|
||||
char* buf = (char*)calloc(l1 + l2 + 1, sizeof(char));
|
||||
strncpy(buf, (char*)s1, l1);
|
||||
strncpy(buf + l1, (char*)s2, l2+1);
|
||||
return (int8_t*) buf;
|
||||
}
|
||||
|
||||
int64_t ll_callback(int64_t (*fun)(int64_t, int64_t)) {
|
||||
int64_t x = 19L;
|
||||
return fun(x, x);
|
||||
}
|
||||
|
||||
int8_t* ll_ltoa(int64_t i) {
|
||||
// Safety: INT64_MIN is -9223372036854775808, which has 20 characters when
|
||||
// represented as a string. After including the null terminator, we need to
|
||||
// allocate a buffer of size 21.
|
||||
char* buf = (char*)calloc(21, sizeof(char));
|
||||
int t = 0;
|
||||
if (i == 0) {
|
||||
buf[t++] = '0';
|
||||
return (int8_t*) buf;
|
||||
}
|
||||
|
||||
bool negative = i < 0;
|
||||
if (!negative) {
|
||||
// Normalize to negative number to avoid overflow
|
||||
i = -i;
|
||||
}
|
||||
|
||||
// Generate the digits in reverse order, from [0..t)
|
||||
while (i < 0) {
|
||||
char last_digit = '0' + -(i % 10);
|
||||
buf[t++] = last_digit;
|
||||
i /= 10;
|
||||
}
|
||||
if (negative) {
|
||||
buf[t++] = '-';
|
||||
}
|
||||
|
||||
// Reverse the buffer
|
||||
for (int j = 0, r = t - 1; j < r; j++, r--) {
|
||||
char temp = buf[j];
|
||||
buf[j] = buf[r];
|
||||
buf[r] = temp;
|
||||
}
|
||||
|
||||
return (int8_t*) buf;
|
||||
}
|
||||
|
||||
void *ll_malloc(int64_t n, int64_t size) {
|
||||
return calloc(n, size);
|
||||
}
|
||||
219
hw6/bin/constprop.ml
Normal file
219
hw6/bin/constprop.ml
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
open Ll
|
||||
open Datastructures
|
||||
|
||||
(* The lattice of symbolic constants ---------------------------------------- *)
|
||||
module SymConst =
|
||||
struct
|
||||
type t = NonConst (* Uid may take on multiple values at runtime *)
|
||||
| Const of int64 (* Uid will always evaluate to const i64 or i1 *)
|
||||
| UndefConst (* Uid is not defined at the point *)
|
||||
|
||||
let compare (a:t) (b:t) =
|
||||
match a, b with
|
||||
| (Const i, Const j) -> Int64.compare i j
|
||||
| (NonConst, NonConst) | (UndefConst, UndefConst) -> 0
|
||||
| (NonConst, _) | (_, UndefConst) -> 1
|
||||
| (UndefConst, _) | (_, NonConst) -> -1
|
||||
|
||||
let to_string : t -> string = function
|
||||
| NonConst -> "NonConst"
|
||||
| Const i -> Printf.sprintf "Const (%LdL)" i
|
||||
| UndefConst -> "UndefConst"
|
||||
|
||||
|
||||
end
|
||||
|
||||
(* The analysis computes, at each program point, which UIDs in scope will evaluate
|
||||
to integer constants *)
|
||||
type fact = SymConst.t UidM.t
|
||||
|
||||
|
||||
|
||||
(* flow function across Ll instructions ------------------------------------- *)
|
||||
(* - Uid of a binop or icmp with const arguments is constant-out
|
||||
- Uid of a binop or icmp with an UndefConst argument is UndefConst-out
|
||||
- Uid of a binop or icmp with an NonConst argument is NonConst-out
|
||||
- Uid of stores and void calls are UndefConst-out
|
||||
- Uid of all other instructions are NonConst-out
|
||||
*)
|
||||
|
||||
let compute_const_bop (bop:bop) (i1:int64) (i2:int64) : int64=
|
||||
match bop with
|
||||
| Add -> Int64.add i1 i2
|
||||
| Sub -> Int64.sub i1 i2
|
||||
| Mul -> Int64.mul i1 i2
|
||||
| And -> Int64.logand i1 i2
|
||||
| Or -> Int64.logor i1 i2
|
||||
| Xor -> Int64.logxor i1 i2
|
||||
| Shl -> Int64.shift_left i1 (Int64.to_int i2)
|
||||
| Ashr -> Int64.shift_right i1 (Int64.to_int i2)
|
||||
| Lshr -> Int64.shift_right_logical i1 (Int64.to_int i2)
|
||||
|
||||
let compute_const_cnd (cnd:cnd) (i1:int64) (i2:int64) =
|
||||
let result = match cnd with
|
||||
| Eq -> i1 == i2
|
||||
| Ne -> i1 != i2
|
||||
| Slt -> i1 < i2
|
||||
| Sle -> i1 <= i2
|
||||
| Sgt -> i1 > i2
|
||||
| Sge -> i1 >= i2
|
||||
in if result then 1L else 0L
|
||||
|
||||
let meet_facts (c1:SymConst.t) (c2:SymConst.t) (bop:bop option) (cnd:cnd option): SymConst.t =
|
||||
(* NonConst <= Const c <= UndefConst *)
|
||||
match c1, c2 with
|
||||
| NonConst, _ -> NonConst
|
||||
| _, NonConst -> NonConst
|
||||
| Const a, Const b ->
|
||||
begin match bop, cnd with
|
||||
| Some c, _ -> Const (compute_const_bop c a b)
|
||||
| _, Some c -> Const (compute_const_cnd c a b)
|
||||
| _ -> failwith "meet_facts self-error: did not supply a bop or a cnd" end
|
||||
| Const a, UndefConst -> Const a
|
||||
| UndefConst, Const b -> Const b
|
||||
| UndefConst, UndefConst -> UndefConst
|
||||
|
||||
let op_symconst (op:operand) (i:insn) (d:fact): SymConst.t =
|
||||
match op with
|
||||
| Const c -> Const c
|
||||
| Null -> NonConst
|
||||
| Id i | Gid i -> begin match UidM.find_opt i d with
|
||||
| Some c -> c | None -> UndefConst end
|
||||
|
||||
let insn_flow (u,i:uid * insn) (d:fact) : fact =
|
||||
let nonconst : SymConst.t = NonConst in
|
||||
let undefconst : SymConst.t = UndefConst in
|
||||
|
||||
match i with
|
||||
| Binop (bop, _, op1, op2) ->
|
||||
let op_symconst1 = op_symconst op1 i d in
|
||||
let op_symconst2 = op_symconst op2 i d in
|
||||
let symconst = meet_facts op_symconst1 op_symconst2 (Some bop) None in
|
||||
UidM.add u symconst d
|
||||
| Icmp (cnd, _, op1, op2) ->
|
||||
let op_symconst1 = op_symconst op1 i d in
|
||||
let op_symconst2 = op_symconst op2 i d in
|
||||
let symconst = meet_facts op_symconst1 op_symconst2 None (Some cnd) in
|
||||
UidM.add u symconst d
|
||||
| Store (_, _, _) | Call (Void, _, _) -> UidM.add u undefconst d
|
||||
| _ -> UidM.add u nonconst d
|
||||
|
||||
(* The flow function across terminators is trivial: they never change const info *)
|
||||
let terminator_flow (t:terminator) (d:fact) : fact = d
|
||||
|
||||
(* module for instantiating the generic framework --------------------------- *)
|
||||
module Fact =
|
||||
struct
|
||||
type t = fact
|
||||
let forwards = true
|
||||
|
||||
let insn_flow = insn_flow
|
||||
let terminator_flow = terminator_flow
|
||||
|
||||
let normalize : fact -> fact =
|
||||
UidM.filter (fun _ v -> v != SymConst.UndefConst)
|
||||
|
||||
let compare (d:fact) (e:fact) : int =
|
||||
UidM.compare SymConst.compare (normalize d) (normalize e)
|
||||
|
||||
let to_string : fact -> string =
|
||||
UidM.to_string (fun _ v -> SymConst.to_string v)
|
||||
|
||||
|
||||
(* The constprop analysis should take the meet over predecessors to compute the
|
||||
flow into a node. You may find the UidM.merge function useful *)
|
||||
|
||||
let combine (ds:fact list) : fact =
|
||||
(* merge function to call meet facts *)
|
||||
let merge_function _ a_opt b_opt =
|
||||
match a_opt, b_opt with
|
||||
| Some a, Some b -> if a == b then Some b else None
|
||||
| Some a, None -> Some a
|
||||
| None, Some b -> Some b
|
||||
| _, _ -> failwith "" in
|
||||
|
||||
(* combine function to call merge function *)
|
||||
let rec combine_function (fl : fact list) (acc : SymConst.t UidM.t) : SymConst.t UidM.t =
|
||||
match fl with
|
||||
| [] -> acc
|
||||
| hd :: tl -> let result = UidM.merge merge_function acc hd in combine_function tl result in
|
||||
combine_function ds UidM.empty
|
||||
end
|
||||
|
||||
(* instantiate the general framework ---------------------------------------- *)
|
||||
module Graph = Cfg.AsGraph (Fact)
|
||||
module Solver = Solver.Make (Fact) (Graph)
|
||||
|
||||
(* expose a top-level analysis operation ------------------------------------ *)
|
||||
let analyze (g:Cfg.t) : Graph.t =
|
||||
(* the analysis starts with every node set to bottom (the map of every uid
|
||||
in the function to UndefConst *)
|
||||
let init l = UidM.empty in
|
||||
|
||||
(* the flow into the entry node should indicate that any parameter to the
|
||||
function is not a constant *)
|
||||
let cp_in = List.fold_right
|
||||
(fun (u,_) -> UidM.add u SymConst.NonConst)
|
||||
g.Cfg.args UidM.empty
|
||||
in
|
||||
let fg = Graph.of_cfg init cp_in g in
|
||||
Solver.solve fg
|
||||
|
||||
|
||||
(* run constant propagation on a cfg given analysis results ----------------- *)
|
||||
(* HINT: your cp_block implementation will probably rely on several helper
|
||||
functions. *)
|
||||
let run (cg:Graph.t) (cfg:Cfg.t) : Cfg.t =
|
||||
let open SymConst in
|
||||
|
||||
let cp_block (l:Ll.lbl) (cfg:Cfg.t) : Cfg.t =
|
||||
let b = Cfg.block cfg l in
|
||||
let cb = Graph.uid_out cg l in
|
||||
|
||||
let rec check_operand (op:operand) (insn:insn) =
|
||||
let op1_new = match op with
|
||||
| Id i | Gid i ->
|
||||
let fact = cb i in
|
||||
let symconst : SymConst.t = op_symconst op insn fact in
|
||||
let r = begin match symconst with
|
||||
| Const c -> Some c
|
||||
| _ -> None end in r
|
||||
| _ -> None in op1_new in
|
||||
|
||||
let rec iterate_instructions (uid_insn_list : (uid * insn) list) (new_uid_insn_list : (uid * insn) list) =
|
||||
match uid_insn_list with
|
||||
| [] -> new_uid_insn_list
|
||||
| hd :: tl ->
|
||||
let uid, insn = hd in
|
||||
(* we want to see if the value is a var = constant *)
|
||||
(* if this is the case, we'll want to check every other instruction and "propogate it" in there *)
|
||||
let new_uid_insn = match insn with
|
||||
| Binop (bop, ty, op1, op2) ->
|
||||
let check_op1 = check_operand op1 insn in
|
||||
let check_op2 = check_operand op2 insn in
|
||||
let new_op1 : operand = match check_op1 with | Some c -> Const c | _ -> op1 in
|
||||
let new_op2 : operand = match check_op2 with | Some c -> Const c | _ -> op2 in
|
||||
(uid, Binop (bop, ty, new_op1, new_op2))
|
||||
| _ -> failwith "nye"
|
||||
in iterate_instructions tl (new_uid_insn_list @ [new_uid_insn]) in
|
||||
|
||||
(* WE ALSO NEED TO DO THE TERMINATOR INSTRUCTION, SAME IDEA :) *)
|
||||
|
||||
|
||||
let new_uid_insns = iterate_instructions b.insns [] in
|
||||
let new_block = { insns = new_uid_insns; term = b.term } in
|
||||
|
||||
let remove_old_block = LblM.remove l cfg.blocks in
|
||||
let new_block_same_lbl = LblM.add l new_block cfg.blocks in
|
||||
|
||||
let new_cfg : Cfg.cfg = {
|
||||
blocks = new_block_same_lbl;
|
||||
preds = cfg.preds;
|
||||
ret_ty = cfg.ret_ty;
|
||||
args = cfg.args;
|
||||
} in
|
||||
|
||||
new_cfg
|
||||
in
|
||||
|
||||
LblS.fold cp_block (Cfg.nodes cfg) cfg
|
||||
|
|
@ -21,10 +21,27 @@ open Datastructures
|
|||
|
||||
Hint: Consider using List.filter
|
||||
*)
|
||||
let dce_block (lb:uid -> Liveness.Fact.t)
|
||||
(ab:uid -> Alias.fact)
|
||||
(b:Ll.block) : Ll.block =
|
||||
failwith "Dce.dce_block unimplemented"
|
||||
|
||||
|
||||
let dce_block (lb:uid -> Liveness.Fact.t) (ab:uid -> Alias.fact) (b:Ll.block) : Ll.block =
|
||||
(* check by each instruction *)
|
||||
let is_not_dead (uid : uid) (insn: insn) : bool =
|
||||
match insn with
|
||||
| Call _ -> true
|
||||
| Store (_, _, ptr) ->
|
||||
(* not dead if live or mayalias *)
|
||||
(* if we're storing into a *)
|
||||
let ptr_uid = match ptr with | Id i -> i | Gid i -> i | _ -> failwith "Store must be an id" in
|
||||
let ptr_live = UidS.mem ptr_uid (lb uid) in
|
||||
let ptr_alias = UidM.find_opt ptr_uid (ab uid) in (* <= issue: ab ptr_uid returns "Not_Found"*)
|
||||
let ptr_alias = match ptr_alias with | Some alias -> (alias == MayAlias) | None -> false in
|
||||
ptr_live || ptr_alias
|
||||
|
||||
| _ -> if UidS.mem uid (lb uid) then true else false
|
||||
|
||||
in let result = List.filter (fun (uid, insn) -> is_not_dead uid insn) b.insns in
|
||||
let new_block : Ll.block = {insns = result; term = b.term} in new_block
|
||||
|
||||
|
||||
let run (lg:Liveness.Graph.t) (ag:Alias.Graph.t) (cfg:Cfg.t) : Cfg.t =
|
||||
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
open Printf
|
||||
module Platform = Util.Platform
|
||||
module Range = Util.Range
|
||||
open Platform
|
||||
|
||||
(* configuration flags ------------------------------------------------------ *)
|
||||
|
|
@ -154,7 +156,7 @@ let process_files files =
|
|||
if (List.length files) > 0 then begin
|
||||
List.iter process_file files;
|
||||
( if !assemble && !link then
|
||||
Platform.link (List.rev !link_files@["runtime.c"]) !executable_filename );
|
||||
Platform.link (List.rev !link_files) !executable_filename );
|
||||
( if !assemble && !link && !execute_x86 then
|
||||
let ret = run_executable "" !executable_filename in
|
||||
print_banner @@ Printf.sprintf "Executing: %s" !executable_filename;
|
||||
44
hw6/bin/dune
Normal file
44
hw6/bin/dune
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
(library
|
||||
(name oat)
|
||||
(modules driver backend frontend lexer parser ast astlib typechecker tctxt datastructures liveness cfg solver registers opt alias dce constprop escape)
|
||||
(libraries str num util x86 ll))
|
||||
|
||||
(ocamllex lexer)
|
||||
(menhir (modules parser))
|
||||
|
||||
(env
|
||||
(dev
|
||||
(flags
|
||||
(:standard -g -w "+a-4-7-9-26-27-29-30-32..42-44-45-48-50-60-66..70")
|
||||
)))
|
||||
|
||||
(executable
|
||||
(public_name main)
|
||||
(name main)
|
||||
(modules main)
|
||||
(promote (until-clean))
|
||||
(libraries
|
||||
; OCaml standard libraries
|
||||
; project libraries
|
||||
str
|
||||
num
|
||||
util
|
||||
x86
|
||||
ll
|
||||
studenttests
|
||||
gradedtests))
|
||||
|
||||
(executable
|
||||
(public_name printanalysis)
|
||||
(name printanalysis)
|
||||
(modules printanalysis)
|
||||
(promote (until-clean))
|
||||
(libraries
|
||||
oat
|
||||
; OCaml standard libraries
|
||||
; project libraries
|
||||
str
|
||||
num
|
||||
util
|
||||
x86
|
||||
ll))
|
||||
73
hw6/bin/escape.ml
Normal file
73
hw6/bin/escape.ml
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
open Ll
|
||||
open Datastructures
|
||||
|
||||
(* escape analysis ---------------------------------------------------------- *)
|
||||
|
||||
(* Instantiates the generic dataflow analysis framework with the
|
||||
lattice for escape analysis.
|
||||
- the lattice elements are sets of LL uids that must have pointer type
|
||||
- the flow functions propagate escaping pointers toward their definitions
|
||||
*)
|
||||
|
||||
let is_ptr_arg (ty, o) : operand option =
|
||||
match ty with
|
||||
| Ptr t -> Some o
|
||||
| _ -> None
|
||||
|
||||
let uids_of_ops : operand list -> UidS.t =
|
||||
List.fold_left (fun s o -> match o with Id u -> UidS.add u s | _ -> s)
|
||||
UidS.empty
|
||||
|
||||
|
||||
(* escaping operands of a terminator --------------------------------------------- *)
|
||||
let term_escapes : terminator -> UidS.t = function
|
||||
| Ret (Ptr _, Some Id x) -> UidS.singleton x
|
||||
| Ret (_, _)
|
||||
| Br _
|
||||
| Cbr (_,_,_) -> UidS.empty
|
||||
|
||||
let insn_escapes (out:UidS.t) (u:uid) (i:insn) : UidS.t =
|
||||
let conditional_escape u x : UidS.t =
|
||||
if UidS.mem u out then UidS.singleton x else UidS.empty
|
||||
in
|
||||
match i with
|
||||
| Store (Ptr _, Id p, _) -> UidS.singleton p
|
||||
| Bitcast(_,Id x,_) -> conditional_escape u x
|
||||
| Gep(_,Id x,_) -> conditional_escape u x
|
||||
| Call(_,_,args) -> List.filter_map is_ptr_arg args |> uids_of_ops
|
||||
| _ -> UidS.empty
|
||||
|
||||
|
||||
(* (In our representation, there is one flow function for instructions
|
||||
and another for terminators. *)
|
||||
let insn_flow (u,i:uid * insn) (out:UidS.t) : UidS.t =
|
||||
out |> UidS.union (insn_escapes out u i)
|
||||
|
||||
let terminator_flow (t:terminator) (out:UidS.t) : UidS.t =
|
||||
out |> UidS.union (term_escapes t)
|
||||
|
||||
module Fact =
|
||||
struct
|
||||
let forwards = false
|
||||
let insn_flow = insn_flow
|
||||
let terminator_flow = terminator_flow
|
||||
|
||||
(* the lattice ---------------------------------------------------------- *)
|
||||
type t = UidS.t
|
||||
let combine ds = List.fold_left UidS.union UidS.empty ds
|
||||
let equal = UidS.equal
|
||||
|
||||
let compare = UidS.compare
|
||||
let to_string = UidS.to_string
|
||||
end
|
||||
|
||||
(* instantiate the general framework ---------------------------------------- *)
|
||||
module Graph = Cfg.AsGraph (Fact)
|
||||
module Solver = Solver.Make (Fact) (Graph)
|
||||
|
||||
(* expose a top-level analysis operation ------------------------------------ *)
|
||||
let analyze (cfg:Cfg.cfg) : Graph.t =
|
||||
let init l = UidS.empty in
|
||||
let live_out = UidS.empty in
|
||||
let g = Graph.of_cfg init live_out cfg in
|
||||
Solver.solve g
|
||||
|
|
@ -191,14 +191,6 @@ let oat_alloc_array ct (t:Ast.ty) (size:Ll.operand) : Ll.ty * operand * stream =
|
|||
; ans_id, Bitcast(arr_ty, Id arr_id, ans_ty) ]
|
||||
|
||||
|
||||
(* Allocates an oat structure on the
|
||||
heap and returns a target operand with the appropriate reference.
|
||||
|
||||
- generate a call to 'oat_malloc' and use bitcast to conver the
|
||||
resulting pointer to the right type
|
||||
|
||||
- make sure to calculate the correct amount of space to allocate!
|
||||
*)
|
||||
let oat_alloc_struct ct (id:Ast.id) : Ll.ty * operand * stream =
|
||||
let ret_id, arr_id = gensym "struct", gensym "raw_struct" in
|
||||
let ans_ty = cmp_ty ct (TRef (RStruct id)) in
|
||||
|
|
@ -207,7 +199,6 @@ let oat_alloc_struct ct (id:Ast.id) : Ll.ty * operand * stream =
|
|||
[ arr_id, Call(arr_ty, Gid "oat_malloc", [I64, Const (size_oat_struct (TypeCtxt.lookup id ct))])
|
||||
; ret_id, Bitcast(arr_ty, Id arr_id, ans_ty) ]
|
||||
|
||||
|
||||
let str_arr_ty s = Array(1 + String.length s, I8)
|
||||
let i1_op_of_bool b = Ll.Const (if b then 1L else 0L)
|
||||
let i64_op_of_int i = Ll.Const (Int64.of_int i)
|
||||
|
|
@ -291,7 +282,6 @@ let rec cmp_exp (tc : TypeCtxt.t) (c:Ctxt.t) (exp:Ast.exp node) : Ll.ty * Ll.ope
|
|||
| _ -> failwith "broken invariant: identifier not a pointer"
|
||||
end
|
||||
|
||||
(* compiles the length(e) expression. *)
|
||||
| Ast.Length e ->
|
||||
let arr_ty, arr_op, arr_code = cmp_exp tc c e in
|
||||
let _ = match arr_ty with
|
||||
|
|
@ -354,11 +344,6 @@ let rec cmp_exp (tc : TypeCtxt.t) (c:Ctxt.t) (exp:Ast.exp node) : Ll.ty * Ll.ope
|
|||
[I (gensym "store", Store (arr_ty, arr_op, Id ptr_id))] >@
|
||||
assign_code
|
||||
|
||||
(* For each field component of the struct
|
||||
- use the TypeCtxt operations to compute getelementptr indices
|
||||
- compile the initializer expression
|
||||
- store the resulting value into the structure
|
||||
*)
|
||||
| Ast.CStruct (id, l) ->
|
||||
let struct_ty, struct_op, alloc_code = oat_alloc_struct tc id in
|
||||
let add_elt s (fid, fexp) =
|
||||
|
|
@ -471,16 +456,6 @@ and cmp_stmt (tc : TypeCtxt.t) (c:Ctxt.t) (rt:Ll.ty) (stmt:Ast.stmt node) : Ctxt
|
|||
>:: L le >@ else_code >:: T(Br lm)
|
||||
>:: L lm
|
||||
|
||||
(* the 'if?' checked null downcast statement.
|
||||
- check whether the value computed by exp is null, if so jump to
|
||||
the 'null' block, otherwise take the 'notnull' block
|
||||
|
||||
- the identifier id is in scope in the 'nutnull' block and so
|
||||
needs to be allocated (and added to the context)
|
||||
|
||||
- as in the if-the-else construct, you should jump to the common
|
||||
merge label after either block
|
||||
*)
|
||||
| Ast.Cast (typ, id, exp, notnull, null) ->
|
||||
let translated_typ = cmp_ty tc (TRef typ) in
|
||||
let guard_op, guard_code = cmp_exp_as tc c exp translated_typ in
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
{
|
||||
open Lexing
|
||||
open Parser
|
||||
module Range = Util.Range
|
||||
open Range
|
||||
|
||||
exception Lexer_error of Range.t * string
|
||||
|
|
@ -95,4 +95,4 @@ let get_liveness (f : Ll.fdecl) : liveness =
|
|||
(Cfg.nodes cfg)
|
||||
(fun u -> ((* print_endline u; *) raise Not_found)) in
|
||||
{ live_in = make_fn Graph.block_in Graph.uid_in;
|
||||
live_out = make_fn Graph.block_out Graph.uid_out}
|
||||
live_out = make_fn Graph.block_out Graph.uid_out}
|
||||
|
|
@ -1,10 +1,13 @@
|
|||
open Ll
|
||||
open Arg
|
||||
open Assert
|
||||
open Driver
|
||||
open Util.Assert
|
||||
open Oat.Driver
|
||||
module Platform = Util.Platform
|
||||
module Opt = Oat.Opt
|
||||
module Backend = Oat.Backend
|
||||
|
||||
exception Ran_tests
|
||||
let suite = ref (Gradedtests.graded_tests)
|
||||
let suite = ref (Studenttests.provided_tests @ Gradedtests.graded_tests)
|
||||
|
||||
let execute_tests () =
|
||||
Platform.configure_os ();
|
||||
|
|
@ -21,14 +24,15 @@ let args =
|
|||
; ("-c", Clear link, "stop after generating .o files; do not generate executables")
|
||||
; ("--print-ll", Set print_ll_flag, "prints the program's LL code (after lowering to clang code if --clang-malloc is set)")
|
||||
; ("--print-x86", Set print_x86_flag, "prints the program's assembly code")
|
||||
; ("--clang", Set clang, "compiles to assembly using clang, not the Compiler Design backend (implies --clang-malloc)")
|
||||
; ("--clang", Set clang, "compiles to assembly using clang, not the 153 backend (implies --clang-malloc)")
|
||||
; ("--execute-x86", Set execute_x86, "run the resulting executable file")
|
||||
; ("-v", Set Platform.verbose, "enables more verbose compilation output")
|
||||
; ("-O1", Set Opt.do_opt, "enable optimization")
|
||||
; ("-O1", Unit (fun _ -> Opt.opt_level := 1), "enable optimization")
|
||||
; ("-O2", Unit (fun _ -> Opt.opt_level := 2), "enable additional optimization")
|
||||
; ("--regalloc", Symbol (["none"; "greedy"; "better"], Backend.set_regalloc), " use the specified register allocator")
|
||||
; ("--liveness", Symbol (["trivial"; "dataflow"], Backend.set_liveness), " use the specified liveness analysis")
|
||||
; ("--print-regs", Set print_regs_flag, "prints the register usage statistics for x86 code")
|
||||
]
|
||||
]
|
||||
|
||||
let files = ref []
|
||||
|
||||
|
|
@ -39,7 +43,7 @@ let _ =
|
|||
Platform.create_output_dir ();
|
||||
try
|
||||
Arg.parse args (fun filename -> files := filename :: !files)
|
||||
"Compiler Design main test harness\n\
|
||||
"CS153 main test harness\n\
|
||||
USAGE: ./main.native [options] <files>\n\
|
||||
see README for details about using the compiler";
|
||||
Platform.configure_os ();
|
||||
|
|
@ -1,5 +1,16 @@
|
|||
(** Optimizer *)
|
||||
open Ll
|
||||
module Platform = Util.Platform
|
||||
|
||||
(*
|
||||
This file drives the optimization for the compilation. For the leaderboard,
|
||||
you may optionally implement additional optimizations by editing this file.
|
||||
|
||||
NOTE: your additional optimizations should run only if !opt_level = 2.
|
||||
|
||||
That is, your additional optimizations should be enabled only when the
|
||||
flag -O2 is passed to main.native.
|
||||
*)
|
||||
|
||||
(* dead code elimination ---------------------------------------------------- *)
|
||||
let dce (g:Cfg.t) : Cfg.t =
|
||||
|
|
@ -25,12 +36,15 @@ let opt_fdecl (gid,fdecl:Ll.gid * Ll.fdecl) : Ll.gid * Ll.fdecl =
|
|||
let g = pass 2 (Cfg.of_ast fdecl) in
|
||||
gid, Cfg.to_ast g
|
||||
|
||||
(* flag for the main compiler driver *)
|
||||
let do_opt = ref false
|
||||
(* optimization level, set by the main compiler driver *)
|
||||
let opt_level = ref 0
|
||||
|
||||
(* optimize each fdecl in the program *)
|
||||
let optimize (p:Ll.prog) : Ll.prog =
|
||||
if !do_opt
|
||||
if !opt_level = 2 then
|
||||
(* OPTIONAL TASK: implement additional optimizations *)
|
||||
failwith "No -O2 optimizations implemented! This is an optional task."
|
||||
else if !opt_level = 1
|
||||
then begin
|
||||
Platform.verb @@ Printf.sprintf "..optimizing";
|
||||
{ p with Ll.fdecls = List.map opt_fdecl p.Ll.fdecls }
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
open Oat
|
||||
open Ll
|
||||
open Datastructures
|
||||
|
||||
|
|
@ -5,15 +6,19 @@ let do_live = ref false
|
|||
let do_cp = ref false
|
||||
let do_alias = ref false
|
||||
|
||||
let print_live args (cfg:Cfg.t) : string =
|
||||
Liveness.Graph.to_string @@ Liveness.analyze cfg
|
||||
let do_escape = ref false
|
||||
|
||||
let print_cp args (cfg:Cfg.t) : string =
|
||||
Constprop.Graph.to_string @@ Constprop.analyze cfg
|
||||
let print_live tdecls (cfg:Cfg.t) : string =
|
||||
Liveness.Graph.to_string tdecls @@ Liveness.analyze cfg
|
||||
|
||||
let print_alias args (cfg:Cfg.t) : string =
|
||||
Alias.Graph.to_string @@ Alias.analyze cfg
|
||||
let print_cp tdecls (cfg:Cfg.t) : string =
|
||||
Constprop.Graph.to_string tdecls @@ Constprop.analyze cfg
|
||||
|
||||
let print_alias tdecls (cfg:Cfg.t) : string =
|
||||
Alias.Graph.to_string tdecls @@ Alias.analyze cfg
|
||||
|
||||
let print_escape tdecls (cfg:Cfg.t) : string =
|
||||
Escape.Graph.to_string tdecls @@ Escape.analyze cfg
|
||||
|
||||
let files = ref []
|
||||
|
||||
|
|
@ -21,6 +26,7 @@ let args = let open Arg in
|
|||
[ "-live", Set do_live, "print liveness"
|
||||
; "-cp", Set do_cp, "print constant prop"
|
||||
; "-alias", Set do_alias, "print alias"
|
||||
; "-escape", Set do_escape, "print escape"
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -34,7 +40,7 @@ let do_file fname print_fn =
|
|||
Printf.printf "define %s @%s(%s) {\n%s\n}\n"
|
||||
(Llutil.sot t) g
|
||||
(String.concat ", " @@ List.map string_of_arg List.(combine ts f.f_param))
|
||||
(print_fn (List.combine ts f.f_param) (Cfg.of_ast f))
|
||||
(print_fn ll_prog.tdecls (Cfg.of_ast f))
|
||||
|
||||
let opt_file opt fname =
|
||||
let opt_fdecl (gid,fdecl) =
|
||||
|
|
@ -56,7 +62,7 @@ let () =
|
|||
(if !do_live then List.iter (fun f -> do_file f print_live) !files);
|
||||
(if !do_cp then List.iter (fun f -> do_file f print_cp) !files);
|
||||
(if !do_alias then List.iter (fun f -> do_file f print_alias) !files);
|
||||
|
||||
(if !do_escape then List.iter (fun f -> do_file f print_escape) !files);
|
||||
end
|
||||
|
||||
|
||||
|
|
@ -12,7 +12,7 @@ int64_t* oat_malloc(int64_t size) {
|
|||
|
||||
int64_t* oat_alloc_array (int64_t size) {
|
||||
assert (size >= 0);
|
||||
int64_t *arr = (int64_t*)malloc(sizeof(int64_t) * (size+1));
|
||||
int64_t *arr = (int64_t*)calloc(size+1, sizeof(int64_t));
|
||||
arr[0] = size;
|
||||
return arr;
|
||||
}
|
||||
|
|
@ -24,10 +24,11 @@ open Datastructures
|
|||
module type DFA_GRAPH =
|
||||
sig
|
||||
module NodeS : SetS
|
||||
|
||||
(* type of nodes in this graph *)
|
||||
type node = NodeS.elt
|
||||
|
||||
(* dataflow facts associated with the out-edges of the nodes in
|
||||
this graph *)
|
||||
(* dataflow facts associated with the out-edges of the nodes in the graph *)
|
||||
type fact
|
||||
|
||||
(* the abstract type of dataflow graphs *)
|
||||
|
|
@ -47,13 +48,15 @@ module type DFA_GRAPH =
|
|||
val add_fact : node -> fact -> t -> t
|
||||
|
||||
(* printing *)
|
||||
(*
|
||||
val to_string : t -> string
|
||||
val printer : Format.formatter -> t -> unit
|
||||
*)
|
||||
end
|
||||
|
||||
(* abstract dataflow lattice signature -------------------------------------- *)
|
||||
(* The general algorithm works over a generic lattice of abstract "facts".
|
||||
- facts can be combined (this is the 'join' operation)
|
||||
- facts can be combined (this is the 'meet' operation)
|
||||
- facts can be compared *)
|
||||
module type FACT =
|
||||
sig
|
||||
|
|
@ -85,9 +88,35 @@ module type FACT =
|
|||
TASK: complete the [solve] function, which implements the above algorithm.
|
||||
*)
|
||||
module Make (Fact : FACT) (Graph : DFA_GRAPH with type fact := Fact.t) =
|
||||
(* I used ChatGPT here to help me understand functors and find some helper functions, like "choose, remove, union, and add" *)
|
||||
struct
|
||||
|
||||
let solve (g:Graph.t) : Graph.t =
|
||||
failwith "TODO HW6: Solver.solve unimplemented"
|
||||
let worklist = Graph.nodes g in
|
||||
|
||||
let rec solve_helper g worklist =
|
||||
if Graph.NodeS.is_empty worklist then g else
|
||||
|
||||
(* choose a node from the worklist *)
|
||||
let current_node = Graph.NodeS.choose worklist in
|
||||
|
||||
(* find the node's predecessors and combine their flow facts *)
|
||||
let preds : Graph.NodeS.t = Graph.preds g current_node in
|
||||
let pred_fact = Graph.NodeS.fold
|
||||
(fun pred acc -> Fact.combine [Graph.out g pred; acc])
|
||||
preds (Fact.combine []) in
|
||||
|
||||
(* apply the flow function to the combined input to find the new output *)
|
||||
let out_fact = Graph.flow g current_node pred_fact in
|
||||
|
||||
(* if the output has changed, update the graph and add the node's successors to the worklist *)
|
||||
let is_zero = Fact.compare out_fact (Graph.out g current_node) in
|
||||
let new_worklist = Graph.NodeS.remove current_node worklist in
|
||||
if is_zero != 0 then let succs = Graph.succs g current_node in
|
||||
solve_helper (Graph.add_fact current_node out_fact g) (Graph.NodeS.union succs new_worklist)
|
||||
else (* it has not changed *)
|
||||
solve_helper g new_worklist
|
||||
in
|
||||
|
||||
let new_g = solve_helper g worklist in new_g
|
||||
end
|
||||
|
||||
234
hw6/bin/typechecker.ml
Normal file
234
hw6/bin/typechecker.ml
Normal file
|
|
@ -0,0 +1,234 @@
|
|||
open Ast
|
||||
open Astlib
|
||||
open Tctxt
|
||||
|
||||
(* This file is from HW5. You are welcome to replace it
|
||||
with your own solution from HW5 or ask the course staff for
|
||||
our version of the file. You do not need to submit this file.
|
||||
|
||||
*)
|
||||
|
||||
(* Error Reporting ---------------------------------------------------------- *)
|
||||
(* NOTE: Use type_error to report error messages for ill-typed programs. *)
|
||||
|
||||
exception TypeError of string
|
||||
|
||||
let type_error (l : 'a node) (err : string) =
|
||||
let (_, (s, e), _) = l.loc in
|
||||
raise (TypeError (Printf.sprintf "[%d, %d] %s" s e err))
|
||||
|
||||
let unimpl = "; replace typechecker.ml with your own solution from HW5 or contact course staff for a reference solution"
|
||||
|
||||
(* initial context: G0 ------------------------------------------------------ *)
|
||||
(* The Oat types of the Oat built-in functions *)
|
||||
let builtins =
|
||||
[ "array_of_string", ([TRef RString], RetVal (TRef(RArray TInt)))
|
||||
; "string_of_array", ([TRef(RArray TInt)], RetVal (TRef RString))
|
||||
; "length_of_string", ([TRef RString], RetVal TInt)
|
||||
; "string_of_int", ([TInt], RetVal (TRef RString))
|
||||
; "string_cat", ([TRef RString; TRef RString], RetVal (TRef RString))
|
||||
; "print_string", ([TRef RString], RetVoid)
|
||||
; "print_int", ([TInt], RetVoid)
|
||||
; "print_bool", ([TBool], RetVoid)
|
||||
]
|
||||
|
||||
(* binary operation types --------------------------------------------------- *)
|
||||
let typ_of_binop : Ast.binop -> Ast.ty * Ast.ty * Ast.ty = function
|
||||
| Add | Mul | Sub | Shl | Shr | Sar | IAnd | IOr -> (TInt, TInt, TInt)
|
||||
| Lt | Lte | Gt | Gte -> (TInt, TInt, TBool)
|
||||
| And | Or -> (TBool, TBool, TBool)
|
||||
| Eq | Neq -> failwith "typ_of_binop called on polymorphic == or !="
|
||||
|
||||
(* unary operation types ---------------------------------------------------- *)
|
||||
let typ_of_unop : Ast.unop -> Ast.ty * Ast.ty = function
|
||||
| Neg | Bitnot -> (TInt, TInt)
|
||||
| Lognot -> (TBool, TBool)
|
||||
|
||||
(* subtyping ---------------------------------------------------------------- *)
|
||||
(* Decides whether H |- t1 <: t2
|
||||
- assumes that H contains the declarations of all the possible struct types
|
||||
|
||||
- you will want to introduce addition (possibly mutually recursive)
|
||||
helper functions to implement the different judgments of the subtyping
|
||||
relation. We have included a template for subtype_ref to get you started.
|
||||
(Don't forget about OCaml's 'and' keyword.)
|
||||
*)
|
||||
let rec subtype (c : Tctxt.t) (t1 : Ast.ty) (t2 : Ast.ty) : bool =
|
||||
failwith ("todo: subtype"^unimpl)
|
||||
|
||||
(* Decides whether H |-r ref1 <: ref2 *)
|
||||
and subtype_ref (c : Tctxt.t) (t1 : Ast.rty) (t2 : Ast.rty) : bool =
|
||||
failwith ("todo: subtype_ref"^unimpl)
|
||||
|
||||
|
||||
(* well-formed types -------------------------------------------------------- *)
|
||||
(* Implement a (set of) functions that check that types are well formed according
|
||||
to the H |- t and related inference rules
|
||||
|
||||
- the function should succeed by returning () if the type is well-formed
|
||||
according to the rules
|
||||
|
||||
- the function should fail using the "type_error" helper function if the
|
||||
type is not well formed
|
||||
|
||||
- l is just an ast node that provides source location information for
|
||||
generating error messages (it's only needed for the type_error generation)
|
||||
|
||||
- tc contains the structure definition context
|
||||
*)
|
||||
let rec typecheck_ty (l : 'a Ast.node) (tc : Tctxt.t) (t : Ast.ty) : unit =
|
||||
failwith ("todo: implement typecheck_ty"^unimpl)
|
||||
|
||||
|
||||
(* A helper function to determine whether a type allows the null value *)
|
||||
let is_nullable_ty (t : Ast.ty) : bool =
|
||||
match t with
|
||||
| TNullRef _ -> true
|
||||
| _ -> false
|
||||
|
||||
(* typechecking expressions ------------------------------------------------- *)
|
||||
(* Typechecks an expression in the typing context c, returns the type of the
|
||||
expression. This function should implement the inference rules given in the
|
||||
oat.pdf specification. There, they are written:
|
||||
|
||||
H; G; L |- exp : t
|
||||
|
||||
See tctxt.ml for the implementation of the context c, which represents the
|
||||
four typing contexts: H - for structure definitions G - for global
|
||||
identifiers L - for local identifiers
|
||||
|
||||
Returns the (most precise) type for the expression, if it is type correct
|
||||
according to the inference rules.
|
||||
|
||||
Uses the type_error function to indicate a (useful!) error message if the
|
||||
expression is not type correct. The exact wording of the error message is
|
||||
not important, but the fact that the error is raised, is important. (Our
|
||||
tests also do not check the location information associated with the error.)
|
||||
|
||||
Notes: - Structure values permit the programmer to write the fields in any
|
||||
order (compared with the structure definition). This means that, given the
|
||||
declaration struct T { a:int; b:int; c:int } The expression new T {b=3; c=4;
|
||||
a=1} is well typed. (You should sort the fields to compare them.)
|
||||
|
||||
*)
|
||||
let rec typecheck_exp (c : Tctxt.t) (e : Ast.exp node) : Ast.ty =
|
||||
failwith ("todo: implement typecheck_exp"^unimpl)
|
||||
|
||||
(* statements --------------------------------------------------------------- *)
|
||||
|
||||
(* Typecheck a statement
|
||||
This function should implement the statment typechecking rules from oat.pdf.
|
||||
|
||||
Inputs:
|
||||
- tc: the type context
|
||||
- s: the statement node
|
||||
- to_ret: the desired return type (from the function declaration)
|
||||
|
||||
Returns:
|
||||
- the new type context (which includes newly declared variables in scope
|
||||
after this statement)
|
||||
|
||||
- A boolean indicating the return behavior of a statement:
|
||||
false: might not return
|
||||
true: definitely returns
|
||||
|
||||
in the branching statements, the return behavior of the branching
|
||||
statement is the conjunction of the return behavior of the two
|
||||
branches: both both branches must definitely return in order for
|
||||
the whole statement to definitely return.
|
||||
|
||||
Intuitively: if one of the two branches of a conditional does not
|
||||
contain a return statement, then the entire conditional statement might
|
||||
not return.
|
||||
|
||||
looping constructs never definitely return
|
||||
|
||||
Uses the type_error function to indicate a (useful!) error message if the
|
||||
statement is not type correct. The exact wording of the error message is
|
||||
not important, but the fact that the error is raised, is important. (Our
|
||||
tests also do not check the location information associated with the error.)
|
||||
|
||||
- You will probably find it convenient to add a helper function that implements the
|
||||
block typecheck rules.
|
||||
*)
|
||||
let rec typecheck_stmt (tc : Tctxt.t) (s:Ast.stmt node) (to_ret:ret_ty) : Tctxt.t * bool =
|
||||
failwith ("todo: implement typecheck_stmt"^unimpl)
|
||||
|
||||
|
||||
(* struct type declarations ------------------------------------------------- *)
|
||||
(* Here is an example of how to implement the TYP_TDECLOK rule, which is
|
||||
is needed elswhere in the type system.
|
||||
*)
|
||||
|
||||
(* Helper function to look for duplicate field names *)
|
||||
let rec check_dups (fs : field list) =
|
||||
match fs with
|
||||
| [] -> false
|
||||
| h :: t -> (List.exists (fun x -> x.fieldName = h.fieldName) t) || check_dups t
|
||||
|
||||
let typecheck_tdecl (tc : Tctxt.t) (id : id) (fs : field list) (l : 'a Ast.node) : unit =
|
||||
if check_dups fs
|
||||
then type_error l ("Repeated fields in " ^ id)
|
||||
else List.iter (fun f -> typecheck_ty l tc f.ftyp) fs
|
||||
|
||||
(* function declarations ---------------------------------------------------- *)
|
||||
(* typecheck a function declaration
|
||||
- ensures formal parameters are distinct
|
||||
- extends the local context with the types of the formal parameters to the
|
||||
function
|
||||
- typechecks the body of the function (passing in the expected return type
|
||||
- checks that the function actually returns
|
||||
*)
|
||||
let typecheck_fdecl (tc : Tctxt.t) (f : Ast.fdecl) (l : 'a Ast.node) : unit =
|
||||
failwith ("todo: typecheck_fdecl"^unimpl)
|
||||
|
||||
(* creating the typchecking context ----------------------------------------- *)
|
||||
|
||||
(* The following functions correspond to the
|
||||
judgments that create the global typechecking context.
|
||||
|
||||
create_struct_ctxt: - adds all the struct types to the struct 'S'
|
||||
context (checking to see that there are no duplicate fields
|
||||
|
||||
H |-s prog ==> H'
|
||||
|
||||
|
||||
create_function_ctxt: - adds the the function identifiers and their
|
||||
types to the 'G' context (ensuring that there are no redeclared
|
||||
function identifiers)
|
||||
|
||||
H ; G1 |-f prog ==> G2
|
||||
|
||||
|
||||
create_global_ctxt: - typechecks the global initializers and adds
|
||||
their identifiers to the 'G' global context
|
||||
|
||||
H ; G1 |-g prog ==> G2
|
||||
|
||||
|
||||
NOTE: global initializers may mention function identifiers as
|
||||
constants, but can mention only other global values that were declared earlier
|
||||
*)
|
||||
|
||||
let create_struct_ctxt (p:Ast.prog) : Tctxt.t =
|
||||
failwith ("todo: create_struct_ctxt"^unimpl)
|
||||
|
||||
let create_function_ctxt (tc:Tctxt.t) (p:Ast.prog) : Tctxt.t =
|
||||
failwith ("todo: create_function_ctxt"^unimpl)
|
||||
|
||||
let create_global_ctxt (tc:Tctxt.t) (p:Ast.prog) : Tctxt.t =
|
||||
failwith ("todo: create_function_ctxt"^unimpl)
|
||||
|
||||
|
||||
(* This function implements the |- prog and the H ; G |- prog
|
||||
rules of the oat.pdf specification.
|
||||
*)
|
||||
let typecheck_program (p:Ast.prog) : unit =
|
||||
let sc = create_struct_ctxt p in
|
||||
let fc = create_function_ctxt sc p in
|
||||
let tc = create_global_ctxt fc p in
|
||||
List.iter (fun p ->
|
||||
match p with
|
||||
| Gfdecl ({elt=f} as l) -> typecheck_fdecl tc f l
|
||||
| Gtdecl ({elt=(id, fs)} as l) -> typecheck_tdecl tc id fs l
|
||||
| _ -> ()) p
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
#include <inttypes.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
|
||||
void ll_puts(int8_t *s) {
|
||||
puts((char *)s);
|
||||
}
|
||||
|
||||
int8_t* ll_strcat(int8_t* s1, int8_t* s2) {
|
||||
int l1 = strlen((char*)s1);
|
||||
int l2 = strlen((char*)s2);
|
||||
char* buf = (char*)calloc(l1 + l2 + 1, sizeof(char));
|
||||
strncpy(buf, (char*)s1, l1);
|
||||
strncpy(buf + l1, (char*)s2, l2+1);
|
||||
return (int8_t*) buf;
|
||||
}
|
||||
|
||||
int64_t ll_callback(int64_t (*fun)(int64_t, int64_t)) {
|
||||
int64_t x = 19L;
|
||||
return fun(x, x);
|
||||
}
|
||||
|
||||
int8_t* ll_ltoa(int64_t i) {
|
||||
char* buf = (char*)calloc(20, sizeof(char));
|
||||
snprintf((char *)buf, 20, "%ld", (long)i);
|
||||
return (int8_t *)buf;
|
||||
}
|
||||
|
||||
void *ll_malloc(int64_t n, int64_t size) {
|
||||
return calloc(n, size);
|
||||
}
|
||||
102
hw6/constprop.ml
102
hw6/constprop.ml
|
|
@ -1,102 +0,0 @@
|
|||
open Ll
|
||||
open Datastructures
|
||||
|
||||
(* The lattice of symbolic constants ---------------------------------------- *)
|
||||
module SymConst =
|
||||
struct
|
||||
type t = NonConst (* Uid may take on multiple values at runtime *)
|
||||
| Const of int64 (* Uid will always evaluate to const i64 or i1 *)
|
||||
| UndefConst (* Uid is not defined at the point *)
|
||||
|
||||
let compare s t =
|
||||
match (s, t) with
|
||||
| (Const i, Const j) -> Int64.compare i j
|
||||
| (NonConst, NonConst) | (UndefConst, UndefConst) -> 0
|
||||
| (NonConst, _) | (_, UndefConst) -> 1
|
||||
| (UndefConst, _) | (_, NonConst) -> -1
|
||||
|
||||
let to_string : t -> string = function
|
||||
| NonConst -> "NonConst"
|
||||
| Const i -> Printf.sprintf "Const (%LdL)" i
|
||||
| UndefConst -> "UndefConst"
|
||||
|
||||
|
||||
end
|
||||
|
||||
(* The analysis computes, at each program point, which UIDs in scope will evaluate
|
||||
to integer constants *)
|
||||
type fact = SymConst.t UidM.t
|
||||
|
||||
|
||||
|
||||
(* flow function across Ll instructions ------------------------------------- *)
|
||||
(* - Uid of a binop or icmp with const arguments is constant-out
|
||||
- Uid of a binop or icmp with an UndefConst argument is UndefConst-out
|
||||
- Uid of a binop or icmp with an NonConst argument is NonConst-out
|
||||
- Uid of stores and void calls are UndefConst-out
|
||||
- Uid of all other instructions are NonConst-out
|
||||
*)
|
||||
let insn_flow (u,i:uid * insn) (d:fact) : fact =
|
||||
failwith "Constprop.insn_flow unimplemented"
|
||||
|
||||
(* The flow function across terminators is trivial: they never change const info *)
|
||||
let terminator_flow (t:terminator) (d:fact) : fact = d
|
||||
|
||||
(* module for instantiating the generic framework --------------------------- *)
|
||||
module Fact =
|
||||
struct
|
||||
type t = fact
|
||||
let forwards = true
|
||||
|
||||
let insn_flow = insn_flow
|
||||
let terminator_flow = terminator_flow
|
||||
|
||||
let normalize : fact -> fact =
|
||||
UidM.filter (fun _ v -> v != SymConst.UndefConst)
|
||||
|
||||
let compare (d:fact) (e:fact) : int =
|
||||
UidM.compare SymConst.compare (normalize d) (normalize e)
|
||||
|
||||
let to_string : fact -> string =
|
||||
UidM.to_string (fun _ v -> SymConst.to_string v)
|
||||
|
||||
(* The constprop analysis should take the meet over predecessors to compute the
|
||||
flow into a node. You may find the UidM.merge function useful *)
|
||||
let combine (ds:fact list) : fact =
|
||||
failwith "Constprop.Fact.combine unimplemented"
|
||||
end
|
||||
|
||||
(* instantiate the general framework ---------------------------------------- *)
|
||||
module Graph = Cfg.AsGraph (Fact)
|
||||
module Solver = Solver.Make (Fact) (Graph)
|
||||
|
||||
(* expose a top-level analysis operation ------------------------------------ *)
|
||||
let analyze (g:Cfg.t) : Graph.t =
|
||||
(* the analysis starts with every node set to bottom (the map of every uid
|
||||
in the function to UndefConst *)
|
||||
let init l = UidM.empty in
|
||||
|
||||
(* the flow into the entry node should indicate that any parameter to the
|
||||
function is not a constant *)
|
||||
let cp_in = List.fold_right
|
||||
(fun (u,_) -> UidM.add u SymConst.NonConst)
|
||||
g.Cfg.args UidM.empty
|
||||
in
|
||||
let fg = Graph.of_cfg init cp_in g in
|
||||
Solver.solve fg
|
||||
|
||||
|
||||
(* run constant propagation on a cfg given analysis results ----------------- *)
|
||||
(* HINT: your cp_block implementation will probably rely on several helper
|
||||
functions. *)
|
||||
let run (cg:Graph.t) (cfg:Cfg.t) : Cfg.t =
|
||||
let open SymConst in
|
||||
|
||||
|
||||
let cp_block (l:Ll.lbl) (cfg:Cfg.t) : Cfg.t =
|
||||
let b = Cfg.block cfg l in
|
||||
let cb = Graph.uid_out cg l in
|
||||
failwith "Constprop.cp_block unimplemented"
|
||||
in
|
||||
|
||||
LblS.fold cp_block (Cfg.nodes cfg) cfg
|
||||
703
hw6/doc/_static/alabaster.css
vendored
Normal file
703
hw6/doc/_static/alabaster.css
vendored
Normal file
|
|
@ -0,0 +1,703 @@
|
|||
@import url("basic.css");
|
||||
|
||||
/* -- page layout ----------------------------------------------------------- */
|
||||
|
||||
body {
|
||||
font-family: "Lato Extended","Lato","Helvetica Neue",Helvetica,Arial,sans-serif;
|
||||
font-size: 17px;
|
||||
background-color: #fff;
|
||||
color: #000;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
|
||||
div.document {
|
||||
width: 85%;
|
||||
margin: 30px auto 0 auto;
|
||||
}
|
||||
|
||||
div.documentwrapper {
|
||||
float: left;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
div.bodywrapper {
|
||||
margin: 0 0 0 220px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar {
|
||||
width: 220px;
|
||||
font-size: 14px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
hr {
|
||||
border: 1px solid #B1B4B6;
|
||||
}
|
||||
|
||||
div.body {
|
||||
background-color: #fff;
|
||||
color: #3E4349;
|
||||
padding: 0 30px 0 30px;
|
||||
}
|
||||
|
||||
div.body > .section {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
div.footer {
|
||||
width: 85%;
|
||||
margin: 20px auto 30px auto;
|
||||
font-size: 14px;
|
||||
color: #888;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
div.footer a {
|
||||
color: #888;
|
||||
}
|
||||
|
||||
p.caption {
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
}
|
||||
|
||||
|
||||
div.relations {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
||||
div.sphinxsidebar a {
|
||||
color: #444;
|
||||
text-decoration: none;
|
||||
border-bottom: 1px dotted #999;
|
||||
}
|
||||
|
||||
div.sphinxsidebar a:hover {
|
||||
border-bottom: 1px solid #999;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper {
|
||||
padding: 18px 10px;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper p.logo {
|
||||
padding: 0;
|
||||
margin: -10px 0 0 0px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper h1.logo {
|
||||
margin-top: -10px;
|
||||
text-align: center;
|
||||
margin-bottom: 5px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper h1.logo-name {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper p.blurb {
|
||||
margin-top: 0;
|
||||
font-style: normal;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h3,
|
||||
div.sphinxsidebar h4 {
|
||||
font-family: "Lato Extended","Lato","Helvetica Neue",Helvetica,Arial,sans-serif;
|
||||
color: #444;
|
||||
font-size: 24px;
|
||||
font-weight: normal;
|
||||
margin: 0 0 5px 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h4 {
|
||||
font-size: 20px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h3 a {
|
||||
color: #444;
|
||||
}
|
||||
|
||||
div.sphinxsidebar p.logo a,
|
||||
div.sphinxsidebar h3 a,
|
||||
div.sphinxsidebar p.logo a:hover,
|
||||
div.sphinxsidebar h3 a:hover {
|
||||
border: none;
|
||||
}
|
||||
|
||||
div.sphinxsidebar p {
|
||||
color: #555;
|
||||
margin: 10px 0;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul {
|
||||
margin: 10px 0;
|
||||
padding: 0;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul li.toctree-l1 > a {
|
||||
font-size: 120%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul li.toctree-l2 > a {
|
||||
font-size: 110%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar input {
|
||||
border: 1px solid #CCC;
|
||||
font-family: "Lato Extended","Lato","Helvetica Neue",Helvetica,Arial,sans-serif;
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
div.sphinxsidebar hr {
|
||||
border: none;
|
||||
height: 1px;
|
||||
color: #AAA;
|
||||
background: #AAA;
|
||||
|
||||
text-align: left;
|
||||
margin-left: 0;
|
||||
width: 50%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar .badge {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
div.sphinxsidebar .badge:hover {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
/* To address an issue with donation coming after search */
|
||||
div.sphinxsidebar h3.donation {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
/* -- body styles ----------------------------------------------------------- */
|
||||
|
||||
a {
|
||||
color: #004B6B;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
color: #6D4100;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
div.body h1,
|
||||
div.body h2,
|
||||
div.body h3,
|
||||
div.body h4,
|
||||
div.body h5,
|
||||
div.body h6 {
|
||||
font-family: "Lato Extended","Lato","Helvetica Neue",Helvetica,Arial,sans-serif;
|
||||
font-weight: normal;
|
||||
margin: 30px 0px 10px 0px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; }
|
||||
div.body h2 { font-size: 180%; }
|
||||
div.body h3 { font-size: 150%; }
|
||||
div.body h4 { font-size: 130%; }
|
||||
div.body h5 { font-size: 100%; }
|
||||
div.body h6 { font-size: 100%; }
|
||||
|
||||
a.headerlink {
|
||||
color: #DDD;
|
||||
padding: 0 4px;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a.headerlink:hover {
|
||||
color: #444;
|
||||
background: #EAEAEA;
|
||||
}
|
||||
|
||||
div.body p, div.body dd, div.body li {
|
||||
line-height: 1.4em;
|
||||
}
|
||||
|
||||
div.admonition {
|
||||
margin: 20px 0px;
|
||||
padding: 10px 30px;
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.admonition tt.xref, div.admonition code.xref, div.admonition a tt {
|
||||
background-color: #FBFBFB;
|
||||
border-bottom: 1px solid #fafafa;
|
||||
}
|
||||
|
||||
div.admonition p.admonition-title {
|
||||
font-family: "Lato Extended","Lato","Helvetica Neue",Helvetica,Arial,sans-serif;
|
||||
font-weight: normal;
|
||||
font-size: 24px;
|
||||
margin: 0 0 10px 0;
|
||||
padding: 0;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
div.admonition p.last {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.highlight {
|
||||
background-color: #fff;
|
||||
}
|
||||
|
||||
dt:target, .highlight {
|
||||
background: #FAF3E8;
|
||||
}
|
||||
|
||||
div.warning {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
}
|
||||
|
||||
div.danger {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
-moz-box-shadow: 2px 2px 4px #D52C2C;
|
||||
-webkit-box-shadow: 2px 2px 4px #D52C2C;
|
||||
box-shadow: 2px 2px 4px #D52C2C;
|
||||
}
|
||||
|
||||
div.error {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
-moz-box-shadow: 2px 2px 4px #D52C2C;
|
||||
-webkit-box-shadow: 2px 2px 4px #D52C2C;
|
||||
box-shadow: 2px 2px 4px #D52C2C;
|
||||
}
|
||||
|
||||
div.caution {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
}
|
||||
|
||||
div.attention {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
}
|
||||
|
||||
div.important {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.note {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.tip {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.hint {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.seealso {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.topic {
|
||||
background-color: #EEE;
|
||||
}
|
||||
|
||||
p.admonition-title {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
p.admonition-title:after {
|
||||
content: ":";
|
||||
}
|
||||
|
||||
pre, tt, code {
|
||||
font-family: monospace,serif;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
.hll {
|
||||
background-color: #FFC;
|
||||
margin: 0 -12px;
|
||||
padding: 0 12px;
|
||||
display: block;
|
||||
}
|
||||
|
||||
img.screenshot {
|
||||
}
|
||||
|
||||
tt.descname, tt.descclassname, code.descname, code.descclassname {
|
||||
font-size: 0.95em;
|
||||
}
|
||||
|
||||
tt.descname, code.descname {
|
||||
padding-right: 0.08em;
|
||||
}
|
||||
|
||||
img.screenshot {
|
||||
-moz-box-shadow: 2px 2px 4px #EEE;
|
||||
-webkit-box-shadow: 2px 2px 4px #EEE;
|
||||
box-shadow: 2px 2px 4px #EEE;
|
||||
}
|
||||
|
||||
table.docutils {
|
||||
border: 1px solid #888;
|
||||
-moz-box-shadow: 2px 2px 4px #EEE;
|
||||
-webkit-box-shadow: 2px 2px 4px #EEE;
|
||||
box-shadow: 2px 2px 4px #EEE;
|
||||
}
|
||||
|
||||
table.docutils td, table.docutils th {
|
||||
border: 1px solid #888;
|
||||
padding: 0.25em 0.7em;
|
||||
}
|
||||
|
||||
table.field-list, table.footnote {
|
||||
border: none;
|
||||
-moz-box-shadow: none;
|
||||
-webkit-box-shadow: none;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
table.footnote {
|
||||
margin: 15px 0;
|
||||
width: 100%;
|
||||
border: 1px solid #EEE;
|
||||
background: #FDFDFD;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
table.footnote + table.footnote {
|
||||
margin-top: -15px;
|
||||
border-top: none;
|
||||
}
|
||||
|
||||
table.field-list th {
|
||||
padding: 0 0.8em 0 0;
|
||||
}
|
||||
|
||||
table.field-list td {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
table.field-list p {
|
||||
margin-bottom: 0.8em;
|
||||
}
|
||||
|
||||
/* Cloned from
|
||||
* https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68
|
||||
*/
|
||||
.field-name {
|
||||
-moz-hyphens: manual;
|
||||
-ms-hyphens: manual;
|
||||
-webkit-hyphens: manual;
|
||||
hyphens: manual;
|
||||
}
|
||||
|
||||
table.footnote td.label {
|
||||
width: .1px;
|
||||
padding: 0.3em 0 0.3em 0.5em;
|
||||
}
|
||||
|
||||
table.footnote td {
|
||||
padding: 0.3em 0.5em;
|
||||
}
|
||||
|
||||
dl {
|
||||
margin-left: 0;
|
||||
margin-right: 0;
|
||||
margin-top: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
dl dd {
|
||||
margin-left: 30px;
|
||||
}
|
||||
|
||||
blockquote {
|
||||
margin: 0 0 0 30px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
ul, ol {
|
||||
/* Matches the 30px from the narrow-screen "li > ul" selector below */
|
||||
margin: 10px 0 10px 30px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
pre {
|
||||
background: #EEE;
|
||||
padding: 7px 30px;
|
||||
margin: 15px 0px;
|
||||
line-height: 1.3em;
|
||||
}
|
||||
|
||||
div.viewcode-block:target {
|
||||
background: #ffd;
|
||||
}
|
||||
|
||||
dl pre, blockquote pre, li pre {
|
||||
margin-left: 0;
|
||||
padding-left: 30px;
|
||||
}
|
||||
|
||||
tt, code {
|
||||
background-color: #ecf0f3;
|
||||
color: #222;
|
||||
/* padding: 1px 2px; */
|
||||
}
|
||||
|
||||
tt.xref, code.xref, a tt {
|
||||
background-color: #FBFBFB;
|
||||
border-bottom: 1px solid #fff;
|
||||
}
|
||||
|
||||
a.reference {
|
||||
text-decoration: none;
|
||||
border-bottom: 1px dotted #004B6B;
|
||||
}
|
||||
|
||||
/* Don't put an underline on images */
|
||||
a.image-reference, a.image-reference:hover {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
a.reference:hover {
|
||||
border-bottom: 1px solid #6D4100;
|
||||
}
|
||||
|
||||
a.footnote-reference {
|
||||
text-decoration: none;
|
||||
font-size: 0.7em;
|
||||
vertical-align: top;
|
||||
border-bottom: 1px dotted #004B6B;
|
||||
}
|
||||
|
||||
a.footnote-reference:hover {
|
||||
border-bottom: 1px solid #6D4100;
|
||||
}
|
||||
|
||||
a:hover tt, a:hover code {
|
||||
background: #EEE;
|
||||
}
|
||||
|
||||
|
||||
@media screen and (max-width: 870px) {
|
||||
|
||||
div.sphinxsidebar {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.document {
|
||||
width: 100%;
|
||||
|
||||
}
|
||||
|
||||
div.documentwrapper {
|
||||
margin-left: 0;
|
||||
margin-top: 0;
|
||||
margin-right: 0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.bodywrapper {
|
||||
margin-top: 0;
|
||||
margin-right: 0;
|
||||
margin-bottom: 0;
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
ul {
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
li > ul {
|
||||
/* Matches the 30px from the "ul, ol" selector above */
|
||||
margin-left: 30px;
|
||||
}
|
||||
|
||||
.document {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.bodywrapper {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.github {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@media screen and (max-width: 875px) {
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 20px 30px;
|
||||
}
|
||||
|
||||
div.documentwrapper {
|
||||
float: none;
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
div.sphinxsidebar {
|
||||
display: block;
|
||||
float: none;
|
||||
width: 102.5%;
|
||||
margin: 50px -30px -20px -30px;
|
||||
padding: 10px 20px;
|
||||
background: #333;
|
||||
color: #FFF;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p,
|
||||
div.sphinxsidebar h3 a {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
div.sphinxsidebar a {
|
||||
color: #AAA;
|
||||
}
|
||||
|
||||
div.sphinxsidebar p.logo {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.document {
|
||||
width: 100%;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
div.footer {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.bodywrapper {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
div.body {
|
||||
min-height: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.rtd_doc_footer {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.document {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.github {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* misc. */
|
||||
|
||||
.revsys-inline {
|
||||
display: none!important;
|
||||
}
|
||||
|
||||
/* Make nested-list/multi-paragraph items look better in Releases changelog
|
||||
* pages. Without this, docutils' magical list fuckery causes inconsistent
|
||||
* formatting between different release sub-lists.
|
||||
*/
|
||||
div#changelog > div.section > ul > li > p:only-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
/* Hide fugly table cell borders in ..bibliography:: directive output */
|
||||
table.docutils.citation, table.docutils.citation td, table.docutils.citation th {
|
||||
border: none;
|
||||
/* Below needed in some edge cases; if not applied, bottom shadows appear */
|
||||
-moz-box-shadow: none;
|
||||
-webkit-box-shadow: none;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
|
||||
/* relbar */
|
||||
|
||||
.related {
|
||||
line-height: 30px;
|
||||
width: 100%;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.related.top {
|
||||
border-bottom: 1px solid #EEE;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.related.bottom {
|
||||
border-top: 1px solid #EEE;
|
||||
}
|
||||
|
||||
.related ul {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.related li {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
nav#rellinks {
|
||||
float: right;
|
||||
}
|
||||
|
||||
nav#rellinks li+li:before {
|
||||
content: "|";
|
||||
}
|
||||
|
||||
nav#breadcrumbs li+li:before {
|
||||
content: "\00BB";
|
||||
}
|
||||
|
||||
/* Hide certain items when printing */
|
||||
@media print {
|
||||
div.related {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
925
hw6/doc/_static/basic.css
vendored
Normal file
925
hw6/doc/_static/basic.css
vendored
Normal file
|
|
@ -0,0 +1,925 @@
|
|||
/*
|
||||
* basic.css
|
||||
* ~~~~~~~~~
|
||||
*
|
||||
* Sphinx stylesheet -- basic theme.
|
||||
*
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
|
||||
/* -- main layout ----------------------------------------------------------- */
|
||||
|
||||
div.clearer {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
div.section::after {
|
||||
display: block;
|
||||
content: '';
|
||||
clear: left;
|
||||
}
|
||||
|
||||
/* -- relbar ---------------------------------------------------------------- */
|
||||
|
||||
div.related {
|
||||
width: 100%;
|
||||
font-size: 90%;
|
||||
}
|
||||
|
||||
div.related h3 {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.related ul {
|
||||
margin: 0;
|
||||
padding: 0 0 0 10px;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
div.related li {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
div.related li.right {
|
||||
float: right;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
/* -- sidebar --------------------------------------------------------------- */
|
||||
|
||||
div.sphinxsidebarwrapper {
|
||||
padding: 10px 5px 0 10px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar {
|
||||
float: left;
|
||||
width: 230px;
|
||||
margin-left: -100%;
|
||||
font-size: 90%;
|
||||
word-wrap: break-word;
|
||||
overflow-wrap : break-word;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul {
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul ul,
|
||||
div.sphinxsidebar ul.want-points {
|
||||
margin-left: 20px;
|
||||
list-style: square;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul ul {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.sphinxsidebar form {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar input {
|
||||
border: 1px solid #98dbcc;
|
||||
font-family: sans-serif;
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
div.sphinxsidebar #searchbox form.search {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
div.sphinxsidebar #searchbox input[type="text"] {
|
||||
float: left;
|
||||
width: 80%;
|
||||
padding: 0.25em;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
div.sphinxsidebar #searchbox input[type="submit"] {
|
||||
float: left;
|
||||
width: 20%;
|
||||
border-left: none;
|
||||
padding: 0.25em;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
|
||||
img {
|
||||
border: 0;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
/* -- search page ----------------------------------------------------------- */
|
||||
|
||||
ul.search {
|
||||
margin: 10px 0 0 20px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
ul.search li {
|
||||
padding: 5px 0 5px 20px;
|
||||
background-image: url(file.png);
|
||||
background-repeat: no-repeat;
|
||||
background-position: 0 7px;
|
||||
}
|
||||
|
||||
ul.search li a {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
ul.search li p.context {
|
||||
color: #888;
|
||||
margin: 2px 0 0 30px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
ul.keywordmatches li.goodmatch a {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* -- index page ------------------------------------------------------------ */
|
||||
|
||||
table.contentstable {
|
||||
width: 90%;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table.contentstable p.biglink {
|
||||
line-height: 150%;
|
||||
}
|
||||
|
||||
a.biglink {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
|
||||
span.linkdescr {
|
||||
font-style: italic;
|
||||
padding-top: 5px;
|
||||
font-size: 90%;
|
||||
}
|
||||
|
||||
/* -- general index --------------------------------------------------------- */
|
||||
|
||||
table.indextable {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
table.indextable td {
|
||||
text-align: left;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
table.indextable ul {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
list-style-type: none;
|
||||
}
|
||||
|
||||
table.indextable > tbody > tr > td > ul {
|
||||
padding-left: 0em;
|
||||
}
|
||||
|
||||
table.indextable tr.pcap {
|
||||
height: 10px;
|
||||
}
|
||||
|
||||
table.indextable tr.cap {
|
||||
margin-top: 10px;
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
|
||||
img.toggler {
|
||||
margin-right: 3px;
|
||||
margin-top: 3px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
div.modindex-jumpbox {
|
||||
border-top: 1px solid #ddd;
|
||||
border-bottom: 1px solid #ddd;
|
||||
margin: 1em 0 1em 0;
|
||||
padding: 0.4em;
|
||||
}
|
||||
|
||||
div.genindex-jumpbox {
|
||||
border-top: 1px solid #ddd;
|
||||
border-bottom: 1px solid #ddd;
|
||||
margin: 1em 0 1em 0;
|
||||
padding: 0.4em;
|
||||
}
|
||||
|
||||
/* -- domain module index --------------------------------------------------- */
|
||||
|
||||
table.modindextable td {
|
||||
padding: 2px;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
/* -- general body styles --------------------------------------------------- */
|
||||
|
||||
div.body {
|
||||
min-width: 360px;
|
||||
max-width: auto;
|
||||
}
|
||||
|
||||
div.body p, div.body dd, div.body li, div.body blockquote {
|
||||
-moz-hyphens: auto;
|
||||
-ms-hyphens: auto;
|
||||
-webkit-hyphens: auto;
|
||||
hyphens: auto;
|
||||
}
|
||||
|
||||
a.headerlink {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
a:visited {
|
||||
color: #551A8B;
|
||||
}
|
||||
|
||||
h1:hover > a.headerlink,
|
||||
h2:hover > a.headerlink,
|
||||
h3:hover > a.headerlink,
|
||||
h4:hover > a.headerlink,
|
||||
h5:hover > a.headerlink,
|
||||
h6:hover > a.headerlink,
|
||||
dt:hover > a.headerlink,
|
||||
caption:hover > a.headerlink,
|
||||
p.caption:hover > a.headerlink,
|
||||
div.code-block-caption:hover > a.headerlink {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
div.body p.caption {
|
||||
text-align: inherit;
|
||||
}
|
||||
|
||||
div.body td {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.first {
|
||||
margin-top: 0 !important;
|
||||
}
|
||||
|
||||
p.rubric {
|
||||
margin-top: 30px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
img.align-left, figure.align-left, .figure.align-left, object.align-left {
|
||||
clear: left;
|
||||
float: left;
|
||||
margin-right: 1em;
|
||||
}
|
||||
|
||||
img.align-right, figure.align-right, .figure.align-right, object.align-right {
|
||||
clear: right;
|
||||
float: right;
|
||||
margin-left: 1em;
|
||||
}
|
||||
|
||||
img.align-center, figure.align-center, .figure.align-center, object.align-center {
|
||||
display: block;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
img.align-default, figure.align-default, .figure.align-default {
|
||||
display: block;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
.align-left {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.align-center {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.align-default {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.align-right {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
/* -- sidebars -------------------------------------------------------------- */
|
||||
|
||||
div.sidebar,
|
||||
aside.sidebar {
|
||||
margin: 0 0 0.5em 1em;
|
||||
border: 1px solid #ddb;
|
||||
padding: 7px;
|
||||
background-color: #ffe;
|
||||
width: 40%;
|
||||
float: right;
|
||||
clear: right;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
p.sidebar-title {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
nav.contents,
|
||||
aside.topic,
|
||||
div.admonition, div.topic, blockquote {
|
||||
clear: left;
|
||||
}
|
||||
|
||||
/* -- topics ---------------------------------------------------------------- */
|
||||
|
||||
nav.contents,
|
||||
aside.topic,
|
||||
div.topic {
|
||||
border: 1px solid #ccc;
|
||||
padding: 7px;
|
||||
margin: 10px 0 10px 0;
|
||||
}
|
||||
|
||||
p.topic-title {
|
||||
font-size: 1.1em;
|
||||
font-weight: bold;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
/* -- admonitions ----------------------------------------------------------- */
|
||||
|
||||
div.admonition {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
padding: 7px;
|
||||
}
|
||||
|
||||
div.admonition dt {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
p.admonition-title {
|
||||
margin: 0px 10px 5px 0px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
div.body p.centered {
|
||||
text-align: center;
|
||||
margin-top: 25px;
|
||||
}
|
||||
|
||||
/* -- content of sidebars/topics/admonitions -------------------------------- */
|
||||
|
||||
div.sidebar > :last-child,
|
||||
aside.sidebar > :last-child,
|
||||
nav.contents > :last-child,
|
||||
aside.topic > :last-child,
|
||||
div.topic > :last-child,
|
||||
div.admonition > :last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.sidebar::after,
|
||||
aside.sidebar::after,
|
||||
nav.contents::after,
|
||||
aside.topic::after,
|
||||
div.topic::after,
|
||||
div.admonition::after,
|
||||
blockquote::after {
|
||||
display: block;
|
||||
content: '';
|
||||
clear: both;
|
||||
}
|
||||
|
||||
/* -- tables ---------------------------------------------------------------- */
|
||||
|
||||
table.docutils {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
border: 0;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
table.align-center {
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table.align-default {
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table caption span.caption-number {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
table caption span.caption-text {
|
||||
}
|
||||
|
||||
table.docutils td, table.docutils th {
|
||||
padding: 1px 8px 1px 5px;
|
||||
border-top: 0;
|
||||
border-left: 0;
|
||||
border-right: 0;
|
||||
border-bottom: 1px solid #aaa;
|
||||
}
|
||||
|
||||
th {
|
||||
text-align: left;
|
||||
padding-right: 5px;
|
||||
}
|
||||
|
||||
table.citation {
|
||||
border-left: solid 1px gray;
|
||||
margin-left: 1px;
|
||||
}
|
||||
|
||||
table.citation td {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
th > :first-child,
|
||||
td > :first-child {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
th > :last-child,
|
||||
td > :last-child {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
/* -- figures --------------------------------------------------------------- */
|
||||
|
||||
div.figure, figure {
|
||||
margin: 0.5em;
|
||||
padding: 0.5em;
|
||||
}
|
||||
|
||||
div.figure p.caption, figcaption {
|
||||
padding: 0.3em;
|
||||
}
|
||||
|
||||
div.figure p.caption span.caption-number,
|
||||
figcaption span.caption-number {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
div.figure p.caption span.caption-text,
|
||||
figcaption span.caption-text {
|
||||
}
|
||||
|
||||
/* -- field list styles ----------------------------------------------------- */
|
||||
|
||||
table.field-list td, table.field-list th {
|
||||
border: 0 !important;
|
||||
}
|
||||
|
||||
.field-list ul {
|
||||
margin: 0;
|
||||
padding-left: 1em;
|
||||
}
|
||||
|
||||
.field-list p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.field-name {
|
||||
-moz-hyphens: manual;
|
||||
-ms-hyphens: manual;
|
||||
-webkit-hyphens: manual;
|
||||
hyphens: manual;
|
||||
}
|
||||
|
||||
/* -- hlist styles ---------------------------------------------------------- */
|
||||
|
||||
table.hlist {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
table.hlist td {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
/* -- object description styles --------------------------------------------- */
|
||||
|
||||
.sig {
|
||||
font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
|
||||
}
|
||||
|
||||
.sig-name, code.descname {
|
||||
background-color: transparent;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.sig-name {
|
||||
font-size: 1.1em;
|
||||
}
|
||||
|
||||
code.descname {
|
||||
font-size: 1.2em;
|
||||
}
|
||||
|
||||
.sig-prename, code.descclassname {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.optional {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
|
||||
.sig-paren {
|
||||
font-size: larger;
|
||||
}
|
||||
|
||||
.sig-param.n {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* C++ specific styling */
|
||||
|
||||
.sig-inline.c-texpr,
|
||||
.sig-inline.cpp-texpr {
|
||||
font-family: unset;
|
||||
}
|
||||
|
||||
.sig.c .k, .sig.c .kt,
|
||||
.sig.cpp .k, .sig.cpp .kt {
|
||||
color: #0033B3;
|
||||
}
|
||||
|
||||
.sig.c .m,
|
||||
.sig.cpp .m {
|
||||
color: #1750EB;
|
||||
}
|
||||
|
||||
.sig.c .s, .sig.c .sc,
|
||||
.sig.cpp .s, .sig.cpp .sc {
|
||||
color: #067D17;
|
||||
}
|
||||
|
||||
|
||||
/* -- other body styles ----------------------------------------------------- */
|
||||
|
||||
ol.arabic {
|
||||
list-style: decimal;
|
||||
}
|
||||
|
||||
ol.loweralpha {
|
||||
list-style: lower-alpha;
|
||||
}
|
||||
|
||||
ol.upperalpha {
|
||||
list-style: upper-alpha;
|
||||
}
|
||||
|
||||
ol.lowerroman {
|
||||
list-style: lower-roman;
|
||||
}
|
||||
|
||||
ol.upperroman {
|
||||
list-style: upper-roman;
|
||||
}
|
||||
|
||||
:not(li) > ol > li:first-child > :first-child,
|
||||
:not(li) > ul > li:first-child > :first-child {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
:not(li) > ol > li:last-child > :last-child,
|
||||
:not(li) > ul > li:last-child > :last-child {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
ol.simple ol p,
|
||||
ol.simple ul p,
|
||||
ul.simple ol p,
|
||||
ul.simple ul p {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
ol.simple > li:not(:first-child) > p,
|
||||
ul.simple > li:not(:first-child) > p {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
ol.simple p,
|
||||
ul.simple p {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
aside.footnote > span,
|
||||
div.citation > span {
|
||||
float: left;
|
||||
}
|
||||
aside.footnote > span:last-of-type,
|
||||
div.citation > span:last-of-type {
|
||||
padding-right: 0.5em;
|
||||
}
|
||||
aside.footnote > p {
|
||||
margin-left: 2em;
|
||||
}
|
||||
div.citation > p {
|
||||
margin-left: 4em;
|
||||
}
|
||||
aside.footnote > p:last-of-type,
|
||||
div.citation > p:last-of-type {
|
||||
margin-bottom: 0em;
|
||||
}
|
||||
aside.footnote > p:last-of-type:after,
|
||||
div.citation > p:last-of-type:after {
|
||||
content: "";
|
||||
clear: both;
|
||||
}
|
||||
|
||||
dl.field-list {
|
||||
display: grid;
|
||||
grid-template-columns: fit-content(30%) auto;
|
||||
}
|
||||
|
||||
dl.field-list > dt {
|
||||
font-weight: bold;
|
||||
word-break: break-word;
|
||||
padding-left: 0.5em;
|
||||
padding-right: 5px;
|
||||
}
|
||||
|
||||
dl.field-list > dd {
|
||||
padding-left: 0.5em;
|
||||
margin-top: 0em;
|
||||
margin-left: 0em;
|
||||
margin-bottom: 0em;
|
||||
}
|
||||
|
||||
dl {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
dd > :first-child {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
dd ul, dd table {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
dd {
|
||||
margin-top: 3px;
|
||||
margin-bottom: 10px;
|
||||
margin-left: 30px;
|
||||
}
|
||||
|
||||
.sig dd {
|
||||
margin-top: 0px;
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
.sig dl {
|
||||
margin-top: 0px;
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
dl > dd:last-child,
|
||||
dl > dd:last-child > :last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
dt:target, span.highlighted {
|
||||
background-color: #fbe54e;
|
||||
}
|
||||
|
||||
rect.highlighted {
|
||||
fill: #fbe54e;
|
||||
}
|
||||
|
||||
dl.glossary dt {
|
||||
font-weight: bold;
|
||||
font-size: 1.1em;
|
||||
}
|
||||
|
||||
.versionmodified {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.system-message {
|
||||
background-color: #fda;
|
||||
padding: 5px;
|
||||
border: 3px solid red;
|
||||
}
|
||||
|
||||
.footnote:target {
|
||||
background-color: #ffa;
|
||||
}
|
||||
|
||||
.line-block {
|
||||
display: block;
|
||||
margin-top: 1em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.line-block .line-block {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
margin-left: 1.5em;
|
||||
}
|
||||
|
||||
.guilabel, .menuselection {
|
||||
font-family: sans-serif;
|
||||
}
|
||||
|
||||
.accelerator {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.classifier {
|
||||
font-style: oblique;
|
||||
}
|
||||
|
||||
.classifier:before {
|
||||
font-style: normal;
|
||||
margin: 0 0.5em;
|
||||
content: ":";
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
abbr, acronym {
|
||||
border-bottom: dotted 1px;
|
||||
cursor: help;
|
||||
}
|
||||
|
||||
.translated {
|
||||
background-color: rgba(207, 255, 207, 0.2)
|
||||
}
|
||||
|
||||
.untranslated {
|
||||
background-color: rgba(255, 207, 207, 0.2)
|
||||
}
|
||||
|
||||
/* -- code displays --------------------------------------------------------- */
|
||||
|
||||
pre {
|
||||
overflow: auto;
|
||||
overflow-y: hidden; /* fixes display issues on Chrome browsers */
|
||||
}
|
||||
|
||||
pre, div[class*="highlight-"] {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
span.pre {
|
||||
-moz-hyphens: none;
|
||||
-ms-hyphens: none;
|
||||
-webkit-hyphens: none;
|
||||
hyphens: none;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
div[class*="highlight-"] {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
td.linenos pre {
|
||||
border: 0;
|
||||
background-color: transparent;
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
table.highlighttable {
|
||||
display: block;
|
||||
}
|
||||
|
||||
table.highlighttable tbody {
|
||||
display: block;
|
||||
}
|
||||
|
||||
table.highlighttable tr {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
table.highlighttable td {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
table.highlighttable td.linenos {
|
||||
padding-right: 0.5em;
|
||||
}
|
||||
|
||||
table.highlighttable td.code {
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.highlight .hll {
|
||||
display: block;
|
||||
}
|
||||
|
||||
div.highlight pre,
|
||||
table.highlighttable pre {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
div.code-block-caption + div {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
div.code-block-caption {
|
||||
margin-top: 1em;
|
||||
padding: 2px 5px;
|
||||
font-size: small;
|
||||
}
|
||||
|
||||
div.code-block-caption code {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
table.highlighttable td.linenos,
|
||||
span.linenos,
|
||||
div.highlight span.gp { /* gp: Generic.Prompt */
|
||||
user-select: none;
|
||||
-webkit-user-select: text; /* Safari fallback only */
|
||||
-webkit-user-select: none; /* Chrome/Safari */
|
||||
-moz-user-select: none; /* Firefox */
|
||||
-ms-user-select: none; /* IE10+ */
|
||||
}
|
||||
|
||||
div.code-block-caption span.caption-number {
|
||||
padding: 0.1em 0.3em;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
div.code-block-caption span.caption-text {
|
||||
}
|
||||
|
||||
div.literal-block-wrapper {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
code.xref, a code {
|
||||
background-color: transparent;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
h1 code, h2 code, h3 code, h4 code, h5 code, h6 code {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.viewcode-link {
|
||||
float: right;
|
||||
}
|
||||
|
||||
.viewcode-back {
|
||||
float: right;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
|
||||
div.viewcode-block:target {
|
||||
margin: -1px -10px;
|
||||
padding: 0 10px;
|
||||
}
|
||||
|
||||
/* -- math display ---------------------------------------------------------- */
|
||||
|
||||
img.math {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
div.body div.math p {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
span.eqno {
|
||||
float: right;
|
||||
}
|
||||
|
||||
span.eqno a.headerlink {
|
||||
position: absolute;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
div.math:hover a.headerlink {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
/* -- printout stylesheet --------------------------------------------------- */
|
||||
|
||||
@media print {
|
||||
div.document,
|
||||
div.documentwrapper,
|
||||
div.bodywrapper {
|
||||
margin: 0 !important;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar,
|
||||
div.related,
|
||||
div.footer,
|
||||
#top-link {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
56
hw6/doc/_static/cs153-handout.css
vendored
Normal file
56
hw6/doc/_static/cs153-handout.css
vendored
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
/* CS153 Style sheet for handouts */
|
||||
|
||||
h1, h2, h3, h4, h5, h6 {
|
||||
font-size: 1.5em;
|
||||
line-height: 1.5;
|
||||
font-weight: normal;
|
||||
}
|
||||
h1 {
|
||||
font-size: 2em;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 1.8em;
|
||||
}
|
||||
|
||||
tt,code,pre,.literal {
|
||||
font-family:monospace,serif;
|
||||
}
|
||||
|
||||
pre {
|
||||
background-color: #f7f7f7;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 4px;
|
||||
padding: 10px;
|
||||
white-space: pre-wrap; /* Wrap long lines */
|
||||
font-size: 14px;
|
||||
line-height: 1.4;
|
||||
color: #333;
|
||||
overflow: auto;
|
||||
page-break-inside:avoid
|
||||
|
||||
}
|
||||
|
||||
/* Add a bit of syntax highlighting for code */
|
||||
pre code {
|
||||
display: block;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-size: 14px;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
/* Style code blocks within pre tags */
|
||||
pre code {
|
||||
background-color: #f7f7f7;
|
||||
border: none;
|
||||
border-radius: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.docutils.literal {
|
||||
color: #e74c3c;
|
||||
white-space: normal;
|
||||
border: 1px solid #e1e4e5;
|
||||
}
|
||||
|
||||
105
hw6/doc/_static/custom.css
vendored
Normal file
105
hw6/doc/_static/custom.css
vendored
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
.wy-side-nav-search, .wy-nav-top {
|
||||
background: #6ba339;
|
||||
}
|
||||
|
||||
.wy-nav-content {
|
||||
max-width: none;
|
||||
}
|
||||
|
||||
.wy-table-responsive table td {
|
||||
white-space: normal;
|
||||
}
|
||||
|
||||
/********************************************/
|
||||
/* LECTURE TABLE STYLES */
|
||||
/********************************************/
|
||||
|
||||
:root {
|
||||
--Color--border : #FFFFFF; /* border color */
|
||||
--Color--link : #DE3A0D; /* hyperlinks */ /* red */
|
||||
|
||||
--Color--white : #FFFFFF;
|
||||
|
||||
--Color--background : #FFFFFF;
|
||||
--Color--text : #000000;
|
||||
--Color--header : #000000;
|
||||
|
||||
--Color--menu-bg : #FFFFFF;
|
||||
--Color--menu-fg : #000000;
|
||||
--Color--menu-fg-hover : #FFFFFF;
|
||||
--Color--menu-bg-hover : #DE3A0D;
|
||||
|
||||
--Color--section-bg : #F7F7F7;
|
||||
--Color--section-fg : #121519;
|
||||
|
||||
--Color--week1 : #FFFFFF;
|
||||
--Color--week2 : #F7F7F7;
|
||||
--Color--dimweek1 : #CCCCCC;
|
||||
--Color--dimweek2 : #BBBBBB;
|
||||
--Color--dimhw : #AAAAAA;
|
||||
|
||||
--Color--note-bg : #FCD63A;
|
||||
--Color--note-border : #FCBF3A;
|
||||
--Color--notice-fg : #DE3A0D;
|
||||
|
||||
--Color--code-fg : #000000;
|
||||
--Color--code-bg : #FEFEFB;
|
||||
--Color--code-border : #E1EDB9;
|
||||
--Color--code-error : #A40000;
|
||||
|
||||
--Color--hdr-bg : #666666;
|
||||
--Color--hdr-fg : #000000;
|
||||
}
|
||||
|
||||
|
||||
.bright {
|
||||
color: var(--Color--white);
|
||||
}
|
||||
|
||||
td.date {
|
||||
color: #050505;
|
||||
}
|
||||
|
||||
div.hdr {
|
||||
color: var(--Color--white);
|
||||
}
|
||||
|
||||
|
||||
.week1 {
|
||||
background-color: var(--Color--week1);/*week1*/
|
||||
}
|
||||
|
||||
.week2 {
|
||||
background-color: var(--Color--week2);;/*week2*/
|
||||
}
|
||||
|
||||
tr.week2.elide > td.topic {
|
||||
background-color: var(--Color--week2);
|
||||
color: var(--Color--dimweek2);
|
||||
}
|
||||
|
||||
tr.week1.elide > td.topic {
|
||||
background-color: var(--Color--week1);
|
||||
color: var(--Color--dimweek1);
|
||||
}
|
||||
|
||||
tr.elide > td.slides > br {
|
||||
display:none;
|
||||
}
|
||||
|
||||
tr.elide > td.handout > br {
|
||||
display:none;
|
||||
}
|
||||
|
||||
.tr.elide.hw > .td > a {
|
||||
display:none;
|
||||
}
|
||||
|
||||
tr.elide > td.slides > a {
|
||||
display:none;
|
||||
}
|
||||
|
||||
tr.elide > td.handout > a {
|
||||
display:none;
|
||||
}
|
||||
|
||||
156
hw6/doc/_static/doctools.js
vendored
Normal file
156
hw6/doc/_static/doctools.js
vendored
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
/*
|
||||
* doctools.js
|
||||
* ~~~~~~~~~~~
|
||||
*
|
||||
* Base JavaScript utilities for all Sphinx HTML documentation.
|
||||
*
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([
|
||||
"TEXTAREA",
|
||||
"INPUT",
|
||||
"SELECT",
|
||||
"BUTTON",
|
||||
]);
|
||||
|
||||
const _ready = (callback) => {
|
||||
if (document.readyState !== "loading") {
|
||||
callback();
|
||||
} else {
|
||||
document.addEventListener("DOMContentLoaded", callback);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Small JavaScript module for the documentation.
|
||||
*/
|
||||
const Documentation = {
|
||||
init: () => {
|
||||
Documentation.initDomainIndexTable();
|
||||
Documentation.initOnKeyListeners();
|
||||
},
|
||||
|
||||
/**
|
||||
* i18n support
|
||||
*/
|
||||
TRANSLATIONS: {},
|
||||
PLURAL_EXPR: (n) => (n === 1 ? 0 : 1),
|
||||
LOCALE: "unknown",
|
||||
|
||||
// gettext and ngettext don't access this so that the functions
|
||||
// can safely bound to a different name (_ = Documentation.gettext)
|
||||
gettext: (string) => {
|
||||
const translated = Documentation.TRANSLATIONS[string];
|
||||
switch (typeof translated) {
|
||||
case "undefined":
|
||||
return string; // no translation
|
||||
case "string":
|
||||
return translated; // translation exists
|
||||
default:
|
||||
return translated[0]; // (singular, plural) translation tuple exists
|
||||
}
|
||||
},
|
||||
|
||||
ngettext: (singular, plural, n) => {
|
||||
const translated = Documentation.TRANSLATIONS[singular];
|
||||
if (typeof translated !== "undefined")
|
||||
return translated[Documentation.PLURAL_EXPR(n)];
|
||||
return n === 1 ? singular : plural;
|
||||
},
|
||||
|
||||
addTranslations: (catalog) => {
|
||||
Object.assign(Documentation.TRANSLATIONS, catalog.messages);
|
||||
Documentation.PLURAL_EXPR = new Function(
|
||||
"n",
|
||||
`return (${catalog.plural_expr})`
|
||||
);
|
||||
Documentation.LOCALE = catalog.locale;
|
||||
},
|
||||
|
||||
/**
|
||||
* helper function to focus on search bar
|
||||
*/
|
||||
focusSearchBar: () => {
|
||||
document.querySelectorAll("input[name=q]")[0]?.focus();
|
||||
},
|
||||
|
||||
/**
|
||||
* Initialise the domain index toggle buttons
|
||||
*/
|
||||
initDomainIndexTable: () => {
|
||||
const toggler = (el) => {
|
||||
const idNumber = el.id.substr(7);
|
||||
const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`);
|
||||
if (el.src.substr(-9) === "minus.png") {
|
||||
el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`;
|
||||
toggledRows.forEach((el) => (el.style.display = "none"));
|
||||
} else {
|
||||
el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`;
|
||||
toggledRows.forEach((el) => (el.style.display = ""));
|
||||
}
|
||||
};
|
||||
|
||||
const togglerElements = document.querySelectorAll("img.toggler");
|
||||
togglerElements.forEach((el) =>
|
||||
el.addEventListener("click", (event) => toggler(event.currentTarget))
|
||||
);
|
||||
togglerElements.forEach((el) => (el.style.display = ""));
|
||||
if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler);
|
||||
},
|
||||
|
||||
initOnKeyListeners: () => {
|
||||
// only install a listener if it is really needed
|
||||
if (
|
||||
!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS &&
|
||||
!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
|
||||
)
|
||||
return;
|
||||
|
||||
document.addEventListener("keydown", (event) => {
|
||||
// bail for input elements
|
||||
if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
|
||||
// bail with special keys
|
||||
if (event.altKey || event.ctrlKey || event.metaKey) return;
|
||||
|
||||
if (!event.shiftKey) {
|
||||
switch (event.key) {
|
||||
case "ArrowLeft":
|
||||
if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
|
||||
|
||||
const prevLink = document.querySelector('link[rel="prev"]');
|
||||
if (prevLink && prevLink.href) {
|
||||
window.location.href = prevLink.href;
|
||||
event.preventDefault();
|
||||
}
|
||||
break;
|
||||
case "ArrowRight":
|
||||
if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
|
||||
|
||||
const nextLink = document.querySelector('link[rel="next"]');
|
||||
if (nextLink && nextLink.href) {
|
||||
window.location.href = nextLink.href;
|
||||
event.preventDefault();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// some keyboard layouts may need Shift to get /
|
||||
switch (event.key) {
|
||||
case "/":
|
||||
if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break;
|
||||
Documentation.focusSearchBar();
|
||||
event.preventDefault();
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
// quick alias for translations
|
||||
const _ = Documentation.gettext;
|
||||
|
||||
_ready(Documentation.init);
|
||||
13
hw6/doc/_static/documentation_options.js
vendored
Normal file
13
hw6/doc/_static/documentation_options.js
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
const DOCUMENTATION_OPTIONS = {
|
||||
VERSION: '',
|
||||
LANGUAGE: 'en',
|
||||
COLLAPSE_INDEX: false,
|
||||
BUILDER: 'html',
|
||||
FILE_SUFFIX: '.html',
|
||||
LINK_SUFFIX: '.html',
|
||||
HAS_SOURCE: false,
|
||||
SOURCELINK_SUFFIX: '.txt',
|
||||
NAVIGATION_WITH_KEYS: false,
|
||||
SHOW_SEARCH_SUMMARY: true,
|
||||
ENABLE_SEARCH_SHORTCUTS: true,
|
||||
};
|
||||
BIN
hw6/doc/_static/file.png
vendored
Normal file
BIN
hw6/doc/_static/file.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 286 B |
199
hw6/doc/_static/language_data.js
vendored
Normal file
199
hw6/doc/_static/language_data.js
vendored
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
/*
|
||||
* language_data.js
|
||||
* ~~~~~~~~~~~~~~~~
|
||||
*
|
||||
* This script contains the language-specific data used by searchtools.js,
|
||||
* namely the list of stopwords, stemmer, scorer and splitter.
|
||||
*
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
|
||||
var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"];
|
||||
|
||||
|
||||
/* Non-minified version is copied as a separate JS file, is available */
|
||||
|
||||
/**
|
||||
* Porter Stemmer
|
||||
*/
|
||||
var Stemmer = function() {
|
||||
|
||||
var step2list = {
|
||||
ational: 'ate',
|
||||
tional: 'tion',
|
||||
enci: 'ence',
|
||||
anci: 'ance',
|
||||
izer: 'ize',
|
||||
bli: 'ble',
|
||||
alli: 'al',
|
||||
entli: 'ent',
|
||||
eli: 'e',
|
||||
ousli: 'ous',
|
||||
ization: 'ize',
|
||||
ation: 'ate',
|
||||
ator: 'ate',
|
||||
alism: 'al',
|
||||
iveness: 'ive',
|
||||
fulness: 'ful',
|
||||
ousness: 'ous',
|
||||
aliti: 'al',
|
||||
iviti: 'ive',
|
||||
biliti: 'ble',
|
||||
logi: 'log'
|
||||
};
|
||||
|
||||
var step3list = {
|
||||
icate: 'ic',
|
||||
ative: '',
|
||||
alize: 'al',
|
||||
iciti: 'ic',
|
||||
ical: 'ic',
|
||||
ful: '',
|
||||
ness: ''
|
||||
};
|
||||
|
||||
var c = "[^aeiou]"; // consonant
|
||||
var v = "[aeiouy]"; // vowel
|
||||
var C = c + "[^aeiouy]*"; // consonant sequence
|
||||
var V = v + "[aeiou]*"; // vowel sequence
|
||||
|
||||
var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
|
||||
var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
|
||||
var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
|
||||
var s_v = "^(" + C + ")?" + v; // vowel in stem
|
||||
|
||||
this.stemWord = function (w) {
|
||||
var stem;
|
||||
var suffix;
|
||||
var firstch;
|
||||
var origword = w;
|
||||
|
||||
if (w.length < 3)
|
||||
return w;
|
||||
|
||||
var re;
|
||||
var re2;
|
||||
var re3;
|
||||
var re4;
|
||||
|
||||
firstch = w.substr(0,1);
|
||||
if (firstch == "y")
|
||||
w = firstch.toUpperCase() + w.substr(1);
|
||||
|
||||
// Step 1a
|
||||
re = /^(.+?)(ss|i)es$/;
|
||||
re2 = /^(.+?)([^s])s$/;
|
||||
|
||||
if (re.test(w))
|
||||
w = w.replace(re,"$1$2");
|
||||
else if (re2.test(w))
|
||||
w = w.replace(re2,"$1$2");
|
||||
|
||||
// Step 1b
|
||||
re = /^(.+?)eed$/;
|
||||
re2 = /^(.+?)(ed|ing)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
re = new RegExp(mgr0);
|
||||
if (re.test(fp[1])) {
|
||||
re = /.$/;
|
||||
w = w.replace(re,"");
|
||||
}
|
||||
}
|
||||
else if (re2.test(w)) {
|
||||
var fp = re2.exec(w);
|
||||
stem = fp[1];
|
||||
re2 = new RegExp(s_v);
|
||||
if (re2.test(stem)) {
|
||||
w = stem;
|
||||
re2 = /(at|bl|iz)$/;
|
||||
re3 = new RegExp("([^aeiouylsz])\\1$");
|
||||
re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
|
||||
if (re2.test(w))
|
||||
w = w + "e";
|
||||
else if (re3.test(w)) {
|
||||
re = /.$/;
|
||||
w = w.replace(re,"");
|
||||
}
|
||||
else if (re4.test(w))
|
||||
w = w + "e";
|
||||
}
|
||||
}
|
||||
|
||||
// Step 1c
|
||||
re = /^(.+?)y$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
re = new RegExp(s_v);
|
||||
if (re.test(stem))
|
||||
w = stem + "i";
|
||||
}
|
||||
|
||||
// Step 2
|
||||
re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
suffix = fp[2];
|
||||
re = new RegExp(mgr0);
|
||||
if (re.test(stem))
|
||||
w = stem + step2list[suffix];
|
||||
}
|
||||
|
||||
// Step 3
|
||||
re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
suffix = fp[2];
|
||||
re = new RegExp(mgr0);
|
||||
if (re.test(stem))
|
||||
w = stem + step3list[suffix];
|
||||
}
|
||||
|
||||
// Step 4
|
||||
re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
|
||||
re2 = /^(.+?)(s|t)(ion)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
re = new RegExp(mgr1);
|
||||
if (re.test(stem))
|
||||
w = stem;
|
||||
}
|
||||
else if (re2.test(w)) {
|
||||
var fp = re2.exec(w);
|
||||
stem = fp[1] + fp[2];
|
||||
re2 = new RegExp(mgr1);
|
||||
if (re2.test(stem))
|
||||
w = stem;
|
||||
}
|
||||
|
||||
// Step 5
|
||||
re = /^(.+?)e$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
re = new RegExp(mgr1);
|
||||
re2 = new RegExp(meq1);
|
||||
re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
|
||||
if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
|
||||
w = stem;
|
||||
}
|
||||
re = /ll$/;
|
||||
re2 = new RegExp(mgr1);
|
||||
if (re.test(w) && re2.test(w)) {
|
||||
re = /.$/;
|
||||
w = w.replace(re,"");
|
||||
}
|
||||
|
||||
// and turn initial Y back to y
|
||||
if (firstch == "y")
|
||||
w = firstch.toLowerCase() + w.substr(1);
|
||||
return w;
|
||||
}
|
||||
}
|
||||
|
||||
BIN
hw6/doc/_static/minus.png
vendored
Normal file
BIN
hw6/doc/_static/minus.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 90 B |
BIN
hw6/doc/_static/plus.png
vendored
Normal file
BIN
hw6/doc/_static/plus.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 90 B |
84
hw6/doc/_static/pygments.css
vendored
Normal file
84
hw6/doc/_static/pygments.css
vendored
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
pre { line-height: 125%; }
|
||||
td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
|
||||
span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
|
||||
td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
|
||||
span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
|
||||
.highlight .hll { background-color: #ffffcc }
|
||||
.highlight { background: #f8f8f8; }
|
||||
.highlight .c { color: #8f5902; font-style: italic } /* Comment */
|
||||
.highlight .err { color: #a40000; border: 1px solid #ef2929 } /* Error */
|
||||
.highlight .g { color: #000000 } /* Generic */
|
||||
.highlight .k { color: #004461; font-weight: bold } /* Keyword */
|
||||
.highlight .l { color: #000000 } /* Literal */
|
||||
.highlight .n { color: #000000 } /* Name */
|
||||
.highlight .o { color: #582800 } /* Operator */
|
||||
.highlight .x { color: #000000 } /* Other */
|
||||
.highlight .p { color: #000000; font-weight: bold } /* Punctuation */
|
||||
.highlight .ch { color: #8f5902; font-style: italic } /* Comment.Hashbang */
|
||||
.highlight .cm { color: #8f5902; font-style: italic } /* Comment.Multiline */
|
||||
.highlight .cp { color: #8f5902 } /* Comment.Preproc */
|
||||
.highlight .cpf { color: #8f5902; font-style: italic } /* Comment.PreprocFile */
|
||||
.highlight .c1 { color: #8f5902; font-style: italic } /* Comment.Single */
|
||||
.highlight .cs { color: #8f5902; font-style: italic } /* Comment.Special */
|
||||
.highlight .gd { color: #a40000 } /* Generic.Deleted */
|
||||
.highlight .ge { color: #000000; font-style: italic } /* Generic.Emph */
|
||||
.highlight .ges { color: #000000 } /* Generic.EmphStrong */
|
||||
.highlight .gr { color: #ef2929 } /* Generic.Error */
|
||||
.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
|
||||
.highlight .gi { color: #00A000 } /* Generic.Inserted */
|
||||
.highlight .go { color: #888888 } /* Generic.Output */
|
||||
.highlight .gp { color: #745334 } /* Generic.Prompt */
|
||||
.highlight .gs { color: #000000; font-weight: bold } /* Generic.Strong */
|
||||
.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
|
||||
.highlight .gt { color: #a40000; font-weight: bold } /* Generic.Traceback */
|
||||
.highlight .kc { color: #004461; font-weight: bold } /* Keyword.Constant */
|
||||
.highlight .kd { color: #004461; font-weight: bold } /* Keyword.Declaration */
|
||||
.highlight .kn { color: #004461; font-weight: bold } /* Keyword.Namespace */
|
||||
.highlight .kp { color: #004461; font-weight: bold } /* Keyword.Pseudo */
|
||||
.highlight .kr { color: #004461; font-weight: bold } /* Keyword.Reserved */
|
||||
.highlight .kt { color: #004461; font-weight: bold } /* Keyword.Type */
|
||||
.highlight .ld { color: #000000 } /* Literal.Date */
|
||||
.highlight .m { color: #990000 } /* Literal.Number */
|
||||
.highlight .s { color: #4e9a06 } /* Literal.String */
|
||||
.highlight .na { color: #c4a000 } /* Name.Attribute */
|
||||
.highlight .nb { color: #004461 } /* Name.Builtin */
|
||||
.highlight .nc { color: #000000 } /* Name.Class */
|
||||
.highlight .no { color: #000000 } /* Name.Constant */
|
||||
.highlight .nd { color: #888888 } /* Name.Decorator */
|
||||
.highlight .ni { color: #ce5c00 } /* Name.Entity */
|
||||
.highlight .ne { color: #cc0000; font-weight: bold } /* Name.Exception */
|
||||
.highlight .nf { color: #000000 } /* Name.Function */
|
||||
.highlight .nl { color: #f57900 } /* Name.Label */
|
||||
.highlight .nn { color: #000000 } /* Name.Namespace */
|
||||
.highlight .nx { color: #000000 } /* Name.Other */
|
||||
.highlight .py { color: #000000 } /* Name.Property */
|
||||
.highlight .nt { color: #004461; font-weight: bold } /* Name.Tag */
|
||||
.highlight .nv { color: #000000 } /* Name.Variable */
|
||||
.highlight .ow { color: #004461; font-weight: bold } /* Operator.Word */
|
||||
.highlight .pm { color: #000000; font-weight: bold } /* Punctuation.Marker */
|
||||
.highlight .w { color: #f8f8f8; text-decoration: underline } /* Text.Whitespace */
|
||||
.highlight .mb { color: #990000 } /* Literal.Number.Bin */
|
||||
.highlight .mf { color: #990000 } /* Literal.Number.Float */
|
||||
.highlight .mh { color: #990000 } /* Literal.Number.Hex */
|
||||
.highlight .mi { color: #990000 } /* Literal.Number.Integer */
|
||||
.highlight .mo { color: #990000 } /* Literal.Number.Oct */
|
||||
.highlight .sa { color: #4e9a06 } /* Literal.String.Affix */
|
||||
.highlight .sb { color: #4e9a06 } /* Literal.String.Backtick */
|
||||
.highlight .sc { color: #4e9a06 } /* Literal.String.Char */
|
||||
.highlight .dl { color: #4e9a06 } /* Literal.String.Delimiter */
|
||||
.highlight .sd { color: #8f5902; font-style: italic } /* Literal.String.Doc */
|
||||
.highlight .s2 { color: #4e9a06 } /* Literal.String.Double */
|
||||
.highlight .se { color: #4e9a06 } /* Literal.String.Escape */
|
||||
.highlight .sh { color: #4e9a06 } /* Literal.String.Heredoc */
|
||||
.highlight .si { color: #4e9a06 } /* Literal.String.Interpol */
|
||||
.highlight .sx { color: #4e9a06 } /* Literal.String.Other */
|
||||
.highlight .sr { color: #4e9a06 } /* Literal.String.Regex */
|
||||
.highlight .s1 { color: #4e9a06 } /* Literal.String.Single */
|
||||
.highlight .ss { color: #4e9a06 } /* Literal.String.Symbol */
|
||||
.highlight .bp { color: #3465a4 } /* Name.Builtin.Pseudo */
|
||||
.highlight .fm { color: #000000 } /* Name.Function.Magic */
|
||||
.highlight .vc { color: #000000 } /* Name.Variable.Class */
|
||||
.highlight .vg { color: #000000 } /* Name.Variable.Global */
|
||||
.highlight .vi { color: #000000 } /* Name.Variable.Instance */
|
||||
.highlight .vm { color: #000000 } /* Name.Variable.Magic */
|
||||
.highlight .il { color: #990000 } /* Literal.Number.Integer.Long */
|
||||
574
hw6/doc/_static/searchtools.js
vendored
Normal file
574
hw6/doc/_static/searchtools.js
vendored
Normal file
|
|
@ -0,0 +1,574 @@
|
|||
/*
|
||||
* searchtools.js
|
||||
* ~~~~~~~~~~~~~~~~
|
||||
*
|
||||
* Sphinx JavaScript utilities for the full-text search.
|
||||
*
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Simple result scoring code.
|
||||
*/
|
||||
if (typeof Scorer === "undefined") {
|
||||
var Scorer = {
|
||||
// Implement the following function to further tweak the score for each result
|
||||
// The function takes a result array [docname, title, anchor, descr, score, filename]
|
||||
// and returns the new score.
|
||||
/*
|
||||
score: result => {
|
||||
const [docname, title, anchor, descr, score, filename] = result
|
||||
return score
|
||||
},
|
||||
*/
|
||||
|
||||
// query matches the full name of an object
|
||||
objNameMatch: 11,
|
||||
// or matches in the last dotted part of the object name
|
||||
objPartialMatch: 6,
|
||||
// Additive scores depending on the priority of the object
|
||||
objPrio: {
|
||||
0: 15, // used to be importantResults
|
||||
1: 5, // used to be objectResults
|
||||
2: -5, // used to be unimportantResults
|
||||
},
|
||||
// Used when the priority is not in the mapping.
|
||||
objPrioDefault: 0,
|
||||
|
||||
// query found in title
|
||||
title: 15,
|
||||
partialTitle: 7,
|
||||
// query found in terms
|
||||
term: 5,
|
||||
partialTerm: 2,
|
||||
};
|
||||
}
|
||||
|
||||
const _removeChildren = (element) => {
|
||||
while (element && element.lastChild) element.removeChild(element.lastChild);
|
||||
};
|
||||
|
||||
/**
|
||||
* See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
|
||||
*/
|
||||
const _escapeRegExp = (string) =>
|
||||
string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||
|
||||
const _displayItem = (item, searchTerms, highlightTerms) => {
|
||||
const docBuilder = DOCUMENTATION_OPTIONS.BUILDER;
|
||||
const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX;
|
||||
const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX;
|
||||
const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY;
|
||||
const contentRoot = document.documentElement.dataset.content_root;
|
||||
|
||||
const [docName, title, anchor, descr, score, _filename] = item;
|
||||
|
||||
let listItem = document.createElement("li");
|
||||
let requestUrl;
|
||||
let linkUrl;
|
||||
if (docBuilder === "dirhtml") {
|
||||
// dirhtml builder
|
||||
let dirname = docName + "/";
|
||||
if (dirname.match(/\/index\/$/))
|
||||
dirname = dirname.substring(0, dirname.length - 6);
|
||||
else if (dirname === "index/") dirname = "";
|
||||
requestUrl = contentRoot + dirname;
|
||||
linkUrl = requestUrl;
|
||||
} else {
|
||||
// normal html builders
|
||||
requestUrl = contentRoot + docName + docFileSuffix;
|
||||
linkUrl = docName + docLinkSuffix;
|
||||
}
|
||||
let linkEl = listItem.appendChild(document.createElement("a"));
|
||||
linkEl.href = linkUrl + anchor;
|
||||
linkEl.dataset.score = score;
|
||||
linkEl.innerHTML = title;
|
||||
if (descr) {
|
||||
listItem.appendChild(document.createElement("span")).innerHTML =
|
||||
" (" + descr + ")";
|
||||
// highlight search terms in the description
|
||||
if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
|
||||
highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
|
||||
}
|
||||
else if (showSearchSummary)
|
||||
fetch(requestUrl)
|
||||
.then((responseData) => responseData.text())
|
||||
.then((data) => {
|
||||
if (data)
|
||||
listItem.appendChild(
|
||||
Search.makeSearchSummary(data, searchTerms)
|
||||
);
|
||||
// highlight search terms in the summary
|
||||
if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
|
||||
highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
|
||||
});
|
||||
Search.output.appendChild(listItem);
|
||||
};
|
||||
const _finishSearch = (resultCount) => {
|
||||
Search.stopPulse();
|
||||
Search.title.innerText = _("Search Results");
|
||||
if (!resultCount)
|
||||
Search.status.innerText = Documentation.gettext(
|
||||
"Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories."
|
||||
);
|
||||
else
|
||||
Search.status.innerText = _(
|
||||
`Search finished, found ${resultCount} page(s) matching the search query.`
|
||||
);
|
||||
};
|
||||
const _displayNextItem = (
|
||||
results,
|
||||
resultCount,
|
||||
searchTerms,
|
||||
highlightTerms,
|
||||
) => {
|
||||
// results left, load the summary and display it
|
||||
// this is intended to be dynamic (don't sub resultsCount)
|
||||
if (results.length) {
|
||||
_displayItem(results.pop(), searchTerms, highlightTerms);
|
||||
setTimeout(
|
||||
() => _displayNextItem(results, resultCount, searchTerms, highlightTerms),
|
||||
5
|
||||
);
|
||||
}
|
||||
// search finished, update title and status message
|
||||
else _finishSearch(resultCount);
|
||||
};
|
||||
|
||||
/**
|
||||
* Default splitQuery function. Can be overridden in ``sphinx.search`` with a
|
||||
* custom function per language.
|
||||
*
|
||||
* The regular expression works by splitting the string on consecutive characters
|
||||
* that are not Unicode letters, numbers, underscores, or emoji characters.
|
||||
* This is the same as ``\W+`` in Python, preserving the surrogate pair area.
|
||||
*/
|
||||
if (typeof splitQuery === "undefined") {
|
||||
var splitQuery = (query) => query
|
||||
.split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu)
|
||||
.filter(term => term) // remove remaining empty strings
|
||||
}
|
||||
|
||||
/**
|
||||
* Search Module
|
||||
*/
|
||||
const Search = {
|
||||
_index: null,
|
||||
_queued_query: null,
|
||||
_pulse_status: -1,
|
||||
|
||||
htmlToText: (htmlString) => {
|
||||
const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html');
|
||||
htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() });
|
||||
const docContent = htmlElement.querySelector('[role="main"]');
|
||||
if (docContent !== undefined) return docContent.textContent;
|
||||
console.warn(
|
||||
"Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template."
|
||||
);
|
||||
return "";
|
||||
},
|
||||
|
||||
init: () => {
|
||||
const query = new URLSearchParams(window.location.search).get("q");
|
||||
document
|
||||
.querySelectorAll('input[name="q"]')
|
||||
.forEach((el) => (el.value = query));
|
||||
if (query) Search.performSearch(query);
|
||||
},
|
||||
|
||||
loadIndex: (url) =>
|
||||
(document.body.appendChild(document.createElement("script")).src = url),
|
||||
|
||||
setIndex: (index) => {
|
||||
Search._index = index;
|
||||
if (Search._queued_query !== null) {
|
||||
const query = Search._queued_query;
|
||||
Search._queued_query = null;
|
||||
Search.query(query);
|
||||
}
|
||||
},
|
||||
|
||||
hasIndex: () => Search._index !== null,
|
||||
|
||||
deferQuery: (query) => (Search._queued_query = query),
|
||||
|
||||
stopPulse: () => (Search._pulse_status = -1),
|
||||
|
||||
startPulse: () => {
|
||||
if (Search._pulse_status >= 0) return;
|
||||
|
||||
const pulse = () => {
|
||||
Search._pulse_status = (Search._pulse_status + 1) % 4;
|
||||
Search.dots.innerText = ".".repeat(Search._pulse_status);
|
||||
if (Search._pulse_status >= 0) window.setTimeout(pulse, 500);
|
||||
};
|
||||
pulse();
|
||||
},
|
||||
|
||||
/**
|
||||
* perform a search for something (or wait until index is loaded)
|
||||
*/
|
||||
performSearch: (query) => {
|
||||
// create the required interface elements
|
||||
const searchText = document.createElement("h2");
|
||||
searchText.textContent = _("Searching");
|
||||
const searchSummary = document.createElement("p");
|
||||
searchSummary.classList.add("search-summary");
|
||||
searchSummary.innerText = "";
|
||||
const searchList = document.createElement("ul");
|
||||
searchList.classList.add("search");
|
||||
|
||||
const out = document.getElementById("search-results");
|
||||
Search.title = out.appendChild(searchText);
|
||||
Search.dots = Search.title.appendChild(document.createElement("span"));
|
||||
Search.status = out.appendChild(searchSummary);
|
||||
Search.output = out.appendChild(searchList);
|
||||
|
||||
const searchProgress = document.getElementById("search-progress");
|
||||
// Some themes don't use the search progress node
|
||||
if (searchProgress) {
|
||||
searchProgress.innerText = _("Preparing search...");
|
||||
}
|
||||
Search.startPulse();
|
||||
|
||||
// index already loaded, the browser was quick!
|
||||
if (Search.hasIndex()) Search.query(query);
|
||||
else Search.deferQuery(query);
|
||||
},
|
||||
|
||||
/**
|
||||
* execute search (requires search index to be loaded)
|
||||
*/
|
||||
query: (query) => {
|
||||
const filenames = Search._index.filenames;
|
||||
const docNames = Search._index.docnames;
|
||||
const titles = Search._index.titles;
|
||||
const allTitles = Search._index.alltitles;
|
||||
const indexEntries = Search._index.indexentries;
|
||||
|
||||
// stem the search terms and add them to the correct list
|
||||
const stemmer = new Stemmer();
|
||||
const searchTerms = new Set();
|
||||
const excludedTerms = new Set();
|
||||
const highlightTerms = new Set();
|
||||
const objectTerms = new Set(splitQuery(query.toLowerCase().trim()));
|
||||
splitQuery(query.trim()).forEach((queryTerm) => {
|
||||
const queryTermLower = queryTerm.toLowerCase();
|
||||
|
||||
// maybe skip this "word"
|
||||
// stopwords array is from language_data.js
|
||||
if (
|
||||
stopwords.indexOf(queryTermLower) !== -1 ||
|
||||
queryTerm.match(/^\d+$/)
|
||||
)
|
||||
return;
|
||||
|
||||
// stem the word
|
||||
let word = stemmer.stemWord(queryTermLower);
|
||||
// select the correct list
|
||||
if (word[0] === "-") excludedTerms.add(word.substr(1));
|
||||
else {
|
||||
searchTerms.add(word);
|
||||
highlightTerms.add(queryTermLower);
|
||||
}
|
||||
});
|
||||
|
||||
if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js
|
||||
localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" "))
|
||||
}
|
||||
|
||||
// console.debug("SEARCH: searching for:");
|
||||
// console.info("required: ", [...searchTerms]);
|
||||
// console.info("excluded: ", [...excludedTerms]);
|
||||
|
||||
// array of [docname, title, anchor, descr, score, filename]
|
||||
let results = [];
|
||||
_removeChildren(document.getElementById("search-progress"));
|
||||
|
||||
const queryLower = query.toLowerCase();
|
||||
for (const [title, foundTitles] of Object.entries(allTitles)) {
|
||||
if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) {
|
||||
for (const [file, id] of foundTitles) {
|
||||
let score = Math.round(100 * queryLower.length / title.length)
|
||||
results.push([
|
||||
docNames[file],
|
||||
titles[file] !== title ? `${titles[file]} > ${title}` : title,
|
||||
id !== null ? "#" + id : "",
|
||||
null,
|
||||
score,
|
||||
filenames[file],
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// search for explicit entries in index directives
|
||||
for (const [entry, foundEntries] of Object.entries(indexEntries)) {
|
||||
if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) {
|
||||
for (const [file, id] of foundEntries) {
|
||||
let score = Math.round(100 * queryLower.length / entry.length)
|
||||
results.push([
|
||||
docNames[file],
|
||||
titles[file],
|
||||
id ? "#" + id : "",
|
||||
null,
|
||||
score,
|
||||
filenames[file],
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// lookup as object
|
||||
objectTerms.forEach((term) =>
|
||||
results.push(...Search.performObjectSearch(term, objectTerms))
|
||||
);
|
||||
|
||||
// lookup as search terms in fulltext
|
||||
results.push(...Search.performTermsSearch(searchTerms, excludedTerms));
|
||||
|
||||
// let the scorer override scores with a custom scoring function
|
||||
if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item)));
|
||||
|
||||
// now sort the results by score (in opposite order of appearance, since the
|
||||
// display function below uses pop() to retrieve items) and then
|
||||
// alphabetically
|
||||
results.sort((a, b) => {
|
||||
const leftScore = a[4];
|
||||
const rightScore = b[4];
|
||||
if (leftScore === rightScore) {
|
||||
// same score: sort alphabetically
|
||||
const leftTitle = a[1].toLowerCase();
|
||||
const rightTitle = b[1].toLowerCase();
|
||||
if (leftTitle === rightTitle) return 0;
|
||||
return leftTitle > rightTitle ? -1 : 1; // inverted is intentional
|
||||
}
|
||||
return leftScore > rightScore ? 1 : -1;
|
||||
});
|
||||
|
||||
// remove duplicate search results
|
||||
// note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept
|
||||
let seen = new Set();
|
||||
results = results.reverse().reduce((acc, result) => {
|
||||
let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(',');
|
||||
if (!seen.has(resultStr)) {
|
||||
acc.push(result);
|
||||
seen.add(resultStr);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
results = results.reverse();
|
||||
|
||||
// for debugging
|
||||
//Search.lastresults = results.slice(); // a copy
|
||||
// console.info("search results:", Search.lastresults);
|
||||
|
||||
// print the results
|
||||
_displayNextItem(results, results.length, searchTerms, highlightTerms);
|
||||
},
|
||||
|
||||
/**
|
||||
* search for object names
|
||||
*/
|
||||
performObjectSearch: (object, objectTerms) => {
|
||||
const filenames = Search._index.filenames;
|
||||
const docNames = Search._index.docnames;
|
||||
const objects = Search._index.objects;
|
||||
const objNames = Search._index.objnames;
|
||||
const titles = Search._index.titles;
|
||||
|
||||
const results = [];
|
||||
|
||||
const objectSearchCallback = (prefix, match) => {
|
||||
const name = match[4]
|
||||
const fullname = (prefix ? prefix + "." : "") + name;
|
||||
const fullnameLower = fullname.toLowerCase();
|
||||
if (fullnameLower.indexOf(object) < 0) return;
|
||||
|
||||
let score = 0;
|
||||
const parts = fullnameLower.split(".");
|
||||
|
||||
// check for different match types: exact matches of full name or
|
||||
// "last name" (i.e. last dotted part)
|
||||
if (fullnameLower === object || parts.slice(-1)[0] === object)
|
||||
score += Scorer.objNameMatch;
|
||||
else if (parts.slice(-1)[0].indexOf(object) > -1)
|
||||
score += Scorer.objPartialMatch; // matches in last name
|
||||
|
||||
const objName = objNames[match[1]][2];
|
||||
const title = titles[match[0]];
|
||||
|
||||
// If more than one term searched for, we require other words to be
|
||||
// found in the name/title/description
|
||||
const otherTerms = new Set(objectTerms);
|
||||
otherTerms.delete(object);
|
||||
if (otherTerms.size > 0) {
|
||||
const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase();
|
||||
if (
|
||||
[...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0)
|
||||
)
|
||||
return;
|
||||
}
|
||||
|
||||
let anchor = match[3];
|
||||
if (anchor === "") anchor = fullname;
|
||||
else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname;
|
||||
|
||||
const descr = objName + _(", in ") + title;
|
||||
|
||||
// add custom score for some objects according to scorer
|
||||
if (Scorer.objPrio.hasOwnProperty(match[2]))
|
||||
score += Scorer.objPrio[match[2]];
|
||||
else score += Scorer.objPrioDefault;
|
||||
|
||||
results.push([
|
||||
docNames[match[0]],
|
||||
fullname,
|
||||
"#" + anchor,
|
||||
descr,
|
||||
score,
|
||||
filenames[match[0]],
|
||||
]);
|
||||
};
|
||||
Object.keys(objects).forEach((prefix) =>
|
||||
objects[prefix].forEach((array) =>
|
||||
objectSearchCallback(prefix, array)
|
||||
)
|
||||
);
|
||||
return results;
|
||||
},
|
||||
|
||||
/**
|
||||
* search for full-text terms in the index
|
||||
*/
|
||||
performTermsSearch: (searchTerms, excludedTerms) => {
|
||||
// prepare search
|
||||
const terms = Search._index.terms;
|
||||
const titleTerms = Search._index.titleterms;
|
||||
const filenames = Search._index.filenames;
|
||||
const docNames = Search._index.docnames;
|
||||
const titles = Search._index.titles;
|
||||
|
||||
const scoreMap = new Map();
|
||||
const fileMap = new Map();
|
||||
|
||||
// perform the search on the required terms
|
||||
searchTerms.forEach((word) => {
|
||||
const files = [];
|
||||
const arr = [
|
||||
{ files: terms[word], score: Scorer.term },
|
||||
{ files: titleTerms[word], score: Scorer.title },
|
||||
];
|
||||
// add support for partial matches
|
||||
if (word.length > 2) {
|
||||
const escapedWord = _escapeRegExp(word);
|
||||
Object.keys(terms).forEach((term) => {
|
||||
if (term.match(escapedWord) && !terms[word])
|
||||
arr.push({ files: terms[term], score: Scorer.partialTerm });
|
||||
});
|
||||
Object.keys(titleTerms).forEach((term) => {
|
||||
if (term.match(escapedWord) && !titleTerms[word])
|
||||
arr.push({ files: titleTerms[word], score: Scorer.partialTitle });
|
||||
});
|
||||
}
|
||||
|
||||
// no match but word was a required one
|
||||
if (arr.every((record) => record.files === undefined)) return;
|
||||
|
||||
// found search word in contents
|
||||
arr.forEach((record) => {
|
||||
if (record.files === undefined) return;
|
||||
|
||||
let recordFiles = record.files;
|
||||
if (recordFiles.length === undefined) recordFiles = [recordFiles];
|
||||
files.push(...recordFiles);
|
||||
|
||||
// set score for the word in each file
|
||||
recordFiles.forEach((file) => {
|
||||
if (!scoreMap.has(file)) scoreMap.set(file, {});
|
||||
scoreMap.get(file)[word] = record.score;
|
||||
});
|
||||
});
|
||||
|
||||
// create the mapping
|
||||
files.forEach((file) => {
|
||||
if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1)
|
||||
fileMap.get(file).push(word);
|
||||
else fileMap.set(file, [word]);
|
||||
});
|
||||
});
|
||||
|
||||
// now check if the files don't contain excluded terms
|
||||
const results = [];
|
||||
for (const [file, wordList] of fileMap) {
|
||||
// check if all requirements are matched
|
||||
|
||||
// as search terms with length < 3 are discarded
|
||||
const filteredTermCount = [...searchTerms].filter(
|
||||
(term) => term.length > 2
|
||||
).length;
|
||||
if (
|
||||
wordList.length !== searchTerms.size &&
|
||||
wordList.length !== filteredTermCount
|
||||
)
|
||||
continue;
|
||||
|
||||
// ensure that none of the excluded terms is in the search result
|
||||
if (
|
||||
[...excludedTerms].some(
|
||||
(term) =>
|
||||
terms[term] === file ||
|
||||
titleTerms[term] === file ||
|
||||
(terms[term] || []).includes(file) ||
|
||||
(titleTerms[term] || []).includes(file)
|
||||
)
|
||||
)
|
||||
break;
|
||||
|
||||
// select one (max) score for the file.
|
||||
const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w]));
|
||||
// add result to the result list
|
||||
results.push([
|
||||
docNames[file],
|
||||
titles[file],
|
||||
"",
|
||||
null,
|
||||
score,
|
||||
filenames[file],
|
||||
]);
|
||||
}
|
||||
return results;
|
||||
},
|
||||
|
||||
/**
|
||||
* helper function to return a node containing the
|
||||
* search summary for a given text. keywords is a list
|
||||
* of stemmed words.
|
||||
*/
|
||||
makeSearchSummary: (htmlText, keywords) => {
|
||||
const text = Search.htmlToText(htmlText);
|
||||
if (text === "") return null;
|
||||
|
||||
const textLower = text.toLowerCase();
|
||||
const actualStartPosition = [...keywords]
|
||||
.map((k) => textLower.indexOf(k.toLowerCase()))
|
||||
.filter((i) => i > -1)
|
||||
.slice(-1)[0];
|
||||
const startWithContext = Math.max(actualStartPosition - 120, 0);
|
||||
|
||||
const top = startWithContext === 0 ? "" : "...";
|
||||
const tail = startWithContext + 240 < text.length ? "..." : "";
|
||||
|
||||
let summary = document.createElement("p");
|
||||
summary.classList.add("context");
|
||||
summary.textContent = top + text.substr(startWithContext, 240).trim() + tail;
|
||||
|
||||
return summary;
|
||||
},
|
||||
};
|
||||
|
||||
_ready(Search.init);
|
||||
154
hw6/doc/_static/sphinx_highlight.js
vendored
Normal file
154
hw6/doc/_static/sphinx_highlight.js
vendored
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
/* Highlighting utilities for Sphinx HTML documentation. */
|
||||
"use strict";
|
||||
|
||||
const SPHINX_HIGHLIGHT_ENABLED = true
|
||||
|
||||
/**
|
||||
* highlight a given string on a node by wrapping it in
|
||||
* span elements with the given class name.
|
||||
*/
|
||||
const _highlight = (node, addItems, text, className) => {
|
||||
if (node.nodeType === Node.TEXT_NODE) {
|
||||
const val = node.nodeValue;
|
||||
const parent = node.parentNode;
|
||||
const pos = val.toLowerCase().indexOf(text);
|
||||
if (
|
||||
pos >= 0 &&
|
||||
!parent.classList.contains(className) &&
|
||||
!parent.classList.contains("nohighlight")
|
||||
) {
|
||||
let span;
|
||||
|
||||
const closestNode = parent.closest("body, svg, foreignObject");
|
||||
const isInSVG = closestNode && closestNode.matches("svg");
|
||||
if (isInSVG) {
|
||||
span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
|
||||
} else {
|
||||
span = document.createElement("span");
|
||||
span.classList.add(className);
|
||||
}
|
||||
|
||||
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
|
||||
const rest = document.createTextNode(val.substr(pos + text.length));
|
||||
parent.insertBefore(
|
||||
span,
|
||||
parent.insertBefore(
|
||||
rest,
|
||||
node.nextSibling
|
||||
)
|
||||
);
|
||||
node.nodeValue = val.substr(0, pos);
|
||||
/* There may be more occurrences of search term in this node. So call this
|
||||
* function recursively on the remaining fragment.
|
||||
*/
|
||||
_highlight(rest, addItems, text, className);
|
||||
|
||||
if (isInSVG) {
|
||||
const rect = document.createElementNS(
|
||||
"http://www.w3.org/2000/svg",
|
||||
"rect"
|
||||
);
|
||||
const bbox = parent.getBBox();
|
||||
rect.x.baseVal.value = bbox.x;
|
||||
rect.y.baseVal.value = bbox.y;
|
||||
rect.width.baseVal.value = bbox.width;
|
||||
rect.height.baseVal.value = bbox.height;
|
||||
rect.setAttribute("class", className);
|
||||
addItems.push({ parent: parent, target: rect });
|
||||
}
|
||||
}
|
||||
} else if (node.matches && !node.matches("button, select, textarea")) {
|
||||
node.childNodes.forEach((el) => _highlight(el, addItems, text, className));
|
||||
}
|
||||
};
|
||||
const _highlightText = (thisNode, text, className) => {
|
||||
let addItems = [];
|
||||
_highlight(thisNode, addItems, text, className);
|
||||
addItems.forEach((obj) =>
|
||||
obj.parent.insertAdjacentElement("beforebegin", obj.target)
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Small JavaScript module for the documentation.
|
||||
*/
|
||||
const SphinxHighlight = {
|
||||
|
||||
/**
|
||||
* highlight the search words provided in localstorage in the text
|
||||
*/
|
||||
highlightSearchWords: () => {
|
||||
if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
|
||||
|
||||
// get and clear terms from localstorage
|
||||
const url = new URL(window.location);
|
||||
const highlight =
|
||||
localStorage.getItem("sphinx_highlight_terms")
|
||||
|| url.searchParams.get("highlight")
|
||||
|| "";
|
||||
localStorage.removeItem("sphinx_highlight_terms")
|
||||
url.searchParams.delete("highlight");
|
||||
window.history.replaceState({}, "", url);
|
||||
|
||||
// get individual terms from highlight string
|
||||
const terms = highlight.toLowerCase().split(/\s+/).filter(x => x);
|
||||
if (terms.length === 0) return; // nothing to do
|
||||
|
||||
// There should never be more than one element matching "div.body"
|
||||
const divBody = document.querySelectorAll("div.body");
|
||||
const body = divBody.length ? divBody[0] : document.querySelector("body");
|
||||
window.setTimeout(() => {
|
||||
terms.forEach((term) => _highlightText(body, term, "highlighted"));
|
||||
}, 10);
|
||||
|
||||
const searchBox = document.getElementById("searchbox");
|
||||
if (searchBox === null) return;
|
||||
searchBox.appendChild(
|
||||
document
|
||||
.createRange()
|
||||
.createContextualFragment(
|
||||
'<p class="highlight-link">' +
|
||||
'<a href="javascript:SphinxHighlight.hideSearchWords()">' +
|
||||
_("Hide Search Matches") +
|
||||
"</a></p>"
|
||||
)
|
||||
);
|
||||
},
|
||||
|
||||
/**
|
||||
* helper function to hide the search marks again
|
||||
*/
|
||||
hideSearchWords: () => {
|
||||
document
|
||||
.querySelectorAll("#searchbox .highlight-link")
|
||||
.forEach((el) => el.remove());
|
||||
document
|
||||
.querySelectorAll("span.highlighted")
|
||||
.forEach((el) => el.classList.remove("highlighted"));
|
||||
localStorage.removeItem("sphinx_highlight_terms")
|
||||
},
|
||||
|
||||
initEscapeListener: () => {
|
||||
// only install a listener if it is really needed
|
||||
if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return;
|
||||
|
||||
document.addEventListener("keydown", (event) => {
|
||||
// bail for input elements
|
||||
if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
|
||||
// bail with special keys
|
||||
if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return;
|
||||
if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) {
|
||||
SphinxHighlight.hideSearchWords();
|
||||
event.preventDefault();
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
_ready(() => {
|
||||
/* Do not call highlightSearchWords() when we are on the search page.
|
||||
* It will highlight words from the *previous* search query.
|
||||
*/
|
||||
if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords();
|
||||
SphinxHighlight.initEscapeListener();
|
||||
});
|
||||
572
hw6/doc/hw6-opt.html
Normal file
572
hw6/doc/hw6-opt.html
Normal file
|
|
@ -0,0 +1,572 @@
|
|||
<!DOCTYPE html>
|
||||
|
||||
<html lang="en" data-content_root="./">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.18.1: http://docutils.sourceforge.net/" />
|
||||
|
||||
<title>1. HW6: Dataflow Analysis and Optimizations — CS 153 2023</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css?v=4f649999" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css?v=a2fbdfc9" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/custom.css?v=3dba9716" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/cs153-handout.css?v=bc747a33" />
|
||||
<script src="_static/documentation_options.js?v=7f41d439"></script>
|
||||
<script src="_static/doctools.js?v=888ff710"></script>
|
||||
<script src="_static/sphinx_highlight.js?v=dc90522c"></script>
|
||||
<link rel="index" title="Index" href="genindex.html" />
|
||||
<link rel="search" title="Search" href="search.html" />
|
||||
<link rel="prev" title="<no title>" href="index.html" />
|
||||
|
||||
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
|
||||
|
||||
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
|
||||
|
||||
</head><body>
|
||||
|
||||
|
||||
<div class="document">
|
||||
<div class="documentwrapper">
|
||||
<div class="bodywrapper">
|
||||
|
||||
|
||||
<div class="body" role="main">
|
||||
|
||||
<section id="hw6-dataflow-analysis-and-optimizations">
|
||||
<span id="hw6-opt"></span><h1><span class="section-number">1. </span>HW6: Dataflow Analysis and Optimizations<a class="headerlink" href="#hw6-dataflow-analysis-and-optimizations" title="Link to this heading">¶</a></h1>
|
||||
<section id="getting-started">
|
||||
<h2><span class="section-number">1.1. </span>Getting Started<a class="headerlink" href="#getting-started" title="Link to this heading">¶</a></h2>
|
||||
<p>Many of the files in this project are taken from the earlier projects. The
|
||||
new files (only) and their uses are listed below. Those marked with <code class="docutils literal notranslate"><span class="pre">*</span></code> are
|
||||
the only ones you should need to modify while completing this assignment.</p>
|
||||
<table class="docutils align-default">
|
||||
<tbody>
|
||||
<tr class="row-odd"><td><p>bin/datastructures.ml</p></td>
|
||||
<td><p>set and map modules (enhanced with printing)</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/cfg.ml</p></td>
|
||||
<td><p>“view” of LL control-flow graphs as dataflow graphs</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/analysis.ml</p></td>
|
||||
<td><p>helper functions for propagating dataflow facts</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/solver.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> the general-purpose iterative dataflow analysis solver</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/alias.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> alias analysis</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/dce.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> dead code elimination optimization</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/constprop.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> constant propagation analysis & optimization</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/liveness.ml</p></td>
|
||||
<td><p>provided liveness analysis code</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/analysistests.ml</p></td>
|
||||
<td><p>test cases (for liveness, constprop, alias)</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/opt.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> optimizer that runs dce and constprop (and more if you want)</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/backend.ml</p></td>
|
||||
<td><p><code class="docutils literal notranslate"><span class="pre">*</span></code> you will implement register allocation heuristics here</p></td>
|
||||
</tr>
|
||||
<tr class="row-even"><td><p>bin/registers.ml</p></td>
|
||||
<td><p>collects statistics about register usage</p></td>
|
||||
</tr>
|
||||
<tr class="row-odd"><td><p>bin/printanalysis.ml</p></td>
|
||||
<td><p>a standalone program to print the results of an analysis</p></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>You’ll need to have <a class="reference external" href="http://gallium.inria.fr/~fpottier/menhir/">menhir</a> and <a class="reference external" href="https://clang.llvm.org/">clang</a> installed on your system for this
|
||||
assignment. If you have any difficulty installing these files, please
|
||||
post on <a class="reference external" href="https://edstem.org/us/courses/40936/discussion/">Ed</a> and/or contact the course staff.</p>
|
||||
</div>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>As usual, running <code class="docutils literal notranslate"><span class="pre">oatc</span> <span class="pre">--test</span></code> will run the test suite. <code class="docutils literal notranslate"><span class="pre">oatc</span></code>
|
||||
also now supports several new flags having to do with optimizations.</p>
|
||||
<div class="highlight-none notranslate"><div class="highlight"><pre><span></span>-O1 : runs two iterations of (constprop followed by dce)
|
||||
--liveness {trivial|dataflow} : select which liveness analysis to use for register allocation
|
||||
--regalloc {none|greedy|better} : select which register allocator to use
|
||||
--print-regs : print a histogram of the registers used
|
||||
</pre></div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
<section id="overview">
|
||||
<h2><span class="section-number">1.2. </span>Overview<a class="headerlink" href="#overview" title="Link to this heading">¶</a></h2>
|
||||
<p>The Oat compiler we have developed so far produces very inefficient code,
|
||||
since it performs no optimizations at any stage of the compilation
|
||||
pipeline. In this project, you will implement several simple dataflow analyses
|
||||
and some optimizations at the level of our LLVMlite intermediate
|
||||
representation in order to improve code size and speed.</p>
|
||||
<section id="provided-code">
|
||||
<h3>Provided Code<a class="headerlink" href="#provided-code" title="Link to this heading">¶</a></h3>
|
||||
<p>The provided code makes extensive use of modules, module signatures, and
|
||||
functors. These aid in code reuse and abstraction. If you need a refresher on
|
||||
OCaml functors, we recommend reading through the <a class="reference external" href="https://dev.realworldocaml.org/functors.html">Functors Chapter</a> of Real World OCaml.</p>
|
||||
<p>In <code class="docutils literal notranslate"><span class="pre">datastructures.ml</span></code>, we provide you with a number of useful modules,
|
||||
module signatures, and functors for the assignment, including:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">OrdPrintT</span></code>: A module signature for a type that is both comparable and
|
||||
can be converted to a string for printing. This is used in conjunction with
|
||||
some of our other custom modules described below. Wrapper modules <code class="docutils literal notranslate"><span class="pre">Lbl</span></code>
|
||||
and <code class="docutils literal notranslate"><span class="pre">Uid</span></code> satisfying this signature are defined later in the file for the
|
||||
<code class="docutils literal notranslate"><span class="pre">Ll.lbl</span></code> and <code class="docutils literal notranslate"><span class="pre">Ll.uid</span></code> types.</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">SetS</span></code>: A module signature that extends OCaml’s
|
||||
built-in set to include string conversion and printing capabilities.</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">MakeSet</span></code>: A functor that creates an extended set (<code class="docutils literal notranslate"><span class="pre">SetS</span></code>) from a type
|
||||
that satisfies the <code class="docutils literal notranslate"><span class="pre">OrdPrintT</span></code> module signature. This is applied to the
|
||||
<code class="docutils literal notranslate"><span class="pre">Lbl</span></code> and <code class="docutils literal notranslate"><span class="pre">Uid</span></code> wrapper modules to create a label set module <code class="docutils literal notranslate"><span class="pre">LblS</span></code>
|
||||
and a UID set module <code class="docutils literal notranslate"><span class="pre">UidS</span></code>.</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">MapS</span></code>: A module signature that extends OCaml’s built-in maps to include
|
||||
string conversion and printing capabilities. Three additional helper
|
||||
functions are also included: <code class="docutils literal notranslate"><span class="pre">update</span></code> for updating the value associated
|
||||
with a particular key, <code class="docutils literal notranslate"><span class="pre">find_or</span></code> for performing a map look-up with a
|
||||
default value to be supplied when the key is not present, and <code class="docutils literal notranslate"><span class="pre">update_or</span></code>
|
||||
for updating the value associated with a key if it is present, or adding an
|
||||
entry with a default value if not.</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">MakeMap</span></code>: A functor that creates an extended map (<code class="docutils literal notranslate"><span class="pre">MapS</span></code>) from a type
|
||||
that satisfies the <code class="docutils literal notranslate"><span class="pre">OrdPrintT</span></code> module signature. This is applied to the
|
||||
<code class="docutils literal notranslate"><span class="pre">Lbl</span></code> and <code class="docutils literal notranslate"><span class="pre">Uid</span></code> wrapper modules to create a label map module <code class="docutils literal notranslate"><span class="pre">LblM</span></code>
|
||||
and a UID map module <code class="docutils literal notranslate"><span class="pre">UidM</span></code>. These map modules have fixed key types, but
|
||||
are polymorphic in the types of their values.</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
</section>
|
||||
</section>
|
||||
<section id="task-i-dataflow-analysis">
|
||||
<h2><span class="section-number">1.3. </span>Task I: Dataflow Analysis<a class="headerlink" href="#task-i-dataflow-analysis" title="Link to this heading">¶</a></h2>
|
||||
<p>Your first task is to implement a version of the worklist algorithm for
|
||||
solving dataflow flow equations presented in lecture. Since we plan to
|
||||
implement several analyses, we’d like to reuse as much code as possible
|
||||
between each one. In lecture, we saw that each analysis differs only in the
|
||||
choice of the lattice, the flow function, the direction of the analysis,
|
||||
and how to compute the meet of facts flowing into a node. We can take
|
||||
advantage of this by writing a generic solver as an OCaml functor and
|
||||
instantiating it with these parameters.</p>
|
||||
<section id="the-algorithm">
|
||||
<h3>The Algorithm<a class="headerlink" href="#the-algorithm" title="Link to this heading">¶</a></h3>
|
||||
<p>Assuming only that we have a directed graph where each node is labeled with a
|
||||
<em>dataflow fact</em> and a <em>flow function</em>, we can compute a fixpoint of the flow
|
||||
on the graph as follows:</p>
|
||||
<div class="highlight-none notranslate"><div class="highlight"><pre><span></span>let w = new set with all nodes
|
||||
repeat until w is empty
|
||||
let n = w.pop()
|
||||
old_out = out[n]
|
||||
let in = combine(preds[n])
|
||||
out[n] := flow[n](in)
|
||||
if (!equal old_out out[n]),
|
||||
for all m in succs[n], w.add(m)
|
||||
end
|
||||
</pre></div>
|
||||
</div>
|
||||
<p>Here <code class="docutils literal notranslate"><span class="pre">equal</span></code>, <code class="docutils literal notranslate"><span class="pre">combine</span></code> and <code class="docutils literal notranslate"><span class="pre">flow</span></code> are abstract operations that will be
|
||||
instantiated with lattice equality, the meet operation and the flow function
|
||||
(e.g., defined by the gen and kill sets of the analysis),
|
||||
respectively. Similarly, <code class="docutils literal notranslate"><span class="pre">preds</span></code> and <code class="docutils literal notranslate"><span class="pre">succs</span></code> are the graph predecessors
|
||||
and successors in the <em>flow graph</em>, and do not correspond to the control flow
|
||||
of the program. They can be instantiated appropriately to create a forwards or
|
||||
backwards analysis.</p>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>Don’t try to use OCaml’s polymorphic equality operator (<code class="docutils literal notranslate"><span class="pre">=</span></code>) to compare
|
||||
<code class="docutils literal notranslate"><span class="pre">old_out</span></code> and <code class="docutils literal notranslate"><span class="pre">out[n]</span></code> – that’s <em>reference equality</em>, not <em>structural
|
||||
equality</em>. Use the supplied <code class="docutils literal notranslate"><span class="pre">Fact.compare</span></code> instead.</p>
|
||||
</div>
|
||||
</section>
|
||||
<section id="getting-started-and-testing">
|
||||
<h3>Getting Started and Testing<a class="headerlink" href="#getting-started-and-testing" title="Link to this heading">¶</a></h3>
|
||||
<p>Be sure to review the comments in the <code class="docutils literal notranslate"><span class="pre">DFA_GRAPH</span></code> (<em>data flow analysis graph</em>)
|
||||
and <code class="docutils literal notranslate"><span class="pre">FACT</span></code> module signatures in <code class="docutils literal notranslate"><span class="pre">solver.ml</span></code>, which define the parameters of
|
||||
the solver. Make sure you understand what each declaration in the signature does
|
||||
– your solver will need to use each one (other than the printing functions)!
|
||||
It will also be helpful for you to understand the way that <code class="docutils literal notranslate"><span class="pre">cfg.ml</span></code> connects
|
||||
to the solver. Read the commentary there for more information.</p>
|
||||
</section>
|
||||
<section id="now-implement-the-solver">
|
||||
<h3>Now implement the solver<a class="headerlink" href="#now-implement-the-solver" title="Link to this heading">¶</a></h3>
|
||||
<p>Your first task is to fill in the <code class="docutils literal notranslate"><span class="pre">solve</span></code> function in the <code class="docutils literal notranslate"><span class="pre">Solver.Make</span></code>
|
||||
functor in <code class="docutils literal notranslate"><span class="pre">solver.ml</span></code>. The input to the function is a flow graph labeled
|
||||
with the initial facts. It should compute the fixpoint and return a graph with
|
||||
the corresponding labeling. You will find the set datatype from
|
||||
<code class="docutils literal notranslate"><span class="pre">datastructures.ml</span></code> useful for manipulating sets of nodes.</p>
|
||||
<p>To test your solver, we have provided a full implementation of a liveness
|
||||
analysis in <code class="docutils literal notranslate"><span class="pre">liveness.ml</span></code>. Once you’ve completed the solver, the liveness
|
||||
tests in the test suite should all be passing. These tests compare the output
|
||||
of your solver on a number of programs with pre-computed solutions in
|
||||
<code class="docutils literal notranslate"><span class="pre">analysistest.ml</span></code>. Each entry in this file describes the set of uids that
|
||||
are <strong>live-in</strong> at a label in a program from <code class="docutils literal notranslate"><span class="pre">./llprograms</span></code>. To debug,
|
||||
you can compare these with the output of the <code class="docutils literal notranslate"><span class="pre">Graph.to_string</span></code> function on
|
||||
the flow graphs you will be manipulating.</p>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>The stand-alone program <code class="docutils literal notranslate"><span class="pre">printanalysis</span></code> can print out the results of a
|
||||
dataflow analysis for a given .ll program. You can build it by doing
|
||||
<code class="docutils literal notranslate"><span class="pre">make</span> <span class="pre">printanalysis</span></code>. It takes flags for each analysis (run with <code class="docutils literal notranslate"><span class="pre">--h</span></code>
|
||||
for a list).</p>
|
||||
</div>
|
||||
</section>
|
||||
</section>
|
||||
<section id="task-ii-alias-analysis-and-dead-code-elimination">
|
||||
<h2><span class="section-number">1.4. </span>Task II: Alias Analysis and Dead Code Elimination<a class="headerlink" href="#task-ii-alias-analysis-and-dead-code-elimination" title="Link to this heading">¶</a></h2>
|
||||
<p>The goal of this task is to implement a simple dead code elimination
|
||||
optimization that can also remove <code class="docutils literal notranslate"><span class="pre">store</span></code> instructions when we can prove
|
||||
that they have no effect on the result of the program. Though we already have
|
||||
a liveness analysis, it doesn’t give us enough information to eliminate
|
||||
<code class="docutils literal notranslate"><span class="pre">store</span></code> instructions: even if we know the UID of the destination pointer is
|
||||
dead after a store and is not used in a load in the rest of the program, we
|
||||
can not remove a store instruction because of <em>aliasing</em>. The problem is that
|
||||
there may be different UIDs that name the same stack slot. There are a number
|
||||
of ways this can happen after a pointer is returned by <code class="docutils literal notranslate"><span class="pre">alloca</span></code>:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p>The pointer is used as an argument to a <code class="docutils literal notranslate"><span class="pre">getelementptr</span></code> or <code class="docutils literal notranslate"><span class="pre">bitcast</span></code> instruction</p></li>
|
||||
<li><p>The pointer is stored into memory and then later loaded</p></li>
|
||||
<li><p>The pointer is passed as an argument to a function, which can manipulate it
|
||||
in arbitrary ways</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
<p>Some pointers are never aliased. For example, the code generated by the Oat
|
||||
frontend for local variables never creates aliases because the Oat language
|
||||
itself doesn’t have an “address of” operator. We can find such uses of
|
||||
<code class="docutils literal notranslate"><span class="pre">alloca</span></code> by applying a simple alias analysis.</p>
|
||||
<section id="alias-analysis">
|
||||
<h3>Alias Analysis<a class="headerlink" href="#alias-analysis" title="Link to this heading">¶</a></h3>
|
||||
<p>We have provided some code to get you started in <code class="docutils literal notranslate"><span class="pre">alias.ml</span></code>. You will have
|
||||
to fill in the flow function and lattice operations. The type of lattice
|
||||
elements, <code class="docutils literal notranslate"><span class="pre">fact</span></code>, is a map from UIDs to <em>symbolic pointers</em> of type
|
||||
<code class="docutils literal notranslate"><span class="pre">SymPtr.t</span></code>. Your analysis should compute, at every program point, the set of
|
||||
UIDs of pointer type that are in scope and, additionally, whether that pointer
|
||||
is the unique name for a stack slot according to the rules above. See the
|
||||
comments in <code class="docutils literal notranslate"><span class="pre">alias.ml</span></code> for details.</p>
|
||||
<blockquote>
|
||||
<div><ol class="arabic simple">
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">Alias.insn_flow</span></code>: the flow function over instructions</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">Alias.fact.combine</span></code>: the combine function for alias facts</p></li>
|
||||
</ol>
|
||||
</div></blockquote>
|
||||
</section>
|
||||
<section id="dead-code-elimination">
|
||||
<h3>Dead Code Elimination<a class="headerlink" href="#dead-code-elimination" title="Link to this heading">¶</a></h3>
|
||||
<p>Now we can use our liveness and alias analyses to implement a dead code
|
||||
elimination pass. We will simply compute the results of the analysis at each
|
||||
program point, then iterate over the blocks of the CFG removing any
|
||||
instructions that do not contribute to the output of the program.</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p>For all instructions except <code class="docutils literal notranslate"><span class="pre">store</span></code> and <code class="docutils literal notranslate"><span class="pre">call</span></code>, the instruction can
|
||||
be removed if the UID it defines is not live-out at the point of definition</p></li>
|
||||
<li><p>A <code class="docutils literal notranslate"><span class="pre">store</span></code> instruction can be removed if we know the UID of the destination
|
||||
pointer is not aliased and not live-out at the program point of the store</p></li>
|
||||
<li><p>A <code class="docutils literal notranslate"><span class="pre">call</span></code> instruction can never be removed</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
<p>Complete the dead-code elimination optimization in <code class="docutils literal notranslate"><span class="pre">dce.ml</span></code>, where you will
|
||||
only need to fill out the <code class="docutils literal notranslate"><span class="pre">dce_block</span></code> function that implements these rules.</p>
|
||||
</section>
|
||||
</section>
|
||||
<section id="task-iii-constant-propagation">
|
||||
<h2><span class="section-number">1.5. </span>Task III: Constant Propagation<a class="headerlink" href="#task-iii-constant-propagation" title="Link to this heading">¶</a></h2>
|
||||
<p>Programmers don’t often write dead code directly. However, dead code is often
|
||||
produced as a result of other optimizations that execute parts of the original
|
||||
program at compile time, for instance <em>constant propagation</em>. In this section
|
||||
you’ll implement a simple constant propagation analysis and constant folding
|
||||
optimization.</p>
|
||||
<p>Start by reading through the <code class="docutils literal notranslate"><span class="pre">constprop.ml</span></code>. Constant propagation is similar
|
||||
to the alias analysis from the previous section. Dataflow facts will be maps
|
||||
from UIDs to the type <code class="docutils literal notranslate"><span class="pre">SymConst.t</span></code>, which corresponds to the lattice from
|
||||
the lecture slides. Your analysis will compute the set of UIDs in scope at
|
||||
each program point, and the integer value of any UID that is computed as a
|
||||
result of a series of <code class="docutils literal notranslate"><span class="pre">binop</span></code> and <code class="docutils literal notranslate"><span class="pre">icmp</span></code> instructions on constant
|
||||
operands. More specifically:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p>The flow out of any <code class="docutils literal notranslate"><span class="pre">binop</span></code> or <code class="docutils literal notranslate"><span class="pre">icmp</span></code> whose operands have been
|
||||
determined to be constants is the incoming flow with the defined UID to
|
||||
<code class="docutils literal notranslate"><span class="pre">Const</span></code> with the expected constant value</p></li>
|
||||
<li><p>The flow out of any <code class="docutils literal notranslate"><span class="pre">binop</span></code> or <code class="docutils literal notranslate"><span class="pre">icmp</span></code> with a <code class="docutils literal notranslate"><span class="pre">NonConst</span></code> operand sets
|
||||
the defined UID to <code class="docutils literal notranslate"><span class="pre">NonConst</span></code></p></li>
|
||||
<li><p>Similarly, the flow out of any <code class="docutils literal notranslate"><span class="pre">binop</span></code> or <code class="docutils literal notranslate"><span class="pre">icmp</span></code> with a <code class="docutils literal notranslate"><span class="pre">UndefConst</span></code>
|
||||
operand sets the defined UID to <code class="docutils literal notranslate"><span class="pre">UndefConst</span></code></p></li>
|
||||
<li><p>A <code class="docutils literal notranslate"><span class="pre">store</span></code> or <code class="docutils literal notranslate"><span class="pre">call</span></code> of type <code class="docutils literal notranslate"><span class="pre">Void</span></code> sets the defined UID to
|
||||
<code class="docutils literal notranslate"><span class="pre">UndefConst</span></code></p></li>
|
||||
<li><p>All other instructions set the defined UID to <code class="docutils literal notranslate"><span class="pre">NonConst</span></code></p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
<p>(At this point we could also include some arithmetic identities, for instance
|
||||
optimizing multiplication by 0, but we’ll keep the specification simple.)
|
||||
Next, you will have to implement the constant folding optimization itself,
|
||||
which just traverses the blocks of the CFG and replaces operands whose values
|
||||
we have computed with the appropriate constants. The structure of the code is
|
||||
very similar to that in the previous section. You will have to fill in:</p>
|
||||
<blockquote>
|
||||
<div><ol class="arabic simple">
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">Constprop.insn_flow</span></code> with the rules defined above</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">Constprop.Fact.combine</span></code> with the combine operation for the analysis</p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">Constprop.cp_block</span></code> (inside the <code class="docutils literal notranslate"><span class="pre">run</span></code> function) with the code needed
|
||||
to perform the constant propagation transformation</p></li>
|
||||
</ol>
|
||||
</div></blockquote>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>Once you have implemented constant folding and dead-code elimination, the
|
||||
compiler’s <code class="docutils literal notranslate"><span class="pre">-O1</span></code> option will optimize your ll code by doing 2 iterations
|
||||
of (constant prop followed by dce). See <code class="docutils literal notranslate"><span class="pre">opt.ml</span></code>. The <code class="docutils literal notranslate"><span class="pre">-O1</span></code>
|
||||
optimizations are <em>not</em> used for testing <em>except</em> that they are <em>always</em>
|
||||
performed in the register-allocation quality tests – these optimizations
|
||||
improve register allocation (see below).</p>
|
||||
<p>This coupling means that if you have a faulty optimization pass, it might
|
||||
cause the quality of your register allocator to degrade. And it might make
|
||||
getting a high score harder.</p>
|
||||
</div>
|
||||
</section>
|
||||
<section id="task-iv-register-allocationn-optional">
|
||||
<h2><span class="section-number">1.6. </span>Task IV: Register Allocationn (Optional)<a class="headerlink" href="#task-iv-register-allocationn-optional" title="Link to this heading">¶</a></h2>
|
||||
<p>The backend implementation that we have given you provides two basic register
|
||||
allocation stragies:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p><strong>none</strong>: spills all uids to the stack;</p></li>
|
||||
<li><p><strong>greedy</strong>: uses register and a greedy linear-scan algorithm.</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
<p>For this task, you will implement a <strong>better</strong> register allocation strategy
|
||||
that makes use of the liveness information that you compute in Task I. Most
|
||||
of the instructions for this part of the assignment are found in
|
||||
<code class="docutils literal notranslate"><span class="pre">backend.ml</span></code>, where we have modified the code generation strategy to be able
|
||||
to make use of liveness information. The task is to implement a single
|
||||
function <code class="docutils literal notranslate"><span class="pre">better_layout</span></code> that beats our example “greedy” register allocation
|
||||
strategy. We recommend familiarizing yourself with the way that the simple
|
||||
strategies work before attempting to write your own allocator.</p>
|
||||
<p>The compiler now also supports several additional command-line switches that
|
||||
can be used to select among different analysis and code generation options for
|
||||
testing purposes:</p>
|
||||
<div class="highlight-none notranslate"><div class="highlight"><pre><span></span>--print-regs prints the register usage statistics for x86 code
|
||||
--liveness {trivial|dataflow} use the specified liveness analysis
|
||||
--regalloc {none|greedy|better} use the specified register allocator
|
||||
</pre></div>
|
||||
</div>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>The flags above <em>do not</em> imply the <code class="docutils literal notranslate"><span class="pre">-O1</span></code> flag (despite the fact that we
|
||||
always turn on optimization for testing purposes when running with
|
||||
<code class="docutils literal notranslate"><span class="pre">--test</span></code>). You should enable it explicitly.</p>
|
||||
</div>
|
||||
<p>For testing purposes, you can run the compiler with the <code class="docutils literal notranslate"><span class="pre">-v</span></code> verbose flag
|
||||
and/or use the <code class="docutils literal notranslate"><span class="pre">--print-regs</span></code> flag to get more information about how your
|
||||
algorithm is performing. It is also useful to sprinkle your own verbose
|
||||
output into the backend.</p>
|
||||
<p>The goal for this part of the homework is to create a strategy such that code
|
||||
generated with the <code class="docutils literal notranslate"><span class="pre">--regalloc</span> <span class="pre">better</span></code> <code class="docutils literal notranslate"><span class="pre">--liveness</span> <span class="pre">dataflow</span></code> flags is
|
||||
“better” than code generated using the simple settings, which are <code class="docutils literal notranslate"><span class="pre">--regalloc</span>
|
||||
<span class="pre">greedy</span></code> <code class="docutils literal notranslate"><span class="pre">--liveness</span> <span class="pre">dataflow</span></code>. See the discussion about how we compare
|
||||
register allocation strategies in <code class="docutils literal notranslate"><span class="pre">backend.ml</span></code>. The “quality” test cases
|
||||
report the results of these comparisons.</p>
|
||||
<p>Of course your register allocation strategy should produce correct code, so we
|
||||
still perform all of the correctness tests that we have used in previous
|
||||
version of the compiler. Your allocation strategy should not break any of
|
||||
these tests – and you cannot earn points for the “quality” tests unless all
|
||||
of the correctness tests also pass.</p>
|
||||
<div class="admonition-note admonition">
|
||||
<p class="admonition-title">Note</p>
|
||||
<p>Since this task is optional, the quality test cases in <code class="docutils literal notranslate"><span class="pre">gradedtests.ml</span></code>
|
||||
are commented out. If you are doing this task, uncomment the additional
|
||||
tests in that file. (Look for the text “Uncomment the following code if
|
||||
you are doing the optional Task IV Register Allocation”.)</p>
|
||||
</div>
|
||||
</section>
|
||||
<section id="task-v-experimentation-validation-only-if-task-iv-completed">
|
||||
<h2><span class="section-number">1.7. </span>Task V: Experimentation / Validation (Only if Task Iv completed)<a class="headerlink" href="#task-v-experimentation-validation-only-if-task-iv-completed" title="Link to this heading">¶</a></h2>
|
||||
<p>Of course we want to understand how much of an impact your register allocation
|
||||
strategy has on actual execution time. For the final task, you will create a
|
||||
new Oat program that highlights the difference. There are two parts to this
|
||||
task.</p>
|
||||
<section id="create-a-test-case">
|
||||
<h3>Create a test case<a class="headerlink" href="#create-a-test-case" title="Link to this heading">¶</a></h3>
|
||||
<p>Post an Oat program to <a class="reference external" href="https://edstem.org/us/courses/40936/discussion/">Ed</a>. This program should exhibit significantly
|
||||
different performance when compiled using the “greedy” register allocation
|
||||
strategy vs. using your “better” register allocation strategy with dataflow
|
||||
information. See the file <code class="docutils literal notranslate"><span class="pre">hw4programs/regalloctest.oat</span></code> and
|
||||
<code class="docutils literal notranslate"><span class="pre">hw4programs/regalloctest2.oat</span></code> for uninspired examples of such a
|
||||
program. Yours should be more interesting.</p>
|
||||
</section>
|
||||
<section id="post-your-running-time">
|
||||
<h3>Post your running time<a class="headerlink" href="#post-your-running-time" title="Link to this heading">¶</a></h3>
|
||||
<p>Use the unix <code class="docutils literal notranslate"><span class="pre">time</span></code> command to test the performance of your
|
||||
register allocation algorithm. This should take the form of a simple table of
|
||||
timing information for several test cases, including the one you create and
|
||||
those mentioned below. You should test the performance in several
|
||||
configurations:</p>
|
||||
<blockquote>
|
||||
<div><ol class="arabic simple">
|
||||
<li><p>using the <code class="docutils literal notranslate"><span class="pre">--liveness</span> <span class="pre">trivial</span></code> <code class="docutils literal notranslate"><span class="pre">--regalloc</span> <span class="pre">none</span></code> flags (baseline)</p></li>
|
||||
<li><p>using the <code class="docutils literal notranslate"><span class="pre">--liveness</span> <span class="pre">dataflow</span></code> <code class="docutils literal notranslate"><span class="pre">--regalloc</span> <span class="pre">greedy</span></code> flags (greedy)</p></li>
|
||||
<li><p>using the <code class="docutils literal notranslate"><span class="pre">--liveness</span> <span class="pre">dataflow</span></code> <code class="docutils literal notranslate"><span class="pre">--regalloc</span> <span class="pre">better</span></code> flags (better)</p></li>
|
||||
<li><p>using the <code class="docutils literal notranslate"><span class="pre">--clang</span></code> flags (clang)</p></li>
|
||||
</ol>
|
||||
</div></blockquote>
|
||||
<p>And… all of the above plus the <code class="docutils literal notranslate"><span class="pre">-O1</span></code> flag.</p>
|
||||
<p>Test your compiler on at least these three programs:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">hw4programs/regalloctest.oat</span></code></p></li>
|
||||
<li><p><code class="docutils literal notranslate"><span class="pre">llprograms/matmul.ll</span></code></p></li>
|
||||
<li><p>your own test case</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
<p>Report the processor and OS version that you use to test. For best results,
|
||||
use a “lightly loaded” machine (close all other applications) and average the
|
||||
timing over several trial runs.</p>
|
||||
<p>The example below shows one interaction used to test the <code class="docutils literal notranslate"><span class="pre">matmul.ll</span></code> file in
|
||||
several configurations from the command line:</p>
|
||||
<div class="highlight-none notranslate"><div class="highlight"><pre><span></span>> ./oatc --liveness trivial --regalloc none llprograms/matmul.ll
|
||||
> time ./a.out
|
||||
|
||||
real 0m1.647s
|
||||
user 0m1.639s
|
||||
sys 0m0.002s
|
||||
|
||||
|
||||
> ./oatc --liveness dataflow --regalloc greedy llprograms/matmul.ll
|
||||
> time ./a.out
|
||||
|
||||
real 0m1.127s
|
||||
user 0m1.123s
|
||||
sys 0m0.002s
|
||||
|
||||
> ./oatc --liveness dataflow --regalloc better llprograms/matmul.ll
|
||||
> time ./a.out
|
||||
|
||||
real 0m0.500s
|
||||
user 0m0.496s
|
||||
sys 0m0.002s
|
||||
|
||||
> ./oatc --clang llprograms/matmul.ll
|
||||
> time ./a.out
|
||||
|
||||
real 0m0.061s
|
||||
user 0m0.053s
|
||||
sys 0m0.004s
|
||||
</pre></div>
|
||||
</div>
|
||||
<p>Don’t get too discouraged when clang beats your compiler’s performance by many
|
||||
orders of magnitude. It uses register promotion and many other optimizations
|
||||
to get high-quality code!</p>
|
||||
</section>
|
||||
</section>
|
||||
<section id="optional-task-leaderboard">
|
||||
<h2><span class="section-number">1.8. </span>Optional Task: Leaderboard!<a class="headerlink" href="#optional-task-leaderboard" title="Link to this heading">¶</a></h2>
|
||||
<p>As an optional and hopefully fun activity, we will run a leaderboard for efficient
|
||||
compilation. When you submit your homework, we will use it to compile a test suite.
|
||||
(You can choose what name will appear for you on the leaderboard; feel free to use
|
||||
your real name or a pseudonym.) We will compare the time that your compiled version
|
||||
takes to execute compared to a compilation using the Clang backend.</p>
|
||||
<p>You are welcome to implement additional optimizations by editing the file <code class="docutils literal notranslate"><span class="pre">opt.ml</span></code>.
|
||||
Note that your additional optimizations should run only if the <code class="docutils literal notranslate"><span class="pre">-O2</span></code> flag is passed
|
||||
(which will set <code class="docutils literal notranslate"><span class="pre">Opt.opt_level</span></code> to 2).</p>
|
||||
<p>All of your additional optimizations should be implemented in the <code class="docutils literal notranslate"><span class="pre">opt.ml</span></code> file; we
|
||||
know this isn’t good software engineering practice, but it helps us simplify our
|
||||
code submission framework sorry.</p>
|
||||
<p>We will post on Ed a link to the leaderboard test suite, so you can access the latest
|
||||
version of the test suite.</p>
|
||||
<p>Info about leaderboard results: The leaderboard shows the execution time of your
|
||||
compiled version compared to the Clang-compiled version. Specifically, we compile
|
||||
a testcase with the command
|
||||
<code class="docutils literal notranslate"><span class="pre">./oatc</span> <span class="pre">-O2</span> <span class="pre">--liveness</span> <span class="pre">dataflow</span> <span class="pre">--regalloc</span> <span class="pre">better</span> <span class="pre">testfile</span> <span class="pre">runtime.c</span></code> and
|
||||
measure the execution time of the resulting executable. Let this time be
|
||||
<em>t_student</em>. We also compile the test case with the additional flag
|
||||
<code class="docutils literal notranslate"><span class="pre">--clang</span></code> and measure the execution time of the resulting executable. Let
|
||||
this time be <em>t_clang</em>. The leaderboard displays <em>t_student</em>
|
||||
divided by <em>t_clang</em> for each test case, and also the geometric mean
|
||||
of all the test cases. (The “version” column is the md5 sum of all the testcases.)</p>
|
||||
<p>Propose a test case to add to the leaderboard: If you implement an additional
|
||||
optimization and have developed a test case that your optimization does well on,
|
||||
you can post a description of your optimization and the test case on Ed, and we
|
||||
will consider the test case for inclusion in the test suite. Your test case must
|
||||
satisfy the following properties:</p>
|
||||
<blockquote>
|
||||
<div><ul class="simple">
|
||||
<li><p>Does not require any command line arguments to run.</p></li>
|
||||
<li><p>Takes on the order of 1-3 seconds to execute</p></li>
|
||||
</ul>
|
||||
</div></blockquote>
|
||||
</section>
|
||||
<section id="grading">
|
||||
<h2><span class="section-number">1.9. </span>Grading<a class="headerlink" href="#grading" title="Link to this heading">¶</a></h2>
|
||||
<p><strong>Projects that do not compile will receive no credit!</strong></p>
|
||||
<dl class="simple">
|
||||
<dt>Your grade for this project will be based on:</dt><dd><ul class="simple">
|
||||
<li><p>100 Points: the various automated tests that we provide.</p></li>
|
||||
</ul>
|
||||
</dd>
|
||||
</dl>
|
||||
<ul class="simple">
|
||||
<li><p>Bonus points and unlimited bragging rights: completing
|
||||
one or more of the optional tasks. Note that the register-allocator
|
||||
quality tests don’t run unless your allocator passes all the correctness tests.</p></li>
|
||||
</ul>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
|
||||
<div class="sphinxsidebarwrapper"><h3>Navigation</h3>
|
||||
<ul class="current">
|
||||
<li class="toctree-l1 current"><a class="current reference internal" href="#">1. HW6: Dataflow Analysis and Optimizations</a><ul>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#getting-started">1.1. Getting Started</a></li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#overview">1.2. Overview</a><ul>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#provided-code">Provided Code</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#task-i-dataflow-analysis">1.3. Task I: Dataflow Analysis</a><ul>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#the-algorithm">The Algorithm</a></li>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#getting-started-and-testing">Getting Started and Testing</a></li>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#now-implement-the-solver">Now implement the solver</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#task-ii-alias-analysis-and-dead-code-elimination">1.4. Task II: Alias Analysis and Dead Code Elimination</a><ul>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#alias-analysis">Alias Analysis</a></li>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#dead-code-elimination">Dead Code Elimination</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#task-iii-constant-propagation">1.5. Task III: Constant Propagation</a></li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#task-iv-register-allocationn-optional">1.6. Task IV: Register Allocationn (Optional)</a></li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#task-v-experimentation-validation-only-if-task-iv-completed">1.7. Task V: Experimentation / Validation (Only if Task Iv completed)</a><ul>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#create-a-test-case">Create a test case</a></li>
|
||||
<li class="toctree-l3"><a class="reference internal" href="#post-your-running-time">Post your running time</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#optional-task-leaderboard">1.8. Optional Task: Leaderboard!</a></li>
|
||||
<li class="toctree-l2"><a class="reference internal" href="#grading">1.9. Grading</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="clearer"></div>
|
||||
</div>
|
||||
<div class="footer">
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
32
hw6/dune
32
hw6/dune
|
|
@ -1,32 +0,0 @@
|
|||
(menhir
|
||||
(modules ll/llparser)
|
||||
(infer true)
|
||||
(merge_into llparser))
|
||||
|
||||
(menhir
|
||||
(modules parser)
|
||||
(infer true)
|
||||
(merge_into parser))
|
||||
|
||||
(rule
|
||||
(target lllexer.ml)
|
||||
(deps ll/lllexer.mll)
|
||||
(action (run ocamllex -o %{target} %{deps})))
|
||||
|
||||
(rule
|
||||
(target lexer.ml)
|
||||
(deps lexer.mll)
|
||||
(action (run ocamllex -o %{target} %{deps})))
|
||||
|
||||
|
||||
|
||||
(executable
|
||||
(name main)
|
||||
(modules (:standard \ ll-original frontend-break))
|
||||
(libraries unix str num))
|
||||
|
||||
(env
|
||||
(dev
|
||||
(flags (:standard -warn-error -A -no-strict-formats))))
|
||||
|
||||
(include_subdirs unqualified)
|
||||
|
|
@ -1,2 +1,3 @@
|
|||
(lang dune 2.9)
|
||||
(using menhir 2.0)
|
||||
(name hw6)
|
||||
(using menhir 2.1)
|
||||
|
|
|
|||
|
|
@ -1,688 +0,0 @@
|
|||
open Ll
|
||||
open Llutil
|
||||
open Ast
|
||||
|
||||
(* instruction streams ------------------------------------------------------ *)
|
||||
|
||||
(* As in the last project, we'll be working with a flattened representation
|
||||
of LLVMlite programs to make emitting code easier. This version
|
||||
additionally makes it possible to emit elements will be gathered up and
|
||||
"hoisted" to specific parts of the constructed CFG
|
||||
- G of gid * Ll.gdecl: allows you to output global definitions in the middle
|
||||
of the instruction stream. You will find this useful for compiling string
|
||||
literals
|
||||
- E of uid * insn: allows you to emit an instruction that will be moved up
|
||||
to the entry block of the current function. This will be useful for
|
||||
compiling local variable declarations
|
||||
*)
|
||||
|
||||
type elt =
|
||||
| L of Ll.lbl (* block labels *)
|
||||
| I of uid * Ll.insn (* instruction *)
|
||||
| T of Ll.terminator (* block terminators *)
|
||||
| G of gid * Ll.gdecl (* hoisted globals (usually strings) *)
|
||||
| E of uid * Ll.insn (* hoisted entry block instructions *)
|
||||
|
||||
type stream = elt list
|
||||
let ( >@ ) x y = y @ x
|
||||
let ( >:: ) x y = y :: x
|
||||
let lift : (uid * insn) list -> stream = List.rev_map (fun (x,i) -> I (x,i))
|
||||
|
||||
(* Build a CFG and collection of global variable definitions from a stream *)
|
||||
let cfg_of_stream (code:stream) : Ll.cfg * (Ll.gid * Ll.gdecl) list =
|
||||
let gs, einsns, insns, term_opt, blks = List.fold_left
|
||||
(fun (gs, einsns, insns, term_opt, blks) e ->
|
||||
match e with
|
||||
| L l ->
|
||||
begin match term_opt with
|
||||
| None ->
|
||||
if (List.length insns) = 0 then (gs, einsns, [], None, blks)
|
||||
else failwith @@ Printf.sprintf "build_cfg: block labeled %s has\
|
||||
no terminator" l
|
||||
| Some term ->
|
||||
(gs, einsns, [], None, (l, {insns; term})::blks)
|
||||
end
|
||||
| T t -> (gs, einsns, [], Some (Llutil.Parsing.gensym "tmn", t), blks)
|
||||
| I (uid,insn) -> (gs, einsns, (uid,insn)::insns, term_opt, blks)
|
||||
| G (gid,gdecl) -> ((gid,gdecl)::gs, einsns, insns, term_opt, blks)
|
||||
| E (uid,i) -> (gs, (uid, i)::einsns, insns, term_opt, blks)
|
||||
) ([], [], [], None, []) code
|
||||
in
|
||||
match term_opt with
|
||||
| None -> failwith "build_cfg: entry block has no terminator"
|
||||
| Some term ->
|
||||
let insns = einsns @ insns in
|
||||
({insns; term}, blks), gs
|
||||
|
||||
|
||||
(* compilation contexts ----------------------------------------------------- *)
|
||||
|
||||
(* To compile OAT variables, we maintain a mapping of source identifiers to the
|
||||
corresponding LLVMlite operands. Bindings are added for global OAT variables
|
||||
and local variables that are in scope. *)
|
||||
|
||||
module Ctxt = struct
|
||||
|
||||
type t = (Ast.id * (Ll.ty * Ll.operand)) list
|
||||
let empty = []
|
||||
|
||||
(* Add a binding to the context *)
|
||||
let add (c:t) (id:id) (bnd:Ll.ty * Ll.operand) : t = (id,bnd)::c
|
||||
|
||||
(* Lookup a binding in the context *)
|
||||
let lookup (id:Ast.id) (c:t) : Ll.ty * Ll.operand =
|
||||
List.assoc id c
|
||||
|
||||
end
|
||||
|
||||
(* Mapping of identifiers representing struct definitions to
|
||||
* the corresponding name-to-name-to-type map.
|
||||
|
||||
Note: You will need to use these operations when compiling structures.
|
||||
*)
|
||||
module TypeCtxt = struct
|
||||
type t = (Ast.id * Ast.field list) list
|
||||
let empty = []
|
||||
|
||||
let add c id bnd = (id, bnd) :: c
|
||||
let lookup id c = List.assoc id c
|
||||
let lookup_field st_name f_name (c : t) =
|
||||
let rec lookup_field_aux f_name l =
|
||||
match l with
|
||||
| [] -> failwith "TypeCtxt.lookup_field: Not_found"
|
||||
| h :: t -> if h.fieldName = f_name then h.ftyp else lookup_field_aux f_name t in
|
||||
lookup_field_aux f_name (List.assoc st_name c)
|
||||
|
||||
let rec index_of f l i =
|
||||
match l with
|
||||
| [] -> None
|
||||
| h :: t -> if h.fieldName = f then Some i else index_of f t (i + 1)
|
||||
|
||||
(* Return the index of a field in the struct. *)
|
||||
let index_of_field_opt st f (c : t) =
|
||||
index_of f (List.assoc st c) 0
|
||||
|
||||
let index_of_field st f c =
|
||||
match index_of_field_opt st f c with
|
||||
| None -> failwith "index_of_field: Not found"
|
||||
| Some x -> x
|
||||
|
||||
(* Return a pair of base type and index into struct *)
|
||||
let rec lookup_field_name f (c : t) =
|
||||
match c with
|
||||
| [] -> failwith "lookup_field_name: Not found"
|
||||
| (id, field) :: t ->
|
||||
match index_of f field 0 with
|
||||
| None -> lookup_field_name f t
|
||||
| Some x -> List.(nth field x).ftyp, Int64.of_int x
|
||||
end
|
||||
|
||||
(* compiling OAT types ------------------------------------------------------ *)
|
||||
|
||||
(* The mapping of source types onto LLVMlite is straightforward. Booleans and ints
|
||||
are represented as the the corresponding integer types. OAT strings are
|
||||
pointers to bytes (I8). Arrays are the most interesting type: they are
|
||||
represented as pointers to structs where the first component is the number
|
||||
of elements in the following array.
|
||||
|
||||
NOTE: structure types are named, so they compile to their named form
|
||||
*)
|
||||
|
||||
let rec cmp_ty (ct : TypeCtxt.t) : Ast.ty -> Ll.ty = function
|
||||
| Ast.TBool -> I1
|
||||
| Ast.TInt -> I64
|
||||
| Ast.TRef r -> Ptr (cmp_rty ct r)
|
||||
| Ast.TNullRef r -> Ptr (cmp_rty ct r)
|
||||
|
||||
|
||||
and cmp_ret_ty ct : Ast.ret_ty -> Ll.ty = function
|
||||
| Ast.RetVoid -> Void
|
||||
| Ast.RetVal t -> cmp_ty ct t
|
||||
|
||||
and cmp_fty ct (ts, r) : Ll.fty =
|
||||
List.map (cmp_ty ct) ts, cmp_ret_ty ct r
|
||||
|
||||
and cmp_rty ct : Ast.rty -> Ll.ty = function
|
||||
| Ast.RString -> I8
|
||||
| Ast.RArray u -> Struct [I64; Array(0, cmp_ty ct u)]
|
||||
| Ast.RStruct r -> Namedt r
|
||||
| Ast.RFun (ts, t) ->
|
||||
let args, ret = cmp_fty ct (ts, t) in
|
||||
Fun (args, ret)
|
||||
|
||||
let typ_of_binop : Ast.binop -> Ast.ty * Ast.ty * Ast.ty = function
|
||||
| Add | Mul | Sub | Shl | Shr | Sar | IAnd | IOr -> (TInt, TInt, TInt)
|
||||
| Eq | Neq | Lt | Lte | Gt | Gte -> (TInt, TInt, TBool)
|
||||
| And | Or -> (TBool, TBool, TBool)
|
||||
|
||||
let typ_of_unop : Ast.unop -> Ast.ty * Ast.ty = function
|
||||
| Neg | Bitnot -> (TInt, TInt)
|
||||
| Lognot -> (TBool, TBool)
|
||||
|
||||
|
||||
(* Some useful helper functions *)
|
||||
|
||||
(* Generate a fresh temporary identifier. Since OAT identifiers cannot begin
|
||||
with an underscore, these should not clash with any source variables *)
|
||||
let gensym : string -> string =
|
||||
let c = ref 0 in
|
||||
fun (s:string) -> incr c; Printf.sprintf "_%s%d" s (!c)
|
||||
|
||||
(* Amount of space an Oat type takes when stored in the satck, in bytes.
|
||||
Note that since structured values are manipulated by reference, all
|
||||
Oat values take 8 bytes on the stack.
|
||||
*)
|
||||
let size_oat_ty (t : Ast.ty) = 8L
|
||||
|
||||
|
||||
(* Amount of size that needs to be allocated to store a structure *)
|
||||
let rec size_oat_struct (l : Ast.field list) =
|
||||
match l with
|
||||
| [] -> 0L
|
||||
| f :: t -> Int64.(add (size_oat_struct t) (size_oat_ty f.ftyp))
|
||||
|
||||
(* Generate code to allocate an array of source type TRef (RArray t) of the
|
||||
given size. Note "size" is an operand whose value can be computed at
|
||||
runtime *)
|
||||
let oat_alloc_array ct (t:Ast.ty) (size:Ll.operand) : Ll.ty * operand * stream =
|
||||
let ans_id, arr_id = gensym "array", gensym "raw_array" in
|
||||
let ans_ty = cmp_ty ct @@ TRef (RArray t) in
|
||||
let arr_ty = Ptr I64 in
|
||||
ans_ty, Id ans_id, lift
|
||||
[ arr_id, Call(arr_ty, Gid "oat_alloc_array", [I64, size])
|
||||
; ans_id, Bitcast(arr_ty, Id arr_id, ans_ty) ]
|
||||
|
||||
|
||||
(* Allocates an oat structure on the
|
||||
heap and returns a target operand with the appropriate reference.
|
||||
|
||||
- generate a call to 'oat_malloc' and use bitcast to conver the
|
||||
resulting pointer to the right type
|
||||
|
||||
- make sure to calculate the correct amount of space to allocate!
|
||||
*)
|
||||
let oat_alloc_struct ct (id:Ast.id) : Ll.ty * operand * stream =
|
||||
let ret_id, arr_id = gensym "struct", gensym "raw_struct" in
|
||||
let ans_ty = cmp_ty ct (TRef (RStruct id)) in
|
||||
let arr_ty = Ptr I64 in
|
||||
ans_ty, Id ret_id, lift
|
||||
[ arr_id, Call(arr_ty, Gid "oat_malloc", [I64, Const (size_oat_struct (TypeCtxt.lookup id ct))])
|
||||
; ret_id, Bitcast(arr_ty, Id arr_id, ans_ty) ]
|
||||
|
||||
|
||||
let str_arr_ty s = Array(1 + String.length s, I8)
|
||||
let i1_op_of_bool b = Ll.Const (if b then 1L else 0L)
|
||||
let i64_op_of_int i = Ll.Const (Int64.of_int i)
|
||||
|
||||
let cmp_binop t (b : Ast.binop) : Ll.operand -> Ll.operand -> Ll.insn =
|
||||
let ib b op1 op2 = Ll.Binop (b, t, op1, op2) in
|
||||
let ic c op1 op2 = Ll.Icmp (c, t, op1, op2) in
|
||||
match b with
|
||||
| Ast.Add -> ib Ll.Add
|
||||
| Ast.Mul -> ib Ll.Mul
|
||||
| Ast.Sub -> ib Ll.Sub
|
||||
| Ast.And -> ib Ll.And
|
||||
| Ast.IAnd -> ib Ll.And
|
||||
| Ast.IOr -> ib Ll.Or
|
||||
| Ast.Or -> ib Ll.Or
|
||||
| Ast.Shl -> ib Ll.Shl
|
||||
| Ast.Shr -> ib Ll.Lshr
|
||||
| Ast.Sar -> ib Ll.Ashr
|
||||
|
||||
| Ast.Eq -> ic Ll.Eq
|
||||
| Ast.Neq -> ic Ll.Ne
|
||||
| Ast.Lt -> ic Ll.Slt
|
||||
| Ast.Lte -> ic Ll.Sle
|
||||
| Ast.Gt -> ic Ll.Sgt
|
||||
| Ast.Gte -> ic Ll.Sge
|
||||
|
||||
(* Compiles an expression exp in context c, outputting the Ll operand that will
|
||||
recieve the value of the expression, and the stream of instructions
|
||||
implementing the expression.
|
||||
*)
|
||||
let rec cmp_exp (tc : TypeCtxt.t) (c:Ctxt.t) (exp:Ast.exp node) : Ll.ty * Ll.operand * stream =
|
||||
match exp.elt with
|
||||
| Ast.CInt i -> I64, Const i, []
|
||||
| Ast.CNull r -> cmp_ty tc (TNullRef r), Null, []
|
||||
| Ast.CBool b -> I1, i1_op_of_bool b, []
|
||||
|
||||
| Ast.CStr s ->
|
||||
let gid = gensym "str_arr" in
|
||||
let str_typ = str_arr_ty s in
|
||||
let uid = gensym "str" in
|
||||
Ptr I8, Id uid, []
|
||||
>:: G(gid, (str_typ, GString s))
|
||||
>:: I(uid, Gep(Ptr str_typ, Gid gid, [Const 0L; Const 0L;]))
|
||||
|
||||
| Ast.Bop (bop, e1, e2) ->
|
||||
let t, _, ret_ty = typ_of_binop bop in
|
||||
let ll_t = cmp_ty tc t in
|
||||
let op1, code1 = cmp_exp_as tc c e1 ll_t in
|
||||
let op2, code2 = cmp_exp_as tc c e2 ll_t in
|
||||
let ans_id = gensym "bop" in
|
||||
cmp_ty tc ret_ty, Id ans_id, code1 >@ code2 >:: I(ans_id, cmp_binop ll_t bop op1 op2)
|
||||
|
||||
| Ast.Uop (uop, e) ->
|
||||
let t, ret_ty = typ_of_unop uop in
|
||||
let op, code = cmp_exp_as tc c e (cmp_ty tc t) in
|
||||
let ans_id = gensym "unop" in
|
||||
let cmp_uop op = function
|
||||
| Ast.Neg -> Binop (Sub, I64, i64_op_of_int 0, op)
|
||||
| Ast.Lognot -> Icmp (Eq, I1, op, i1_op_of_bool false)
|
||||
| Ast.Bitnot -> Binop (Xor, I64, op, i64_op_of_int (-1)) in
|
||||
cmp_ty tc ret_ty, Id ans_id, code >:: I (ans_id, cmp_uop op uop)
|
||||
|
||||
| Ast.Id id ->
|
||||
let t, op = Ctxt.lookup id c in
|
||||
begin match t with
|
||||
| Ptr (Fun _) -> t, op, []
|
||||
| Ptr t ->
|
||||
let ans_id = gensym id in
|
||||
t, Id ans_id, [I(ans_id, Load(Ptr t, op))]
|
||||
| _ -> failwith "broken invariant: identifier not a pointer"
|
||||
end
|
||||
|
||||
(* compiles the length(e) expression. *)
|
||||
| Ast.Length e ->
|
||||
let arr_ty, arr_op, arr_code = cmp_exp tc c e in
|
||||
let _ = match arr_ty with
|
||||
| Ptr (Struct [_; Array (_,t)]) -> t
|
||||
| _ -> failwith "Length: indexed into non pointer" in
|
||||
let ptr_id, tmp_id = gensym "index_ptr", gensym "tmp" in
|
||||
let ans_id = gensym "len" in
|
||||
I64, (Id ans_id),
|
||||
arr_code >@ lift
|
||||
[
|
||||
ptr_id, Gep(arr_ty, arr_op, [i64_op_of_int 0; i64_op_of_int 0])
|
||||
; ans_id, Load(Ptr I64, Id ptr_id)]
|
||||
|
||||
|
||||
| Ast.Index (e, i) ->
|
||||
let ans_ty, ptr_op, code = cmp_exp_lhs tc c exp in
|
||||
let ans_id = gensym "index" in
|
||||
ans_ty, Id ans_id, code >:: I(ans_id, Load(Ptr ans_ty, ptr_op))
|
||||
|
||||
| Ast.Call (f, es) ->
|
||||
cmp_call tc c f es
|
||||
|
||||
| Ast.CArr (elt_ty, cs) ->
|
||||
let size_op = Ll.Const (Int64.of_int @@ List.length cs) in
|
||||
let arr_ty, arr_op, alloc_code = oat_alloc_array tc elt_ty size_op in
|
||||
let ll_elt_ty = cmp_ty tc elt_ty in
|
||||
let add_elt s (i, elt) =
|
||||
let elt_op, elt_code = cmp_exp_as tc c elt ll_elt_ty in
|
||||
let ind = gensym "ind" in
|
||||
s >@ elt_code >@ lift
|
||||
[ ind, Gep(arr_ty, arr_op, [Const 0L; Const 1L; i64_op_of_int i ])
|
||||
; gensym "store", Store (ll_elt_ty, elt_op, Id ind) ]
|
||||
in
|
||||
let ind_code = List.(fold_left add_elt [] @@ mapi (fun i e -> i, e) cs) in
|
||||
arr_ty, arr_op, alloc_code >@ ind_code
|
||||
|
||||
(* - the initializer is a loop that uses id as the index
|
||||
- each iteration of the loop the code evaluates e2 and assigns it
|
||||
to the index stored in id.
|
||||
*)
|
||||
| Ast.NewArr (elt_ty, e1, id, e2) ->
|
||||
let ptr_id = gensym "ptr_" in
|
||||
let bound_id = gensym "bnd_" in
|
||||
let _, size_op, size_code = cmp_exp tc c e1 in
|
||||
let arr_ty, arr_op, alloc_code = oat_alloc_array tc elt_ty size_op in
|
||||
let for_loop = (no_loc @@ Ast.For ([(id, no_loc (CInt 0L))],
|
||||
Some (no_loc @@ Bop (Lt, no_loc @@ Id id, no_loc @@ Id bound_id)),
|
||||
Some (no_loc @@ Assn (no_loc @@ Id id, no_loc @@ Bop (Add, no_loc @@ Id id, no_loc @@ CInt 1L))),
|
||||
[no_loc @@ Assn (no_loc @@ Index (no_loc @@ Id ptr_id, no_loc @@ Id id), e2)])) in
|
||||
let new_context = Ctxt.add c ptr_id (Ptr arr_ty, Id ptr_id) in
|
||||
let new_context = Ctxt.add new_context bound_id (Ptr I64, Id bound_id) in
|
||||
let _, assign_code = cmp_stmt tc new_context arr_ty for_loop None None in
|
||||
arr_ty, arr_op,
|
||||
size_code >@
|
||||
alloc_code >@
|
||||
[I (bound_id, Alloca(I64))] >@
|
||||
[I (gensym "store", Store (I64, size_op, Id bound_id))] >@
|
||||
[I (ptr_id, Alloca(arr_ty))] >@
|
||||
[I (gensym "store", Store (arr_ty, arr_op, Id ptr_id))] >@
|
||||
assign_code
|
||||
|
||||
(* For each field component of the struct
|
||||
- use the TypeCtxt operations to compute getelementptr indices
|
||||
- compile the initializer expression
|
||||
- store the resulting value into the structure
|
||||
*)
|
||||
| Ast.CStruct (id, l) ->
|
||||
let struct_ty, struct_op, alloc_code = oat_alloc_struct tc id in
|
||||
let add_elt s (fid, fexp) =
|
||||
let field_type = cmp_ty tc @@ TypeCtxt.lookup_field id fid tc in
|
||||
let index = TypeCtxt.index_of_field id fid tc in
|
||||
let elt_op, elt_code = cmp_exp_as tc c fexp field_type in
|
||||
let ind = gensym "ind" in
|
||||
s >@ elt_code >@ lift
|
||||
[ ind, Gep(struct_ty, struct_op, [Const 0L; i64_op_of_int index])
|
||||
; gensym "store", Store(field_type, elt_op, Id ind) ]
|
||||
in
|
||||
let ind_code = List.fold_left add_elt [] l in
|
||||
struct_ty, struct_op, alloc_code >@ ind_code
|
||||
|
||||
| Ast.Proj (e, id) ->
|
||||
let ans_ty, ptr_op, code = cmp_exp_lhs tc c exp in
|
||||
let ans_id = gensym "proj" in
|
||||
ans_ty, Id ans_id, code >:: I(ans_id, Load(Ptr ans_ty, ptr_op))
|
||||
|
||||
|
||||
and cmp_exp_lhs (tc : TypeCtxt.t) (c:Ctxt.t) (e:exp node) : Ll.ty * Ll.operand * stream =
|
||||
match e.elt with
|
||||
| Ast.Id x ->
|
||||
let t, op = Ctxt.lookup x c in
|
||||
t, op, []
|
||||
|
||||
| Ast.Proj (e, i) ->
|
||||
let src_ty, src_op, src_code = cmp_exp tc c e in
|
||||
let ret_ty, ret_index = TypeCtxt.lookup_field_name i tc in
|
||||
let gep_id = gensym "index" in
|
||||
let ret_op = Gep(src_ty, src_op, [Const 0L; Const ret_index]) in
|
||||
cmp_ty tc ret_ty, Id gep_id, src_code >:: I (gep_id, ret_op)
|
||||
|
||||
|
||||
| Ast.Index (e, i) ->
|
||||
let arr_ty, arr_op, arr_code = cmp_exp tc c e in
|
||||
let _, ind_op, ind_code = cmp_exp tc c i in
|
||||
let ans_ty = match arr_ty with
|
||||
| Ptr (Struct [_; Array (_,t)]) -> t
|
||||
| _ -> failwith "Index: indexed into non pointer" in
|
||||
let ptr_id, tmp_id, call_id = gensym "index_ptr", gensym "tmp", gensym "call" in
|
||||
ans_ty, (Id ptr_id),
|
||||
arr_code >@ ind_code >@ lift
|
||||
[tmp_id, Bitcast(arr_ty, arr_op, Ptr I64)
|
||||
;call_id, Call (Void, Gid "oat_assert_array_length", [Ptr I64, Id tmp_id; I64, ind_op ])
|
||||
;ptr_id, Gep(arr_ty, arr_op, [i64_op_of_int 0; i64_op_of_int 1; ind_op]) ]
|
||||
|
||||
|
||||
|
||||
| _ -> failwith "invalid lhs expression"
|
||||
|
||||
and cmp_call (tc : TypeCtxt.t) (c:Ctxt.t) (exp:Ast.exp node) (es:Ast.exp node list) : Ll.ty * Ll.operand * stream =
|
||||
let (t, op, s) = cmp_exp tc c exp in
|
||||
let (ts, rt) =
|
||||
match t with
|
||||
| Ptr (Fun (l, r)) -> l, r
|
||||
| _ -> failwith "nonfunction passed to cmp_call" in
|
||||
let args, args_code = List.fold_right2
|
||||
(fun e t (args, code) ->
|
||||
let arg_op, arg_code = cmp_exp_as tc c e t in
|
||||
(t, arg_op)::args, arg_code @ code
|
||||
) es ts ([],[]) in
|
||||
let res_id = gensym "result" in
|
||||
rt, Id res_id, s >@ args_code >:: I(res_id, Call(rt, op, args))
|
||||
|
||||
and cmp_exp_as (tc : TypeCtxt.t) (c:Ctxt.t) (e:Ast.exp node) (t:Ll.ty) : Ll.operand * stream =
|
||||
let from_t, op, code = cmp_exp tc c e in
|
||||
if from_t = t then op, code
|
||||
else let res_id = gensym "cast" in
|
||||
Id res_id, code >:: I(res_id, Bitcast(from_t, op, t))
|
||||
|
||||
(* Compile a statement in context c with return typ rt. Return a new context,
|
||||
possibly extended with new local bindings, and the instruction stream
|
||||
implementing the statement.
|
||||
|
||||
Left-hand-sides of assignment statements must either be OAT identifiers,
|
||||
or an index into some arbitrary expression of array type. Otherwise, the
|
||||
program is not well-formed and your compiler may throw an error.
|
||||
*)
|
||||
and cmp_stmt (tc : TypeCtxt.t) (c:Ctxt.t) (rt:Ll.ty) (stmt:Ast.stmt node) (lo : Ll.lbl option) (ls : Ll.lbl option) : Ctxt.t * stream =
|
||||
|
||||
match stmt.elt with
|
||||
| Ast.Decl (id, init) ->
|
||||
let ll_ty, init_op, init_code = cmp_exp tc c init in
|
||||
let res_id = gensym id in
|
||||
let c' = Ctxt.add c id (Ptr ll_ty, Id res_id) in
|
||||
c', init_code
|
||||
>:: E(res_id, Alloca ll_ty)
|
||||
>:: I(gensym "store", Store (ll_ty, init_op, Id res_id))
|
||||
|
||||
| Ast.Assn (path ,e) ->
|
||||
let _, pop, path_code = cmp_exp_lhs tc c path in
|
||||
let ll_ty, eop, exp_code = cmp_exp tc c e in
|
||||
c, path_code >@ exp_code >:: I(gensym "store", (Store (ll_ty, eop, pop)))
|
||||
|
||||
| Ast.If (guard, st1, st2) ->
|
||||
let guard_ty, guard_op, guard_code = cmp_exp tc c guard in
|
||||
let then_code = cmp_block tc c rt st1 lo ls in
|
||||
let else_code = cmp_block tc c rt st2 lo ls in
|
||||
let lt, le, lm = gensym "then", gensym "else", gensym "merge" in
|
||||
c, guard_code
|
||||
>:: T(Cbr (guard_op, lt, le))
|
||||
>:: L lt >@ then_code >:: T(Br lm)
|
||||
>:: L le >@ else_code >:: T(Br lm)
|
||||
>:: L lm
|
||||
|
||||
(* the 'if?' checked null downcast statement.
|
||||
- check whether the value computed by exp is null, if so jump to
|
||||
the 'null' block, otherwise take the 'notnull' block
|
||||
|
||||
- the identifier id is in scope in the 'nutnull' block and so
|
||||
needs to be allocated (and added to the context)
|
||||
|
||||
- as in the if-the-else construct, you should jump to the common
|
||||
merge label after either block
|
||||
*)
|
||||
| Ast.Cast (typ, id, exp, notnull, null) ->
|
||||
let translated_typ = cmp_ty tc (TRef typ) in
|
||||
let guard_op, guard_code = cmp_exp_as tc c exp translated_typ in
|
||||
let res_id = gensym id in
|
||||
let c' = Ctxt.add c id (Ptr translated_typ, Id res_id) in
|
||||
let null_code = cmp_block tc c rt null lo ls in
|
||||
let notnull_code = cmp_block tc c' rt notnull lo ls in
|
||||
let cast_id = gensym "cast" in
|
||||
let ln, lnn, lm = gensym "null", gensym "notnull", gensym "merge" in
|
||||
c, guard_code
|
||||
>:: I(cast_id, Icmp(Eq, translated_typ, guard_op, Null))
|
||||
>:: T(Cbr (Id cast_id, ln, lnn))
|
||||
>:: L lnn
|
||||
>:: E(res_id, Alloca translated_typ)
|
||||
>:: I(gensym "store", Store (translated_typ, guard_op, Id res_id))
|
||||
>@ notnull_code >:: T(Br lm)
|
||||
>:: L ln >@ null_code >:: T(Br lm)
|
||||
>:: L lm
|
||||
|
||||
| Ast.While (guard, body) ->
|
||||
let guard_ty, guard_op, guard_code = cmp_exp tc c guard in
|
||||
let lcond, lbody, lpost = gensym "cond", gensym "body", gensym "post" in
|
||||
let body_code = cmp_block tc c rt body (Some lpost) (Some lcond) in
|
||||
c, []
|
||||
>:: T (Br lcond)
|
||||
>:: L lcond >@ guard_code >:: T (Cbr (guard_op, lbody, lpost))
|
||||
>:: L lbody >@ body_code >:: T (Br lcond)
|
||||
>:: L lpost
|
||||
|
||||
| Ast.For (inits, guard, after, body) ->
|
||||
let guard = match guard with Some e -> e | None -> no_loc (CBool true) in
|
||||
let after = match after with Some s -> [s] | None -> [] in
|
||||
let body = body @ after in
|
||||
let ds = List.map (fun d -> no_loc (Decl d)) inits in
|
||||
let stream = cmp_block tc c rt (ds @ [no_loc @@ Ast.While (guard, body)]) None None in
|
||||
c, stream
|
||||
|
||||
| Ast.Ret None ->
|
||||
c, [T (Ret(Void, None))]
|
||||
|
||||
| Ast.Ret (Some e) ->
|
||||
let op, code = cmp_exp_as tc c e rt in
|
||||
c, code >:: T(Ret (rt, Some op))
|
||||
|
||||
| Ast.SCall (f, es) ->
|
||||
let _, op, code = cmp_call tc c f es in
|
||||
c, code
|
||||
|
||||
(* Compile a series of statements *)
|
||||
and cmp_block (tc : TypeCtxt.t) (c:Ctxt.t) (rt:Ll.ty) (stmts:Ast.block) (lo:Ll.lbl option) ls : stream =
|
||||
snd @@ List.fold_left (fun (c, code) s ->
|
||||
let c, stmt_code = cmp_stmt tc c rt s lo ls in
|
||||
c, code >@ stmt_code
|
||||
) (c,[]) stmts
|
||||
|
||||
|
||||
|
||||
(* Construct the structure context for compilation. We could reuse
|
||||
the H component from the Typechecker rather than recomputing this
|
||||
information here, but we do it this way to make the two parts of
|
||||
the project less interdependent. *)
|
||||
let get_struct_defns (p:Ast.prog) : TypeCtxt.t =
|
||||
List.fold_right (fun d ts ->
|
||||
match d with
|
||||
| Ast.Gtdecl { elt=(id, fs) } ->
|
||||
TypeCtxt.add ts id fs
|
||||
| _ -> ts) p TypeCtxt.empty
|
||||
|
||||
|
||||
(* Adds each function identifer to the context at an
|
||||
appropriately translated type.
|
||||
|
||||
NOTE: The Gid of a function is just its source name
|
||||
*)
|
||||
let cmp_function_ctxt (tc : TypeCtxt.t) (c:Ctxt.t) (p:Ast.prog) : Ctxt.t =
|
||||
List.fold_left (fun c -> function
|
||||
| Ast.Gfdecl { elt={ frtyp; fname; args } } ->
|
||||
let ft = TRef (RFun (List.map fst args, frtyp)) in
|
||||
Ctxt.add c fname (cmp_ty tc ft, Gid fname)
|
||||
| _ -> c
|
||||
) c p
|
||||
|
||||
(* Populate a context with bindings for global variables
|
||||
mapping OAT identifiers to LLVMlite gids and their types.
|
||||
|
||||
Only a small subset of OAT expressions can be used as global initializers
|
||||
in well-formed programs. (The constructors starting with C and Id's
|
||||
for global function values).
|
||||
*)
|
||||
let cmp_global_ctxt (tc : TypeCtxt.t) (c:Ctxt.t) (p:Ast.prog) : Ctxt.t =
|
||||
let gexp_ty c = function
|
||||
| Id id -> fst (Ctxt.lookup id c)
|
||||
| CStruct (t, cs) -> Ptr (Namedt t)
|
||||
| CNull r -> cmp_ty tc (TNullRef r)
|
||||
| CBool b -> I1
|
||||
| CInt i -> I64
|
||||
| CStr s -> Ptr (str_arr_ty s)
|
||||
| CArr (u, cs) -> Ptr (Struct [I64; Array(List.length cs, cmp_ty tc u)])
|
||||
| x -> failwith ( "bad global initializer: " ^ (Astlib.string_of_exp (no_loc x)))
|
||||
in
|
||||
List.fold_left (fun c -> function
|
||||
| Ast.Gvdecl { elt={ name; init } } ->
|
||||
Ctxt.add c name (Ptr (gexp_ty c init.elt), Gid name)
|
||||
| _ -> c) c p
|
||||
|
||||
|
||||
(* Compile a function declaration in global context c. Return the LLVMlite cfg
|
||||
and a list of global declarations containing the string literals appearing
|
||||
in the function.
|
||||
*)
|
||||
let cmp_fdecl (tc : TypeCtxt.t) (c:Ctxt.t) (f:Ast.fdecl node) : Ll.fdecl * (Ll.gid * Ll.gdecl) list =
|
||||
let {frtyp; args; body} = f.elt in
|
||||
let add_arg (s_typ, s_id) (c,code,args) =
|
||||
let ll_id = gensym s_id in
|
||||
let ll_ty = cmp_ty tc s_typ in
|
||||
let alloca_id = gensym s_id in
|
||||
let c = Ctxt.add c s_id (Ptr ll_ty, Ll.Id alloca_id)in
|
||||
c, []
|
||||
>:: E(alloca_id, Alloca ll_ty)
|
||||
>:: I(gensym "store", Store(ll_ty, Id ll_id, Id alloca_id))
|
||||
>@ code,
|
||||
(ll_ty, ll_id)::args
|
||||
in
|
||||
let c, args_code, args = List.fold_right add_arg args (c,[],[]) in
|
||||
let ll_rty = cmp_ret_ty tc frtyp in
|
||||
let block_code = cmp_block tc c ll_rty body None None in
|
||||
let argtys, f_param = List.split args in
|
||||
let f_ty = (argtys, ll_rty) in
|
||||
let f_cfg, globals = cfg_of_stream (args_code >@ block_code) in
|
||||
{f_ty; f_param; f_cfg}, globals
|
||||
|
||||
|
||||
|
||||
(* Compile a global initializer, returning the resulting LLVMlite global
|
||||
declaration, and a list of additional global declarations.
|
||||
*)
|
||||
let rec cmp_gexp c (tc : TypeCtxt.t) (e:Ast.exp node) : Ll.gdecl * (Ll.gid * Ll.gdecl) list =
|
||||
match e.elt with
|
||||
| CNull r -> (cmp_ty tc (TNullRef r), GNull), []
|
||||
| CBool b -> (I1, (if b then GInt 1L else GInt 0L)), []
|
||||
| CInt i -> (I64, GInt i), []
|
||||
| Id id -> ((fst @@ Ctxt.lookup id c), GGid id), []
|
||||
|
||||
| CStr s ->
|
||||
let gid = gensym "str" in
|
||||
let ll_ty = str_arr_ty s in
|
||||
(Ptr ll_ty, GGid gid), [gid, (ll_ty, GString s)]
|
||||
|
||||
| CArr (u, cs) ->
|
||||
let elts, gs = List.fold_right
|
||||
(fun cst (elts, gs) ->
|
||||
let gd, gs' = cmp_gexp c tc cst in
|
||||
gd::elts, gs' @ gs) cs ([], [])
|
||||
in
|
||||
let len = List.length cs in
|
||||
let ll_u = cmp_ty tc u in
|
||||
let gid = gensym "global_arr" in
|
||||
let arr_t = Struct [ I64; Array(len, ll_u) ] in
|
||||
let arr_i = GStruct [ I64, GInt (Int64.of_int len); Array(len, ll_u), GArray elts ] in
|
||||
(Ptr arr_t, GGid gid), (gid, (arr_t, arr_i))::gs
|
||||
|
||||
| CStruct (id, cs) ->
|
||||
let fields = TypeCtxt.lookup id tc in
|
||||
let elts, gs =
|
||||
List.fold_right
|
||||
(fun fs (elts, gs) ->
|
||||
let gd, gs' = cmp_gexp c tc (snd (List.find (fun (xid, xname) -> xid = fs.fieldName) cs)) in
|
||||
(gd :: elts, gs' @ gs)) fields ([], []) in
|
||||
let gid = gensym "global_struct" in
|
||||
(Ptr (Namedt id), GGid gid), (gid, (Namedt id, GStruct elts)) :: gs
|
||||
|
||||
| _ -> failwith "bad global initializer"
|
||||
|
||||
(* Oat internals function context ------------------------------------------- *)
|
||||
let internals =
|
||||
[ "oat_malloc", Ll.Fun ([I64], Ptr I64)
|
||||
; "oat_alloc_array", Ll.Fun ([I64], Ptr I64)
|
||||
; "oat_assert_not_null", Ll.Fun ([Ptr I8], Void)
|
||||
; "oat_assert_array_length", Ll.Fun ([Ptr I64; I64], Void)
|
||||
]
|
||||
|
||||
(* Oat builtin function context --------------------------------------------- *)
|
||||
let builtins = List.map
|
||||
(fun (fname, ftyp) ->
|
||||
let args, ret = cmp_fty TypeCtxt.empty ftyp in
|
||||
(fname, Ll.Fun (args, ret)))
|
||||
Typechecker.builtins
|
||||
|
||||
|
||||
let tctxt_to_tdecls c =
|
||||
List.map (fun (i, l) -> i, Struct (List.map (fun f -> cmp_ty c f.ftyp) l)) c
|
||||
|
||||
(* Compile a OAT program to LLVMlite *)
|
||||
let cmp_prog (p:Ast.prog) : Ll.prog =
|
||||
let tc = get_struct_defns p in
|
||||
(* add built-in functions to context *)
|
||||
let init_ctxt =
|
||||
List.fold_left (fun c (i, t) -> Ctxt.add c i (Ll.Ptr t, Gid i))
|
||||
Ctxt.empty builtins
|
||||
in
|
||||
let fc = cmp_function_ctxt tc init_ctxt p in
|
||||
|
||||
(* build global variable context *)
|
||||
let c = cmp_global_ctxt tc fc p in
|
||||
(* compile functions and global variables *)
|
||||
let fdecls, gdecls =
|
||||
List.fold_right (fun d (fs, gs) ->
|
||||
match d with
|
||||
| Ast.Gvdecl { elt=gd } ->
|
||||
let ll_gd, gs' = cmp_gexp c tc gd.init in
|
||||
(fs, (gd.name, ll_gd)::gs' @ gs)
|
||||
| Ast.Gfdecl fd ->
|
||||
let fdecl, gs' = cmp_fdecl tc c fd in
|
||||
(fd.elt.fname,fdecl)::fs, gs' @ gs
|
||||
| Ast.Gtdecl _ ->
|
||||
fs, gs
|
||||
) p ([], [])
|
||||
in
|
||||
(* gather external declarations *)
|
||||
let edecls = internals @ builtins in
|
||||
{ tdecls = tctxt_to_tdecls tc; gdecls; fdecls; edecls }
|
||||
13
hw6/hw4programs/abs.oat
Normal file
13
hw6/hw4programs/abs.oat
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
int abs(int x) {
|
||||
if (x < 0) {
|
||||
return -x;
|
||||
} else if (x > 0) {
|
||||
return x;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
int program (int argc, string[] argv) {
|
||||
print_int(abs(10) + abs(-10) + abs(0));
|
||||
return 0;
|
||||
}
|
||||
8
hw6/hw4programs/argassign.oat
Normal file
8
hw6/hw4programs/argassign.oat
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
int foo(int x) {
|
||||
x = x + 1;
|
||||
return x;
|
||||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
return foo(17);
|
||||
}
|
||||
|
|
@ -7,8 +7,8 @@ int[] f(int[] x, int[] y, bool b) {
|
|||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var x = new int[3]{i -> 0};
|
||||
var y = new int[3]{i -> 0};
|
||||
var x = new int[3];
|
||||
var y = new int[3];
|
||||
f(x, y, true)[0] = 17;
|
||||
return x[0];
|
||||
}
|
||||
|
|
@ -51,7 +51,7 @@ bool binary_search (int[] input, int key, int min, int max) {
|
|||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var test_array = new int[100]{i->0};
|
||||
var test_array = new int[100];
|
||||
for (var i=0; i < 100; i=i+1;) { test_array[i] = 2 * i + 1; }
|
||||
var even = binary_search (test_array, 80, 0, 99);
|
||||
var odd = binary_search (test_array, 81, 0, 99);
|
||||
|
|
@ -20,7 +20,7 @@ void bubble_sort(int[] numbers, int array_size)
|
|||
}
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var a = new int[8]{i -> 0};
|
||||
var a = new int[8];
|
||||
|
||||
a[0] = 121;
|
||||
a[1] = 125;
|
||||
|
|
@ -16,7 +16,7 @@ void proc2 ( ) {
|
|||
}
|
||||
|
||||
bool foo ( int x, int[] y ) {
|
||||
var s = bar (x, "compilerdesign");
|
||||
var s = bar (x, "CS153");
|
||||
proc1 ();
|
||||
return true;
|
||||
}
|
||||
14
hw6/hw4programs/easy_p5.oat
Normal file
14
hw6/hw4programs/easy_p5.oat
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
global i = 19;
|
||||
global b1 = true;
|
||||
global b2 = false;
|
||||
global str = "This is a string!";
|
||||
global arr1 = new int[]{0,1,2};
|
||||
global arr2 = new int[][]{ new int[]{10,11}, new int[]{20,21}, new int[]{30,31}};
|
||||
global arr3 = new string[]{"String1", "String2", "String3"};
|
||||
|
||||
global arr4 = new string[][]
|
||||
{
|
||||
new string[]{"String00","String01"},
|
||||
new string[]{"String10","String11"},
|
||||
new string[]{"String20","String21"}
|
||||
};
|
||||
|
|
@ -1,8 +1,7 @@
|
|||
global j = int[]{1,2,3,4};
|
||||
global j = new int[]{1,2,3,4};
|
||||
int[] f () {
|
||||
var a = new int[][]{1, 2};
|
||||
var i = new int[4];
|
||||
var arr1 = new int[3];
|
||||
var arr2 = new int[][3];
|
||||
return new int[2];
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
int program (int argc, string[] argv) {
|
||||
if(6 != 5) {
|
||||
return ~(5 >> -6 << 9 >>> 10) * 2 - 100 + 6;
|
||||
return ~(5 >> --6 << 9 >>> 10) * 2 - 100 + 6;
|
||||
} else {
|
||||
return 2;
|
||||
}
|
||||
|
|
@ -1,52 +1,52 @@
|
|||
global float_len = 2;
|
||||
int[] determine_shift(int[] float)
|
||||
{
|
||||
var dec = float[1];
|
||||
var count = 0;
|
||||
while(dec > 0)
|
||||
{
|
||||
var temp = float[0];
|
||||
float[0] = temp << 1;
|
||||
dec = dec >>> 1;
|
||||
count = count + 1;
|
||||
}
|
||||
var list = new int[2];
|
||||
list[0] = float[0] + float[1];
|
||||
list[1] = count;
|
||||
return list;
|
||||
}
|
||||
|
||||
int[] multiply_floats(int[] f1, int[] f2)
|
||||
{
|
||||
var f1_shifted = determine_shift(f1);
|
||||
var f2_shifted = determine_shift(f2);
|
||||
var product = f1_shifted[0] * f2_shifted[0];
|
||||
var num_left_shifts = f1_shifted[1] + f2_shifted[1];
|
||||
var remainder = 0;
|
||||
for(var i = 0; i < num_left_shifts; i=i+1;)
|
||||
{
|
||||
var lsb = product [&] 1;
|
||||
var shifted_lsb = lsb << i;
|
||||
product = product >>> 1;
|
||||
remainder = remainder + shifted_lsb;
|
||||
}
|
||||
var ans = new int[2];
|
||||
ans[0] = product;
|
||||
ans[1] = remainder;
|
||||
return ans;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv)
|
||||
{
|
||||
var pi = new int[2];
|
||||
pi[0] = 3;
|
||||
pi[1] = 14159;
|
||||
var diameter = new int[2];
|
||||
diameter[0] = 20;
|
||||
diameter[1] = 17;
|
||||
var prod = multiply_floats(pi, diameter);
|
||||
print_int(prod[0]);
|
||||
print_string(".");
|
||||
print_int(prod[1]);
|
||||
return 0;
|
||||
global float_len = 2;
|
||||
int[] determine_shift(int[] float)
|
||||
{
|
||||
var dec = float[1];
|
||||
var count = 0;
|
||||
while(dec > 0)
|
||||
{
|
||||
var temp = float[0];
|
||||
float[0] = temp << 1;
|
||||
dec = dec >>> 1;
|
||||
count = count + 1;
|
||||
}
|
||||
var list = new int[2];
|
||||
list[0] = float[0] + float[1];
|
||||
list[1] = count;
|
||||
return list;
|
||||
}
|
||||
|
||||
int[] multiply_floats(int[] f1, int[] f2)
|
||||
{
|
||||
var f1_shifted = determine_shift(f1);
|
||||
var f2_shifted = determine_shift(f2);
|
||||
var product = f1_shifted[0] * f2_shifted[0];
|
||||
var num_left_shifts = f1_shifted[1] + f2_shifted[1];
|
||||
var remainder = 0;
|
||||
for(var i = 0; i < num_left_shifts; i=i+1;)
|
||||
{
|
||||
var lsb = product [&] 1;
|
||||
var shifted_lsb = lsb << i;
|
||||
product = product >>> 1;
|
||||
remainder = remainder + shifted_lsb;
|
||||
}
|
||||
var ans = new int[2];
|
||||
ans[0] = product;
|
||||
ans[1] = remainder;
|
||||
return ans;
|
||||
}
|
||||
|
||||
int program(int argc, string[] argv)
|
||||
{
|
||||
var pi = new int[2];
|
||||
pi[0] = 3;
|
||||
pi[1] = 14159;
|
||||
var diameter = new int[2];
|
||||
diameter[0] = 20;
|
||||
diameter[1] = 17;
|
||||
var prod = multiply_floats(pi, diameter);
|
||||
print_int(prod[0]);
|
||||
print_string(".");
|
||||
print_int(prod[1]);
|
||||
return 0;
|
||||
}
|
||||
8
hw6/hw4programs/globals7.oat
Normal file
8
hw6/hw4programs/globals7.oat
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
global arr = new int[] {};
|
||||
|
||||
int program (int argc, string[] argv) {
|
||||
var x = new int[3];
|
||||
arr = x;
|
||||
x[2] = 3;
|
||||
return arr[2];
|
||||
}
|
||||
|
|
@ -1,6 +1,9 @@
|
|||
string sub (string str, int start, int len) {
|
||||
var arr = array_of_string(str);
|
||||
var r = new int[len]{i -> arr[i+start]};
|
||||
var r = new int[len];
|
||||
for (var i = 0; i < len; i = i + 1;) {
|
||||
r[i] = arr[i+start];
|
||||
}
|
||||
return string_of_array (r);
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue