Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add coverage to Try Flow #7871

Open
wants to merge 6 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
139 changes: 108 additions & 31 deletions src/flow_dot_js.ml
Original file line number Diff line number Diff line change
Expand Up @@ -304,23 +304,23 @@ let mk_loc file line col =
}

let infer_type filename content line col =
let filename = File_key.SourceFile filename in
let root = Path.dummy_path in
match parse_content filename content with
| Error _ -> failwith "parse error"
| Ok (ast, file_sig) ->
let file_sig = File_sig.abstractify_locs file_sig in
let cx, typed_ast = infer_and_merge ~root filename ast file_sig in
let file = Context.file cx in
let loc = mk_loc filename line col in Query_types.(
let result = type_at_pos_type ~full_cx:cx ~file ~file_sig ~expand_aliases:false
~omit_targ_defaults:false ~typed_ast loc in
match result with
| FailureNoMatch -> Loc.none, Error "No match"
| FailureUnparseable (loc, _, _) -> loc, Error "Unparseable"
| Success (loc, t) ->
loc, Ok (Ty_printer.string_of_t ~force_single_line:true t)
)
let filename = File_key.SourceFile filename in
let root = Path.dummy_path in
match parse_content filename content with
| Error _ -> failwith "parse error"
| Ok (ast, file_sig) ->
let file_sig = File_sig.abstractify_locs file_sig in
let cx, typed_ast = infer_and_merge ~root filename ast file_sig in
let file = Context.file cx in
let loc = mk_loc filename line col in Query_types.(
let result = type_at_pos_type ~full_cx:cx ~file ~file_sig ~expand_aliases:false
~omit_targ_defaults:false ~typed_ast loc in
match result with
| FailureNoMatch -> Loc.none, Error "No match"
| FailureUnparseable (loc, _, _) -> loc, Error "Unparseable"
| Success (loc, t) ->
loc, Ok (Ty_printer.string_of_t ~force_single_line:true t)
)

let types_to_json types ~strip_root =
let open Hh_json in
Expand All @@ -336,21 +336,96 @@ let types_to_json types ~strip_root =
) in
JSON_Array types_json

let coverage_to_json ~strip_root ~trust (types : (Loc.t * Coverage_response.expression_coverage) list) content =
let accum_coverage (untainted, tainted, empty, total) (_loc, cov) =
match cov with
| Coverage_response.Uncovered -> (untainted, tainted, empty, total + 1)
| Coverage_response.Empty -> (untainted, tainted, empty + 1, total + 1)
| Coverage_response.Untainted -> (untainted + 1, tainted, empty, total + 1)
| Coverage_response.Tainted -> (untainted, tainted + 1, empty, total + 1)
in

let accum_coverage_locs (untainted, tainted, empty, uncovered) (loc, cov) =
match cov with
| Coverage_response.Uncovered -> (untainted, tainted, empty, loc::uncovered)
| Coverage_response.Empty -> (untainted, tainted, loc::empty, loc::uncovered)
| Coverage_response.Untainted -> (loc::untainted, tainted, empty, uncovered)
| Coverage_response.Tainted -> (untainted, loc::tainted, empty, uncovered)
in

let offset_table = lazy (Offset_utils.make content) in
let untainted, tainted, empty, total =
Core_list.fold_left ~f:accum_coverage ~init:(0, 0, 0, 0) types in

(* In trust mode, we only consider untainted locations covered. In normal mode we consider both *)
let covered = if trust then untainted else untainted + tainted in
(* let percent = if total = 0 then 100. else (float_of_int covered /. float_of_int total) *. 100. in *)

let offset_table = Some (Lazy.force offset_table) in
let untainted_locs, tainted_locs, empty_locs, uncovered_locs =
let untainted, tainted, empty, uncovered =
Core_list.fold_left ~f:accum_coverage_locs ~init:([], [], [], []) types
in
Core_list.rev untainted, Core_list.rev tainted, Core_list.rev empty, Core_list.rev uncovered
in
let open Hh_json in
let open Reason in
let covered_data = if trust then
[
"untainted_count", int_ untainted;
"untainted_locs", JSON_Array (Core_list.map ~f:(json_of_loc ~strip_root ~offset_table) untainted_locs);
"tainted_count", int_ tainted;
"tainted_locs", JSON_Array (Core_list.map ~f:(json_of_loc ~strip_root ~offset_table) tainted_locs);
]
else
let covered_locs = untainted_locs @ tainted_locs |> Core_list.sort ~cmp:compare in
[
"covered_count", int_ covered;
"covered_locs", JSON_Array (Core_list.map ~f:(json_of_loc ~strip_root ~offset_table) covered_locs);
]
in
JSON_Object [
"expressions", JSON_Object (covered_data @ [
"uncovered_count", int_ (total - covered);
"uncovered_locs", JSON_Array (Core_list.map ~f:(json_of_loc ~strip_root ~offset_table) uncovered_locs);
"empty_count", int_ empty;
"empty_locs", JSON_Array (Core_list.map ~f:(json_of_loc ~strip_root ~offset_table) empty_locs);
]);
]

let dump_types js_file js_content =
let filename = File_key.SourceFile (Js.to_string js_file) in
let root = Path.dummy_path in
let content = Js.to_string js_content in
match parse_content filename content with
| Error _ -> failwith "parse error"
| Ok (ast, file_sig) ->
let file_sig = File_sig.abstractify_locs file_sig in
let cx, typed_ast = infer_and_merge ~root filename ast file_sig in
let printer = Ty_printer.string_of_t in
let types = Query_types.dump_types ~printer cx file_sig typed_ast in
let strip_root = None in
let types_json = types_to_json types ~strip_root in

js_of_json types_json
let filename = File_key.SourceFile (Js.to_string js_file) in
let root = Path.dummy_path in
let content = Js.to_string js_content in
match parse_content filename content with
| Error _ -> failwith "parse error"
| Ok (ast, file_sig) ->
let file_sig = File_sig.abstractify_locs file_sig in
let cx, typed_ast = infer_and_merge ~root filename ast file_sig in
let printer = Ty_printer.string_of_t in
let types = Query_types.dump_types ~printer cx file_sig typed_ast in
let strip_root = None in
let types_json = types_to_json types ~strip_root in

js_of_json types_json

let coverage js_file js_content =
let filename = File_key.SourceFile (Js.to_string js_file) in
let root = Path.dummy_path in
let content = Js.to_string js_content in
match parse_content filename content with
| Error _ ->
Js.raise_js_error (Js.Unsafe.new_obj Js.error_constr [|
Js.Unsafe.inject (Js.string "parse error")
|])
| Ok (ast, file_sig) ->
let file_sig = File_sig.abstractify_locs file_sig in
let cx, typed_ast = infer_and_merge ~root filename ast file_sig in
let types = Query_types.covered_types ~should_check:true ~check_trust:false cx typed_ast in
let strip_root = None in
let coverage_json = coverage_to_json types content ~trust:false ~strip_root in

js_of_json coverage_json

let type_at_pos js_file js_content js_line js_col =
let filename = Js.to_string js_file in
Expand Down Expand Up @@ -381,6 +456,8 @@ let () = Js.Unsafe.set exports
"checkContent" (Js.wrap_callback check_content_js)
let () = Js.Unsafe.set exports
"dumpTypes" (Js.wrap_callback dump_types)
let () = Js.Unsafe.set exports
"coverage" (Js.wrap_callback coverage)
let () = Js.Unsafe.set exports
"jsOfOcamlVersion" (Js.string Sys_js.js_of_ocaml_version)
let () = Js.Unsafe.set exports
Expand Down
125 changes: 119 additions & 6 deletions website/_assets/css/_try.scss
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,23 @@ html.site-fullscreen .content {
position: relative;
height: 100%;

.options {
position: absolute;
right: 50%;
z-index: 3;

label {
cursor: pointer;
float: left;
font-size: 14px;
padding: 7px 15px;

input {
margin-right: 8px;
}
}
}

.code,
.results {
position: absolute;
Expand Down Expand Up @@ -163,17 +180,26 @@ html.site-fullscreen .content {
}

@-webkit-keyframes sk-bouncedelay {
0%, 80%, 100% { -webkit-transform: scale(0) }
40% { -webkit-transform: scale(1.0) }
0%,
80%,
100% {
-webkit-transform: scale(0);
}
40% {
-webkit-transform: scale(1);
}
}

@keyframes sk-bouncedelay {
0%, 80%, 100% {
0%,
80%,
100% {
-webkit-transform: scale(0);
transform: scale(0);
} 40% {
-webkit-transform: scale(1.0);
transform: scale(1.0);
}
40% {
-webkit-transform: scale(1);
transform: scale(1);
}
}

Expand Down Expand Up @@ -205,3 +231,90 @@ html.site-fullscreen .content {
width: 100%;
}
}

/* New images */

.CodeMirror-lint-mark-error {
background-image: url("data:image/svg+xml,%3Csvg%20xmlns%3D'http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg'%20viewBox%3D'0%200%206%203'%20enable-background%3D'new%200%200%206%203'%20height%3D'3'%20width%3D'6'%3E%3Cg%20fill%3D'%23d60a0a'%3E%3Cpolygon%20points%3D'5.5%2C0%202.5%2C3%201.1%2C3%204.1%2C0'%2F%3E%3Cpolygon%20points%3D'4%2C0%206%2C2%206%2C0.6%205.4%2C0'%2F%3E%3Cpolygon%20points%3D'0%2C2%201%2C3%202.4%2C3%200%2C0.6'%2F%3E%3C%2Fg%3E%3C%2Fsvg%3E");
}

.CodeMirror-lint-mark-warning {
background-image: url("data:image/svg+xml,%3Csvg%20xmlns%3D'http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg'%20viewBox%3D'0%200%206%203'%20enable-background%3D'new%200%200%206%203'%20height%3D'3'%20width%3D'6'%3E%3Cg%20fill%3D'%23117711'%3E%3Cpolygon%20points%3D'5.5%2C0%202.5%2C3%201.1%2C3%204.1%2C0'%2F%3E%3Cpolygon%20points%3D'4%2C0%206%2C2%206%2C0.6%205.4%2C0'%2F%3E%3Cpolygon%20points%3D'0%2C2%201%2C3%202.4%2C3%200%2C0.6'%2F%3E%3C%2Fg%3E%3C%2Fsvg%3E");
}

.CodeMirror-lint-mark-info {
background-image: url("data:image/svg+xml,%3Csvg%20xmlns%3D'http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg'%20viewBox%3D'0%200%206%203'%20enable-background%3D'new%200%200%206%203'%20height%3D'3'%20width%3D'6'%3E%3Cg%20fill%3D'%23008000'%3E%3Cpolygon%20points%3D'5.5%2C0%202.5%2C3%201.1%2C3%204.1%2C0'%2F%3E%3Cpolygon%20points%3D'4%2C0%206%2C2%206%2C0.6%205.4%2C0'%2F%3E%3Cpolygon%20points%3D'0%2C2%201%2C3%202.4%2C3%200%2C0.6'%2F%3E%3C%2Fg%3E%3C%2Fsvg%3E");
}

/* The lint marker gutter */

.CodeMirror-lint-markers {
width: 16px;
}

.CodeMirror-lint-tooltip {
background-color: #ffd;
border: 1px solid black;
border-radius: 4px 4px 4px 4px;
color: black;
font-family: monospace;
font-size: 10pt;
overflow: hidden;
padding: 2px 5px;
position: fixed;
white-space: pre;
white-space: pre-wrap;
z-index: 100;
max-width: 600px;
opacity: 0;
transition: opacity 0.4s;
-moz-transition: opacity 0.4s;
-webkit-transition: opacity 0.4s;
-o-transition: opacity 0.4s;
-ms-transition: opacity 0.4s;
}

.CodeMirror-lint-mark-error,
.CodeMirror-lint-mark-warning,
.CodeMirror-lint-mark-info {
display: inline-block;
background-position: left bottom;
background-repeat: repeat-x;
}

.CodeMirror-lint-marker-error,
.CodeMirror-lint-marker-warning {
background-position: center center;
background-repeat: no-repeat;
cursor: pointer;
display: inline-block;
height: 16px;
width: 16px;
vertical-align: middle;
position: relative;
}

.CodeMirror-lint-message-error,
.CodeMirror-lint-message-warning {
padding-left: 18px;
background-position: top left;
background-repeat: no-repeat;
}

.CodeMirror-lint-marker-error,
.CodeMirror-lint-message-error {
background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAAHlBMVEW7AAC7AACxAAC7AAC7AAAAAAC4AAC5AAD///+7AAAUdclpAAAABnRSTlMXnORSiwCK0ZKSAAAATUlEQVR42mWPOQ7AQAgDuQLx/z8csYRmPRIFIwRGnosRrpamvkKi0FTIiMASR3hhKW+hAN6/tIWhu9PDWiTGNEkTtIOucA5Oyr9ckPgAWm0GPBog6v4AAAAASUVORK5CYII=");
}

.CodeMirror-lint-marker-warning,
.CodeMirror-lint-message-warning {
background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAANlBMVEX/uwDvrwD/uwD/uwD/uwD/uwD/uwD/uwD/uwD6twD/uwAAAADurwD2tQD7uAD+ugAAAAD/uwDhmeTRAAAADHRSTlMJ8mN1EYcbmiixgACm7WbuAAAAVklEQVR42n3PUQqAIBBFUU1LLc3u/jdbOJoW1P08DA9Gba8+YWJ6gNJoNYIBzAA2chBth5kLmG9YUoG0NHAUwFXwO9LuBQL1giCQb8gC9Oro2vp5rncCIY8L8uEx5ZkAAAAASUVORK5CYII=");
}

.CodeMirror-lint-marker-multiple {
background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAcAAAAHCAMAAADzjKfhAAAACVBMVEUAAAAAAAC/v7914kyHAAAAAXRSTlMAQObYZgAAACNJREFUeNo1ioEJAAAIwmz/H90iFFSGJgFMe3gaLZ0od+9/AQZ0ADosbYraAAAAAElFTkSuQmCC");
background-repeat: no-repeat;
background-position: right bottom;
width: 100%;
height: 100%;
}