package reason
Reason: Syntax & Toolchain for OCaml
Install
Dune Dependency
Authors
Maintainers
Sources
reason-3.8.0.tbz
sha256=9478724b01767320064ba950ddea24c5df0eefdcdd8ff1324ba9ccd646258979
sha512=c5dcec810a1981b78b19271a8a255f4087628514725d6818419b141be6346a62b28fe8d0f10855efe420ede2839bfe562aa5abf21f29d66250e5c0d02323d6d8
doc/src/reason/reason_toolchain_ocaml.ml.html
Source file reason_toolchain_ocaml.ml
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146
open Reason_toolchain_conf (* The OCaml parser keep doc strings in the comment list. To avoid duplicating comments, we need to filter comments that appear as doc strings is the AST out of the comment list. *) let doc_comments_filter () = let open Ast_mapper in let open Parsetree in let seen = Hashtbl.create 7 in let attribute mapper = function | { attr_name = { Location. txt = ("ocaml.doc" | "ocaml.text")}; attr_payload = PStr [{ pstr_desc = Pstr_eval ({ pexp_desc = Pexp_constant (Pconst_string(_text, _loc, None)) } , _); pstr_loc = loc }]} as attribute -> (* Workaround: OCaml 4.02.3 kept an initial '*' in docstrings. * For other versions, we have to put the '*' back. *) Hashtbl.add seen loc (); default_mapper.attribute mapper attribute | attribute -> default_mapper.attribute mapper attribute in let mapper = {default_mapper with attribute} in let filter (_text, loc) = not (Hashtbl.mem seen loc) in (mapper, filter) module Lexer_impl = struct type t = Lexing.lexbuf let init ?insert_completion_ident:_ lexbuf = Lexer.init (); lexbuf let token = Lexer.token let filtered_comments = ref [] let filter_comments filter = filtered_comments := List.filter filter (Lexer.comments ()) let get_comments _lexbuf _docstrings = !filtered_comments end module OCaml_parser = Parser type token = OCaml_parser.token type invalid_docstrings = unit (* OCaml parser parses into compiler-libs version of Ast. Parsetrees are converted to Reason version on the fly. *) let parse_and_filter_doc_comments iter fn lexbuf= let it, filter = doc_comments_filter () in let result = fn lexbuf in ignore (iter it result); Lexer_impl.filter_comments filter; (result, ()) let implementation lexbuf = parse_and_filter_doc_comments (fun it -> it.Ast_mapper.structure it) (fun lexbuf -> From_current.copy_structure (Parser.implementation Lexer.token lexbuf)) lexbuf let core_type lexbuf = parse_and_filter_doc_comments (fun it -> it.Ast_mapper.typ it) (fun lexbuf -> From_current.copy_core_type (Parser.parse_core_type Lexer.token lexbuf)) lexbuf let interface lexbuf = parse_and_filter_doc_comments (fun it -> it.Ast_mapper.signature it) (fun lexbuf -> From_current.copy_signature (Parser.interface Lexer.token lexbuf)) lexbuf let filter_toplevel_phrase it = function | Parsetree.Ptop_def str -> ignore (it.Ast_mapper.structure it str) | Parsetree.Ptop_dir _ -> () let toplevel_phrase lexbuf = parse_and_filter_doc_comments filter_toplevel_phrase (fun lexbuf -> From_current.copy_toplevel_phrase (Parser.toplevel_phrase Lexer.token lexbuf)) lexbuf let use_file lexbuf = parse_and_filter_doc_comments (fun it result -> List.map (filter_toplevel_phrase it) result) (fun lexbuf -> List.map From_current.copy_toplevel_phrase (Parser.use_file Lexer.token lexbuf)) lexbuf (* Skip tokens to the end of the phrase *) (* TODO: consolidate these copy-paste skip/trys into something that works for * every syntax (also see [Reason_syntax_util]). *) let rec skip_phrase lexbuf = try match Lexer.token lexbuf with OCaml_parser.SEMISEMI | OCaml_parser.EOF -> () | _ -> skip_phrase lexbuf with | Lexer.Error (Lexer.Unterminated_comment _, _) | Lexer.Error (Lexer.Unterminated_string, _) | Lexer.Error (Lexer.Unterminated_string_in_comment _, _) | Lexer.Error (Lexer.Illegal_character _, _) -> skip_phrase lexbuf let maybe_skip_phrase lexbuf = if Parsing.is_current_lookahead OCaml_parser.SEMISEMI || Parsing.is_current_lookahead OCaml_parser.EOF then () else skip_phrase lexbuf let safeguard_parsing lexbuf fn = try fn () with | Lexer.Error(Lexer.Illegal_character _, _) as err when !Location.input_name = "//toplevel//"-> skip_phrase lexbuf; raise err | Syntaxerr.Error _ as err when !Location.input_name = "//toplevel//" -> maybe_skip_phrase lexbuf; raise err (* Escape error is raised as a general catchall when a syntax_error() is thrown in the parser. *) | Parsing.Parse_error | Syntaxerr.Escape_error -> let loc = Location.curr lexbuf in if !Location.input_name = "//toplevel//" then maybe_skip_phrase lexbuf; raise(Syntaxerr.Error(Syntaxerr.Other loc)) (* Unfortunately we drop the comments because there doesn't exist an ML * printer that formats comments *and* line wrapping! (yet) *) let format_interface_with_comments (signature, _) formatter = Pprintast.signature formatter (To_current.copy_signature signature) let format_implementation_with_comments (structure, _) formatter = let structure = Reason_syntax_util.(apply_mapper_to_structure structure (backport_letopt_mapper remove_stylistic_attrs_mapper)) in Pprintast.structure formatter (To_current.copy_structure structure) module Lexer = Lexer_impl
sectionYPositions = computeSectionYPositions($el), 10)"
x-init="setTimeout(() => sectionYPositions = computeSectionYPositions($el), 10)"
>