package reason
Reason: Syntax & Toolchain for OCaml
Install
Dune Dependency
Authors
Maintainers
Sources
reason-3.13.0.tbz
sha256=df2544606bc824a67082e2011826e7a1cde7af0ccb5ba457e937fe018c3ce7e4
sha512=ad6606defad2dcc2456b42e77eb77d5aee02ccda7ad32c313dcbb2c5d25953ff0c80719a3c6ef30b8443d8e6105b46aacdccc3607620c36ce8c8d0537574c21c
doc/src/reason/reason_toolchain_reason.ml.html
Source file reason_toolchain_reason.ml
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
module P = Reason_recover_parser module Lexer = Reason_lexer (* From Reason source text to OCaml AST * * 1. Make a lexbuf from source text * 2. Reason_lexer: * a. Using OCamllex: * extract one token from stream of characters * b. post-process token: * - store comments separately * - insert ES6_FUN token * - insert completion identifier * 3. Reason_parser, using Menhir: * A parser with explicit continuations, which take a new token and return: * - an AST when parse succeeded * - a new continuation if more tokens are needed * - nothing, if the parser got stuck (token is invalid in current state) * 4. Reason_toolchain connect lexer and parser: *) type token = Reason_parser.token type invalid_docstrings = Reason_lexer.invalid_docstrings let rec loop lexer parser = let token = Lexer.token lexer in match P.step parser token with | P.Intermediate parser' -> loop lexer parser' | P.Error -> (* Impossible to reach this case? *) let _, loc_start, loc_end = token in let loc = { Location.loc_start; loc_end; loc_ghost = false } in Reason_errors.raise_fatal_error (Parsing_error "Syntax error") loc | P.Success (x, docstrings) -> x, docstrings let initial_run entry_point lexer = loop lexer (P.initial entry_point (Lexer.lexbuf lexer).Lexing.lex_curr_p) let implementation lexer = initial_run Reason_parser.Incremental.implementation lexer let interface lexer = initial_run Reason_parser.Incremental.interface lexer let core_type lexer = initial_run Reason_parser.Incremental.parse_core_type lexer let toplevel_phrase lexer = initial_run Reason_parser.Incremental.toplevel_phrase lexer let use_file lexer = initial_run Reason_parser.Incremental.use_file lexer (* Skip tokens to the end of the phrase *) let rec skip_phrase lexer = try match Lexer.token lexer with | (Reason_parser.SEMI | Reason_parser.EOF), _, _ -> () | _ -> skip_phrase lexer with | Reason_errors.Reason_error ( Lexing_error ( Unterminated_comment _ | Unterminated_string | Unterminated_string_in_comment _ | Illegal_character _ ) , _ ) -> skip_phrase lexer let safeguard_parsing lexbuf fn = try fn () with | Reason_errors.Reason_error _ as err when !Location.input_name = "//toplevel//" -> skip_phrase (Lexer.init lexbuf); raise err | Location.Error _ as x -> let loc = Location.curr lexbuf in if !Location.input_name = "//toplevel//" then let _ = skip_phrase (Lexer.init lexbuf) in raise (Syntaxerr.Error (Syntaxerr.Other loc)) else raise x let format_interface_with_comments (signature, comments) formatter = let reason_formatter = Reason_pprint_ast.createFormatter () in reason_formatter#signature comments formatter signature let format_implementation_with_comments (implementation, comments) formatter = let reason_formatter = Reason_pprint_ast.createFormatter () in reason_formatter#structure comments formatter implementation
sectionYPositions = computeSectionYPositions($el), 10)"
x-init="setTimeout(() => sectionYPositions = computeSectionYPositions($el), 10)"
>