package forester
A tool for tending mathematical forests
Install
Dune Dependency
Authors
Maintainers
Sources
5.0.tar.gz
md5=24f4aed96a8b8af33aba13fba66f1b37
sha512=d36b896aca11858bb4a00fc704c16cc27a1f197bdb3e479d6132fd70f70d67d7158096285cb0b6fb00db14417f0f822cc27fe65d82f0971e42378fd8271ce573
doc/src/forester.parser/Parse.ml.html
Source file Parse.ml
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164
(* * SPDX-FileCopyrightText: 2024 The Forester Project Contributors * * SPDX-License-Identifier: GPL-3.0-or-later *) open Forester_prelude open Forester_core open Lexing module I = Grammar.MenhirInterpreter let buffer_lexer lexer = let buf = ref [] in let rec loop lexbuf = match !buf with | v :: vs -> buf := vs; v | [] -> match lexer lexbuf with | v :: vs -> buf := vs @ !buf; v | [] -> loop lexbuf in loop let lexer = let@ lexbuf = buffer_lexer in match Stack.top @@ Lexer.mode_stack with | Main -> Lexer.token lexbuf | Ident_init -> Lexer.ident_init lexbuf | Ident_fragments -> Lexer.ident_fragments lexbuf | Verbatim (herald, buffer) -> Lexer.verbatim herald buffer lexbuf let _get_range : I.element option -> (position * position) option = fun el -> match el with | Some (I.Element (_, _, start_pos, end_pos)) -> Some (start_pos, end_pos) | None -> None let closed_by c o = match (o, c) with | (Grammar.LSQUARE, Grammar.RSQUARE) | (Grammar.LPAREN, Grammar.RPAREN) | (Grammar.LBRACE, Grammar.RBRACE) | (Grammar.HASH_LBRACE, Grammar.RBRACE) | (Grammar.HASH_HASH_LBRACE, Grammar.RBRACE) -> true | _ -> false let is_opening_delim = function | Grammar.LSQUARE | Grammar.LPAREN | Grammar.LBRACE | Grammar.HASH_LBRACE | Grammar.HASH_HASH_LBRACE -> true | _ -> false let is_closing_delim = function | Grammar.RSQUARE | Grammar.RPAREN | Grammar.RBRACE -> true | _ -> false let parse : ?stop_on_err: bool -> lexbuf -> (Code.t, Reporter.diagnostic) Result.t = fun ?(stop_on_err = true) lexbuf -> let initial_checkpoint = (Grammar.Incremental.main lexbuf.lex_curr_p) in let delim_stack = Stack.create () in let rec run : _ I.checkpoint -> _ -> (Code.t, Reporter.diagnostic) Result.t = fun checkpoint supplier -> match checkpoint with | I.InputNeeded _env -> (* If the current token is an opening delimiter, save the token and its position on the stack.*) let token, _, _ = supplier () in let start_position = lexbuf.lex_start_p in let end_position = lexbuf.lex_curr_p in if is_opening_delim token then let range = Range.of_lex_range (start_position, end_position) in Stack.push (token, range) delim_stack; ; if is_closing_delim token then begin match Stack.top_opt delim_stack with | Some (open_delim, _) -> if (open_delim |> closed_by token) then Stack.drop delim_stack | None -> () end; let checkpoint = I.offer checkpoint (token, start_position, end_position) in run checkpoint supplier | I.Shifting((_, _, _): Code.t I.env * Code.t I.env * bool) -> let checkpoint = I.resume checkpoint ~strategy: `Simplified in run checkpoint supplier | I.AboutToReduce (_, _) -> let checkpoint = I.resume checkpoint ~strategy: `Simplified in run checkpoint supplier | I.HandlingError _ -> if not stop_on_err then (* TODO: Don't error out here *) Error ( Asai.Diagnostic.of_text ~loc: (Range.of_lexbuf lexbuf) Error Reporter.Message.Parse_error (Asai.Diagnostic.text "") ) else (* let range_of_last_unclosed = *) (* Option.map snd @@ Stack.top_opt delim_stack *) (* in *) let loc = Range.of_lexbuf lexbuf in (* let extra_remarks = *) (* if Option.is_some range_of_last_unclosed then *) (* [ *) (* Asai.Diagnostic.loctext *) (* ?loc: range_of_last_unclosed *) (* "This delimiter is never closed"; *) (* ] *) (* else [] *) (* in *) Error ( Asai.Diagnostic.( of_loctext (* ~extra_remarks *) Error Forester_core.Reporter.Message.Parse_error (loctext ~loc Format.(sprintf "syntax error, unexpected %S" (Lexing.lexeme lexbuf))) ) ) | I.Accepted code -> Ok code | I.Rejected -> assert false in let supplier = I.lexer_lexbuf_to_supplier lexer lexbuf in try run initial_checkpoint supplier with (* NOTE: This should be the only exception we ever need to catch here: The parser is driven manually, so we are responsible for creating the diagnostics. This means we should not use `fatalf`! This also means that we can safely use the returned diagnostic without worrying that there might be an unhandled Asai effect. *) | Lexer.SyntaxError lexeme -> let loc = Range.of_lexbuf lexbuf in Error ( Asai.Diagnostic.( of_loctext Error Reporter.Message.Parse_error (loctext ~loc Format.(sprintf "syntax error, unexpected %S" lexeme)) ) )
sectionYPositions = computeSectionYPositions($el), 10)"
x-init="setTimeout(() => sectionYPositions = computeSectionYPositions($el), 10)"
>