package vscoq-language-server
VSCoq language server
Install
Dune Dependency
Authors
Maintainers
Sources
vscoq-language-server-2.2.6.tar.gz
md5=f528c1760966ac10d48b5f1c5531411a
sha512=1f69538ae5f78854b34e3f1a9d408714843e899bb96d063c2bfac410339b6a13ee5f30d5e7b3cd2bbd673169bcfdb550153ba741092cdc3ee3a8ca6446cc2240
doc/src/vscoq-language-server.dm/document.ml.html
Source file document.ml
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781
(**************************************************************************) (* *) (* VSCoq *) (* *) (* Copyright INRIA and contributors *) (* (see version control and README file for authors & dates) *) (* *) (**************************************************************************) (* *) (* This file is distributed under the terms of the MIT License. *) (* See LICENSE file. *) (* *) (**************************************************************************) open Gramlib open Types open Lsp.Types open Scheduler let Log log = Log.mk_log "document" module LM = Map.Make (Int) module SM = Map.Make (Stateid) type proof_block_type = | TheoremKind | DefinitionType | InductiveType | BeginSection | BeginModule | End | Other type proof_step = { id: sentence_id; tactic: string; range: Range.t; } type outline_element = { id: sentence_id; name: string; type_: proof_block_type; statement: string; proof: proof_step list; range: Range.t } type outline = outline_element list type parsed_ast = { ast: Synterp.vernac_control_entry; classification: Vernacextend.vernac_classification; tokens: Tok.t list } type comment = { start: int; stop: int; content: string; } type parsing_error = { start: int; stop: int; msg: Pp.t Loc.located; qf: Quickfix.t list option; str: string; } type sentence_state = | Error of parsing_error | Parsed of parsed_ast type pre_sentence = { parsing_start : int; start : int; stop : int; synterp_state : Vernacstate.Synterp.t; (* synterp state after this sentence's synterp phase *) ast : sentence_state; } (* Example: *) (* " Check 3. " *) (* ^ ^ ^---- end *) (* | |------------ start *) (* |---------------- parsing_start *) type sentence = { parsing_start : int; start : int; stop : int; synterp_state : Vernacstate.Synterp.t; (* synterp state after this sentence's synterp phase *) scheduler_state_before : Scheduler.state; scheduler_state_after : Scheduler.state; ast : sentence_state; id : sentence_id; } type document = { sentences_by_id : sentence SM.t; sentences_by_end : sentence LM.t; parsing_errors_by_end : parsing_error LM.t; comments_by_end : comment LM.t; schedule : Scheduler.schedule; outline : outline; parsed_loc : int; raw_doc : RawDocument.t; init_synterp_state : Vernacstate.Synterp.t; cancel_handle: Sel.Event.cancellation_handle option; } type parse_state = { started: float; stop: int; top_id: sentence_id option; loc: Loc.t option; synterp_state : Vernacstate.Synterp.t; stream: (unit, char) Gramlib.Stream.t; raw: RawDocument.t; parsed: pre_sentence list; errors: parsing_error list; parsed_comments: comment list; previous_document: document; } type parsing_end_info = { unchanged_id: sentence_id option; invalid_ids: sentence_id_set; previous_document: document; parsed_document: document; } type event = | ParseEvent of parse_state | Invalidate of parse_state let pp_event fmt = function | ParseEvent _ -> Format.fprintf fmt "ParseEvent _" | Invalidate _ -> Format.fprintf fmt "Invalidate _" type events = event Sel.Event.t list let create_parsing_event event = let priority = Some PriorityManager.parsing in Sel.now ?priority event let range_of_sentence raw (sentence : sentence) = let start = RawDocument.position_of_loc raw sentence.start in let end_ = RawDocument.position_of_loc raw sentence.stop in Range.{ start; end_ } let string_of_sentence raw (sentence: sentence) = let string = RawDocument.string_in_range raw sentence.start sentence.stop in string let range_of_sentence_with_blank_space raw (sentence : sentence) = let start = RawDocument.position_of_loc raw sentence.parsing_start in let end_ = RawDocument.position_of_loc raw sentence.stop in Range.{ start; end_ } let string_of_id document id = match SM.find_opt id document.sentences_by_id with | None -> CErrors.anomaly Pp.(str"Trying to get range of non-existing sentence " ++ Stateid.print id) | Some sentence -> string_of_sentence document.raw_doc sentence let range_of_id document id = match SM.find_opt id document.sentences_by_id with | None -> CErrors.anomaly Pp.(str"Trying to get range of non-existing sentence " ++ Stateid.print id) | Some sentence -> range_of_sentence document.raw_doc sentence let range_of_id_with_blank_space document id = match SM.find_opt id document.sentences_by_id with | None -> CErrors.anomaly Pp.(str"Trying to get range of non-existing sentence " ++ Stateid.print id) | Some sentence -> range_of_sentence_with_blank_space document.raw_doc sentence let push_proof_step_in_outline document id (outline : outline) = let range = range_of_id document id in let tactic = string_of_id document id in let proof_step = {id; tactic; range} in match outline with | [] -> outline | e :: l -> let proof = proof_step :: e.proof in {e with proof} :: l let record_outline document id (ast : Synterp.vernac_control_entry) classif (outline : outline) = let open Vernacextend in match classif with | VtProofStep _ | VtQed _ -> push_proof_step_in_outline document id outline | VtStartProof (_, names) -> let vernac_gen_expr = ast.v.expr in let type_ = match vernac_gen_expr with | VernacSynterp _ -> None | VernacSynPure pure -> match pure with | Vernacexpr.VernacStartTheoremProof _ -> Some TheoremKind | Vernacexpr.VernacDefinition _ -> Some DefinitionType | Vernacexpr.VernacFixpoint _ -> Some DefinitionType | Vernacexpr.VernacCoFixpoint _ -> Some DefinitionType | _ -> None in let str_names = List.map (fun n -> Names.Id.to_string n) names in begin match type_ with | None -> outline | Some type_ -> let range = range_of_id document id in let statement = string_of_id document id in let elements = List.map (fun name -> {id; type_; name; statement; range; proof=[]}) str_names in List.append elements outline end | VtSideff (names, _) -> let vernac_gen_expr = ast.v.expr in let type_, statement = match vernac_gen_expr with | VernacSynterp (Synterp.EVernacExtend _) when names <> [] -> Some Other, "external" | VernacSynterp (Synterp.EVernacBeginSection _) -> log (fun () -> Format.sprintf "BEGIN SECTION %s" (string_of_id document id)); Some BeginSection, "" | VernacSynterp (Synterp.EVernacDeclareModuleType _) -> log (fun () -> Format.sprintf "BEGIN MODULE %s" (string_of_id document id)); Some BeginModule, "" | VernacSynterp (Synterp.EVernacDefineModule _) -> log (fun () -> Format.sprintf "BEGIN MODULE %s" (string_of_id document id)); Some BeginModule, "" | VernacSynterp (Synterp.EVernacDeclareModule _) -> log (fun () -> Format.sprintf "BEGIN MODULE %s" (string_of_id document id)); Some BeginModule, "" | VernacSynterp (Synterp.EVernacEndSegment _) -> log (fun () -> Format.sprintf "END SEGMENT"); Some End, "" | VernacSynterp _ -> None, "" | VernacSynPure pure -> match pure with | Vernacexpr.VernacStartTheoremProof _ -> Some TheoremKind, string_of_id document id | Vernacexpr.VernacDefinition _ -> Some DefinitionType, string_of_id document id | Vernacexpr.VernacInductive _ -> Some InductiveType, string_of_id document id | Vernacexpr.VernacFixpoint _ -> Some DefinitionType, string_of_id document id | Vernacexpr.VernacCoFixpoint _ -> Some DefinitionType, string_of_id document id | _ -> None, "" in let str_names = List.map (fun n -> Names.Id.to_string n) names in begin match type_ with | None -> outline | Some type_ -> let range = range_of_id document id in let element = List.map (fun name -> {id; type_; name; statement; range; proof=[]}) str_names in List.append element outline end | _ -> outline let record_outline document {id; ast} outline = match ast with | Error _ -> outline | Parsed ast -> record_outline document id ast.ast ast.classification outline let compute_outline ({ sentences_by_end } as document) = LM.fold (fun _ s -> record_outline document s) sentences_by_end [] let schedule doc = doc.schedule let raw_document doc = doc.raw_doc let outline doc = doc.outline let parse_errors parsed = List.map snd (LM.bindings parsed.parsing_errors_by_end) let add_sentence parsed parsing_start start stop (ast: sentence_state) synterp_state scheduler_state_before = let id = Stateid.fresh () in let scheduler_state_after, schedule = match ast with | Error {msg} -> scheduler_state_before, Scheduler.schedule_errored_sentence id msg parsed.schedule | Parsed ast -> let ast' = (ast.ast, ast.classification, synterp_state) in Scheduler.schedule_sentence (id, ast') scheduler_state_before parsed.schedule in (* FIXME may invalidate scheduler_state_XXX for following sentences -> propagate? *) let sentence = { parsing_start; start; stop; ast; id; synterp_state; scheduler_state_before; scheduler_state_after } in let document = { parsed with sentences_by_end = LM.add stop sentence parsed.sentences_by_end; sentences_by_id = SM.add id sentence parsed.sentences_by_id; schedule; } in document, scheduler_state_after let remove_sentence parsed id = match SM.find_opt id parsed.sentences_by_id with | None -> parsed | Some sentence -> let sentences_by_id = SM.remove id parsed.sentences_by_id in let sentences_by_end = LM.remove sentence.stop parsed.sentences_by_end in let outline = List.filter (fun (e : outline_element) -> e.id != id) parsed.outline in (* TODO clean up the schedule and free cached states *) { parsed with sentences_by_id; sentences_by_end; outline } let sentences parsed = List.map snd @@ SM.bindings parsed.sentences_by_id type code_line = | Sentence of sentence | ParsingError of parsing_error | Comment of comment let start_of_code_line = function | Sentence { start = x } -> x | ParsingError { start = x } -> x | Comment { start = x } -> x let compare_code_line x y = let s1 = start_of_code_line x in let s2 = start_of_code_line y in s1 - s2 let code_lines_sorted_by_loc parsed = List.sort compare_code_line @@ List.concat [ (List.map (fun (_,x) -> Sentence x) @@ SM.bindings parsed.sentences_by_id) ; (List.map (fun (_,x) -> ParsingError x) @@ LM.bindings parsed.parsing_errors_by_end) ; [] (* todo comments *) ] let code_lines_by_end_sorted_by_loc parsed = List.sort compare_code_line @@ List.concat [ (List.map (fun (_,x) -> Sentence x) @@ LM.bindings parsed.sentences_by_end) ; (List.map (fun (_,x) -> ParsingError x) @@ LM.bindings parsed.parsing_errors_by_end) ; [] (* todo comments *) ] let sentences_sorted_by_loc parsed = List.sort (fun ({start = s1} : sentence) {start = s2} -> s1 - s2) @@ List.map snd @@ SM.bindings parsed.sentences_by_id let sentences_before parsed loc = let (before,ov,_after) = LM.split loc parsed.sentences_by_end in let before = Option.cata (fun v -> LM.add loc v before) before ov in List.map (fun (_id,s) -> s) @@ LM.bindings before let sentences_after parsed loc = let (_before,ov,after) = LM.split loc parsed.sentences_by_end in let after = Option.cata (fun v -> LM.add loc v after) after ov in List.map (fun (_id,s) -> s) @@ LM.bindings after let parsing_errors_before parsed loc = LM.filter (fun stop _v -> stop <= loc) parsed.parsing_errors_by_end let comments_before parsed loc = LM.filter (fun stop _v -> stop <= loc) parsed.comments_by_end let get_sentence parsed id = SM.find_opt id parsed.sentences_by_id let find_sentence parsed loc = match LM.find_first_opt (fun k -> loc <= k) parsed.sentences_by_end with | Some (_, sentence) when sentence.start <= loc -> Some sentence | _ -> None let find_sentence_before parsed loc = match LM.find_last_opt (fun k -> k <= loc) parsed.sentences_by_end with | Some (_, sentence) -> Some sentence | _ -> None let find_sentence_strictly_before parsed loc = match LM.find_last_opt (fun k -> k < loc) parsed.sentences_by_end with | Some (_, sentence) -> Some sentence | _ -> None let find_sentence_after parsed loc = match LM.find_first_opt (fun k -> loc <= k) parsed.sentences_by_end with | Some (_, sentence) -> Some sentence | _ -> None let find_next_qed parsed loc = let exception Found of sentence in let f k sentence = if loc <= k then match sentence.ast with | Error _ -> () | Parsed ast -> match ast.classification with | VtQed _ -> raise (Found sentence) | _ -> () in (* We can't use find_first since f isn't monotone *) match LM.iter f parsed.sentences_by_end with | () -> None | exception (Found n) -> Some n let get_first_sentence parsed = Option.map snd @@ LM.find_first_opt (fun _ -> true) parsed.sentences_by_end let get_last_sentence parsed = Option.map snd @@ LM.find_last_opt (fun _ -> true) parsed.sentences_by_end let state_after_sentence parsed = function | Some (stop, { synterp_state; scheduler_state_after }) -> (stop, synterp_state, scheduler_state_after) | None -> (-1, parsed.init_synterp_state, Scheduler.initial_state) let state_at_pos parsed pos = state_after_sentence parsed @@ LM.find_last_opt (fun stop -> stop <= pos) parsed.sentences_by_end let state_strictly_before parsed pos = state_after_sentence parsed @@ LM.find_last_opt (fun stop -> stop < pos) parsed.sentences_by_end let pos_at_end parsed = match LM.max_binding_opt parsed.sentences_by_end with | Some (stop, _) -> stop | None -> -1 let string_of_parsed_ast { tokens } = (* TODO implement printer for vernac_entry *) "[" ^ String.concat "--" (List.map (Tok.extract_string false) tokens) ^ "]" let string_of_parsed_ast = function | Error e -> "[errored sentence]: " ^ (e.str) | Parsed ast -> string_of_parsed_ast ast let patch_sentence parsed scheduler_state_before id ({ parsing_start; ast; start; stop; synterp_state } : pre_sentence) = let old_sentence = SM.find id parsed.sentences_by_id in log (fun () -> Format.sprintf "Patching sentence %s , %s" (Stateid.to_string id) (string_of_parsed_ast old_sentence.ast)); let scheduler_state_after, schedule = match ast with | Error {msg} -> scheduler_state_before, Scheduler.schedule_errored_sentence id msg parsed.schedule | Parsed ast -> let ast = (ast.ast, ast.classification, synterp_state) in Scheduler.schedule_sentence (id,ast) scheduler_state_before parsed.schedule in let new_sentence = { old_sentence with ast; parsing_start; start; stop; scheduler_state_before; scheduler_state_after } in let sentences_by_id = SM.add id new_sentence parsed.sentences_by_id in let sentences_by_end = match LM.find_opt old_sentence.stop parsed.sentences_by_end with | Some { id } when Stateid.equal id new_sentence.id -> LM.remove old_sentence.stop parsed.sentences_by_end | _ -> parsed.sentences_by_end in let sentences_by_end = LM.add new_sentence.stop new_sentence sentences_by_end in { parsed with sentences_by_end; sentences_by_id; schedule }, scheduler_state_after type diff = | Deleted of sentence_id list | Added of pre_sentence list | Equal of (sentence_id * pre_sentence) list let tok_equal t1 t2 = let open Tok in match t1, t2 with | KEYWORD s1, KEYWORD s2 -> CString.equal s1 s2 | IDENT s1, IDENT s2 -> CString.equal s1 s2 | FIELD s1, FIELD s2 -> CString.equal s1 s2 | NUMBER n1, NUMBER n2 -> NumTok.Unsigned.equal n1 n2 | STRING s1, STRING s2 -> CString.equal s1 s2 | LEFTQMARK, LEFTQMARK -> true | BULLET s1, BULLET s2 -> CString.equal s1 s2 | EOI, EOI -> true | QUOTATION(s1,t1), QUOTATION(s2,t2) -> CString.equal s1 s2 && CString.equal t1 t2 | _ -> false let same_errors (e1 : parsing_error) (e2 : parsing_error) = (String.compare e1.str e2.str = 0) && (e1.start = e2.start) && (e1.stop = e2.stop) let same_tokens (s1 : sentence) (s2 : pre_sentence) = match s1.ast, s2.ast with | Error e1, Error e2 -> same_errors e1 e2 | Parsed ast1, Parsed ast2 -> CList.equal tok_equal ast1.tokens ast2.tokens | _, _ -> false (* TODO improve diff strategy (insertions,etc) *) let rec diff old_sentences new_sentences = match old_sentences, new_sentences with | [], [] -> [] | [], new_sentences -> [Added new_sentences] | old_sentences, [] -> [Deleted (List.map (fun s -> s.id) old_sentences)] (* FIXME something special should be done when `Deleted` is applied to a parsing effect *) | old_sentence::old_sentences, new_sentence::new_sentences -> if same_tokens old_sentence new_sentence then Equal [(old_sentence.id,new_sentence)] :: diff old_sentences new_sentences else Deleted [old_sentence.id] :: Added [new_sentence] :: diff old_sentences new_sentences let string_of_diff_item doc = function | Deleted ids -> ids |> List.map (fun id -> Printf.sprintf "- (id: %d) %s" (Stateid.to_int id) (string_of_parsed_ast (Option.get (get_sentence doc id)).ast)) | Added sentences -> sentences |> List.map (fun (s : pre_sentence) -> Printf.sprintf "+ %s" (string_of_parsed_ast s.ast)) | Equal l -> l |> List.map (fun (id, (s : pre_sentence)) -> Printf.sprintf "= (id: %d) %s" (Stateid.to_int id) (string_of_parsed_ast s.ast)) let string_of_diff doc l = String.concat "\n" (List.flatten (List.map (string_of_diff_item doc) l)) [%%if coq = "8.18" || coq = "8.19" || coq = "8.20"] let get_keyword_state = Pcoq.get_keyword_state [%%else] let get_keyword_state = Procq.get_keyword_state [%%endif] let rec stream_tok n_tok acc str begin_line begin_char = let e = LStream.next (get_keyword_state ()) str in match e with | Tok.EOI -> List.rev acc | _ -> stream_tok (n_tok+1) (e::acc) str begin_line begin_char (* let parse_one_sentence stream ~st = let pa = Pcoq.Parsable.make stream in Vernacstate.Parser.parse st (Pvernac.main_entry (Some (Vernacinterp.get_default_proof_mode ()))) pa (* FIXME: handle proof mode correctly *) *) [%%if coq = "8.18" || coq = "8.19"] let parse_one_sentence ?loc stream ~st = Vernacstate.Synterp.unfreeze st; let entry = Pvernac.main_entry (Some (Synterp.get_default_proof_mode ())) in let pa = Pcoq.Parsable.make ?loc stream in let sentence = Pcoq.Entry.parse entry pa in (sentence, []) [%%elif coq = "8.20"] let parse_one_sentence ?loc stream ~st = Vernacstate.Synterp.unfreeze st; Flags.record_comments := true; let entry = Pvernac.main_entry (Some (Synterp.get_default_proof_mode ())) in let pa = Pcoq.Parsable.make ?loc stream in let sentence = Pcoq.Entry.parse entry pa in let comments = Pcoq.Parsable.comments pa in (sentence, comments) [%%else] let parse_one_sentence ?loc stream ~st = Vernacstate.Synterp.unfreeze st; Flags.record_comments := true; let entry = Pvernac.main_entry (Some (Synterp.get_default_proof_mode ())) in let pa = Procq.Parsable.make ?loc stream in let sentence = Procq.Entry.parse entry pa in let comments = Procq.Parsable.comments pa in (sentence, comments) [%%endif] let rec junk_sentence_end stream = match Stream.npeek () 2 stream with | ['.'; (' ' | '\t' | '\n' |'\r')] -> Stream.junk () stream | [] -> () | _ -> Stream.junk () stream; junk_sentence_end stream [%%if coq = "8.18"] exception E = Stream.Error [%%else] exception E = Grammar.Error [%%endif] [%%if coq = "8.18" || coq = "8.19"] let get_loc_from_info_or_exn e info = match e with | Synterp.UnmappedLibrary (_, qid) -> qid.loc | Synterp.NotFoundLibrary (_, qid) -> qid.loc | _ -> Loc.get_loc @@ info [%%else] let get_loc_from_info_or_exn _ info = Loc.get_loc info (* let get_qf_from_info info = Quickfix.get_qf info *) [%%endif] [%%if coq = "8.18" || coq = "8.19"] let get_entry ast = Synterp.synterp_control ast [%%else] let get_entry ast = let intern = Vernacinterp.fs_intern in Synterp.synterp_control ~intern ast [%%endif] let handle_parse_error start parsing_start msg qf ({stream; errors; parsed;} as parse_state) synterp_state = log (fun () -> "handling parse error at " ^ string_of_int start); let stop = Stream.count stream in let str = String.sub (RawDocument.text parse_state.raw) parsing_start (stop - parsing_start) in let parsing_error = { msg; start; stop; qf; str} in let sentence = { parsing_start; ast = Error parsing_error; start; stop; synterp_state } in let parsed = sentence :: parsed in let errors = parsing_error :: errors in let parse_state = {parse_state with errors; parsed} in (* TODO: we could count the \n between start and stop and increase Loc.line_nb *) create_parsing_event (ParseEvent parse_state) let handle_parse_more ({loc; synterp_state; stream; raw; parsed; parsed_comments} as parse_state) = let start = Stream.count stream in log (fun () -> "Start of parse is: " ^ (string_of_int start)); begin (* FIXME should we save lexer state? *) match parse_one_sentence ?loc stream ~st:synterp_state with | None, _ (* EOI *) -> create_parsing_event (Invalidate parse_state) | Some ast, comments -> let stop = Stream.count stream in let begin_line, begin_char, end_char = match ast.loc with | Some lc -> lc.line_nb, lc.bp, lc.ep | None -> assert false in let str = String.sub (RawDocument.text raw) begin_char (end_char - begin_char) in let sstr = Stream.of_string str in let lex = CLexer.Lexer.tok_func sstr in let tokens = stream_tok 0 [] lex begin_line begin_char in begin try log (fun () -> "Parsed: " ^ (Pp.string_of_ppcmds @@ Ppvernac.pr_vernac ast)); let entry = get_entry ast in let classification = Vernac_classifier.classify_vernac ast in let synterp_state = Vernacstate.Synterp.freeze () in let parsed_ast = Parsed { ast = entry; classification; tokens } in let sentence = { parsing_start = start; ast = parsed_ast; start = begin_char; stop; synterp_state } in let parsed = sentence :: parsed in let comments = List.map (fun ((start, stop), content) -> {start; stop; content}) comments in let parsed_comments = List.append comments parsed_comments in let loc = ast.loc in let parse_state = {parse_state with parsed_comments; parsed; loc; synterp_state} in create_parsing_event (ParseEvent parse_state) with exn -> let e, info = Exninfo.capture exn in let loc = get_loc_from_info_or_exn e info in let qf = Result.value ~default:[] @@ Quickfix.from_exception e in handle_parse_error start begin_char (loc, CErrors.iprint_no_report (e,info)) (Some qf) parse_state synterp_state end | exception (E _ as exn) -> let e, info = Exninfo.capture exn in let loc = get_loc_from_info_or_exn e info in let qf = Result.value ~default:[] @@ Quickfix.from_exception e in junk_sentence_end stream; handle_parse_error start start (loc, CErrors.iprint_no_report (e, info)) (Some qf) {parse_state with stream} synterp_state | exception (CLexer.Error.E _ as exn) -> (* May be more problematic to handle for the diff *) let e, info = Exninfo.capture exn in let loc = get_loc_from_info_or_exn e info in let qf = Result.value ~default:[] @@ Quickfix.from_exception exn in junk_sentence_end stream; handle_parse_error start start (loc,CErrors.iprint_no_report (e, info)) (Some qf) {parse_state with stream} synterp_state | exception exn -> let e, info = Exninfo.capture exn in let loc = Loc.get_loc @@ info in let qf = Result.value ~default:[] @@ Quickfix.from_exception exn in junk_sentence_end stream; handle_parse_error start start (loc, CErrors.iprint_no_report (e,info)) (Some qf) {parse_state with stream} synterp_state end let rec unchanged_id id = function | [] -> id | Equal s :: diffs -> let get_id_ignore_error id_option (id, (s: pre_sentence)) = match s.ast with | Error _ -> id_option | Parsed _ -> Some id in unchanged_id (List.fold_left get_id_ignore_error id s) diffs | (Added _ | Deleted _) :: _ -> id let invalidate top_edit top_id parsed_doc new_sentences = let rec add_new_or_patch parsed_doc scheduler_state = function | [] -> parsed_doc | Deleted _ :: diffs -> add_new_or_patch parsed_doc scheduler_state diffs | Equal s :: diffs -> let patch_sentence (parsed_doc,scheduler_state) (id,new_s) = patch_sentence parsed_doc scheduler_state id new_s in let parsed_doc, scheduler_state = List.fold_left patch_sentence (parsed_doc, scheduler_state) s in add_new_or_patch parsed_doc scheduler_state diffs | Added new_sentences :: diffs -> (* FIXME could have side effect on the following, unchanged sentences *) let add_sentence (parsed_doc,scheduler_state) ({ parsing_start; start; stop; ast; synterp_state } : pre_sentence) = add_sentence parsed_doc parsing_start start stop ast synterp_state scheduler_state in let parsed_doc, scheduler_state = List.fold_left add_sentence (parsed_doc,scheduler_state) new_sentences in add_new_or_patch parsed_doc scheduler_state diffs in let rec remove_old parsed_doc invalid_ids = function | [] -> parsed_doc, invalid_ids | Deleted ids :: diffs -> let invalid_ids = List.fold_left (fun ids id -> Stateid.Set.add id ids) invalid_ids ids in let parsed_doc = List.fold_left remove_sentence parsed_doc ids in (* FIXME update scheduler state, maybe invalidate after diff zone *) remove_old parsed_doc invalid_ids diffs | Equal _ :: diffs | Added _ :: diffs -> remove_old parsed_doc invalid_ids diffs in let (_,_synterp_state,scheduler_state) = state_at_pos parsed_doc top_edit in let sentence_strings = LM.bindings @@ LM.map (fun s -> string_of_parsed_ast s.ast) parsed_doc.sentences_by_end in let sentence_strings = List.map (fun s -> snd s) sentence_strings in let sentence_string = String.concat " " sentence_strings in let sentence_strings_id = SM.bindings @@ SM.map (fun s -> string_of_parsed_ast s.ast) parsed_doc.sentences_by_id in let sentence_strings_id = List.map (fun s -> snd s) sentence_strings_id in let sentence_string_id = String.concat " " sentence_strings_id in log (fun () -> Format.sprintf "Top edit: %i, Doc: %s, Doc by id: %s" top_edit sentence_string sentence_string_id); let old_sentences = sentences_after parsed_doc top_edit in let diff = diff old_sentences new_sentences in let parsed_doc, invalid_ids = remove_old parsed_doc Stateid.Set.empty diff in let parsed_doc = add_new_or_patch parsed_doc scheduler_state diff in let unchanged_id = unchanged_id top_id diff in log (fun () -> "diff:\n" ^ string_of_diff parsed_doc diff); unchanged_id, invalid_ids, parsed_doc (** Validate document when raw text has changed *) let validate_document ({ parsed_loc; raw_doc; cancel_handle } as document) = (* Cancel any previous parsing event *) Option.iter Sel.Event.cancel cancel_handle; (* We take the state strictly before parsed_loc to cover the case when the end of the sentence is editted *) let (stop, synterp_state, _scheduler_state) = state_strictly_before document parsed_loc in (* let top_id = find_sentence_strictly_before document parsed_loc with None -> Top | Some sentence -> Id sentence.id in *) let top_id = Option.map (fun sentence -> sentence.id) (find_sentence_strictly_before document parsed_loc) in let text = RawDocument.text raw_doc in let stream = Stream.of_string text in while Stream.count stream < stop do Stream.junk () stream done; log (fun () -> Format.sprintf "Parsing more from pos %i" stop); let started = Unix.gettimeofday () in let parsed_state = {stop; top_id;synterp_state; stream; raw=raw_doc; parsed=[]; errors=[]; parsed_comments=[]; loc=None; started; previous_document=document} in let priority = Some PriorityManager.parsing in let event = Sel.now ?priority (ParseEvent parsed_state) in let cancel_handle = Some (Sel.Event.get_cancellation_handle event) in {document with cancel_handle}, [event] let handle_invalidate {parsed; errors; parsed_comments; stop; top_id; started; previous_document} document = let end_ = Unix.gettimeofday ()in let time = end_ -. started in (* log (fun () -> Format.sprintf "Parsing phase ended in %5.3f" time); *) log (fun () -> Format.sprintf "Parsing phase ended in %5.3f\n%!" time); let new_sentences = List.rev parsed in let new_comments = List.rev parsed_comments in let new_errors = errors in log (fun () -> Format.sprintf "%i new sentences" (List.length new_sentences)); log (fun () -> Format.sprintf "%i new comments" (List.length new_comments)); let errors = parsing_errors_before document stop in let comments = comments_before document stop in let unchanged_id, invalid_ids, document = invalidate (stop+1) top_id document new_sentences in let parsing_errors_by_end = List.fold_left (fun acc (error : parsing_error) -> LM.add error.stop error acc) errors new_errors in let comments_by_end = List.fold_left (fun acc (comment : comment) -> LM.add comment.stop comment acc) comments new_comments in let parsed_loc = pos_at_end document in let outline = compute_outline document in let parsed_document = {document with parsed_loc; parsing_errors_by_end; comments_by_end; outline} in Some {parsed_document; unchanged_id; invalid_ids; previous_document} let handle_event document = function | ParseEvent state -> let event = handle_parse_more state in let cancel_handle = Some (Sel.Event.get_cancellation_handle event) in {document with cancel_handle}, [event], None | Invalidate state -> {document with cancel_handle=None}, [], handle_invalidate state document let create_document init_synterp_state text = let raw_doc = RawDocument.create text in { parsed_loc = -1; raw_doc; sentences_by_id = SM.empty; sentences_by_end = LM.empty; parsing_errors_by_end = LM.empty; comments_by_end = LM.empty; schedule = initial_schedule; outline = []; init_synterp_state; cancel_handle = None; } let apply_text_edit document edit = let raw_doc, start = RawDocument.apply_text_edit document.raw_doc edit in let parsed_loc = min document.parsed_loc start in { document with raw_doc; parsed_loc } let apply_text_edits document edits = let doc' = { document with raw_doc = document.raw_doc } in List.fold_left apply_text_edit doc' edits module Internal = struct let string_of_sentence sentence = Format.sprintf "[%s] %s (%i -> %i)" (Stateid.to_string sentence.id) (string_of_parsed_ast sentence.ast) sentence.start sentence.stop let string_of_error error = let (_, pp) = error.msg in Format.sprintf "[parsing error] [%s] (%i -> %i)" (Pp.string_of_ppcmds pp) error.start error.stop let string_of_item = function | Sentence sentence -> string_of_sentence sentence | Comment _ -> "(* comment *)" | ParsingError error -> string_of_error error end
sectionYPositions = computeSectionYPositions($el), 10)"
x-init="setTimeout(() => sectionYPositions = computeSectionYPositions($el), 10)"
>