1 (********************************************************************)
3 (* The Why3 Verification Platform / The Why3 Development Team *)
4 (* Copyright 2010-2023 -- Inria - CNRS - Paris-Saclay University *)
6 (* This software is distributed under the terms of the GNU Lesser *)
7 (* General Public License version 2.1, with the special exception *)
8 (* on linking described in file LICENSE. *)
10 (********************************************************************)
17 exception Lexing_error of string
19 let () = Why3.Exn_printer.register (fun fmt exn -> match exn with
20 | Lexing_error s -> Format.fprintf fmt "syntax error: %s" s
23 type state = Code | OneLineSpec | MultiLineSpec
27 let h1 = Hashtbl.create 32 in
28 List.iter (fun (s, tok) -> Hashtbl.add h1 s tok)
29 ["if", IF; "else", ELSE;
30 "return", RETURN; "while", WHILE;
31 "for", FOR; "break", BREAK;
32 "void", VOID; "int", INT; "scanf", SCANF;
34 let h2 = Hashtbl.create 32 in
35 List.iter (fun (s, tok) -> Hashtbl.add h2 s tok)
36 ["true", TRUE; "false", FALSE;
37 "forall", FORALL; "exists", EXISTS; "then", THEN; "let", LET; "in", LET;
39 "invariant", INVARIANT; "variant", VARIANT;
40 "assert", ASSERT; "assume", ASSUME; "check", CHECK;
41 "lemma", LEMMA; "axiom", AXIOM; "goal", GOAL;
42 "requires", REQUIRES; "ensures", ENSURES;
43 "label", LABEL; "function", FUNCTION; "predicate", PREDICATE;
46 try Hashtbl.find h1 s with Not_found ->
47 if !state = Code then IDENT s else
48 try Hashtbl.find h2 s with Not_found -> IDENT s
50 let string_buffer = Buffer.create 1024
54 let letter = ['a'-'z' 'A'-'Z']
56 let ident = letter (letter | digit | '_')*
57 let integer = ['0'-'9']+
58 let space = ' ' | '\t'
59 let comment = "//" [^'@''\n'] [^'\n']*
61 rule next_token = parse
62 | '\n' { new_line lexbuf;
63 if !state = OneLineSpec then state := Code;
65 | space+ { next_token lexbuf }
66 | "//\n" { new_line lexbuf; next_token lexbuf }
67 | comment { next_token lexbuf }
68 | '#' space* "include" space* '<' ([^ '>' '\n']* as file) '>' space* '\n'
69 { new_line lexbuf; INCLUDE file }
70 | "/*" { comment lexbuf }
71 | "*/" { if !state <> MultiLineSpec
72 then raise (Lexing_error "no comment to be closed");
75 | "//@" { state := OneLineSpec; next_token lexbuf }
76 | "/*@" { state := MultiLineSpec; next_token lexbuf }
77 | "@" { if !state <> MultiLineSpec
78 then raise (Lexing_error "illegal character '@'");
111 | "--" { MINUSMINUS }
120 | '"' { STRING (string lexbuf) }
122 | _ as c { raise (Lexing_error ("illegal character: " ^ String.make 1 c)) }
124 (* skip a comment, then resume next_token *)
126 | "*/" { next_token lexbuf }
127 | '\n' { new_line lexbuf; comment lexbuf }
128 | _ { comment lexbuf }
129 | eof { raise (Lexing_error "unterminated comment") }
133 { let s = Buffer.contents string_buffer in
134 Buffer.reset string_buffer;
137 { Buffer.add_char string_buffer '\n';
140 { Buffer.add_char string_buffer '"';
143 { Buffer.add_char string_buffer c;
146 { raise (Lexing_error "unterminated string") }
151 let lb = Lexing.from_channel c in
152 Why3.Loc.set_file file lb;
153 Why3.Loc.with_location (Mc_parser.file next_token) lb
155 (* Entries for transformations: similar to lexer.mll *)
156 let build_parsing_function entry lb = Why3.Loc.with_location (entry next_token) lb
158 let parse_term = build_parsing_function Mc_parser.term_eof
160 let parse_term_list = build_parsing_function Mc_parser.term_comma_list_eof
162 let parse_list_ident = build_parsing_function Mc_parser.ident_comma_list_eof