diff --git a/.gitignore b/.gitignore index acdc84ea0..b7ebf7ff4 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ analysis/tests/.bsb.lock analysis/_build analysis/tests/.merlin analysis/rescript-editor-analysis.exe + diff --git a/analysis/examples/example-project/src/syntax/sample-highlighting.res b/analysis/examples/example-project/src/syntax/sample-highlighting.res new file mode 100644 index 000000000..3d8930c31 --- /dev/null +++ b/analysis/examples/example-project/src/syntax/sample-highlighting.res @@ -0,0 +1,77 @@ +// Bindings +let numberBinding = 123 + +let someFunction = (param: int): int => { + let innerBinding = param + 2 + innerBinding +} + +// Types +type someRecord<'typeParameter> = { + someField: int, + someOtherField: string, + theParam: typeParameter, + another: bool, + to: string, +} + +type someEnum = + | SomeMember + | AnotherMember + | SomeMemberWithPayload(someRecord) + +type somePolyEnum = [ + | #someMember + | #AnotherMember + | #SomeMemberWithPayload(someRecord) + | #"fourth Member" +] + +// Destructuring +let destructuring = () => { + let someVar = (1, 2, 3) + let (one, two, three) = someVar + let someObj: someRecord = { + someField: 1, + someOtherField: "hello", + theParam: 2, + another: true, + to: "123", + } + let {someField, someOtherField, theParam} = someObj + + someField +} + +module SomeModule = { + type t = Some | Value | Here +} + +// Strings +let interpolated = `${numberBinding} ${"123"}` + +// JSX +module SomeComponent = { + @react.component + let make = ( + ~someProp: int, + ~otherProp: string, + ~thirdProp: SomeModule.t, + ~fourth: somePolyEnum=#"fourth member", + ) => { + React.null + } + + module Nested = { + @react.component + let make = (~children) => { + <> {children} + } + } +} + +let jsx = +
+ + {React.string("Nested")} +
diff --git a/analysis/examples/example-project/src/syntax/sample-highlighting.rs b/analysis/examples/example-project/src/syntax/sample-highlighting.rs new file mode 100644 index 000000000..7131ba600 --- /dev/null +++ b/analysis/examples/example-project/src/syntax/sample-highlighting.rs @@ -0,0 +1,31 @@ +// Bindings +fn some_function(param: usize) -> usize { + let innerBinding = param + 2; + innerBinding +} + +// Types +struct someRecord { + someField: usize, + someOtherField: String, + theParam: typeParameter, +} + +enum someEnum { + SomeMember, + AnotherMember, + SomeMemberWithPayload(someRecord), +} + +// Destructuring +fn destructuring() -> usize { + let someVar = (1, 2, 3); + let (one, two, three) = someVar; + let someObj = someRecord:: { + someField: 1, + someOtherField: String::new("HEllo"), + theParam: 2, + }; + + someObj.someField +} diff --git a/analysis/examples/example-project/src/syntax/sample-highlighting.tsx b/analysis/examples/example-project/src/syntax/sample-highlighting.tsx new file mode 100644 index 000000000..b646a4cfb --- /dev/null +++ b/analysis/examples/example-project/src/syntax/sample-highlighting.tsx @@ -0,0 +1,94 @@ +// Bindings +let numberBinding = 123; + +const SomeComp = { + Nested: () => null, +}; + +let someFunction = (param: number): number => { + let innerBinding = param + 2; + return innerBinding; +}; + +// Types +type someRecord = { + someField: number; + someOtherField: string; + theParam: typeParameter; + another: boolean; + to: string; +}; + +enum someEnum { + SomeMember, + AnotherMember, +} + +// Destructuring +let destructuring = () => { + let someVar = [1, 2, 3]; + let [one, two, three] = someVar; + let someObj: someRecord = { + someField: 1, + someOtherField: "hello", + theParam: 2, + another: true, + to: "123", + }; + let { someField, someOtherField, theParam } = someObj; + + return someField; +}; + +namespace SomeModule { + export enum t { + Some, + Value, + Here, + } +} + +// Decorators and classes +function someDecorator() { + return function ( + target: any, + propertyKey: string, + descriptor: PropertyDescriptor + ) { + console.log("first(): called"); + }; +} + +class SomeClass { + @someDecorator() doStuff() { + return 123; + } +} + +// Strings +let interpolated = `${numberBinding} ${"123"}`; + +// JSX +interface Props { + someProp: number; + otherProp: string; + thirdProp: SomeModule.t; +} +const SomeComponent = ({ someProp, otherProp, thirdProp }: Props) => { + return null; +}; + +let jsx = ( +
+ + + {"Hello"} +
+); +function Property() { + throw new Error("Function not implemented."); +} diff --git a/analysis/src/Cli.ml b/analysis/src/Cli.ml index cc08302ea..21c128b63 100644 --- a/analysis/src/Cli.ml +++ b/analysis/src/Cli.ml @@ -70,6 +70,8 @@ let main () = ~col:(int_of_string col) | _ :: "dump" :: files -> Commands.dump files | [_; "documentSymbol"; path] -> Commands.documentSymbol ~path + | [_; "semanticTokens"; currentFile] -> + SemanticTokens.semanticTokens ~currentFile | [_; "hover"; path; line; col] -> Commands.hover ~path ~line:(int_of_string line) ~col:(int_of_string col) | [_; "references"; path; line; col] -> @@ -83,6 +85,6 @@ let main () = | _ -> prerr_endline help; exit 1 - ;; + main () diff --git a/analysis/src/Commands.ml b/analysis/src/Commands.ml index 74504e8ba..64b965347 100644 --- a/analysis/src/Commands.ml +++ b/analysis/src/Commands.ml @@ -314,7 +314,6 @@ let test ~path = print_endline ("Hover " ^ path ^ " " ^ string_of_int line ^ ":" ^ string_of_int col); - hover ~path ~line ~col | "ref" -> print_endline @@ -331,7 +330,6 @@ let test ~path = ("Rename " ^ path ^ " " ^ string_of_int line ^ ":" ^ string_of_int col ^ " " ^ newName) in - rename ~path ~line ~col ~newName | "com" -> print_endline @@ -349,6 +347,11 @@ let test ~path = close_out cout; completion ~path ~line ~col ~currentFile; Sys.remove currentFile + | "hig" -> + print_endline ("Highlight " ^ path); + SemanticTokens.command ~debug:true + ~emitter:(SemanticTokens.Token.createEmitter ()) + ~path | _ -> ()); print_newline ()) in diff --git a/analysis/src/SemanticTokens.ml b/analysis/src/SemanticTokens.ml new file mode 100644 index 000000000..f8293813a --- /dev/null +++ b/analysis/src/SemanticTokens.ml @@ -0,0 +1,447 @@ +(* + Generally speaking, semantic highlighting here takes care of categorizing identifiers, + since the kind of an identifier is highly context-specific and hard to catch with a grammar. + + The big exception is labels, whose location is not represented in the AST + E.g. function definition such as (~foo as _) =>, application (~foo=3) and prop
. + Labels are handled in the grammar, not here. + Punned labels such as (~foo) => are both labels and identifiers. They are overridden here. + + There are 2 cases where the grammar and semantic highlighting work jointly. + The styles emitted in the grammar and here need to be kept in sync. + 1) For jsx angled brackets, the grammar handles basic cases such as /> + whose location is not in the AST. + Instead < and > are handled here. Those would be difficult to disambiguate in a grammar. + 2) Most operators are handled in the grammar. Except < and > are handled here. + The reason is again that < and > would be difficult do disambiguate in a grammar. +*) + +module Token = struct + (* This needs to stay synced with the same legend in `server.ts` *) + (* See https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_semanticTokens *) + type tokenType = + | Operator (** < and > *) + | Variable (** let x = *) + | Type (** type t = *) + | JsxTag (** the < and > in
*) + | Class (** module M = *) + | EnumMember (** variant A or poly variant #A *) + | Property (** {x:...} *) + | JsxLowercase (** div in
*) + + type tokenModifiers = NoModifier + + let tokenTypeToString = function + | Operator -> "0" + | Variable -> "1" + | Type -> "2" + | JsxTag -> "3" + | Class -> "4" + | EnumMember -> "5" + | Property -> "6" + | JsxLowercase -> "7" + + let tokenTypeDebug = function + | Operator -> "Operator" + | Variable -> "Variable" + | Type -> "Type" + | JsxTag -> "JsxTag" + | Class -> "Class" + | EnumMember -> "EnumMember" + | Property -> "Property" + | JsxLowercase -> "JsxLowercase" + + let tokenModifiersToString = function NoModifier -> "0" + + type token = int * int * int * tokenType * tokenModifiers + + type emitter = { + mutable tokens : token list; + mutable lastLine : int; + mutable lastChar : int; + } + + let createEmitter () = {tokens = []; lastLine = 0; lastChar = 0} + + let add ~line ~char ~length ~type_ e = + let modifiers = NoModifier in + e.tokens <- (line, char, length, type_, modifiers) :: e.tokens + + let emitToken buf (line, char, length, type_, modifiers) e = + let deltaLine = line - e.lastLine in + let deltaChar = if deltaLine = 0 then char - e.lastChar else char in + e.lastLine <- line; + e.lastChar <- char; + if Buffer.length buf > 0 then Buffer.add_char buf ','; + Buffer.add_string buf + (string_of_int deltaLine ^ "," ^ string_of_int deltaChar ^ "," + ^ string_of_int length ^ "," ^ tokenTypeToString type_ ^ "," + ^ tokenModifiersToString modifiers) + + let emit e = + let sortedTokens = + e.tokens + |> List.sort (fun (l1, c1, _, _, _) (l2, c2, _, _, _) -> + if l1 = l2 then compare c1 c2 else compare l1 l2) + in + let buf = Buffer.create 1 in + sortedTokens |> List.iter (fun t -> e |> emitToken buf t); + Buffer.contents buf +end + +let locToPositions (loc : Location.t) = + (Utils.tupleOfLexing loc.loc_start, Utils.tupleOfLexing loc.loc_end) + +let posToString (loc, col) = Printf.sprintf "(%d,%d)" loc col + +let locToString (loc : Location.t) = + let posStart, posEnd = locToPositions loc in + Printf.sprintf "%s->%s" (posToString posStart) (posToString posEnd) + +let isLowercaseId id = + id <> "" + && + let c = id.[0] in + c == '_' || (c >= 'a' && c <= 'z') + +let isUppercaseId id = + id <> "" + && + let c = id.[0] in + c >= 'A' && c <= 'Z' + +let emitFromPos posStart posEnd ~type_ emitter = + let length = + if fst posStart = fst posEnd then snd posEnd - snd posStart else 0 + in + if length > 0 then + emitter + |> Token.add ~line:(fst posStart) ~char:(snd posStart) ~length ~type_ + +let emitFromLoc ~loc ~type_ emitter = + let posStart, posEnd = locToPositions loc in + emitter |> emitFromPos posStart posEnd ~type_ + +let emitLongident ?(backwards = false) ?(jsx = false) + ?(lowerCaseToken = if jsx then Token.JsxLowercase else Token.Variable) + ?(upperCaseToken = Token.Class) ?(lastToken = None) ?(posEnd = None) ~pos + ~lid ~debug emitter = + let rec flatten acc lid = + match lid with + | Longident.Lident txt -> txt :: acc + | Ldot (lid, txt) -> + let acc = if jsx && txt = "createElement" then acc else txt :: acc in + flatten acc lid + | _ -> acc + in + let rec loop pos segments = + match segments with + | [id] when isUppercaseId id || isLowercaseId id -> + let type_ = + match lastToken with + | Some type_ -> type_ + | None -> if isUppercaseId id then upperCaseToken else lowerCaseToken + in + let posAfter = (fst pos, snd pos + String.length id) in + let posEnd, lenMismatch = + (* There could be a length mismatch when ids are quoted + e.g. variable /"true" or object field {"x":...} *) + match posEnd with + | Some posEnd -> (posEnd, posEnd <> posAfter) + | None -> (posAfter, false) + in + if debug then + Printf.printf "Lident: %s %s%s %s\n" id (posToString pos) + (if lenMismatch then "->" ^ posToString posEnd else "") + (Token.tokenTypeDebug type_); + emitter |> emitFromPos pos posEnd ~type_ + | id :: segments when isUppercaseId id || isLowercaseId id -> + let type_ = if isUppercaseId id then upperCaseToken else lowerCaseToken in + if debug then + Printf.printf "Ldot: %s %s %s\n" id (posToString pos) + (Token.tokenTypeDebug type_); + let length = String.length id in + emitter |> emitFromPos pos (fst pos, snd pos + length) ~type_; + loop (fst pos, snd pos + length + 1) segments + | _ -> () + in + let segments = flatten [] lid in + if backwards then ( + let totalLength = segments |> String.concat "." |> String.length in + if snd pos >= totalLength then + loop (fst pos, snd pos - totalLength) segments) + else loop pos segments + +let emitVariable ~id ~debug ~loc emitter = + if debug then Printf.printf "Variable: %s %s\n" id (locToString loc); + emitter |> emitFromLoc ~loc ~type_:Variable + +let emitJsxOpen ~lid ~debug ~loc emitter = + emitter + |> emitLongident + ~pos:(Utils.tupleOfLexing loc.Location.loc_start) + ~lid ~jsx:true ~debug + +let emitJsxClose ~lid ~debug ~pos emitter = + emitter |> emitLongident ~backwards:true ~pos ~lid ~jsx:true ~debug + +let emitJsxTag ~debug ~name ~pos emitter = + if debug then Printf.printf "JsxTag %s: %s\n" name (posToString pos); + emitter |> emitFromPos pos (fst pos, snd pos + 1) ~type_:Token.JsxTag + +let emitType ~lid ~debug ~loc emitter = + emitter + |> emitLongident ~lowerCaseToken:Token.Type + ~pos:(Utils.tupleOfLexing loc.Location.loc_start) + ~lid ~debug + +let emitRecordLabel ~(label : Longident.t Location.loc) ~debug emitter = + emitter + |> emitLongident ~lowerCaseToken:Token.Property + ~pos:(Utils.tupleOfLexing label.loc.loc_start) + ~posEnd:(Some (Utils.tupleOfLexing label.loc.loc_end)) + ~lid:label.txt ~debug + +let emitVariant ~(name : Longident.t Location.loc) ~debug emitter = + emitter + |> emitLongident ~lastToken:(Some Token.EnumMember) + ~pos:(Utils.tupleOfLexing name.loc.loc_start) + ~lid:name.txt ~debug + +let command ~debug ~emitter ~path = + let processTypeArg (coreType : Parsetree.core_type) = + if debug then Printf.printf "TypeArg: %s\n" (locToString coreType.ptyp_loc) + in + let typ (mapper : Ast_mapper.mapper) (coreType : Parsetree.core_type) = + match coreType.ptyp_desc with + | Ptyp_constr ({txt = lid; loc}, args) -> + emitter |> emitType ~lid ~debug ~loc; + args |> List.iter processTypeArg; + Ast_mapper.default_mapper.typ mapper coreType + | _ -> Ast_mapper.default_mapper.typ mapper coreType + in + let type_declaration (mapper : Ast_mapper.mapper) + (tydecl : Parsetree.type_declaration) = + emitter + |> emitType ~lid:(Lident tydecl.ptype_name.txt) ~debug + ~loc:tydecl.ptype_name.loc; + Ast_mapper.default_mapper.type_declaration mapper tydecl + in + let pat (mapper : Ast_mapper.mapper) (p : Parsetree.pattern) = + match p.ppat_desc with + | Ppat_var {txt = id} -> + if isLowercaseId id then + emitter |> emitVariable ~id ~debug ~loc:p.ppat_loc; + Ast_mapper.default_mapper.pat mapper p + | Ppat_construct ({txt = Lident ("true" | "false")}, _) -> + (* Don't emit true or false *) + Ast_mapper.default_mapper.pat mapper p + | Ppat_record (cases, _) -> + cases + |> List.iter (fun (label, _) -> emitter |> emitRecordLabel ~label ~debug); + Ast_mapper.default_mapper.pat mapper p + | Ppat_construct (name, _) -> + emitter |> emitVariant ~name ~debug; + Ast_mapper.default_mapper.pat mapper p + | Ppat_type {txt = lid; loc} -> + emitter |> emitType ~lid ~debug ~loc; + Ast_mapper.default_mapper.pat mapper p + | _ -> Ast_mapper.default_mapper.pat mapper p + in + let expr (mapper : Ast_mapper.mapper) (e : Parsetree.expression) = + match e.pexp_desc with + | Pexp_ident {txt = lid; loc} -> + if lid <> Lident "not" then + emitter + |> emitLongident + ~pos:(Utils.tupleOfLexing loc.loc_start) + ~posEnd:(Some (Utils.tupleOfLexing loc.loc_end)) + ~lid ~debug; + Ast_mapper.default_mapper.expr mapper e + | Pexp_apply ({pexp_desc = Pexp_ident lident; pexp_loc}, args) + when Res_parsetree_viewer.isJsxExpression e -> + (* + Angled brackets: + - These are handled in the grammar: <> + - Here we handle `<` and `>` + + Component names: + - handled like other Longitent.t, except lowercase id is marked Token.JsxLowercase + *) + emitter (* --> emitJsxTag ~debug ~name:"<" + ~pos: + (let pos = Utils.tupleOfLexing e.pexp_loc.loc_start in + (fst pos, snd pos - 1 (* the AST skips the loc of < somehow *))); + emitter |> emitJsxOpen ~lid:lident.txt ~debug ~loc:pexp_loc; + + let posOfGreatherthanAfterProps = + let rec loop = function + | (Asttypes.Labelled "children", {Parsetree.pexp_loc = {loc_start}}) + :: _ -> + Utils.tupleOfLexing loc_start + | _ :: args -> loop args + | [] -> (* should not happen *) (-1, -1) + in + + loop args + in + let posOfFinalGreatherthan = + let pos = Utils.tupleOfLexing e.pexp_loc.loc_end in + (fst pos, snd pos - 1) + in + let selfClosing = + fst posOfGreatherthanAfterProps == fst posOfFinalGreatherthan + && snd posOfGreatherthanAfterProps + 1 == snd posOfFinalGreatherthan + (* there's an off-by one somehow in the AST *) + in + (if not selfClosing then + let lineStart, colStart = Utils.tupleOfLexing pexp_loc.loc_start in + let lineEnd, colEnd = Utils.tupleOfLexing pexp_loc.loc_end in + let length = if lineStart = lineEnd then colEnd - colStart else 0 in + let lineEndWhole, colEndWhole = Utils.tupleOfLexing e.pexp_loc.loc_end in + if length > 0 && colEndWhole > length then ( + emitter + |> emitJsxClose ~debug ~lid:lident.txt + ~pos:(lineEndWhole, colEndWhole - 1); + emitter (* <-- *) + |> emitJsxTag ~debug ~name:">" ~pos:posOfGreatherthanAfterProps; + emitter (* ... <-- *) + |> emitJsxTag ~debug ~name:">" ~pos:posOfFinalGreatherthan)); + + let _ = args |> List.map (fun (_lbl, arg) -> mapper.expr mapper arg) in + e + | Pexp_apply + ( { + pexp_desc = + Pexp_ident {txt = Longident.Lident (("<" | ">") as op); loc}; + }, + [_; _] ) -> + if debug then Printf.printf "Binary operator %s %s\n" op (locToString loc); + emitter |> emitFromLoc ~loc ~type_:Operator; + Ast_mapper.default_mapper.expr mapper e + | Pexp_record (cases, _) -> + cases + |> List.iter (fun (label, _) -> emitter |> emitRecordLabel ~label ~debug); + Ast_mapper.default_mapper.expr mapper e + | Pexp_field (_, label) | Pexp_setfield (_, label, _) -> + emitter |> emitRecordLabel ~label ~debug; + Ast_mapper.default_mapper.expr mapper e + | Pexp_construct ({txt = Lident ("true" | "false")}, _) -> + (* Don't emit true or false *) + Ast_mapper.default_mapper.expr mapper e + | Pexp_construct (name, _) -> + emitter |> emitVariant ~name ~debug; + Ast_mapper.default_mapper.expr mapper e + | _ -> Ast_mapper.default_mapper.expr mapper e + in + let module_expr (mapper : Ast_mapper.mapper) (me : Parsetree.module_expr) = + match me.pmod_desc with + | Pmod_ident {txt = lid; loc} -> + emitter + |> emitLongident ~pos:(Utils.tupleOfLexing loc.loc_start) ~lid ~debug; + Ast_mapper.default_mapper.module_expr mapper me + | _ -> Ast_mapper.default_mapper.module_expr mapper me + in + let module_binding (mapper : Ast_mapper.mapper) + (mb : Parsetree.module_binding) = + emitter + |> emitLongident + ~pos:(Utils.tupleOfLexing mb.pmb_name.loc.loc_start) + ~lid:(Longident.Lident mb.pmb_name.txt) ~debug; + Ast_mapper.default_mapper.module_binding mapper mb + in + let module_declaration (mapper : Ast_mapper.mapper) + (md : Parsetree.module_declaration) = + emitter + |> emitLongident + ~pos:(Utils.tupleOfLexing md.pmd_name.loc.loc_start) + ~lid:(Longident.Lident md.pmd_name.txt) ~debug; + Ast_mapper.default_mapper.module_declaration mapper md + in + let module_type (mapper : Ast_mapper.mapper) (mt : Parsetree.module_type) = + match mt.pmty_desc with + | Pmty_ident {txt = lid; loc} -> + emitter + |> emitLongident ~upperCaseToken:Token.Type + ~pos:(Utils.tupleOfLexing loc.loc_start) + ~lid ~debug; + Ast_mapper.default_mapper.module_type mapper mt + | _ -> Ast_mapper.default_mapper.module_type mapper mt + in + let module_type_declaration (mapper : Ast_mapper.mapper) + (mtd : Parsetree.module_type_declaration) = + emitter + |> emitLongident ~upperCaseToken:Token.Type + ~pos:(Utils.tupleOfLexing mtd.pmtd_name.loc.loc_start) + ~lid:(Longident.Lident mtd.pmtd_name.txt) ~debug; + Ast_mapper.default_mapper.module_type_declaration mapper mtd + in + let open_description (mapper : Ast_mapper.mapper) + (od : Parsetree.open_description) = + emitter + |> emitLongident + ~pos:(Utils.tupleOfLexing od.popen_lid.loc.loc_start) + ~lid:od.popen_lid.txt ~debug; + Ast_mapper.default_mapper.open_description mapper od + in + let label_declaration (mapper : Ast_mapper.mapper) + (ld : Parsetree.label_declaration) = + emitter + |> emitRecordLabel + ~label:{loc = ld.pld_name.loc; txt = Longident.Lident ld.pld_name.txt} + ~debug; + Ast_mapper.default_mapper.label_declaration mapper ld + in + let constructor_declaration (mapper : Ast_mapper.mapper) + (cd : Parsetree.constructor_declaration) = + emitter + |> emitVariant + ~name:{loc = cd.pcd_name.loc; txt = Longident.Lident cd.pcd_name.txt} + ~debug; + Ast_mapper.default_mapper.constructor_declaration mapper cd + in + + let mapper = + { + Ast_mapper.default_mapper with + constructor_declaration; + expr; + label_declaration; + module_declaration; + module_binding; + module_expr; + module_type; + module_type_declaration; + open_description; + pat; + typ; + type_declaration; + } + in + + if Filename.check_suffix path ".res" then ( + let parser = + Res_driver.parsingEngine.parseImplementation ~forPrinter:false + in + let {Res_driver.parsetree = structure; diagnostics} = + parser ~filename:path + in + if debug then + Printf.printf "structure items:%d diagnostics:%d \n" + (List.length structure) (List.length diagnostics); + mapper.structure mapper structure |> ignore) + else + let parser = Res_driver.parsingEngine.parseInterface ~forPrinter:false in + let {Res_driver.parsetree = signature; diagnostics} = + parser ~filename:path + in + if debug then + Printf.printf "signature items:%d diagnostics:%d \n" + (List.length signature) (List.length diagnostics); + mapper.signature mapper signature |> ignore + +let semanticTokens ~currentFile = + let emitter = Token.createEmitter () in + command ~emitter ~debug:false ~path:currentFile; + Printf.printf "{\"data\":[%s]}" (Token.emit emitter) diff --git a/analysis/tests/src/Highlight.res b/analysis/tests/src/Highlight.res new file mode 100644 index 000000000..c3eff1792 --- /dev/null +++ b/analysis/tests/src/Highlight.res @@ -0,0 +1,134 @@ +module M = { + module C = Component +} + +let _c = + +let _mc = + +let _d =
+ +let _d2 = +
+ {React.string("abc")} +
{React.string("abc")}
+ {React.string("abc")} + {React.string("abc")} +
+ +type pair<'x, 'y> = ('x, 'y) + +type looooooooooooooooooooooooooooooooooooooong_int = int + +type looooooooooooooooooooooooooooooooooooooong_string = string + +type pairIntString = list< + pair< + looooooooooooooooooooooooooooooooooooooong_int, + looooooooooooooooooooooooooooooooooooooong_string, + >, +> + +let _ = !(3 < 4) || 3 > 4 + +module type MT = { + module DDF: { + + } +} + +module DDF: MT = { + module DDF = { + + } +} + +module XX = { + module YY = { + type t = int + } +} + +open XX.YY + +type tt = t + +// ^hig + +module T = { + type someRecord<'typeParameter> = { + someField: int, + someOtherField: string, + theParam: 'typeParameter, + } + + type someEnum = A | B | C +} + +let foo = x => x.T.someField + +let add = (~hello as x, ~world) => x + world + +let _ = add(~hello=3) + +let _ =
+ +module SomeComponent = { + module Nested = { + @react.component + let make = (~children) => { + <> {children} + } + } +} + +let _ =
+ +// true/false +let _ = true || false + +// to/downto as label +let toAs = (~to as x) => x +let _toEquals = toAs(~to=10) + +let to = 1 +for _ in to + to to to + to { + () +} + +module ToAsProp = { + @react.component + let make = (~to) => { + <> {React.int(to)} + } +} +let _ = + +// quoted identifiers +let \"true" = 4 +let _ = \"true" + +let enumInModule = T.A + +type typeInModule = XX.YY.t + +module QQ = { + type somePolyEnumType = [ + | #someMember + | #AnotherMember + | #SomeMemberWithPayload(list) + | #"fourth Member" + ] +} + +let _ = x => + switch x { + | #stuff => 3 + | #...QQ.somePolyEnumType => 4 + } + +let _ = 3 == 3 || 3 === 3 + +let _ = (~_type_ as _) => () + +let _ = {"abc": 34} diff --git a/analysis/tests/src/expected/Debug.res.txt b/analysis/tests/src/expected/Debug.res.txt index f63aaa815..2897bf280 100644 --- a/analysis/tests/src/expected/Debug.res.txt +++ b/analysis/tests/src/expected/Debug.res.txt @@ -4,7 +4,7 @@ Dependencies: @rescript/react Source directories: tests/node_modules/@rescript/react/src tests/node_modules/@rescript/react/src/legacy Source files: tests/node_modules/@rescript/react/src/React.res tests/node_modules/@rescript/react/src/ReactDOM.res tests/node_modules/@rescript/react/src/ReactDOMServer.res tests/node_modules/@rescript/react/src/ReactDOMStyle.res tests/node_modules/@rescript/react/src/ReactEvent.res tests/node_modules/@rescript/react/src/ReactEvent.resi tests/node_modules/@rescript/react/src/ReactTestUtils.res tests/node_modules/@rescript/react/src/ReactTestUtils.resi tests/node_modules/@rescript/react/src/RescriptReactErrorBoundary.res tests/node_modules/@rescript/react/src/RescriptReactErrorBoundary.resi tests/node_modules/@rescript/react/src/RescriptReactRouter.res tests/node_modules/@rescript/react/src/RescriptReactRouter.resi tests/node_modules/@rescript/react/src/legacy/ReactDOMRe.res tests/node_modules/@rescript/react/src/legacy/ReasonReact.res Source directories: tests/src tests/src/expected -Source files: tests/src/Auto.res tests/src/CompletePrioritize1.res tests/src/CompletePrioritize2.res tests/src/Completion.res tests/src/Component.res tests/src/Component.resi tests/src/Cross.res tests/src/Debug.res tests/src/Definition.res tests/src/DefinitionWithInterface.res tests/src/DefinitionWithInterface.resi tests/src/Div.res tests/src/Fragment.res tests/src/Hover.res tests/src/Jsx.res tests/src/Jsx.resi tests/src/LongIdentTest.res tests/src/Obj.res tests/src/Patterns.res tests/src/RecModules.res tests/src/RecordCompletion.res tests/src/References.res tests/src/ReferencesWithInterface.res tests/src/ReferencesWithInterface.resi tests/src/Rename.res tests/src/RenameWithInterface.res tests/src/RenameWithInterface.resi tests/src/TableclothMap.ml tests/src/TableclothMap.mli tests/src/TypeDefinition.res +Source files: tests/src/Auto.res tests/src/CompletePrioritize1.res tests/src/CompletePrioritize2.res tests/src/Completion.res tests/src/Component.res tests/src/Component.resi tests/src/Cross.res tests/src/Debug.res tests/src/Definition.res tests/src/DefinitionWithInterface.res tests/src/DefinitionWithInterface.resi tests/src/Div.res tests/src/Fragment.res tests/src/Highlight.res tests/src/Hover.res tests/src/Jsx.res tests/src/Jsx.resi tests/src/LongIdentTest.res tests/src/Obj.res tests/src/Patterns.res tests/src/RecModules.res tests/src/RecordCompletion.res tests/src/References.res tests/src/ReferencesWithInterface.res tests/src/ReferencesWithInterface.resi tests/src/Rename.res tests/src/RenameWithInterface.res tests/src/RenameWithInterface.resi tests/src/TableclothMap.ml tests/src/TableclothMap.mli tests/src/TypeDefinition.res Impl cmt:tests/lib/bs/src/Auto.cmt res:tests/src/Auto.res Impl cmt:tests/lib/bs/src/CompletePrioritize1.cmt res:tests/src/CompletePrioritize1.res Impl cmt:tests/lib/bs/src/CompletePrioritize2.cmt res:tests/src/CompletePrioritize2.res @@ -16,6 +16,7 @@ Impl cmt:tests/lib/bs/src/Definition.cmt res:tests/src/Definition.res IntfAndImpl cmti:tests/lib/bs/src/DefinitionWithInterface.cmti resi:tests/src/DefinitionWithInterface.resi cmt:tests/lib/bs/src/DefinitionWithInterface.cmt res:tests/src/DefinitionWithInterface.res Impl cmt:tests/lib/bs/src/Div.cmt res:tests/src/Div.res Impl cmt:tests/lib/bs/src/Fragment.cmt res:tests/src/Fragment.res +Impl cmt:tests/lib/bs/src/Highlight.cmt res:tests/src/Highlight.res Impl cmt:tests/lib/bs/src/Hover.cmt res:tests/src/Hover.res IntfAndImpl cmti:tests/lib/bs/src/Jsx.cmti resi:tests/src/Jsx.resi cmt:tests/lib/bs/src/Jsx.cmt res:tests/src/Jsx.res Impl cmt:tests/lib/bs/src/LongIdentTest.cmt res:tests/src/LongIdentTest.res diff --git a/analysis/tests/src/expected/Highlight.res.txt b/analysis/tests/src/expected/Highlight.res.txt new file mode 100644 index 000000000..2c28ad8b0 --- /dev/null +++ b/analysis/tests/src/expected/Highlight.res.txt @@ -0,0 +1,143 @@ +Highlight tests/src/Highlight.res +structure items:38 diagnostics:0 +Lident: M (0,7) Class +Lident: C (1,9) Class +Lident: Component (1,13) Class +JsxTag <: (4,9) +Lident: Component (4,10) Class +Variable: _c (4,4)->(4,6) +JsxTag <: (6,10) +Ldot: M (6,11) Class +Lident: C (6,13) Class +Variable: _mc (6,4)->(6,7) +JsxTag <: (8,9) +Lident: div (8,10) JsxLowercase +Variable: _d (8,4)->(8,6) +JsxTag <: (11,2) +Lident: div (11,3) JsxLowercase +Lident: div (16,4) JsxLowercase +JsxTag >: (11,6) +JsxTag >: (16,7) +Ldot: React (12,5) Class +Lident: string (12,11) Variable +JsxTag <: (13,4) +Lident: div (13,5) JsxLowercase +Lident: div (13,34) JsxLowercase +JsxTag >: (13,8) +JsxTag >: (13,37) +Ldot: React (13,11) Class +Lident: string (13,17) Variable +Ldot: React (14,5) Class +Lident: string (14,11) Variable +Ldot: React (15,5) Class +Lident: string (15,11) Variable +Variable: _d2 (10,4)->(10,7) +Lident: pair (18,5) Type +Lident: looooooooooooooooooooooooooooooooooooooong_int (20,5) Type +Lident: int (20,54) Type +Lident: looooooooooooooooooooooooooooooooooooooong_string (22,5) Type +Lident: string (22,57) Type +Lident: pairIntString (24,5) Type +Lident: list (24,21) Type +TypeArg: (25,2)->(28,3) +Lident: pair (25,2) Type +TypeArg: (26,4)->(26,50) +TypeArg: (27,4)->(27,53) +Lident: looooooooooooooooooooooooooooooooooooooong_int (26,4) Type +Lident: looooooooooooooooooooooooooooooooooooooong_string (27,4) Type +Binary operator < (31,12)->(31,13) +Binary operator > (31,22)->(31,23) +Lident: MT (33,12) Type +Lident: DDF (34,9) Class +Lident: DDF (39,7) Class +Lident: MT (39,12) Type +Lident: DDF (40,9) Class +Lident: XX (45,7) Class +Lident: YY (46,9) Class +Lident: t (47,9) Type +Lident: int (47,13) Type +Ldot: XX (51,5) Class +Lident: YY (51,8) Class +Lident: tt (53,5) Type +Lident: t (53,10) Type +Lident: T (57,7) Class +Lident: someRecord (58,7) Type +Lident: someField (59,4) Property +Lident: int (59,15) Type +Lident: someOtherField (60,4) Property +Lident: string (60,20) Type +Lident: theParam (61,4) Property +Lident: someEnum (64,7) Type +Lident: A (64,18) EnumMember +Lident: B (64,22) EnumMember +Lident: C (64,26) EnumMember +Ldot: T (67,17) Class +Lident: someField (67,19) Property +Lident: x (67,15) Variable +Variable: x (67,10)->(67,11) +Variable: foo (67,4)->(67,7) +Lident: x (69,35) Variable +Lident: world (69,39) Variable +Variable: world (69,24)->(69,30) +Variable: x (69,21)->(69,22) +Variable: add (69,4)->(69,7) +Lident: add (71,8) Variable +JsxTag <: (73,8) +Lident: div (73,9) JsxLowercase +Lident: div (73,36) JsxLowercase +JsxTag >: (73,24) +JsxTag >: (73,39) +JsxTag <: (73,26) +Lident: div (73,27) JsxLowercase +Lident: SomeComponent (75,7) Class +Lident: Nested (76,9) Class +Lident: children (79,10) Variable +Variable: children (78,16)->(78,25) +Variable: make (78,8)->(78,12) +JsxTag <: (84,8) +Ldot: SomeComponent (84,9) Class +Lident: Nested (84,23) Class +Ldot: SomeComponent (84,41) Class +Lident: Nested (84,55) Class +JsxTag >: (84,29) +JsxTag >: (84,61) +JsxTag <: (84,31) +Lident: div (84,32) JsxLowercase +Lident: x (90,25) Variable +Variable: x (90,19)->(90,20) +Variable: toAs (90,4)->(90,8) +Lident: toAs (91,16) Variable +Variable: _toEquals (91,4)->(91,13) +Variable: to (93,4)->(93,6) +Lident: to (94,20) Variable +Lident: to (94,25) Variable +Lident: to (94,9) Variable +Lident: to (94,14) Variable +Lident: ToAsProp (98,7) Class +Lident: to (101,18) Variable +Ldot: React (101,8) Class +Lident: int (101,14) Variable +Variable: to (100,14)->(100,17) +Variable: make (100,6)->(100,10) +JsxTag <: (104,8) +Lident: ToAsProp (104,9) Class +Variable: true (107,4)->(107,11) +Lident: true (108,8)->(108,15) Variable +Ldot: T (110,19) Class +Lident: A (110,21) EnumMember +Variable: enumInModule (110,4)->(110,16) +Lident: typeInModule (112,5) Type +Ldot: XX (112,20) Class +Ldot: YY (112,23) Class +Lident: t (112,26) Type +Lident: QQ (114,7) Class +Lident: somePolyEnumType (115,7) Type +Lident: list (118,29) Type +TypeArg: (118,34)->(118,37) +Lident: int (118,34) Type +Ldot: QQ (126,8) Class +Lident: somePolyEnumType (126,11) Type +Lident: x (124,9) Variable +Variable: x (123,8)->(123,9) +Lident: abc (133,9)->(133,14) Property + diff --git a/grammars/rescript.tmLanguage.json b/grammars/rescript.tmLanguage.json index 5932d7eae..9ed6c4f42 100644 --- a/grammars/rescript.tmLanguage.json +++ b/grammars/rescript.tmLanguage.json @@ -3,23 +3,43 @@ "name": "ReScript", "scopeName": "source.rescript", "repository": { - "RE_IDENT": { - "match": "[a-z_][0-9a-zA-Z_]*" - }, - "RE_ATTRIBUTE": { - "match": "[A-Za-z_][A-Za-z0-9_\\.]*" + "RE_KEYWORD_CONTROL": { + "name": "keyword.control", + "match": "\\b(and|as|assert|constraint|downto|else|exception|external|for|if|in|lazy|mutable|rec|switch|to|try|when|while|with)\\b" }, - "RE_MODULE_IDENT": { - "name": "entity.name.namespace", - "match": "[A-Z_][0-9a-zA-Z_]*" + "RE_TO_DOWNTO_AS_LABELS": { + "patterns": [ + { + "match": "(to|downto)\\s*(=)", + "captures": { + "1": { + "name": "variable" + }, + "2": { + "name": "keyword.operator keyword" + } + } + }, + { + "match": "(to|downto)\\s*(as)", + "captures": { + "1": { + "name": "variable" + }, + "2": { + "name": "keyword.control" + } + } + } + ] }, - "RE_KEYWORDS": { - "name": "keyword.control", - "match": "\\b(and|as|assert|constraint|downto|else|exception|external|false|for|if|in|include|lazy|let|module|mutable|of|open|rec|switch|to|true|try|type|when|while|with)\\b" + "RE_CONSTANTS_BOOL": { + "name": "constant.language.boolean", + "match": "\\b(false|true)\\b" }, - "RE_LITERAL": { - "name": "constant.language", - "match": "\\b(true|false)\\b" + "RE_KEYWORD": { + "name": "keyword", + "match": "\\b(include|let|module|of|open|type)\\b" }, "commentLine": { "match": "//.*", @@ -79,25 +99,23 @@ } ] }, - "storage": { - "patterns": [ - { - "match": "\\btype\\b", - "name": "storage.type" - } - ] - }, "keyword": { "patterns": [ { - "include": "#RE_KEYWORDS" + "include": "#RE_TO_DOWNTO_AS_LABELS" + }, + { + "include": "#RE_KEYWORD_CONTROL" + }, + { + "include": "#RE_KEYWORD" } ] }, "constant": { "patterns": [ { - "include": "#RE_LITERAL" + "include": "#RE_CONSTANTS_BOOL" } ] }, @@ -147,13 +165,13 @@ "begin": "\\$\\{", "beginCaptures": { "0": { - "name": "punctuation.section.interpolation.begin" + "name": "punctuation.definition.template-expression.begin" } }, "end": "\\}", "endCaptures": { "0": { - "name": "punctuation.section.interpolation.end" + "name": "punctuation.definition.template-expression.end" } }, "patterns": [ @@ -199,6 +217,14 @@ } ] }, + "defaultIdIsVariable": { + "patterns": [ + { + "match": "[A-Za-z_][A-Za-z0-9_]*", + "name": "variable" + } + ] + }, "number": { "patterns": [ { @@ -211,7 +237,7 @@ "patterns": [ { "match": "->|\\|\\||&&|\\+\\+|\\*\\*|\\+\\.|\\+|-\\.|-|\\*\\.|\\*|/\\.|/|\\.\\.\\.|\\.\\.|===|==|\\^|:=|!|>=(?! *\\?)|<=|=", - "name": "keyword.operator keyword" + "name": "keyword.operator" }, { "match": "\\|>", @@ -222,20 +248,24 @@ "constructor": { "patterns": [ { - "match": "\\b[A-Z][0-9a-zA-Z_]*\\b", - "name": "variable.function variable.other" + "match": "(#)\\s*([a-zA-Z][0-9a-zA-Z_]*)\\b", + "captures": { + "1": { + "name": "variable.other.enummember" + }, + "2": { + "name": "variable.other.enummember" + } + } }, { - "match": "(#)(\\.\\.\\.)?([a-zA-Z][0-9a-zA-Z_]*)\\b", + "match": "(#)\\s*(\\.\\.\\.)\\b", "captures": { "1": { - "name": "punctuation.definition.keyword" + "name": "variable.other.enummember" }, "2": { - "name": "punctuation.definition.keyword" - }, - "3": { - "name": "variable.function variable.other" + "name": "variable.other.enummember" } } }, @@ -243,7 +273,7 @@ "match": "(#)", "captures": { "1": { - "name": "punctuation.definition.keyword" + "name": "variable.other.enummember" } } } @@ -286,7 +316,7 @@ "match": "(%%?|@@?)([A-Za-z_][A-Za-z0-9_\\.]*)", "captures": { "1": { - "name": "storage.modifier punctuation.definition.annotation" + "name": "punctuation.decorator" }, "2": { "patterns": [ @@ -305,13 +335,13 @@ "name": "invalid.deprecated" }, "2": { - "name": "variable.annotation" + "name": "entity.name.function" } } }, { "match": "[A-Za-z_][A-Za-z0-9_\\.]*", - "name": "variable.annotation" + "name": "entity.name.function" } ] } @@ -320,118 +350,8 @@ ] }, "jsx": { - "patterns": [ - { - "match": "<>||/>" - }, - { - "match": "||", + "name": "punctuation.definition.tag" } }, "patterns": [ @@ -494,6 +414,9 @@ }, { "include": "#punctuations" + }, + { + "include": "#defaultIdIsVariable" } ] } diff --git a/package.json b/package.json index 637b453b1..17563d644 100644 --- a/package.json +++ b/package.json @@ -28,6 +28,14 @@ ], "main": "./client/out/extension", "contributes": { + "semanticTokenScopes": [ + { + "scopes": { + "jsx-lowercase": ["entity.name.tag"], + "jsx-tag": ["punctuation.definition.tag"] + } + } + ], "jsonValidation": [ { "fileMatch": "bsconfig.json", diff --git a/server/src/server.ts b/server/src/server.ts index a31c2a6b1..4b5c39049 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -15,10 +15,9 @@ import * as utils from "./utils"; import * as c from "./constants"; import * as chokidar from "chokidar"; import { assert } from "console"; -import { fileURLToPath, pathToFileURL } from "url"; +import { fileURLToPath } from "url"; import { ChildProcess } from "child_process"; import { WorkspaceEdit } from "vscode-languageserver"; -import { TextEdit } from "vscode-languageserver-types"; // https://microsoft.github.io/language-server-protocol/specification#initialize // According to the spec, there could be requests before the 'initialize' request. Link in comment tells how to handle them. @@ -38,17 +37,17 @@ let projectsFiles: Map< // ^ caching AND states AND distributed system. Why does LSP has to be stupid like this // will be properly defined later depending on the mode (stdio/node-rpc) -let send: (msg: m.Message) => void = (_) => { }; +let send: (msg: m.Message) => void = (_) => {}; interface CreateInterfaceRequestParams { uri: string; } -let createInterfaceRequest = - new v.RequestType< - CreateInterfaceRequestParams, - string, - void>("rescript-vscode.create_interface"); +let createInterfaceRequest = new v.RequestType< + CreateInterfaceRequestParams, + string, + void +>("rescript-vscode.create_interface"); interface OpenCompiledFileParams { uri: string; @@ -66,9 +65,8 @@ let sendUpdatedDiagnostics = () => { path.join(projectRootPath, c.compilerLogPartialPath), { encoding: "utf-8" } ); - let { done, result: filesAndErrors } = utils.parseCompilerLogOutput( - content - ); + let { done, result: filesAndErrors } = + utils.parseCompilerLogOutput(content); // diff Object.keys(filesAndErrors).forEach((file) => { @@ -290,7 +288,12 @@ function typeDefinition(msg: p.RequestMessage) { let filePath = fileURLToPath(params.textDocument.uri); let response = utils.runAnalysisCommand( filePath, - ["typeDefinition", filePath, params.position.line, params.position.character], + [ + "typeDefinition", + filePath, + params.position.line, + params.position.character, + ], msg ); return response; @@ -323,7 +326,7 @@ function prepareRename(msg: p.RequestMessage): m.ResponseMessage { ); let result: p.Range | null = null; if (locations !== null) { - locations.forEach(loc => { + locations.forEach((loc) => { if ( path.normalize(fileURLToPath(loc.uri)) === path.normalize(fileURLToPath(params.textDocument.uri)) @@ -337,14 +340,14 @@ function prepareRename(msg: p.RequestMessage): m.ResponseMessage { end.line >= pos.line ) { result = loc.range; - }; + } } }); - }; + } return { jsonrpc: c.jsonrpcVersion, id: msg.id, - result + result, }; } @@ -352,23 +355,22 @@ function rename(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_rename let params = msg.params as p.RenameParams; let filePath = fileURLToPath(params.textDocument.uri); - let documentChanges: - | (p.RenameFile | p.TextDocumentEdit)[] - | null = utils.runAnalysisAfterSanityCheck(filePath, [ + let documentChanges: (p.RenameFile | p.TextDocumentEdit)[] | null = + utils.runAnalysisAfterSanityCheck(filePath, [ "rename", filePath, params.position.line, params.position.character, - params.newName + params.newName, ]); let result: WorkspaceEdit | null = null; if (documentChanges !== null) { result = { documentChanges }; - }; + } let response: m.ResponseMessage = { jsonrpc: c.jsonrpcVersion, id: msg.id, - result + result, }; return response; } @@ -385,6 +387,23 @@ function documentSymbol(msg: p.RequestMessage) { return response; } +function semanticTokens(msg: p.RequestMessage) { + // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_semanticTokens + let params = msg.params as p.SemanticTokensParams; + let filePath = fileURLToPath(params.textDocument.uri); + let code = getOpenedFileContent(params.textDocument.uri); + let extension = path.extname(params.textDocument.uri); + let tmpname = utils.createFileInTempDir(extension); + fs.writeFileSync(tmpname, code, { encoding: "utf-8" }); + let response = utils.runAnalysisCommand( + filePath, + ["semanticTokens", tmpname], + msg + ); + fs.unlink(tmpname, () => null); + return response; +} + function completion(msg: p.RequestMessage) { let params = msg.params as p.ReferenceParams; let filePath = fileURLToPath(params.textDocument.uri); @@ -739,6 +758,24 @@ function onMessage(msg: m.Message) { // disabled right now until we use the parser to show non-stale symbols per keystroke // documentSymbolProvider: true, completionProvider: { triggerCharacters: [".", ">", "@", "~", '"'] }, + semanticTokensProvider: { + legend: { + tokenTypes: [ + "operator", + "variable", + "type", + "jsx-tag", + "class", + "enumMember", + "property", + "jsx-lowercase", + ], + tokenModifiers: [], + }, + documentSelector: null, + // TODO: Support range for full, and add delta support + full: true, + }, }, }; let response: m.ResponseMessage = { @@ -797,6 +834,8 @@ function onMessage(msg: m.Message) { send(documentSymbol(msg)); } else if (msg.method === p.CompletionRequest.method) { send(completion(msg)); + } else if (msg.method === p.SemanticTokensRequest.method) { + send(semanticTokens(msg)); } else if (msg.method === p.DocumentFormattingRequest.method) { let responses = format(msg); responses.forEach((response) => send(response));