diff --git a/.gitignore b/.gitignore index 5dc814def..77951f065 100644 --- a/.gitignore +++ b/.gitignore @@ -24,4 +24,7 @@ rescript-tools.exe _opam/ _build/ -*.tsbuildinfo \ No newline at end of file +*.tsbuildinfo + +# VSCode test downloads +.vscode-test/ \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 54407d177..898393cbc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,34 @@ > - :house: [Internal] > - :nail_care: [Polish] +## [Unreleased] + +#### :bug: Bug fix + +- Fix Code Analyzer cwd/binary lookup in monorepos (run from workspace root). +- Fix monorepo build detection by only watching the workspace root `.compiler.log`. +- Fix Start Build for ReScript v12+ projects by preferring `rescript.exe`. +- Take namespace into account for incremental cleanup. https://github.com/rescript-lang/rescript-vscode/pull/1164 +- Potential race condition in incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1167 +- Fix extension crash triggered by incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1169 +- Fix file watchers on Windows when using WSL. https://github.com/rescript-lang/rescript-vscode/pull/1178 + +#### :nail_care: Polish + +- Stale .compiler.log can still spill through in incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1167 + +## 1.72.0 + +#### :bug: Bug fix + +- Fix rewatch lockfile detection on Windows. https://github.com/rescript-lang/rescript-vscode/pull/1160 +- Override default `initialConfiguration` with user specific config. https://github.com/rescript-lang/rescript-vscode/pull/1162 + +#### :nail_care: Polish + +- Resolve symlinks when finding platform binaries. https://github.com/rescript-lang/rescript-vscode/pull/1154 +- Use `window/logMessage` in LSP Server for logging. https://github.com/rescript-lang/rescript-vscode/pull/1162 + ## 1.70.0 #### :bug: Bug fix diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 65650bb1e..70cebfb47 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -112,6 +112,59 @@ We call a few binaries and it's tricky to call them properly cross-platform. Her ## Rough Description Of How The Plugin Works +### Text Changes + +The flow below shows how the LSP server reacts to incremental text changes and produces diagnostics: + +```mermaid +flowchart TD + A[Your ReScript file in your editor] + B[LSP Client] + C[LSP Server] + D[bsc] + + A -->|Type a character| B + B -->|textDocument/didChange| C + + subgraph LSP_Server_Internal_Flow["LSP Server"] + C1[triggerIncrementalCompilationOfFile] + C2[compileContents] + C3[figureOutBscArgs] + C4[parse .compiler.log] + end + + C --> C1 + C1 --> C2 --> C3 + C3 -->|invoke| D + D -->|writes| C4 + C4 -->|textDocument/publishDiagnostics| B +``` + +### Completion + +The flow below shows how the LSP server handles completion requests by delegating to the native analysis binary: + +```mermaid +flowchart TD + A[Your ReScript file in your editor] + B[LSP Client] + C[LSP Server] + D[rescript-editor-analysis.exe] + + A -->|Trigger completion| B + B -->|textDocument/completion| C + + subgraph LSP_Server_Internal_Flow["LSP Server"] + C1[shell out to rescript-editor-analysis.exe] + C2[build completion response] + end + + C --> C1 + C1 -->|exec| D + D --> C2 + C2 -->|textDocument/completion response| B +``` + ### Editor Diagnostics They should be synced in from `lib/bs/.compiler.log` build. Don't take them from other places. diff --git a/analysis/examples/example-project/.vscode/settings.json b/analysis/examples/example-project/.vscode/settings.json new file mode 100644 index 000000000..6c4df7cf5 --- /dev/null +++ b/analysis/examples/example-project/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "rescript.settings.logLevel": "log" +} diff --git a/analysis/examples/example-project/bsconfig.json b/analysis/examples/example-project/bsconfig.json deleted file mode 100644 index f59290000..000000000 --- a/analysis/examples/example-project/bsconfig.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "tryit", - "sources": "src", - "bsc-flags": ["-bs-super-errors", "-open Belt"], - "warnings": { - "number": "-32-26-27-33" - }, - "bs-dependencies": ["reason-react"], - "reason": { "react-jsx": 3 }, - "namespace": "my-namespace", - "reanalyze": { - "analysis": ["dce", "exception"] - } -} diff --git a/analysis/examples/example-project/package-lock.json b/analysis/examples/example-project/package-lock.json index 61650f529..a20ccc046 100644 --- a/analysis/examples/example-project/package-lock.json +++ b/analysis/examples/example-project/package-lock.json @@ -1,16 +1,170 @@ { + "name": "tryit", + "lockfileVersion": 3, "requires": true, - "lockfileVersion": 1, - "dependencies": { - "reason-react": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/reason-react/-/reason-react-0.9.1.tgz", - "integrity": "sha512-nlH0O2TDy9KzOLOW+vlEQk4ExHOeciyzFdoLcsmmiit6hx6H5+CVDrwJ+8aiaLT/kqK5xFOjy4PS7PftWz4plA==" - }, - "rescript": { - "version": "9.1.2", - "resolved": "https://registry.npmjs.org/rescript/-/rescript-9.1.2.tgz", - "integrity": "sha512-4wHvTDv3nyYnAPJHcg1RGG8z7u3HDiBf6RN3P/dITDv859Qo35aKOzJWQtfBzbAs0EKNafLqei3TnUqiAv6BwQ==" + "packages": { + "": { + "name": "tryit", + "dependencies": { + "@rescript/react": "^0.14.0", + "rescript": "12.0.2" + } + }, + "node_modules/@rescript/darwin-arm64": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/darwin-arm64/-/darwin-arm64-12.0.2.tgz", + "integrity": "sha512-GcfiPE0L2N7smWXMBEW4pvlcqblSbKh2tisZTuOaLPr9+WmaXnBLap6OUT+XoEWzTY0RA4QURfpNJLAgqZPrzw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/darwin-x64": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/darwin-x64/-/darwin-x64-12.0.2.tgz", + "integrity": "sha512-di4eucoDdLsLqz86h7Cwywsmt+xFup7rpVlkx8L7i22RTqyTw+CD6aGjFj1Ddj2sVUd5SSFiE9Hj86QggzUgbg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/linux-arm64": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/linux-arm64/-/linux-arm64-12.0.2.tgz", + "integrity": "sha512-x6WOyimX0kw9DGADQ02R2rmvKu8SQOrskfYrKREfc2CgDT+FOKAH+qxvpUiljKhrvthoHkVBJOXtlKk854OdjQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/linux-x64": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/linux-x64/-/linux-x64-12.0.2.tgz", + "integrity": "sha512-5QHE+Ca1h3qXxDkw343923WTEdEyBeXphrv+8SZLhLOFNabQ5t6m4rWf52RluDqQpa67j6GB8kVAZpqc2rA2Qw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/react": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@rescript/react/-/react-0.14.0.tgz", + "integrity": "sha512-ncOHWK7ujQmff+QMYKRmtwETvJVolzkwRpDa0MFenEXdUz9ZYywNbq+xH9F9RDQeSwC3/4s9JeUQVyTu4fMpHw==", + "license": "MIT", + "peerDependencies": { + "react": ">=19.0.0", + "react-dom": ">=19.0.0" + } + }, + "node_modules/@rescript/runtime": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/runtime/-/runtime-12.0.2.tgz", + "integrity": "sha512-Jma1QEgns/WlWeVap/Mv6dVbFEKmftPOrmwQbzLQnnrQ9z6PJjorHIvDMQUigQKnvM/4RhRJ3LiG+CsNtgS9ww==" + }, + "node_modules/@rescript/win32-x64": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/win32-x64/-/win32-x64-12.0.2.tgz", + "integrity": "sha512-QcNQiok3H2Xd+cReB1kpH4hlPXnT7DA6ETsJfZugTUOKOdAYLTJ+Imynv+ojuafkUrwa2v0tZAiqHaQsDjf9IQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/react": { + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.3.tgz", + "integrity": "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-yELu4WmLPw5Mr/lmeEpox5rw3RETacE++JgHqQzd2dg+YbJuat3jH4ingc+WPZhxaoFzdv9y33G+F7Nl5O0GBg==", + "license": "MIT", + "peer": true, + "dependencies": { + "scheduler": "^0.27.0" + }, + "peerDependencies": { + "react": "^19.2.3" + } + }, + "node_modules/rescript": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/rescript/-/rescript-12.0.2.tgz", + "integrity": "sha512-H1K9ovIxg2BVwJcXN1cZv0Q1pJEyFgNcUDKHSkmKmv5eBZMpXBASTZ2+grDt0zFFNoeFA1B9abAjSf+oTTi7hg==", + "license": "SEE LICENSE IN LICENSE", + "workspaces": [ + "packages/playground", + "packages/@rescript/*", + "tests/dependencies/**", + "tests/analysis_tests/**", + "tests/docstring_tests", + "tests/gentype_tests/**", + "tests/tools_tests", + "scripts/res" + ], + "dependencies": { + "@rescript/runtime": "12.0.2" + }, + "bin": { + "bsc": "cli/bsc.js", + "bstracing": "cli/bstracing.js", + "rescript": "cli/rescript.js", + "rescript-legacy": "cli/rescript-legacy.js", + "rescript-tools": "cli/rescript-tools.js" + }, + "engines": { + "node": ">=20.11.0" + }, + "optionalDependencies": { + "@rescript/darwin-arm64": "12.0.2", + "@rescript/darwin-x64": "12.0.2", + "@rescript/linux-arm64": "12.0.2", + "@rescript/linux-x64": "12.0.2", + "@rescript/win32-x64": "12.0.2" + } + }, + "node_modules/scheduler": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", + "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", + "license": "MIT", + "peer": true } } } diff --git a/analysis/examples/example-project/package.json b/analysis/examples/example-project/package.json index b948ffc3a..39e890488 100644 --- a/analysis/examples/example-project/package.json +++ b/analysis/examples/example-project/package.json @@ -1,11 +1,13 @@ { + "name": "tryit", "dependencies": { - "reason-react": "^0.9.1", - "rescript": "^9.1.2" + "@rescript/react": "^0.14.0", + "rescript": "12.1.0" }, "scripts": { "build": "rescript", "start": "rescript build -w", - "clean": "rescript clean -with-deps" + "clean": "rescript clean", + "format": "rescript format" } } diff --git a/analysis/examples/example-project/rescript.json b/analysis/examples/example-project/rescript.json new file mode 100644 index 000000000..85ac151a8 --- /dev/null +++ b/analysis/examples/example-project/rescript.json @@ -0,0 +1,20 @@ +{ + "name": "tryit", + "sources": "src", + "compiler-flags": [], + "warnings": { + "number": "-32-26-27-33" + }, + "dependencies": [ + "@rescript/react" + ], + "jsx": { + "version": 4 + }, + "namespace": "my-namespace", + "package-specs": { + "module": "esmodule", + "in-source": true, + "suffix": ".res.js" + } +} \ No newline at end of file diff --git a/analysis/examples/example-project/src/B.re b/analysis/examples/example-project/src/B.re deleted file mode 100644 index 2ad1ee830..000000000 --- a/analysis/examples/example-project/src/B.re +++ /dev/null @@ -1,9 +0,0 @@ - - -let x = 12 - - -let y = 44 - - -let z = 123 diff --git a/analysis/examples/example-project/src/Hello.res b/analysis/examples/example-project/src/Hello.res index db525a7e7..1a1d13434 100644 --- a/analysis/examples/example-project/src/Hello.res +++ b/analysis/examples/example-project/src/Hello.res @@ -50,7 +50,7 @@ let awesome = let thing = "thing" -let transform = (x, y) => x ++ Js.Float.toString(y) +let transform = (x, y) => x ++ Float.toString(y) let z = transform("hello ", 5.) @@ -64,8 +64,6 @@ let added = open Other -open Hashtbl - @ocaml.doc(" Some more documentation about this ") let awesome = x => x + 2 @@ -93,14 +91,12 @@ let someFunction = (memorableName, {contents}) => { let z = 10 -let z = find - let z = later let m = Other.later for _index in 0 to 10 { - print_endline("hellO") + Console.log("hellO") } module OneOneOneOne = { @@ -170,4 +166,3 @@ type lockfile = { pastVersions: Belt.HashMap.Int.t>, current: list<(shortReference, int)>, } - diff --git a/analysis/examples/example-project/src/Hello.res.js b/analysis/examples/example-project/src/Hello.res.js new file mode 100644 index 000000000..51bcd36b8 --- /dev/null +++ b/analysis/examples/example-project/src/Hello.res.js @@ -0,0 +1,171 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + +import * as More$MyNamespace from "./More.res.js"; +import * as Other$MyNamespace from "./Other.res.js"; +import * as JsxRuntime from "react/jsx-runtime"; + +let x = { + a: 3 +}; + +let l = (More$MyNamespace.inner + More$MyNamespace.n | 0) + Other$MyNamespace.inner | 0; + +let me = { + TAG: "People", + _0: "Hie" +}; + +let x$1 = Other$MyNamespace.something + 10 | 0; + +let Something_m = { + name: "Me", + age: 0 +}; + +let Something_animal = { + TAG: "Things", + _0: 10 +}; + +let Something_other = { + TAG: "Things", + _0: 2 +}; + +let Something = { + m: Something_m, + animal: Something_animal, + other: Something_other, + me: me, + x: x$1, + r: "Me", + awesome: 20 +}; + +let aThing = 10 + Other$MyNamespace.something | 0; + +function transform(x, y) { + return x + y.toString(); +} + +transform("hello ", 5); + +let added = 110; + +let a_1 = { + hd: "my finefolks", + tl: { + hd: "in boonville", + tl: /* [] */0 + } +}; + +let a = { + hd: "hello", + tl: a_1 +}; + +function div(x, y, children, param) { + return 10; +} + +JsxRuntime.jsx("div", { + x: "10", + y: "20" +}); + +function something(animal) { + +} + +function someFunction(memorableName, param) { + return memorableName + 20 | 0; +} + +for (let _index = 0; _index <= 10; ++_index) { + console.log("hellO"); +} + +let TwoTwoTwoTwo = { + xxxxxxxxxx: 10 +}; + +let OneOneOneOne = { + TwoTwoTwoTwo: TwoTwoTwoTwo +}; + +let y = 15; + +let z = { + contents: 30 +}; + +function thing() { + return 77; +} + +let contents = z.contents; + +let someLongName = 10; + +let otherLongName = "string"; + +let zzz = 1; + +let more = 20; + +let m = Other$MyNamespace.later; + +let r = 10; + +let xxxxxxxxxx = 10; + +let contnets = More$MyNamespace.contnets; + +let inner = More$MyNamespace.inner; + +let n = More$MyNamespace.n; + +let party = { + one: "one", + two: 2 +}; + +let one = "one"; + +let two = 2; + +let awesome = "hello"; + +export { + someLongName, + otherLongName, + x, + l, + Something, + aThing, + transform, + zzz, + more, + added, + a, + div, + something, + someFunction, + m, + OneOneOneOne, + r, + xxxxxxxxxx, + contnets, + inner, + n, + y, + z, + party, + one, + two, + thing, + contents, + awesome, +} +/* Not a pure module */ diff --git a/analysis/examples/example-project/src/Json.res b/analysis/examples/example-project/src/Json.res index 7cbfbbe03..251e249fa 100644 --- a/analysis/examples/example-project/src/Json.res +++ b/analysis/examples/example-project/src/Json.res @@ -32,6 +32,8 @@ * @doc Infix ") +external parseFloat: string => float = "parseFloat" + type rec t = | String(string) | Number(float) @@ -42,9 +44,9 @@ type rec t = | Null let string_of_number = f => { - let s = Js.Float.toString(f) - if String.get(s, String.length(s) - 1) == '.' { - String.sub(s, 0, String.length(s) - 1) + let s = Float.toString(f) + if String.get(s, String.length(s) - 1) == Some(".") { + String.slice(s, ~start=0, ~end=String.length(s) - 1) } else { s } @@ -135,23 +137,24 @@ module Infix = { let escape = text => { let ln = String.length(text) - let buf = Buffer.create(ln) - let rec loop = i => + let rec loop = (i, acc) => if i < ln { - switch String.get(text, i) { - | '\012' => Buffer.add_string(buf, "\\f") - | '\\' => Buffer.add_string(buf, "\\\\") - | '"' => Buffer.add_string(buf, "\\\"") - | '\n' => Buffer.add_string(buf, "\\n") - | '\b' => Buffer.add_string(buf, "\\b") - | '\r' => Buffer.add_string(buf, "\\r") - | '\t' => Buffer.add_string(buf, "\\t") - | c => Buffer.add_char(buf, c) + let next = switch String.get(text, i) { + | Some("\x0c") => acc ++ "\\f" + | Some("\\") => acc ++ "\\\\" + | Some("\"") => acc ++ "\\\"" + | Some("\n") => acc ++ "\\n" + | Some("\b") => acc ++ "\\b" + | Some("\r") => acc ++ "\\r" + | Some("\t") => acc ++ "\\t" + | Some(c) => acc ++ c + | None => acc } - loop(i + 1) + loop(i + 1, next) + } else { + acc } - loop(0) - Buffer.contents(buf) + loop(0, "") } @ocaml.doc(" ``` @@ -165,54 +168,76 @@ let rec stringify = t => switch t { | String(value) => "\"" ++ (escape(value) ++ "\"") | Number(num) => string_of_number(num) - | Array(items) => "[" ++ (String.concat(", ", List.map(items, stringify)) ++ "]") - | Object(items) => - "{" ++ - (String.concat( - ", ", - List.map(items, ((k, v)) => "\"" ++ (String.escaped(k) ++ ("\": " ++ stringify(v)))), - ) ++ - "}") + | Array(items) => { + let rec join = (items, sep) => + switch items { + | list{} => "" + | list{x} => x + | list{x, ...rest} => x ++ sep ++ join(rest, sep) + } + let parts = List.map(items, stringify) + "[" ++ join(parts, ", ") ++ "]" + } + | Object(items) => { + let rec join = (items, sep) => + switch items { + | list{} => "" + | list{x} => x + | list{x, ...rest} => x ++ sep ++ join(rest, sep) + } + let parts = List.map(items, ((k, v)) => "\"" ++ (escape(k) ++ ("\": " ++ stringify(v)))) + "{" ++ join(parts, ", ") ++ "}" + } | True => "true" | False => "false" | Null => "null" } let white = n => { - let buffer = Buffer.create(n) - for _ in 0 to n - 1 { - Buffer.add_char(buffer, ' ') - } - Buffer.contents(buffer) + let rec loop = (i, acc) => + if i < n { + loop(i + 1, acc ++ " ") + } else { + acc + } + loop(0, "") } -let rec stringifyPretty = (~indent=0, t) => +let rec stringifyPretty = (~indent=0, t) => { + let rec join = (items, sep) => + switch items { + | list{} => "" + | list{x} => x + | list{x, ...rest} => x ++ sep ++ join(rest, sep) + } switch t { | String(value) => "\"" ++ (escape(value) ++ "\"") | Number(num) => string_of_number(num) | Array(list{}) => "[]" - | Array(items) => - "[\n" ++ - (white(indent) ++ - (String.concat(",\n" ++ white(indent), List.map(items, stringifyPretty(~indent=indent + 2))) ++ - ("\n" ++ - (white(indent) ++ "]")))) + | Array(items) => { + let parts = List.map(items, item => stringifyPretty(~indent=indent + 2, item)) + "[\n" ++ + white(indent + 2) ++ + join(parts, ",\n" ++ white(indent + 2)) ++ + "\n" ++ + white(indent) ++ "]" + } | Object(list{}) => "{}" - | Object(items) => - "{\n" ++ - (white(indent) ++ - (String.concat( - ",\n" ++ white(indent), - List.map(items, ((k, v)) => - "\"" ++ (String.escaped(k) ++ ("\": " ++ stringifyPretty(~indent=indent + 2, v))) - ), - ) ++ - ("\n" ++ - (white(indent) ++ "}")))) + | Object(items) => { + let parts = List.map(items, ((k, v)) => + "\"" ++ (escape(k) ++ ("\": " ++ stringifyPretty(~indent=indent + 2, v))) + ) + "{\n" ++ + white(indent + 2) ++ + join(parts, ",\n" ++ white(indent + 2)) ++ + "\n" ++ + white(indent) ++ "}" + } | True => "true" | False => "false" | Null => "null" } +} let unwrap = (message, t) => switch t { @@ -229,12 +254,12 @@ module Parser = { if last_pos == 0 && !keep_empty { acc } else { - list{String.sub(str, 0, last_pos), ...acc} + list{String.slice(str, ~start=0, ~end=last_pos), ...acc} } } else if is_delim(String.get(str, pos)) { let new_len = last_pos - pos - 1 if new_len != 0 || keep_empty { - let v = String.sub(str, pos + 1, new_len) + let v = String.slice(str, ~start=pos + 1, ~end=pos + 1 + new_len) loop(list{v, ...acc}, pos, pos - 1) } else { loop(acc, pos, pos - 1) @@ -245,19 +270,27 @@ module Parser = { loop(list{}, len, len - 1) } let fail = (text, pos, message) => { - let pre = String.sub(text, 0, pos) - let lines = split_by(c => c == '\n', pre) + let pre = String.slice(text, ~start=0, ~end=pos) + let lines = split_by(c => c == Some("\n"), pre) let count = List.length(lines) - let last = count > 0 ? List.getExn(lines, count - 1) : "" + let last = count > 0 ? List.getOrThrow(lines, count - 1) : "" let col = String.length(last) + 1 let line = List.length(lines) - let string = Printf.sprintf("Error \"%s\" at %d:%d -> %s\n", message, line, col, last) + let string = + "Error \"" ++ + message ++ + "\" at " ++ + Int.toString(line) ++ + ":" ++ + Int.toString(col) ++ + " -> " ++ + last ++ "\n" failwith(string) } let rec skipToNewline = (text, pos) => if pos >= String.length(text) { pos - } else if String.get(text, pos) == '\n' { + } else if String.get(text, pos) == Some("\n") { pos + 1 } else { skipToNewline(text, pos + 1) @@ -265,7 +298,7 @@ module Parser = { let stringTail = text => { let len = String.length(text) if len > 1 { - String.sub(text, 1, len - 1) + String.slice(text, ~start=1, ~end=len) } else { "" } @@ -273,7 +306,7 @@ module Parser = { let rec skipToCloseMultilineComment = (text, pos) => if pos + 1 >= String.length(text) { failwith("Unterminated comment") - } else if String.get(text, pos) == '*' && String.get(text, pos + 1) == '/' { + } else if String.get(text, pos) == Some("*") && String.get(text, pos + 1) == Some("/") { pos + 2 } else { skipToCloseMultilineComment(text, pos + 1) @@ -281,43 +314,36 @@ module Parser = { let rec skipWhite = (text, pos) => if ( pos < String.length(text) && - (String.get(text, pos) == ' ' || - (String.get(text, pos) == '\t' || - (String.get(text, pos) == '\n' || String.get(text, pos) == '\r'))) + (String.get(text, pos) == Some(" ") || + (String.get(text, pos) == Some("\t") || + (String.get(text, pos) == Some("\n") || String.get(text, pos) == Some("\r")))) ) { skipWhite(text, pos + 1) } else { pos } let parseString = (text, pos) => { - /* let i = ref(pos); */ - let buffer = Buffer.create(String.length(text)) let ln = String.length(text) - let rec loop = i => + let rec loop = (i, acc) => i >= ln ? fail(text, i, "Unterminated string") : switch String.get(text, i) { - | '"' => i + 1 - | '\\' => + | Some("\"") => (i + 1, acc) + | Some("\\") => i + 1 >= ln ? fail(text, i, "Unterminated string") : switch String.get(text, i + 1) { - | '/' => - Buffer.add_char(buffer, '/') - loop(i + 2) - | 'f' => - Buffer.add_char(buffer, '\012') - loop(i + 2) + | Some("/") => loop(i + 2, acc ++ "/") + | Some("f") => loop(i + 2, acc ++ "\x0c") | _ => - Buffer.add_string(buffer, Scanf.unescaped(String.sub(text, i, 2))) - loop(i + 2) + let escaped = String.slice(text, ~start=i, ~end=i + 2) + loop(i + 2, acc ++ escaped) } - | c => - Buffer.add_char(buffer, c) - loop(i + 1) + | Some(c) => loop(i + 1, acc ++ c) + | None => (i, acc) } - let final = loop(pos) - (Buffer.contents(buffer), final) + let (final, result) = loop(pos, "") + (result, final) } let parseDigits = (text, pos) => { let len = String.length(text) @@ -326,7 +352,17 @@ module Parser = { i } else { switch String.get(text, i) { - | '0' .. '9' => loop(i + 1) + | Some("0") + | Some("1") + | Some("2") + | Some("3") + | Some("4") + | Some("5") + | Some("6") + | Some("7") + | Some("8") + | Some("9") => + loop(i + 1) | _ => i } } @@ -334,7 +370,7 @@ module Parser = { } let parseWithDecimal = (text, pos) => { let pos = parseDigits(text, pos) - if pos < String.length(text) && String.get(text, pos) == '.' { + if pos < String.length(text) && String.get(text, pos) == Some(".") { let pos = parseDigits(text, pos + 1) pos } else { @@ -344,10 +380,10 @@ module Parser = { let parseNumber = (text, pos) => { let pos = parseWithDecimal(text, pos) let ln = String.length(text) - if pos < ln - 1 && (String.get(text, pos) == 'E' || String.get(text, pos) == 'e') { + if pos < ln - 1 && (String.get(text, pos) == Some("E") || String.get(text, pos) == Some("e")) { let pos = switch String.get(text, pos + 1) { - | '-' - | '+' => + | Some("-") + | Some("+") => pos + 2 | _ => pos + 1 } @@ -357,22 +393,23 @@ module Parser = { } } let parseNegativeNumber = (text, pos) => { - let final = if String.get(text, pos) == '-' { + let final = if String.get(text, pos) == Some("-") { parseNumber(text, pos + 1) } else { parseNumber(text, pos) } - (Number(float_of_string(String.sub(text, pos, final - pos))), final) + let numStr = String.slice(text, ~start=pos, ~end=final) + (Number(parseFloat(numStr)), final) } let expect = (char, text, pos, message) => - if String.get(text, pos) != char { + if String.get(text, pos) != Some(char) { fail(text, pos, "Expected: " ++ message) } else { pos + 1 } let parseComment: 'a. (string, int, (string, int) => 'a) => 'a = (text, pos, next) => - if String.get(text, pos) != '/' { - if String.get(text, pos) == '*' { + if String.get(text, pos) != Some("/") { + if String.get(text, pos) == Some("*") { next(text, skipToCloseMultilineComment(text, pos + 1)) } else { failwith("Invalid syntax") @@ -381,10 +418,10 @@ module Parser = { next(text, skipToNewline(text, pos + 1)) } let maybeSkipComment = (text, pos) => - if pos < String.length(text) && String.get(text, pos) == '/' { - if pos + 1 < String.length(text) && String.get(text, pos + 1) == '/' { + if pos < String.length(text) && String.get(text, pos) == Some("/") { + if pos + 1 < String.length(text) && String.get(text, pos + 1) == Some("/") { skipToNewline(text, pos + 1) - } else if pos + 1 < String.length(text) && String.get(text, pos + 1) == '*' { + } else if pos + 1 < String.length(text) && String.get(text, pos + 1) == Some("*") { skipToCloseMultilineComment(text, pos + 1) } else { fail(text, pos, "Invalid synatx") @@ -396,7 +433,7 @@ module Parser = { if pos == String.length(text) { pos } else { - let n = skipWhite(text, pos) |> maybeSkipComment(text) + let n = maybeSkipComment(text, skipWhite(text, pos)) if n > pos { skip(text, n) } else { @@ -408,37 +445,46 @@ module Parser = { fail(text, pos, "Reached end of file without being done parsing") } else { switch String.get(text, pos) { - | '/' => parseComment(text, pos + 1, parse) - | '[' => parseArray(text, pos + 1) - | '{' => parseObject(text, pos + 1) - | 'n' => - if String.sub(text, pos, 4) == "null" { + | Some("/") => parseComment(text, pos + 1, parse) + | Some("[") => parseArray(text, pos + 1) + | Some("{") => parseObject(text, pos + 1) + | Some("n") => + if String.slice(text, ~start=pos, ~end=pos + 4) == "null" { (Null, pos + 4) } else { fail(text, pos, "unexpected character") } - | 't' => - if String.sub(text, pos, 4) == "true" { + | Some("t") => + if String.slice(text, ~start=pos, ~end=pos + 4) == "true" { (True, pos + 4) } else { fail(text, pos, "unexpected character") } - | 'f' => - if String.sub(text, pos, 5) == "false" { + | Some("f") => + if String.slice(text, ~start=pos, ~end=pos + 5) == "false" { (False, pos + 5) } else { fail(text, pos, "unexpected character") } - | '\n' - | '\t' - | ' ' - | '\r' => + | Some("\n") + | Some("\t") + | Some(" ") + | Some("\r") => parse(text, skipWhite(text, pos)) - | '"' => + | Some("\"") => let (s, pos) = parseString(text, pos + 1) (String(s), pos) - | '-' - | '0' .. '9' => + | Some("-") + | Some("0") + | Some("1") + | Some("2") + | Some("3") + | Some("4") + | Some("5") + | Some("6") + | Some("7") + | Some("8") + | Some("9") => parseNegativeNumber(text, pos) | _ => fail(text, pos, "unexpected character") } @@ -448,22 +494,22 @@ module Parser = { let (value, pos) = parse(text, pos) let pos = skip(text, pos) switch String.get(text, pos) { - | ',' => + | Some(",") => let pos = skip(text, pos + 1) - if String.get(text, pos) == ']' { + if String.get(text, pos) == Some("]") { (list{value}, pos + 1) } else { let (rest, pos) = parseArrayValue(text, pos) (list{value, ...rest}, pos) } - | ']' => (list{value}, pos + 1) + | Some("]") => (list{value}, pos + 1) | _ => fail(text, pos, "unexpected character") } } and parseArray = (text, pos) => { let pos = skip(text, pos) switch String.get(text, pos) { - | ']' => (Array(list{}), pos + 1) + | Some("]") => (Array(list{}), pos + 1) | _ => let (items, pos) = parseArrayValue(text, pos) (Array(items), pos) @@ -471,24 +517,24 @@ module Parser = { } and parseObjectValue = (text, pos) => { let pos = skip(text, pos) - if String.get(text, pos) != '"' { + if String.get(text, pos) != Some("\"") { fail(text, pos, "Expected string") } else { let (key, pos) = parseString(text, pos + 1) let pos = skip(text, pos) - let pos = expect(':', text, pos, "Colon") + let pos = expect(":", text, pos, "Colon") let (value, pos) = parse(text, pos) let pos = skip(text, pos) switch String.get(text, pos) { - | ',' => + | Some(",") => let pos = skip(text, pos + 1) - if String.get(text, pos) == '}' { + if String.get(text, pos) == Some("}") { (list{(key, value)}, pos + 1) } else { let (rest, pos) = parseObjectValue(text, pos) (list{(key, value), ...rest}, pos) } - | '}' => (list{(key, value)}, pos + 1) + | Some("}") => (list{(key, value)}, pos + 1) | _ => let (rest, pos) = parseObjectValue(text, pos) (list{(key, value), ...rest}, pos) @@ -497,7 +543,7 @@ module Parser = { } and parseObject = (text, pos) => { let pos = skip(text, pos) - if String.get(text, pos) == '}' { + if String.get(text, pos) == Some("}") { (Object(list{}), pos + 1) } else { let (pairs, pos) = parseObjectValue(text, pos) @@ -512,7 +558,8 @@ let parse = text => { let pos = Parser.skip(text, pos) if pos < String.length(text) { failwith( - "Extra data after parse finished: " ++ String.sub(text, pos, String.length(text) - pos), + "Extra data after parse finished: " ++ + String.slice(text, ~start=pos, ~end=String.length(text)), ) } else { item @@ -529,7 +576,14 @@ let bind = (v, fn) => @ocaml.doc(" If `t` is an object, get the value associated with the given string key ") let get = (key, t) => switch t { - | Object(items) => List.getAssoc(items, key, \"=") + | Object(items) => { + let rec find = items => + switch items { + | list{} => None + | list{(k, v), ...rest} => k == key ? Some(v) : find(rest) + } + find(items) + } | _ => None } @@ -538,7 +592,7 @@ let nth = (n, t) => switch t { | Array(items) => if n < List.length(items) { - Some(List.getExn(items, n)) + Some(List.getOrThrow(items, n)) } else { None } @@ -601,7 +655,6 @@ let rec parsePath = (keyList, t) => * ``` ") let getPath = (path, t) => { - let keys = Parser.split_by(c => c == '.', path) + let keys = Parser.split_by(c => c == Some("."), path) parsePath(keys, t) } - diff --git a/analysis/examples/example-project/src/Json.res.js b/analysis/examples/example-project/src/Json.res.js new file mode 100644 index 000000000..853074caa --- /dev/null +++ b/analysis/examples/example-project/src/Json.res.js @@ -0,0 +1,908 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + +import * as Pervasives from "@rescript/runtime/lib/es6/Pervasives.js"; +import * as Stdlib_List from "@rescript/runtime/lib/es6/Stdlib_List.js"; +import * as Primitive_object from "@rescript/runtime/lib/es6/Primitive_object.js"; +import * as Primitive_option from "@rescript/runtime/lib/es6/Primitive_option.js"; + +function string_of_number(f) { + let s = f.toString(); + if (Primitive_object.equal(s[s.length - 1 | 0], ".")) { + return s.slice(0, s.length - 1 | 0); + } else { + return s; + } +} + +function $pipe$bang(o, d) { + if (o !== undefined) { + return Primitive_option.valFromOption(o); + } else { + return Pervasives.failwith(d); + } +} + +function $pipe$question(o, d) { + if (o !== undefined) { + return Primitive_option.valFromOption(o); + } else { + return d; + } +} + +function $pipe$question$great(o, fn) { + if (o !== undefined) { + return fn(Primitive_option.valFromOption(o)); + } +} + +function $pipe$question$great$great(o, fn) { + if (o !== undefined) { + return Primitive_option.some(fn(Primitive_option.valFromOption(o))); + } +} + +function fold(o, d, f) { + if (o !== undefined) { + return f(Primitive_option.valFromOption(o)); + } else { + return d; + } +} + +let Infix = { + $pipe$bang: $pipe$bang, + $pipe$question: $pipe$question, + $pipe$question$great: $pipe$question$great, + $pipe$question$great$great: $pipe$question$great$great, + fold: fold +}; + +function escape(text) { + let ln = text.length; + let _i = 0; + let _acc = ""; + while (true) { + let acc = _acc; + let i = _i; + if (i >= ln) { + return acc; + } + let c = text[i]; + let next; + if (c !== undefined) { + switch (c) { + case "\"" : + next = acc + "\\\""; + break; + case "\\" : + next = acc + "\\\\"; + break; + case "\b" : + next = acc + "\\b"; + break; + case "\n" : + next = acc + "\\n"; + break; + case "\r" : + next = acc + "\\r"; + break; + case "\t" : + next = acc + "\\t"; + break; + case "\x0c" : + next = acc + "\\f"; + break; + default: + next = acc + c; + } + } else { + next = acc; + } + _acc = next; + _i = i + 1 | 0; + continue; + }; +} + +function stringify(t) { + if (typeof t !== "object") { + switch (t) { + case "True" : + return "true"; + case "False" : + return "false"; + case "Null" : + return "null"; + } + } else { + switch (t.TAG) { + case "String" : + return "\"" + (escape(t._0) + "\""); + case "Number" : + return string_of_number(t._0); + case "Array" : + let join = (items, sep) => { + if (items === 0) { + return ""; + } + let rest = items.tl; + let x = items.hd; + if (rest !== 0) { + return x + sep + join(rest, sep); + } else { + return x; + } + }; + let parts = Stdlib_List.map(t._0, stringify); + return "[" + join(parts, ", ") + "]"; + case "Object" : + let join$1 = (items, sep) => { + if (items === 0) { + return ""; + } + let rest = items.tl; + let x = items.hd; + if (rest !== 0) { + return x + sep + join$1(rest, sep); + } else { + return x; + } + }; + let parts$1 = Stdlib_List.map(t._0, param => "\"" + (escape(param[0]) + ("\": " + stringify(param[1])))); + return "{" + join$1(parts$1, ", ") + "}"; + } + } +} + +function white(n) { + let _i = 0; + let _acc = ""; + while (true) { + let acc = _acc; + let i = _i; + if (i >= n) { + return acc; + } + _acc = acc + " "; + _i = i + 1 | 0; + continue; + }; +} + +function stringifyPretty(indentOpt, t) { + let indent = indentOpt !== undefined ? indentOpt : 0; + let join = (items, sep) => { + if (items === 0) { + return ""; + } + let rest = items.tl; + let x = items.hd; + if (rest !== 0) { + return x + sep + join(rest, sep); + } else { + return x; + } + }; + if (typeof t !== "object") { + switch (t) { + case "True" : + return "true"; + case "False" : + return "false"; + case "Null" : + return "null"; + } + } else { + switch (t.TAG) { + case "String" : + return "\"" + (escape(t._0) + "\""); + case "Number" : + return string_of_number(t._0); + case "Array" : + let items = t._0; + if (items === 0) { + return "[]"; + } + let parts = Stdlib_List.map(items, item => stringifyPretty(indent + 2 | 0, item)); + return "[\n" + white(indent + 2 | 0) + join(parts, ",\n" + white(indent + 2 | 0)) + "\n" + white(indent) + "]"; + case "Object" : + let items$1 = t._0; + if (items$1 === 0) { + return "{}"; + } + let parts$1 = Stdlib_List.map(items$1, param => "\"" + (escape(param[0]) + ("\": " + stringifyPretty(indent + 2 | 0, param[1])))); + return "{\n" + white(indent + 2 | 0) + join(parts$1, ",\n" + white(indent + 2 | 0)) + "\n" + white(indent) + "}"; + } + } +} + +function unwrap(message, t) { + if (t !== undefined) { + return Primitive_option.valFromOption(t); + } else { + return Pervasives.failwith(message); + } +} + +function split_by(keep_emptyOpt, is_delim, str) { + let keep_empty = keep_emptyOpt !== undefined ? keep_emptyOpt : false; + let len = str.length; + let _acc = /* [] */0; + let _last_pos = len; + let _pos = len - 1 | 0; + while (true) { + let pos = _pos; + let last_pos = _last_pos; + let acc = _acc; + if (pos === -1) { + if (last_pos === 0 && !keep_empty) { + return acc; + } else { + return { + hd: str.slice(0, last_pos), + tl: acc + }; + } + } + if (is_delim(str[pos])) { + let new_len = (last_pos - pos | 0) - 1 | 0; + if (new_len !== 0 || keep_empty) { + let v = str.slice(pos + 1 | 0, (pos + 1 | 0) + new_len | 0); + _pos = pos - 1 | 0; + _last_pos = pos; + _acc = { + hd: v, + tl: acc + }; + continue; + } + _pos = pos - 1 | 0; + _last_pos = pos; + continue; + } + _pos = pos - 1 | 0; + continue; + }; +} + +function fail(text, pos, message) { + let pre = text.slice(0, pos); + let lines = split_by(undefined, c => Primitive_object.equal(c, "\n"), pre); + let count = Stdlib_List.length(lines); + let last = count > 0 ? Stdlib_List.getOrThrow(lines, count - 1 | 0) : ""; + let col = last.length + 1 | 0; + let line = Stdlib_List.length(lines); + return Pervasives.failwith("Error \"" + message + "\" at " + line.toString() + ":" + col.toString() + " -> " + last + "\n"); +} + +function skipToNewline(text, _pos) { + while (true) { + let pos = _pos; + if (pos >= text.length) { + return pos; + } + if (Primitive_object.equal(text[pos], "\n")) { + return pos + 1 | 0; + } + _pos = pos + 1 | 0; + continue; + }; +} + +function stringTail(text) { + let len = text.length; + if (len > 1) { + return text.slice(1, len); + } else { + return ""; + } +} + +function skipToCloseMultilineComment(text, _pos) { + while (true) { + let pos = _pos; + if ((pos + 1 | 0) >= text.length) { + return Pervasives.failwith("Unterminated comment"); + } + if (Primitive_object.equal(text[pos], "*") && Primitive_object.equal(text[pos + 1 | 0], "/")) { + return pos + 2 | 0; + } + _pos = pos + 1 | 0; + continue; + }; +} + +function skipWhite(text, _pos) { + while (true) { + let pos = _pos; + if (!(pos < text.length && (Primitive_object.equal(text[pos], " ") || Primitive_object.equal(text[pos], "\t") || Primitive_object.equal(text[pos], "\n") || Primitive_object.equal(text[pos], "\r")))) { + return pos; + } + _pos = pos + 1 | 0; + continue; + }; +} + +function parseString(text, pos) { + let ln = text.length; + let loop = (_i, _acc) => { + while (true) { + let acc = _acc; + let i = _i; + if (i >= ln) { + return fail(text, i, "Unterminated string"); + } + let c = text[i]; + if (c === undefined) { + return [ + i, + acc + ]; + } + switch (c) { + case "\"" : + return [ + i + 1 | 0, + acc + ]; + case "\\" : + if ((i + 1 | 0) >= ln) { + return fail(text, i, "Unterminated string"); + } + let match = text[i + 1 | 0]; + if (match !== undefined) { + switch (match) { + case "/" : + _acc = acc + "/"; + _i = i + 2 | 0; + continue; + case "f" : + _acc = acc + "\x0c"; + _i = i + 2 | 0; + continue; + } + } + let escaped = text.slice(i, i + 2 | 0); + _acc = acc + escaped; + _i = i + 2 | 0; + continue; + break; + default: + _acc = acc + c; + _i = i + 1 | 0; + continue; + } + }; + }; + let match = loop(pos, ""); + return [ + match[1], + match[0] + ]; +} + +function parseDigits(text, pos) { + let len = text.length; + let _i = pos + 1 | 0; + while (true) { + let i = _i; + if (i >= len) { + return i; + } + let match = text[i]; + if (match === undefined) { + return i; + } + switch (match) { + case "0" : + case "1" : + case "2" : + case "3" : + case "4" : + case "5" : + case "6" : + case "7" : + case "8" : + case "9" : + _i = i + 1 | 0; + continue; + default: + return i; + } + }; +} + +function parseWithDecimal(text, pos) { + let pos$1 = parseDigits(text, pos); + if (pos$1 < text.length && Primitive_object.equal(text[pos$1], ".")) { + return parseDigits(text, pos$1 + 1 | 0); + } else { + return pos$1; + } +} + +function parseNumber(text, pos) { + let pos$1 = parseWithDecimal(text, pos); + let ln = text.length; + if (!(pos$1 < (ln - 1 | 0) && (Primitive_object.equal(text[pos$1], "E") || Primitive_object.equal(text[pos$1], "e")))) { + return pos$1; + } + let match = text[pos$1 + 1 | 0]; + let pos$2; + if (match !== undefined) { + switch (match) { + case "+" : + case "-" : + pos$2 = pos$1 + 2 | 0; + break; + default: + pos$2 = pos$1 + 1 | 0; + } + } else { + pos$2 = pos$1 + 1 | 0; + } + return parseDigits(text, pos$2); +} + +function parseNegativeNumber(text, pos) { + let final = Primitive_object.equal(text[pos], "-") ? parseNumber(text, pos + 1 | 0) : parseNumber(text, pos); + let numStr = text.slice(pos, final); + return [ + { + TAG: "Number", + _0: parseFloat(numStr) + }, + final + ]; +} + +function expect(char, text, pos, message) { + if (Primitive_object.notequal(text[pos], char)) { + return fail(text, pos, "Expected: " + message); + } else { + return pos + 1 | 0; + } +} + +function parseComment(text, pos, next) { + if (Primitive_object.notequal(text[pos], "/")) { + if (Primitive_object.equal(text[pos], "*")) { + return next(text, skipToCloseMultilineComment(text, pos + 1 | 0)); + } else { + return Pervasives.failwith("Invalid syntax"); + } + } else { + return next(text, skipToNewline(text, pos + 1 | 0)); + } +} + +function maybeSkipComment(text, pos) { + if (pos < text.length && Primitive_object.equal(text[pos], "/")) { + if ((pos + 1 | 0) < text.length && Primitive_object.equal(text[pos + 1 | 0], "/")) { + return skipToNewline(text, pos + 1 | 0); + } else if ((pos + 1 | 0) < text.length && Primitive_object.equal(text[pos + 1 | 0], "*")) { + return skipToCloseMultilineComment(text, pos + 1 | 0); + } else { + return fail(text, pos, "Invalid synatx"); + } + } else { + return pos; + } +} + +function skip(text, _pos) { + while (true) { + let pos = _pos; + if (pos === text.length) { + return pos; + } + let n = maybeSkipComment(text, skipWhite(text, pos)); + if (n <= pos) { + return n; + } + _pos = n; + continue; + }; +} + +function parse(text, _pos) { + while (true) { + let pos = _pos; + if (pos >= text.length) { + return fail(text, pos, "Reached end of file without being done parsing"); + } + let match = text[pos]; + if (match === undefined) { + return fail(text, pos, "unexpected character"); + } + switch (match) { + case "/" : + return parseComment(text, pos + 1 | 0, parse); + case "-" : + case "0" : + case "1" : + case "2" : + case "3" : + case "4" : + case "5" : + case "6" : + case "7" : + case "8" : + case "9" : + return parseNegativeNumber(text, pos); + case "[" : + return parseArray(text, pos + 1 | 0); + case "\"" : + let match$1 = parseString(text, pos + 1 | 0); + return [ + { + TAG: "String", + _0: match$1[0] + }, + match$1[1] + ]; + case " " : + case "\n" : + case "\r" : + case "\t" : + break; + case "f" : + if (text.slice(pos, pos + 5 | 0) === "false") { + return [ + "False", + pos + 5 | 0 + ]; + } else { + return fail(text, pos, "unexpected character"); + } + case "n" : + if (text.slice(pos, pos + 4 | 0) === "null") { + return [ + "Null", + pos + 4 | 0 + ]; + } else { + return fail(text, pos, "unexpected character"); + } + case "t" : + if (text.slice(pos, pos + 4 | 0) === "true") { + return [ + "True", + pos + 4 | 0 + ]; + } else { + return fail(text, pos, "unexpected character"); + } + case "{" : + return parseObject(text, pos + 1 | 0); + default: + return fail(text, pos, "unexpected character"); + } + _pos = skipWhite(text, pos); + continue; + }; +} + +function parseArrayValue(text, pos) { + let pos$1 = skip(text, pos); + let match = parse(text, pos$1); + let value = match[0]; + let pos$2 = skip(text, match[1]); + let match$1 = text[pos$2]; + if (match$1 === undefined) { + return fail(text, pos$2, "unexpected character"); + } + switch (match$1) { + case "," : + let pos$3 = skip(text, pos$2 + 1 | 0); + if (Primitive_object.equal(text[pos$3], "]")) { + return [ + { + hd: value, + tl: /* [] */0 + }, + pos$3 + 1 | 0 + ]; + } + let match$2 = parseArrayValue(text, pos$3); + return [ + { + hd: value, + tl: match$2[0] + }, + match$2[1] + ]; + case "]" : + return [ + { + hd: value, + tl: /* [] */0 + }, + pos$2 + 1 | 0 + ]; + default: + return fail(text, pos$2, "unexpected character"); + } +} + +function parseArray(text, pos) { + let pos$1 = skip(text, pos); + let match = text[pos$1]; + if (match === "]") { + return [ + { + TAG: "Array", + _0: /* [] */0 + }, + pos$1 + 1 | 0 + ]; + } + let match$1 = parseArrayValue(text, pos$1); + return [ + { + TAG: "Array", + _0: match$1[0] + }, + match$1[1] + ]; +} + +function parseObjectValue(text, pos) { + let pos$1 = skip(text, pos); + if (Primitive_object.notequal(text[pos$1], "\"")) { + return fail(text, pos$1, "Expected string"); + } + let match = parseString(text, pos$1 + 1 | 0); + let key = match[0]; + let pos$2 = skip(text, match[1]); + let pos$3 = expect(":", text, pos$2, "Colon"); + let match$1 = parse(text, pos$3); + let value = match$1[0]; + let pos$4 = skip(text, match$1[1]); + let match$2 = text[pos$4]; + if (match$2 !== undefined) { + switch (match$2) { + case "," : + let pos$5 = skip(text, pos$4 + 1 | 0); + if (Primitive_object.equal(text[pos$5], "}")) { + return [ + { + hd: [ + key, + value + ], + tl: /* [] */0 + }, + pos$5 + 1 | 0 + ]; + } + let match$3 = parseObjectValue(text, pos$5); + return [ + { + hd: [ + key, + value + ], + tl: match$3[0] + }, + match$3[1] + ]; + case "}" : + return [ + { + hd: [ + key, + value + ], + tl: /* [] */0 + }, + pos$4 + 1 | 0 + ]; + } + } + let match$4 = parseObjectValue(text, pos$4); + return [ + { + hd: [ + key, + value + ], + tl: match$4[0] + }, + match$4[1] + ]; +} + +function parseObject(text, pos) { + let pos$1 = skip(text, pos); + if (Primitive_object.equal(text[pos$1], "}")) { + return [ + { + TAG: "Object", + _0: /* [] */0 + }, + pos$1 + 1 | 0 + ]; + } + let match = parseObjectValue(text, pos$1); + return [ + { + TAG: "Object", + _0: match[0] + }, + match[1] + ]; +} + +let Parser = { + split_by: split_by, + fail: fail, + skipToNewline: skipToNewline, + stringTail: stringTail, + skipToCloseMultilineComment: skipToCloseMultilineComment, + skipWhite: skipWhite, + parseString: parseString, + parseDigits: parseDigits, + parseWithDecimal: parseWithDecimal, + parseNumber: parseNumber, + parseNegativeNumber: parseNegativeNumber, + expect: expect, + parseComment: parseComment, + maybeSkipComment: maybeSkipComment, + skip: skip, + parse: parse, + parseArrayValue: parseArrayValue, + parseArray: parseArray, + parseObjectValue: parseObjectValue, + parseObject: parseObject +}; + +function parse$1(text) { + let match = parse(text, 0); + let pos = skip(text, match[1]); + if (pos < text.length) { + return Pervasives.failwith("Extra data after parse finished: " + text.slice(pos, text.length)); + } else { + return match[0]; + } +} + +function bind(v, fn) { + if (v !== undefined) { + return fn(Primitive_option.valFromOption(v)); + } +} + +function get(key, t) { + if (typeof t !== "object") { + return; + } + if (t.TAG !== "Object") { + return; + } + let _items = t._0; + while (true) { + let items = _items; + if (items === 0) { + return; + } + let match = items.hd; + if (match[0] === key) { + return Primitive_option.some(match[1]); + } + _items = items.tl; + continue; + }; +} + +function nth(n, t) { + if (typeof t !== "object") { + return; + } + if (t.TAG !== "Array") { + return; + } + let items = t._0; + if (n < Stdlib_List.length(items)) { + return Stdlib_List.getOrThrow(items, n); + } +} + +function string(t) { + if (typeof t !== "object" || t.TAG !== "String") { + return; + } else { + return t._0; + } +} + +function number(t) { + if (typeof t !== "object" || t.TAG !== "Number") { + return; + } else { + return t._0; + } +} + +function array(t) { + if (typeof t !== "object" || t.TAG !== "Array") { + return; + } else { + return t._0; + } +} + +function obj(t) { + if (typeof t !== "object" || t.TAG !== "Object") { + return; + } else { + return t._0; + } +} + +function bool(t) { + if (typeof t === "object") { + return; + } + switch (t) { + case "True" : + return true; + case "False" : + return false; + default: + return; + } +} + +function $$null(t) { + if (typeof t !== "object" && t === "Null") { + return Primitive_option.some(undefined); + } +} + +function parsePath(_keyList, _t) { + while (true) { + let t = _t; + let keyList = _keyList; + if (keyList === 0) { + return t; + } + let value = get(keyList.hd, t); + if (value === undefined) { + return; + } + _t = value; + _keyList = keyList.tl; + continue; + }; +} + +function getPath(path, t) { + let keys = split_by(undefined, c => Primitive_object.equal(c, "."), path); + return parsePath(keys, t); +} + +export { + string_of_number, + Infix, + escape, + stringify, + white, + stringifyPretty, + unwrap, + Parser, + parse$1 as parse, + bind, + get, + nth, + string, + number, + array, + obj, + bool, + $$null, + parsePath, + getPath, +} +/* No side effect */ diff --git a/analysis/examples/example-project/src/ModuleWithDocComment.res.js b/analysis/examples/example-project/src/ModuleWithDocComment.res.js new file mode 100644 index 000000000..3caa9b550 --- /dev/null +++ b/analysis/examples/example-project/src/ModuleWithDocComment.res.js @@ -0,0 +1,19 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + + +let NestedAgain = { + y: 123 +}; + +let Nested = { + x: "123", + NestedAgain: NestedAgain +}; + +let M; + +export { + Nested, + M, +} +/* No side effect */ diff --git a/analysis/examples/example-project/src/More.res b/analysis/examples/example-project/src/More.res index a1775e21a..88875a3e0 100644 --- a/analysis/examples/example-project/src/More.res +++ b/analysis/examples/example-project/src/More.res @@ -10,4 +10,3 @@ let n = 10 let party = 30 let awesome = 200 - diff --git a/analysis/examples/example-project/src/More.res.js b/analysis/examples/example-project/src/More.res.js new file mode 100644 index 000000000..bb6a97c61 --- /dev/null +++ b/analysis/examples/example-project/src/More.res.js @@ -0,0 +1,18 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + + +let contnets = "here"; + +let inner = 20; + +let n = 10; + +let party = 30; + +export { + contnets, + inner, + n, + party, +} +/* No side effect */ diff --git a/analysis/examples/example-project/src/More.resi b/analysis/examples/example-project/src/More.resi index 023a7222c..3f211d74d 100644 --- a/analysis/examples/example-project/src/More.resi +++ b/analysis/examples/example-project/src/More.resi @@ -2,4 +2,3 @@ let contnets: string let inner: int let n: int let party: int - diff --git a/analysis/examples/example-project/src/Other.res b/analysis/examples/example-project/src/Other.res index d7a5bf432..306354483 100644 --- a/analysis/examples/example-project/src/Other.res +++ b/analysis/examples/example-project/src/Other.res @@ -27,4 +27,3 @@ let oo = {person: z, height: 34.2} let show = o => { let m = o.height } - diff --git a/analysis/examples/example-project/src/Other.res.js b/analysis/examples/example-project/src/Other.res.js new file mode 100644 index 000000000..a6f6873fa --- /dev/null +++ b/analysis/examples/example-project/src/Other.res.js @@ -0,0 +1,43 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + + +let z = { + name: "hi", + age: 20 +}; + +function concat(first, second) { + return first + second | 0; +} + +let oo = { + person: z, + height: 34.2 +}; + +function show(o) { + +} + +let something = 10; + +let inner = 10; + +let m = { + TAG: "Things", + _0: 1 +}; + +let later = 20; + +export { + something, + inner, + m, + z, + later, + concat, + oo, + show, +} +/* No side effect */ diff --git a/analysis/examples/example-project/src/Serde.res b/analysis/examples/example-project/src/Serde.res deleted file mode 100644 index 1c516c210..000000000 --- a/analysis/examples/example-project/src/Serde.res +++ /dev/null @@ -1,222 +0,0 @@ -let rec deserialize_Hello__TryIt____lockfile: Json.t => Belt.Result.t< - MyNamespace.Hello.lockfile, - string, -> = record => - switch record { - | Json.Object(items) => - switch Belt.List.getAssoc(items, "current", \"=") { - | None => Belt.Result.Error(@reason.raw_literal("No attribute ") "No attribute " ++ "current") - | Some(json) => - switch ( - list => - switch list { - | Json.Array(items) => - let transformer = json => - switch json { - | Json.Array(list{arg0, arg1}) => - switch ( - number => - switch number { - | Json.Number(number) => Belt.Result.Ok(int_of_float(number)) - | _ => Error(@reason.raw_literal("Expected a float") "Expected a float") - } - )(arg1) { - | Belt.Result.Ok(arg1) => - switch deserialize_Hello__TryIt____shortReference(arg0) { - | Belt.Result.Ok(arg0) => Belt.Result.Ok(arg0, arg1) - | Error(error) => Error(error) - } - | Error(error) => Error(error) - } - | _ => Belt.Result.Error(@reason.raw_literal("Expected array") "Expected array") - } - let rec loop = items => - switch items { - | list{} => Belt.Result.Ok(list{}) - | list{one, ...rest} => - switch transformer(one) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(value) => - switch loop(rest) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(rest) => Belt.Result.Ok(list{value, ...rest}) - } - } - } - loop(items) - | _ => Belt.Result.Error(@reason.raw_literal("expected an array") "expected an array") - } - )(json) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(attr_current) => - switch Belt.List.getAssoc(items, "pastVersions", \"=") { - | None => - Belt.Result.Error(@reason.raw_literal("No attribute ") "No attribute " ++ "pastVersions") - | Some(json) => - switch deserialize_Belt_HashMapInt____t(list => - switch list { - | Json.Array(items) => - let transformer = json => - switch json { - | Json.Array(list{arg0, arg1}) => - switch ( - number => - switch number { - | Json.Number(number) => Belt.Result.Ok(int_of_float(number)) - | _ => Error(@reason.raw_literal("Expected a float") "Expected a float") - } - )(arg1) { - | Belt.Result.Ok(arg1) => - switch deserialize_Hello__TryIt____shortReference(arg0) { - | Belt.Result.Ok(arg0) => Belt.Result.Ok(arg0, arg1) - | Error(error) => Error(error) - } - | Error(error) => Error(error) - } - | _ => Belt.Result.Error(@reason.raw_literal("Expected array") "Expected array") - } - let rec loop = items => - switch items { - | list{} => Belt.Result.Ok(list{}) - | list{one, ...rest} => - switch transformer(one) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(value) => - switch loop(rest) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(rest) => Belt.Result.Ok(list{value, ...rest}) - } - } - } - loop(items) - | _ => Belt.Result.Error(@reason.raw_literal("expected an array") "expected an array") - } - )(json) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(attr_pastVersions) => - switch Belt.List.getAssoc(items, "version", \"=") { - | None => - Belt.Result.Error(@reason.raw_literal("No attribute ") "No attribute " ++ "version") - | Some(json) => - switch ( - number => - switch number { - | Json.Number(number) => Belt.Result.Ok(int_of_float(number)) - | _ => Error(@reason.raw_literal("Expected a float") "Expected a float") - } - )(json) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(attr_version) => - Belt.Result.Ok({ - version: attr_version, - pastVersions: attr_pastVersions, - current: attr_current, - }) - } - } - } - } - } - } - | _ => Belt.Result.Error(@reason.raw_literal("Expected an object") "Expected an object") - } -and deserialize_Hello__TryIt____shortReference: Json.t => Belt.Result.t< - MyNamespace.Hello.shortReference, - string, -> = value => - ( - json => - switch json { - | Json.Array(list{arg0, arg1, arg2}) => - switch ( - string => - switch string { - | Json.String(string) => Belt.Result.Ok(string) - | _ => Error(@reason.raw_literal("epected a string") "epected a string") - } - )(arg2) { - | Belt.Result.Ok(arg2) => - switch ( - list => - switch list { - | Json.Array(items) => - let transformer = string => - switch string { - | Json.String(string) => Belt.Result.Ok(string) - | _ => Error(@reason.raw_literal("epected a string") "epected a string") - } - let rec loop = items => - switch items { - | list{} => Belt.Result.Ok(list{}) - | list{one, ...rest} => - switch transformer(one) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(value) => - switch loop(rest) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(rest) => Belt.Result.Ok(list{value, ...rest}) - } - } - } - loop(items) - | _ => Belt.Result.Error(@reason.raw_literal("expected an array") "expected an array") - } - )(arg1) { - | Belt.Result.Ok(arg1) => - switch ( - string => - switch string { - | Json.String(string) => Belt.Result.Ok(string) - | _ => Error(@reason.raw_literal("epected a string") "epected a string") - } - )(arg0) { - | Belt.Result.Ok(arg0) => Belt.Result.Ok(arg0, arg1, arg2) - | Error(error) => Error(error) - } - | Error(error) => Error(error) - } - | Error(error) => Error(error) - } - | _ => Belt.Result.Error(@reason.raw_literal("Expected array") "Expected array") - } - )(value) -and deserialize_Belt_HashMapInt____t: 'arg0. ( - Json.t => Belt.Result.t<'arg0, string>, - Json.t, -) => Belt.Result.t, string> = bTransformer => - TransformHelpers.deserialize_Belt_HashMapInt____t(bTransformer) -let rec serialize_Hello__TryIt____lockfile: MyNamespace.Hello.lockfile => Json.t = record => Json.Object(list{ - ("version", (i => Json.Number(float_of_int(i)))(record.version)), - ( - "pastVersions", - serialize_Belt_HashMapInt____t(list => Json.Array( - Belt.List.map(list, ((arg0, arg1)) => Json.Array(list{ - serialize_Hello__TryIt____shortReference(arg0), - (i => Json.Number(float_of_int(i)))(arg1), - })), - ))(record.pastVersions), - ), - ( - "current", - ( - list => Json.Array( - Belt.List.map(list, ((arg0, arg1)) => Json.Array(list{ - serialize_Hello__TryIt____shortReference(arg0), - (i => Json.Number(float_of_int(i)))(arg1), - })), - ) - )(record.current), - ), -}) -and serialize_Hello__TryIt____shortReference: MyNamespace.Hello.shortReference => Json.t = value => - ( - ((arg0, arg1, arg2)) => Json.Array(list{ - (s => Json.String(s))(arg0), - (list => Json.Array(Belt.List.map(list, s => Json.String(s))))(arg1), - (s => Json.String(s))(arg2), - }) - )(value) -and serialize_Belt_HashMapInt____t: 'arg0. ( - 'arg0 => Json.t, - Belt_HashMapInt.t<'arg0>, -) => Json.t = bTransformer => TransformHelpers.serialize_Belt_HashMapInt____t(bTransformer) diff --git a/analysis/examples/example-project/src/TransformHelpers.res b/analysis/examples/example-project/src/TransformHelpers.res index b0ab64685..e789c1a74 100644 --- a/analysis/examples/example-project/src/TransformHelpers.res +++ b/analysis/examples/example-project/src/TransformHelpers.res @@ -1,12 +1,11 @@ -let deserialize_Belt__HashMapInt__t = (transformer, t) => assert false +let deserialize_Belt__HashMapInt__t = (transformer, t) => assert(false) -let deserialize_Belt_HashMapInt____t = (a, b) => assert false +let deserialize_Belt_HashMapInt____t = (a, b) => assert(false) -let deserialize_Belt__HashMap__Int__t = (a, b) => assert false +let deserialize_Belt__HashMap__Int__t = (a, b) => assert(false) -let serialize_Belt_HashMapInt____t = (a, b) => assert false +let serialize_Belt_HashMapInt____t = (a, b) => assert(false) -let serialize_Belt__HashMap__Int__t = (a, b) => assert false - -let serialize_Belt_HashMapInt____t = (transformer, t) => assert false +let serialize_Belt__HashMap__Int__t = (a, b) => assert(false) +let serialize_Belt_HashMapInt____t = (transformer, t) => assert(false) diff --git a/analysis/examples/example-project/src/TransformHelpers.res.js b/analysis/examples/example-project/src/TransformHelpers.res.js new file mode 100644 index 000000000..408a62363 --- /dev/null +++ b/analysis/examples/example-project/src/TransformHelpers.res.js @@ -0,0 +1,71 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + + +function deserialize_Belt__HashMapInt__t(transformer, t) { + throw { + RE_EXN_ID: "Assert_failure", + _1: [ + "TransformHelpers.res", + 1, + 58 + ], + Error: new Error() + }; +} + +function deserialize_Belt_HashMapInt____t(a, b) { + throw { + RE_EXN_ID: "Assert_failure", + _1: [ + "TransformHelpers.res", + 3, + 49 + ], + Error: new Error() + }; +} + +function deserialize_Belt__HashMap__Int__t(a, b) { + throw { + RE_EXN_ID: "Assert_failure", + _1: [ + "TransformHelpers.res", + 5, + 50 + ], + Error: new Error() + }; +} + +function serialize_Belt__HashMap__Int__t(a, b) { + throw { + RE_EXN_ID: "Assert_failure", + _1: [ + "TransformHelpers.res", + 9, + 48 + ], + Error: new Error() + }; +} + +function serialize_Belt_HashMapInt____t(transformer, t) { + throw { + RE_EXN_ID: "Assert_failure", + _1: [ + "TransformHelpers.res", + 11, + 57 + ], + Error: new Error() + }; +} + +export { + deserialize_Belt__HashMapInt__t, + deserialize_Belt_HashMapInt____t, + deserialize_Belt__HashMap__Int__t, + serialize_Belt__HashMap__Int__t, + serialize_Belt_HashMapInt____t, +} +/* No side effect */ diff --git a/analysis/examples/example-project/src/ZZ.res b/analysis/examples/example-project/src/ZZ.res index 6a3c9ddcd..9eca987ed 100644 --- a/analysis/examples/example-project/src/ZZ.res +++ b/analysis/examples/example-project/src/ZZ.res @@ -15,10 +15,14 @@ let d = module J = { @react.component - export make = (~children: React.element) => React.null + let make = (~children: React.element) => React.null } -let z = {React.string("")} {React.string("")} +let z = + + {React.string("")} + {React.string("")} + type inline = | A({x: int, y: string}) @@ -85,7 +89,7 @@ let v1 = V1 module DoubleNested = ModuleWithDocComment.Nested.NestedAgain -let uncurried = (. x) => x + 1 +let uncurried = x => x + 1 module Inner = { type tInner = int @@ -107,7 +111,7 @@ module HoverInsideModuleWithComponent = { } module Lib = { - let foo = (~age, ~name) => name ++ string_of_int(age) + let foo = (~age, ~name) => name ++ Int.toString(age) let next = (~number=0, ~year) => number + year } @@ -122,24 +126,23 @@ module Dep: { let customDouble2 = foo => foo * 2 } -let cc = Dep.customDouble(11) +let customDouble = foo => foo * 2 +let cc = customDouble(11) module O = { module Comp = { @react.component let make = (~first="", ~kas=11, ~foo=3, ~second, ~v) => - React.string(first ++ second ++ string_of_int(foo)) + React.string(first ++ second ++ Int.toString(foo)) } } let comp = -let lll = List.make(3, 4) +let lll = List.make(~length=3, 4) let abc = "abc" let arr = [1, 2, 3] let some7 = Some(7) - - diff --git a/analysis/examples/example-project/src/ZZ.res.js b/analysis/examples/example-project/src/ZZ.res.js new file mode 100644 index 000000000..7d7b87d7b --- /dev/null +++ b/analysis/examples/example-project/src/ZZ.res.js @@ -0,0 +1,197 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + +import * as Stdlib_List from "@rescript/runtime/lib/es6/Stdlib_List.js"; +import * as JsxRuntime from "react/jsx-runtime"; + +let b = [ + 1, + 2, + 3, + 12 +]; + +let c = JsxRuntime.jsx("div", {}); + +function s(prim) { + return prim; +} + +function ZZ$M(props) { + return props.x; +} + +let M = { + make: ZZ$M +}; + +let d = JsxRuntime.jsx(ZZ$M, { + x: "abc" +}); + +function ZZ$J(props) { + return null; +} + +let J = { + make: ZZ$J +}; + +let z = JsxRuntime.jsxs(ZZ$J, { + children: [ + "", + "" + ] +}); + +let MSig = { + x: 14 +}; + +let Impl = { + x: 14 +}; + +let Impl2 = { + x: 14 +}; + +function testRecordFields(gr) { + return gr.s; +} + +function uncurried(x) { + return x + 1 | 0; +} + +let Inner = { + vInner: 34 +}; + +function functionWithTypeAnnotation() { + return 1; +} + +function ZZ$HoverInsideModuleWithComponent(props) { + return null; +} + +let HoverInsideModuleWithComponent = { + x: 2, + make: ZZ$HoverInsideModuleWithComponent +}; + +function foo(age, name) { + return name + age.toString(); +} + +function next(numberOpt, year) { + let number = numberOpt !== undefined ? numberOpt : 0; + return number + year | 0; +} + +let Lib = { + foo: foo, + next: next +}; + +function customDouble(foo) { + return (foo << 1); +} + +function customDouble2(foo) { + return (foo << 1); +} + +let Dep = { + customDouble: customDouble, + customDouble2: customDouble2 +}; + +function customDouble$1(foo) { + return (foo << 1); +} + +let cc = 22; + +function ZZ$O$Comp(props) { + let __foo = props.foo; + let __first = props.first; + let first = __first !== undefined ? __first : ""; + let foo = __foo !== undefined ? __foo : 3; + return first + props.second + foo.toString(); +} + +let Comp = { + make: ZZ$O$Comp +}; + +let O = { + Comp: Comp +}; + +let comp = JsxRuntime.jsx(ZZ$O$Comp, { + second: "abcc", + v: 12 +}, "12"); + +let lll = Stdlib_List.make(3, 4); + +let arr = [ + 1, + 2, + 3 +]; + +let a = 12; + +let D; + +let E; + +let F; + +let v1 = "V1"; + +let DoubleNested; + +let valueInner = 34; + +let abc = "abc"; + +let some7 = 7; + +export { + a, + b, + c, + s, + M, + d, + J, + z, + MSig, + Impl, + Impl2, + D, + E, + F, + testRecordFields, + v1, + DoubleNested, + uncurried, + Inner, + valueInner, + functionWithTypeAnnotation, + HoverInsideModuleWithComponent, + Lib, + Dep, + customDouble$1 as customDouble, + cc, + O, + comp, + lll, + abc, + arr, + some7, +} +/* c Not a pure module */ diff --git a/analysis/examples/example-project/types.json b/analysis/examples/example-project/types.json deleted file mode 100644 index 904cdf616..000000000 --- a/analysis/examples/example-project/types.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "output": "src/Serde.ml", - "engine": "rex-json", - "entries": [ - { - "file": "src/Hello.re", - "type": "lockfile" - } - ], - "custom": [ - {"module": "Belt_HashMapInt", "path": [], "name": "t", "args": 1} - ] -} \ No newline at end of file diff --git a/analysis/examples/monorepo-project/.gitignore b/analysis/examples/monorepo-project/.gitignore new file mode 100644 index 000000000..d66a081c5 --- /dev/null +++ b/analysis/examples/monorepo-project/.gitignore @@ -0,0 +1,2 @@ +lib +.merlin diff --git a/analysis/examples/monorepo-project/.vscode/settings.json b/analysis/examples/monorepo-project/.vscode/settings.json new file mode 100644 index 000000000..6c4df7cf5 --- /dev/null +++ b/analysis/examples/monorepo-project/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "rescript.settings.logLevel": "log" +} diff --git a/analysis/examples/monorepo-project/package-lock.json b/analysis/examples/monorepo-project/package-lock.json new file mode 100644 index 000000000..c79b0d66a --- /dev/null +++ b/analysis/examples/monorepo-project/package-lock.json @@ -0,0 +1,152 @@ +{ + "name": "monorepo-project", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "monorepo-project", + "workspaces": [ + "packages/app", + "packages/lib" + ], + "dependencies": { + "rescript": "12.1.0" + } + }, + "node_modules/@monorepo/app": { + "resolved": "packages/app", + "link": true + }, + "node_modules/@monorepo/lib": { + "resolved": "packages/lib", + "link": true + }, + "node_modules/@rescript/darwin-arm64": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/darwin-arm64/-/darwin-arm64-12.1.0.tgz", + "integrity": "sha512-OuJMT+2h2Lp60n8ONFx1oBAAePSVkM9zl7E/EX4VD2xkQoVTPklz0BpHYOICnFJSCOOdbOhbsTBXdLpo3yvllg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/darwin-x64": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/darwin-x64/-/darwin-x64-12.1.0.tgz", + "integrity": "sha512-r5Iv4ga+LaNq+6g9LODwZG4bwydd9UDXACP/HKxOfrP9XQCITlF/XqB1ZDJWyJOgJLZSJCd7erlG38YtB0VZKA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/linux-arm64": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/linux-arm64/-/linux-arm64-12.1.0.tgz", + "integrity": "sha512-UTZv4GTjbyQ/T5LQDfQiGcumK3SzE1K7+ug6gWpDcGZ7ALc7hCS6BVEFL/LDs8iWVwAwkK/6r456s2zRnvS7wQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/linux-x64": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/linux-x64/-/linux-x64-12.1.0.tgz", + "integrity": "sha512-c0PXuBL09JRSA4nQusYbR4mW5QJrBPqxDrqvIX+M79fk3d6jQmj5x4NsBwk5BavxvmbR/JU1JjYBlSAa3h22Vg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/runtime": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/runtime/-/runtime-12.1.0.tgz", + "integrity": "sha512-bvr9RfvBD+JS/6foWCA4l2fLXmUXN0KGqylXQPHt09QxUghqgoCiaWVHaHSx5dOIk/jAPlGQ7zB5yVeMas/EFQ==" + }, + "node_modules/@rescript/win32-x64": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/win32-x64/-/win32-x64-12.1.0.tgz", + "integrity": "sha512-nQC42QByyAbryfkbyK67iskipUqXVwTPCFrqissY4jJoP0128gg0yG6DydJnV1stXphtFdMFHtmyYE1ffG7UBg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/rescript": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/rescript/-/rescript-12.1.0.tgz", + "integrity": "sha512-n/B43wzIEKV4OmlrWbrlQOL4zZaz0RM/Cc8PG2YvhQvQDW7nscHJliDq1AGeVwHoMX68MeaKKzLDOMOMU9Z6FA==", + "license": "SEE LICENSE IN LICENSE", + "workspaces": [ + "packages/playground", + "packages/@rescript/*", + "tests/dependencies/**", + "tests/analysis_tests/**", + "tests/docstring_tests", + "tests/gentype_tests/**", + "tests/tools_tests", + "scripts/res" + ], + "dependencies": { + "@rescript/runtime": "12.1.0" + }, + "bin": { + "bsc": "cli/bsc.js", + "bstracing": "cli/bstracing.js", + "rescript": "cli/rescript.js", + "rescript-legacy": "cli/rescript-legacy.js", + "rescript-tools": "cli/rescript-tools.js" + }, + "engines": { + "node": ">=20.11.0" + }, + "optionalDependencies": { + "@rescript/darwin-arm64": "12.1.0", + "@rescript/darwin-x64": "12.1.0", + "@rescript/linux-arm64": "12.1.0", + "@rescript/linux-x64": "12.1.0", + "@rescript/win32-x64": "12.1.0" + } + }, + "packages/app": { + "name": "@monorepo/app", + "version": "0.0.1", + "dependencies": { + "@monorepo/lib": "*" + } + }, + "packages/lib": { + "name": "@monorepo/lib", + "version": "0.0.1" + } + } +} diff --git a/analysis/examples/monorepo-project/package.json b/analysis/examples/monorepo-project/package.json new file mode 100644 index 000000000..b516ef9fa --- /dev/null +++ b/analysis/examples/monorepo-project/package.json @@ -0,0 +1,16 @@ +{ + "name": "monorepo-project", + "private": true, + "workspaces": [ + "packages/app", + "packages/lib" + ], + "dependencies": { + "rescript": "12.1.0" + }, + "scripts": { + "build": "rescript build", + "watch": "rescript watch", + "clean": "rescript clean" + } +} diff --git a/analysis/examples/monorepo-project/packages/app/.vscode/settings.json b/analysis/examples/monorepo-project/packages/app/.vscode/settings.json new file mode 100644 index 000000000..6c4df7cf5 --- /dev/null +++ b/analysis/examples/monorepo-project/packages/app/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "rescript.settings.logLevel": "log" +} diff --git a/analysis/examples/monorepo-project/packages/app/package.json b/analysis/examples/monorepo-project/packages/app/package.json new file mode 100644 index 000000000..ce8a9af6d --- /dev/null +++ b/analysis/examples/monorepo-project/packages/app/package.json @@ -0,0 +1,12 @@ +{ + "name": "@monorepo/app", + "version": "0.0.1", + "scripts": { + "build": "rescript build", + "clean": "rescript clean", + "watch": "rescript watch" + }, + "dependencies": { + "@monorepo/lib": "*" + } +} diff --git a/analysis/examples/monorepo-project/packages/app/rescript.json b/analysis/examples/monorepo-project/packages/app/rescript.json new file mode 100644 index 000000000..81c0e2bd7 --- /dev/null +++ b/analysis/examples/monorepo-project/packages/app/rescript.json @@ -0,0 +1,13 @@ +{ + "name": "@monorepo/app", + "sources": { + "dir": "src", + "subdirs": true + }, + "package-specs": { + "module": "esmodule", + "in-source": true + }, + "suffix": ".mjs", + "dependencies": ["@monorepo/lib"] +} diff --git a/analysis/examples/monorepo-project/packages/app/src/App.mjs b/analysis/examples/monorepo-project/packages/app/src/App.mjs new file mode 100644 index 000000000..e91955003 --- /dev/null +++ b/analysis/examples/monorepo-project/packages/app/src/App.mjs @@ -0,0 +1,22 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + +import * as Lib from "@monorepo/lib/src/Lib.mjs"; + +function main() { + let greeting = Lib.greet("World"); + console.log(greeting); + let sum = Lib.add(1, 2); + console.log("Sum: " + sum.toString()); +} + +function unusedAppFunction() { + return "Unused in app"; +} + +main(); + +export { + main, + unusedAppFunction, +} +/* Not a pure module */ diff --git a/analysis/examples/monorepo-project/packages/app/src/App.res b/analysis/examples/monorepo-project/packages/app/src/App.res new file mode 100644 index 000000000..22fb2dd11 --- /dev/null +++ b/analysis/examples/monorepo-project/packages/app/src/App.res @@ -0,0 +1,16 @@ +/* monorepo subpackage test */ +// App module - main application + +let main = () => { + let greeting = Lib.greet("World") + Console.log(greeting) + + let sum = Lib.add(1, 2) + Console.log("Sum: " ++ Int.toString(sum)) +} + +// This function is never used (dead code) +let unusedAppFunction = () => "Unused in app" + +// Run main +let _ = main() diff --git a/analysis/examples/monorepo-project/rescript.json b/analysis/examples/monorepo-project/rescript.json new file mode 100644 index 000000000..b22b4976e --- /dev/null +++ b/analysis/examples/monorepo-project/rescript.json @@ -0,0 +1,16 @@ +{ + "name": "monorepo-project", + "sources": { + "dir": "src", + "subdirs": true + }, + "package-specs": { + "module": "esmodule", + "in-source": true + }, + "suffix": ".mjs", + "dependencies": [ + "@monorepo/app", + "@monorepo/lib" + ] +} diff --git a/analysis/examples/monorepo-project/src/Root.mjs b/analysis/examples/monorepo-project/src/Root.mjs new file mode 100644 index 000000000..201f61b4b --- /dev/null +++ b/analysis/examples/monorepo-project/src/Root.mjs @@ -0,0 +1,15 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + +import * as App from "@monorepo/app/src/App.mjs"; + +App.main(); + +let rootValue = "Root package"; + +let unusedRootValue = 123; + +export { + rootValue, + unusedRootValue, +} +/* Not a pure module */ diff --git a/analysis/examples/monorepo-project/src/Root.res b/analysis/examples/monorepo-project/src/Root.res new file mode 100644 index 000000000..eb3da731e --- /dev/null +++ b/analysis/examples/monorepo-project/src/Root.res @@ -0,0 +1,11 @@ +// Root module - monorepo root + +let rootValue = "Root package" + +// Use something from the app package +let _ = App.main() + +// This is unused (dead code) +let unusedRootValue = 123 + +// let _ = App.unusedAppFunction \ No newline at end of file diff --git a/analysis/examples/rescript9-project/.gitignore b/analysis/examples/rescript9-project/.gitignore new file mode 100644 index 000000000..d66a081c5 --- /dev/null +++ b/analysis/examples/rescript9-project/.gitignore @@ -0,0 +1,2 @@ +lib +.merlin diff --git a/analysis/examples/rescript9-project/.vscode/settings.json b/analysis/examples/rescript9-project/.vscode/settings.json new file mode 100644 index 000000000..6c4df7cf5 --- /dev/null +++ b/analysis/examples/rescript9-project/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "rescript.settings.logLevel": "log" +} diff --git a/analysis/examples/rescript9-project/bsconfig.json b/analysis/examples/rescript9-project/bsconfig.json new file mode 100644 index 000000000..d594590e1 --- /dev/null +++ b/analysis/examples/rescript9-project/bsconfig.json @@ -0,0 +1,9 @@ +{ + "name": "rescript9-project", + "sources": ["src"], + "package-specs": { + "module": "es6", + "in-source": true + }, + "suffix": ".bs.js" +} diff --git a/analysis/examples/rescript9-project/package-lock.json b/analysis/examples/rescript9-project/package-lock.json new file mode 100644 index 000000000..7d9d5c10a --- /dev/null +++ b/analysis/examples/rescript9-project/package-lock.json @@ -0,0 +1,28 @@ +{ + "name": "rescript9-project", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "rescript9-project", + "version": "1.0.0", + "dependencies": { + "rescript": "9.1.4" + } + }, + "node_modules/rescript": { + "version": "9.1.4", + "resolved": "https://registry.npmjs.org/rescript/-/rescript-9.1.4.tgz", + "integrity": "sha512-aXANK4IqecJzdnDpJUsU6pxMViCR5ogAxzuqS0mOr8TloMnzAjJFu63fjD6LCkWrKAhlMkFFzQvVQYaAaVkFXw==", + "hasInstallScript": true, + "license": "SEE LICENSE IN LICENSE", + "bin": { + "bsc": "bsc", + "bsrefmt": "bsrefmt", + "bstracing": "lib/bstracing", + "rescript": "rescript" + } + } + } +} diff --git a/analysis/examples/rescript9-project/package.json b/analysis/examples/rescript9-project/package.json new file mode 100644 index 000000000..2571ffabe --- /dev/null +++ b/analysis/examples/rescript9-project/package.json @@ -0,0 +1,12 @@ +{ + "name": "rescript9-project", + "version": "1.0.0", + "scripts": { + "build": "rescript build", + "watch": "rescript build -w", + "clean": "rescript clean" + }, + "dependencies": { + "rescript": "9.1.4" + } +} diff --git a/analysis/examples/rescript9-project/src/Hello.bs.js b/analysis/examples/rescript9-project/src/Hello.bs.js new file mode 100644 index 000000000..0edc63d78 --- /dev/null +++ b/analysis/examples/rescript9-project/src/Hello.bs.js @@ -0,0 +1,18 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + + +function add(a, b) { + return a + b | 0; +} + +var greeting = "Hello from ReScript 9!"; + +var result = 3; + +export { + greeting , + add , + result , + +} +/* No side effect */ diff --git a/analysis/examples/rescript9-project/src/Hello.res b/analysis/examples/rescript9-project/src/Hello.res new file mode 100644 index 000000000..8dace8fd7 --- /dev/null +++ b/analysis/examples/rescript9-project/src/Hello.res @@ -0,0 +1,6 @@ +// Simple ReScript 9 test file +let greeting = "Hello from ReScript 9!" + +let add = (a, b) => a + b + +let result = add(1, 2) diff --git a/client/.vscode-test.mjs b/client/.vscode-test.mjs new file mode 100644 index 000000000..4bdfbda6b --- /dev/null +++ b/client/.vscode-test.mjs @@ -0,0 +1,65 @@ +import { defineConfig } from "@vscode/test-cli"; +import * as path from "path"; + +export default defineConfig([ + { + label: "example-project", + files: "out/client/src/test/suite/exampleProject.test.js", + version: "stable", + extensionDevelopmentPath: path.resolve(import.meta.dirname, ".."), + workspaceFolder: path.resolve( + import.meta.dirname, + "../analysis/examples/example-project", + ), + mocha: { + ui: "tdd", + timeout: 60000, + }, + launchArgs: ["--disable-extensions"], + }, + { + label: "monorepo-root", + files: "out/client/src/test/suite/monorepoRoot.test.js", + version: "stable", + extensionDevelopmentPath: path.resolve(import.meta.dirname, ".."), + workspaceFolder: path.resolve( + import.meta.dirname, + "../analysis/examples/monorepo-project", + ), + mocha: { + ui: "tdd", + timeout: 60000, + }, + launchArgs: ["--disable-extensions"], + }, + { + label: "monorepo-subpackage", + files: "out/client/src/test/suite/monorepoSubpackage.test.js", + version: "stable", + extensionDevelopmentPath: path.resolve(import.meta.dirname, ".."), + workspaceFolder: path.resolve( + import.meta.dirname, + "../analysis/examples/monorepo-project/packages/app", + ), + mocha: { + ui: "tdd", + timeout: 60000, + }, + launchArgs: ["--disable-extensions"], + }, + { + label: "rescript9-project", + files: "out/client/src/test/suite/rescript9.test.js", + version: "stable", + extensionDevelopmentPath: path.resolve(import.meta.dirname, ".."), + workspaceFolder: path.resolve( + import.meta.dirname, + "../analysis/examples/rescript9-project", + ), + mocha: { + ui: "tdd", + timeout: 60000, + }, + launchArgs: ["--disable-extensions"], + }, +]); diff --git a/client/package.json b/client/package.json index 6f5dc88b8..3a74fdb60 100644 --- a/client/package.json +++ b/client/package.json @@ -6,7 +6,15 @@ "keywords": [], "author": "ReScript Team", "license": "MIT", + "scripts": { + "test": "vscode-test" + }, "dependencies": { "vscode-languageclient": "8.1.0-next.5" + }, + "devDependencies": { + "@types/mocha": "^10.0.10", + "@vscode/test-cli": "^0.0.12", + "@vscode/test-electron": "^2.5.2" } } diff --git a/client/src/commands.ts b/client/src/commands.ts index a94c424a5..f6e97235b 100644 --- a/client/src/commands.ts +++ b/client/src/commands.ts @@ -3,25 +3,34 @@ import { DiagnosticCollection, OutputChannel, StatusBarItem } from "vscode"; import { DiagnosticsResultCodeActionsMap, runCodeAnalysisWithReanalyze, + reanalyzeServers, + stopReanalyzeServer, + stopAllReanalyzeServers, + showReanalyzeServerLog, } from "./commands/code_analysis"; +export { + reanalyzeServers, + stopReanalyzeServer, + stopAllReanalyzeServers, + showReanalyzeServerLog, +}; + export { createInterface } from "./commands/create_interface"; export { openCompiled } from "./commands/open_compiled"; export { switchImplIntf } from "./commands/switch_impl_intf"; export { dumpDebug, dumpDebugRetrigger } from "./commands/dump_debug"; -export { dumpServerState } from "./commands/dump_server_state"; export { pasteAsRescriptJson } from "./commands/paste_as_rescript_json"; export { pasteAsRescriptJsx } from "./commands/paste_as_rescript_jsx"; +// Returns the monorepo root path if a reanalyze server was started, null otherwise. export const codeAnalysisWithReanalyze = ( - targetDir: string | null, diagnosticsCollection: DiagnosticCollection, diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap, outputChannel: OutputChannel, codeAnalysisRunningStatusBarItem: StatusBarItem, -) => { - runCodeAnalysisWithReanalyze( - targetDir, +): Promise => { + return runCodeAnalysisWithReanalyze( diagnosticsCollection, diagnosticsResultCodeActions, outputChannel, diff --git a/client/src/commands/code_analysis.ts b/client/src/commands/code_analysis.ts index e739573f0..1fe197af2 100644 --- a/client/src/commands/code_analysis.ts +++ b/client/src/commands/code_analysis.ts @@ -1,5 +1,7 @@ import * as cp from "child_process"; +import * as fs from "fs"; import * as path from "path"; +import * as semver from "semver"; import { window, DiagnosticCollection, @@ -14,7 +16,259 @@ import { OutputChannel, StatusBarItem, } from "vscode"; -import { findProjectRootOfFileInDir, getBinaryPath } from "../utils"; +import { NormalizedPath, normalizePath } from "../utils"; +import { + findBinary, + getMonorepoRootFromBinaryPath, +} from "../../../shared/src/findBinary"; +import { findProjectRootOfFile } from "../../../shared/src/projectRoots"; + +// Reanalyze server constants (matches rescript monorepo) +const REANALYZE_SOCKET_FILENAME = ".rescript-reanalyze.sock"; +const REANALYZE_SERVER_MIN_VERSION = "12.1.0"; + +// Server state per monorepo root +export interface ReanalyzeServerState { + process: cp.ChildProcess | null; + monorepoRoot: string; + socketPath: string; + startedByUs: boolean; + outputChannel: OutputChannel | null; +} + +// Map from monorepo root to server state +export const reanalyzeServers: Map = new Map(); + +// Check if ReScript version supports reanalyze-server +const supportsReanalyzeServer = async ( + monorepoRootPath: string | null, +): Promise => { + if (monorepoRootPath === null) return false; + + try { + const rescriptDir = path.join(monorepoRootPath, "node_modules", "rescript"); + const packageJsonPath = path.join(rescriptDir, "package.json"); + const packageJson = JSON.parse( + await fs.promises.readFile(packageJsonPath, "utf-8"), + ); + const version = packageJson.version; + + return ( + semver.valid(version) != null && + semver.gte(version, REANALYZE_SERVER_MIN_VERSION) + ); + } catch { + return false; + } +}; + +// Get socket path for a monorepo root +const getSocketPath = (monorepoRoot: string): string => { + return path.join(monorepoRoot, REANALYZE_SOCKET_FILENAME); +}; + +// Check if server is running (socket file exists) +const isServerRunning = (monorepoRoot: string): boolean => { + const socketPath = getSocketPath(monorepoRoot); + return fs.existsSync(socketPath); +}; + +// Start reanalyze server for a monorepo. +// Note: This should only be called after supportsReanalyzeServer() returns true, +// which ensures ReScript >= 12.1.0 where the reanalyze-server subcommand exists. +export const startReanalyzeServer = async ( + monorepoRoot: string, + binaryPath: string, + clientOutputChannel?: OutputChannel, +): Promise => { + // Check if already running (either by us or externally) + if (isServerRunning(monorepoRoot)) { + // Check if we have a record of starting it + const existing = reanalyzeServers.get(monorepoRoot); + if (existing) { + existing.outputChannel?.appendLine( + "[info] Server already running (started by us)", + ); + return existing; + } + // Server running but not started by us - just record it + clientOutputChannel?.appendLine( + `[info] Found existing reanalyze-server for ${path.basename(monorepoRoot)} (not started by extension)`, + ); + const state: ReanalyzeServerState = { + process: null, + monorepoRoot, + socketPath: getSocketPath(monorepoRoot), + startedByUs: false, + outputChannel: null, + }; + reanalyzeServers.set(monorepoRoot, state); + return state; + } + + // Create output channel for server logs + const outputChannel = window.createOutputChannel( + `ReScript Reanalyze Server (${path.basename(monorepoRoot)})`, + ); + + outputChannel.appendLine( + `[info] Starting reanalyze-server in ${monorepoRoot}`, + ); + + // Start the server + const serverProcess = cp.spawn(binaryPath, ["reanalyze-server"], { + cwd: monorepoRoot, + stdio: ["ignore", "pipe", "pipe"], + }); + + if (serverProcess.pid == null) { + outputChannel.appendLine("[error] Failed to start reanalyze-server"); + return null; + } + + const state: ReanalyzeServerState = { + process: serverProcess, + monorepoRoot, + socketPath: getSocketPath(monorepoRoot), + startedByUs: true, + outputChannel, + }; + + // Log stdout and stderr to output channel + serverProcess.stdout?.on("data", (data) => { + outputChannel.appendLine(`[stdout] ${data.toString().trim()}`); + }); + + serverProcess.stderr?.on("data", (data) => { + outputChannel.appendLine(`[stderr] ${data.toString().trim()}`); + }); + + serverProcess.on("error", (err) => { + outputChannel.appendLine(`[error] Server error: ${err.message}`); + }); + + serverProcess.on("exit", (code, signal) => { + outputChannel.appendLine( + `[info] Server exited with code ${code}, signal ${signal}`, + ); + reanalyzeServers.delete(monorepoRoot); + }); + + reanalyzeServers.set(monorepoRoot, state); + + // Wait briefly for socket file to be created (up to 3 seconds) + for (let i = 0; i < 30; i++) { + if (isServerRunning(monorepoRoot)) { + outputChannel.appendLine(`[info] Server socket ready`); + return state; + } + await new Promise((resolve) => setTimeout(resolve, 100)); + } + + outputChannel.appendLine( + "[warn] Server started but socket not found after 3 seconds", + ); + return state; +}; + +// Clean up socket file if it exists +const cleanupSocketFile = (socketPath: string): void => { + try { + if (fs.existsSync(socketPath)) { + fs.unlinkSync(socketPath); + } + } catch { + // Ignore errors during cleanup + } +}; + +// Stop reanalyze server for a monorepo (only if we started it) +export const stopReanalyzeServer = ( + monorepoRoot: string | null, + clientOutputChannel?: OutputChannel, +): void => { + if (monorepoRoot == null) return; + + const state = reanalyzeServers.get(monorepoRoot); + if (!state) return; + + // Only kill the process if we started it + if (state.startedByUs && state.process != null) { + state.process.kill(); + state.outputChannel?.appendLine("[info] Server stopped by extension"); + // Clean up socket file to prevent stale socket issues + cleanupSocketFile(state.socketPath); + } else if (!state.startedByUs) { + clientOutputChannel?.appendLine( + `[info] Leaving external reanalyze-server running for ${path.basename(monorepoRoot)}`, + ); + } + + reanalyzeServers.delete(monorepoRoot); +}; + +// Stop all servers we started +export const stopAllReanalyzeServers = (): void => { + for (const [_monorepoRoot, state] of reanalyzeServers) { + if (state.startedByUs && state.process != null) { + state.process.kill(); + state.outputChannel?.appendLine("[info] Server stopped by extension"); + // Clean up socket file to prevent stale socket issues + cleanupSocketFile(state.socketPath); + } + } + reanalyzeServers.clear(); +}; + +// Show server log for a monorepo +// Returns true if the output channel was shown, false otherwise +// This is an async function because it may need to find the binary to derive monorepo root +export const showReanalyzeServerLog = async ( + monorepoRoot: string | null, +): Promise => { + if (monorepoRoot == null) { + // Try to find any running server + const firstServer = reanalyzeServers.values().next().value; + if (firstServer?.outputChannel) { + firstServer.outputChannel.show(); + return true; + } else { + window.showInformationMessage( + "No reanalyze server is currently running.", + ); + return false; + } + } + + // First try direct lookup + let state = reanalyzeServers.get(monorepoRoot); + if (state?.outputChannel) { + state.outputChannel.show(); + return true; + } + + // If not found, try to derive monorepo root from binary path + // (the server is registered under monorepo root, not subpackage root) + const binaryPath = await findBinary({ + projectRootPath: monorepoRoot, + binary: "rescript-tools.exe", + }); + if (binaryPath != null) { + const derivedMonorepoRoot = getMonorepoRootFromBinaryPath(binaryPath); + if (derivedMonorepoRoot != null && derivedMonorepoRoot !== monorepoRoot) { + state = reanalyzeServers.get(derivedMonorepoRoot); + if (state?.outputChannel) { + state.outputChannel.show(); + return true; + } + } + } + + window.showInformationMessage( + `No reanalyze server log available for ${path.basename(monorepoRoot)}`, + ); + return false; +}; export let statusBarItem = { setToStopText: (codeAnalysisRunningStatusBarItem: StatusBarItem) => { @@ -198,43 +452,85 @@ let resultsToDiagnostics = ( }; }; -export const runCodeAnalysisWithReanalyze = ( - targetDir: string | null, +// Returns the monorepo root path if a reanalyze server was started, null otherwise. +// This allows the caller to track which server to stop later. +export const runCodeAnalysisWithReanalyze = async ( diagnosticsCollection: DiagnosticCollection, diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap, outputChannel: OutputChannel, codeAnalysisRunningStatusBarItem: StatusBarItem, -) => { - let currentDocument = window.activeTextEditor.document; - let cwd = targetDir ?? path.dirname(currentDocument.uri.fsPath); +): Promise => { + let currentDocument = window.activeTextEditor?.document; + if (!currentDocument) { + window.showErrorMessage("No active document found."); + return null; + } - let projectRootPath: string | null = findProjectRootOfFileInDir( - currentDocument.uri.fsPath, + let projectRootPath: NormalizedPath | null = normalizePath( + findProjectRootOfFile(currentDocument.uri.fsPath), ); - // This little weird lookup is because in the legacy setup reanalyze needs to be - // run from the analysis binary, whereas in the new setup it's run from the tools - // binary. - let binaryPath = - getBinaryPath("rescript-tools.exe", projectRootPath) ?? - getBinaryPath("rescript-editor-analysis.exe"); + // findBinary walks up the directory tree to find node_modules/rescript, + // so it works correctly for monorepos (finds the workspace root's binary) + // Note: rescript-tools.exe (with reanalyze command) is only available in ReScript 12+ + const binaryPath: string | null = await findBinary({ + projectRootPath, + binary: "rescript-tools.exe", + }); if (binaryPath === null) { - window.showErrorMessage("Binary executable not found."); - return; + outputChannel.appendLine( + `[error] rescript-tools.exe not found for project root: ${projectRootPath}. Code analysis requires ReScript 12 or later.`, + ); + window.showErrorMessage( + "Code analysis requires ReScript 12 or later (rescript-tools.exe not found).", + ); + return null; + } + + // Derive monorepo root from binary path - the directory containing node_modules + // This handles monorepos correctly since findBinary walks up to find the binary + const monorepoRootPath: NormalizedPath | null = normalizePath( + getMonorepoRootFromBinaryPath(binaryPath), + ); + + if (monorepoRootPath === null) { + outputChannel.appendLine( + `[error] Could not determine workspace root from binary path: ${binaryPath}`, + ); + window.showErrorMessage("Could not determine workspace root."); + return null; + } + + // Check if we should use reanalyze-server (ReScript >= 12.1.0) + const useServer = await supportsReanalyzeServer(monorepoRootPath); + + if (useServer && monorepoRootPath) { + // Ensure server is running from workspace root + const serverState = await startReanalyzeServer( + monorepoRootPath, + binaryPath, + outputChannel, + ); + if (serverState) { + outputChannel.appendLine( + `[info] Using reanalyze-server for ${path.basename(monorepoRootPath)}`, + ); + } } statusBarItem.setToRunningText(codeAnalysisRunningStatusBarItem); let opts = ["reanalyze", "-json"]; - let p = cp.spawn(binaryPath, opts, { - cwd, - }); + let p = cp.spawn(binaryPath, opts, { cwd: monorepoRootPath }); if (p.stdout == null) { + outputChannel.appendLine( + `[error] Failed to spawn reanalyze process: stdout is null. Binary: ${binaryPath}, cwd: ${monorepoRootPath}`, + ); statusBarItem.setToFailed(codeAnalysisRunningStatusBarItem); - window.showErrorMessage("Something went wrong."); - return; + window.showErrorMessage("Failed to start code analysis process."); + return null; } let data = ""; @@ -283,7 +579,7 @@ export const runCodeAnalysisWithReanalyze = ( outputChannel.appendLine( `> To reproduce, run "${binaryPath} ${opts.join( " ", - )}" in directory: "${cwd}"`, + )}" in directory: "${monorepoRootPath}"`, ); outputChannel.appendLine("\n"); } @@ -315,4 +611,7 @@ export const runCodeAnalysisWithReanalyze = ( statusBarItem.setToStopText(codeAnalysisRunningStatusBarItem); }); + + // Return the monorepo root so the caller can track which server to stop + return monorepoRootPath; }; diff --git a/client/src/commands/dump_debug.ts b/client/src/commands/dump_debug.ts index a845848c3..58e4054c2 100644 --- a/client/src/commands/dump_debug.ts +++ b/client/src/commands/dump_debug.ts @@ -11,6 +11,7 @@ import { createFileInTempDir, findProjectRootOfFileInDir, getBinaryPath, + NormalizedPath, } from "../utils"; import * as path from "path"; @@ -136,7 +137,8 @@ export const dumpDebug = async ( const { line: endLine, character: endChar } = editor.selection.end; const filePath = editor.document.uri.fsPath; - let projectRootPath: string | null = findProjectRootOfFileInDir(filePath); + let projectRootPath: NormalizedPath | null = + findProjectRootOfFileInDir(filePath); const binaryPath = getBinaryPath( "rescript-editor-analysis.exe", projectRootPath, diff --git a/client/src/commands/dump_server_state.ts b/client/src/commands/dump_server_state.ts deleted file mode 100644 index 5b61008c8..000000000 --- a/client/src/commands/dump_server_state.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { - ExtensionContext, - StatusBarItem, - Uri, - ViewColumn, - window, -} from "vscode"; -import { LanguageClient } from "vscode-languageclient/node"; -import * as fs from "fs"; -import { createFileInTempDir } from "../utils"; - -export async function dumpServerState( - client: LanguageClient, - _context?: ExtensionContext, - _statusBarItem?: StatusBarItem, -) { - try { - const result = await client.sendRequest("rescript/dumpServerState"); - const outputFile = createFileInTempDir("server_state", ".json"); - - // Pretty-print JSON with stable ordering where possible - const replacer = (_key: string, value: any) => { - if (value instanceof Map) return Object.fromEntries(value); - if (value instanceof Set) return Array.from(value); - return value; - }; - - const json = JSON.stringify(result, replacer, 2); - fs.writeFileSync(outputFile, json, { encoding: "utf-8" }); - - await window.showTextDocument(Uri.parse(outputFile), { - viewColumn: ViewColumn.Beside, - preview: false, - }); - } catch (e) { - window.showErrorMessage(`Failed to dump server state: ${String(e)}`); - } -} diff --git a/client/src/extension.ts b/client/src/extension.ts index cd4fee0f8..dd24726f7 100644 --- a/client/src/extension.ts +++ b/client/src/extension.ts @@ -13,6 +13,7 @@ import { WorkspaceEdit, CodeActionKind, Diagnostic, + ViewColumn, } from "vscode"; import { ThemeColor } from "vscode"; @@ -31,6 +32,7 @@ import { } from "./commands/code_analysis"; import { pasteAsRescriptJson } from "./commands/paste_as_rescript_json"; import { pasteAsRescriptJsx } from "./commands/paste_as_rescript_jsx"; +import { findProjectRootOfFile } from "../../shared/src/projectRoots"; let client: LanguageClient; @@ -154,7 +156,6 @@ export function activate(context: ExtensionContext) { client.onNotification("rescript/compilationFinished", () => { if (inCodeAnalysisState.active === true) { customCommands.codeAnalysisWithReanalyze( - inCodeAnalysisState.activatedFromDirectory, diagnosticsCollection, diagnosticsResultCodeActions, outputChannel, @@ -307,8 +308,8 @@ export function activate(context: ExtensionContext) { let inCodeAnalysisState: { active: boolean; - activatedFromDirectory: string | null; - } = { active: false, activatedFromDirectory: null }; + currentMonorepoRoot: string | null; + } = { active: false, currentMonorepoRoot: null }; // This code actions provider yields the code actions potentially extracted // from the code analysis to the editor. @@ -373,8 +374,30 @@ export function activate(context: ExtensionContext) { customCommands.dumpDebug(context, debugDumpStatusBarItem); }); - commands.registerCommand("rescript-vscode.dump-server-state", () => { - customCommands.dumpServerState(client, context, debugDumpStatusBarItem); + commands.registerCommand("rescript-vscode.dump-server-state", async () => { + try { + const result = (await client.sendRequest("workspace/executeCommand", { + command: "rescript/dumpServerState", + })) as { content: string }; + + // Create an unsaved document with the server state content + const document = await workspace.openTextDocument({ + content: result.content, + language: "json", + }); + + // Show the document in the editor + await window.showTextDocument(document, { + viewColumn: ViewColumn.Beside, + preview: false, + }); + } catch (e) { + outputChannel.appendLine(`Failed to dump server state: ${String(e)}`); + window.showErrorMessage( + "Failed to dump server state. See 'Output' tab, 'ReScript Language Server' channel for details.", + ); + outputChannel.show(); + } }); commands.registerCommand("rescript-vscode.showProblems", async () => { @@ -410,39 +433,33 @@ export function activate(context: ExtensionContext) { ); // Starts the code analysis mode. - commands.registerCommand("rescript-vscode.start_code_analysis", () => { - // Save the directory this first ran from, and re-use that when continuously - // running the analysis. This is so that the target of the analysis does not - // change on subsequent runs, if there are multiple ReScript projects open - // in the editor. - let currentDocument = window.activeTextEditor.document; - + commands.registerCommand("rescript-vscode.start_code_analysis", async () => { inCodeAnalysisState.active = true; - // Pointing reanalyze to the dir of the current file path is fine, because - // reanalyze will walk upwards looking for a bsconfig.json in order to find - // the correct project root. - inCodeAnalysisState.activatedFromDirectory = path.dirname( - currentDocument.uri.fsPath, - ); - codeAnalysisRunningStatusBarItem.command = "rescript-vscode.stop_code_analysis"; codeAnalysisRunningStatusBarItem.show(); statusBarItem.setToStopText(codeAnalysisRunningStatusBarItem); - customCommands.codeAnalysisWithReanalyze( - inCodeAnalysisState.activatedFromDirectory, + // Start code analysis and capture the monorepo root for server management + const monorepoRoot = await customCommands.codeAnalysisWithReanalyze( diagnosticsCollection, diagnosticsResultCodeActions, outputChannel, codeAnalysisRunningStatusBarItem, ); + inCodeAnalysisState.currentMonorepoRoot = monorepoRoot; }); commands.registerCommand("rescript-vscode.stop_code_analysis", () => { inCodeAnalysisState.active = false; - inCodeAnalysisState.activatedFromDirectory = null; + + // Stop server if we started it for this project + customCommands.stopReanalyzeServer( + inCodeAnalysisState.currentMonorepoRoot, + outputChannel, + ); + inCodeAnalysisState.currentMonorepoRoot = null; diagnosticsCollection.clear(); diagnosticsResultCodeActions.clear(); @@ -450,10 +467,50 @@ export function activate(context: ExtensionContext) { codeAnalysisRunningStatusBarItem.hide(); }); + // Show reanalyze server log + commands.registerCommand( + "rescript-vscode.show_reanalyze_server_log", + async () => { + let currentDocument = window.activeTextEditor?.document; + let projectRootPath: string | null = null; + + if (currentDocument) { + projectRootPath = findProjectRootOfFile(currentDocument.uri.fsPath); + } + + return await customCommands.showReanalyzeServerLog(projectRootPath); + }, + ); + commands.registerCommand("rescript-vscode.switch-impl-intf", () => { customCommands.switchImplIntf(client); }); + // Start build command + commands.registerCommand("rescript-vscode.start_build", async () => { + let currentDocument = window.activeTextEditor?.document; + if (!currentDocument) { + window.showErrorMessage("No active document found."); + return; + } + + try { + const result = (await client.sendRequest("rescript/startBuild", { + uri: currentDocument.uri.toString(), + })) as { success: boolean }; + + if (result.success) { + window.showInformationMessage("Build watcher started."); + } else { + window.showErrorMessage( + "Failed to start build. Check that a ReScript project is open.", + ); + } + } catch (e) { + window.showErrorMessage(`Failed to start build: ${String(e)}`); + } + }); + commands.registerCommand("rescript-vscode.restart_language_server", () => { client.stop().then(() => { client = createLanguageClient(); @@ -502,6 +559,9 @@ export function activate(context: ExtensionContext) { } export function deactivate(): Thenable | undefined { + // Stop all reanalyze servers we started + customCommands.stopAllReanalyzeServers(); + if (!client) { return undefined; } diff --git a/client/src/test/suite/exampleProject.test.ts b/client/src/test/suite/exampleProject.test.ts new file mode 100644 index 000000000..e3acc7a72 --- /dev/null +++ b/client/src/test/suite/exampleProject.test.ts @@ -0,0 +1,328 @@ +/** + * Example Project Tests (Code Analysis Server Test Suite) + * + * Run these tests: + * cd client && npm run test -- --label example-project + */ +import * as assert from "assert"; +import * as path from "path"; +import * as vscode from "vscode"; +import { + getWorkspaceRoot, + removeRescriptLockFile, + removeReanalyzeSocketFile, + ensureExtensionActivated, + sleep, + getCompilerLogPath, + getFileMtime, + waitForFileUpdate, + insertCommentAndSave, + restoreContentAndSave, + startBuildWatcher, + startCodeAnalysis, + stopCodeAnalysis, + showReanalyzeServerLog, + findBuildPromptInLogs, +} from "./helpers"; + +suite("Code Analysis Server Test Suite", () => { + test("Extension should be present", async () => { + const extension = vscode.extensions.getExtension( + "chenglou92.rescript-vscode", + ); + assert.ok(extension, "ReScript extension should be present"); + console.log("Extension found:", extension.id); + }); + + test("Commands should be registered after activation", async () => { + const extension = vscode.extensions.getExtension( + "chenglou92.rescript-vscode", + ); + if (!extension) { + console.log("Extension not found, skipping command test"); + return; + } + + if (!extension.isActive) { + console.log("Activating extension..."); + await extension.activate(); + } + + const commands = await vscode.commands.getCommands(true); + + const expectedCommands = [ + "rescript-vscode.start_code_analysis", + "rescript-vscode.stop_code_analysis", + "rescript-vscode.show_reanalyze_server_log", + "rescript-vscode.start_build", + ]; + + for (const cmd of expectedCommands) { + assert.ok(commands.includes(cmd), `Command ${cmd} should be registered`); + } + console.log("All commands registered successfully!"); + }); + + test("Start Code Analysis should run on a ReScript file", async () => { + const workspaceRoot = getWorkspaceRoot(); + if (!workspaceRoot) { + console.log("No workspace folder found, skipping test"); + return; + } + console.log("Workspace root:", workspaceRoot); + + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + console.log("Running start_code_analysis command..."); + await startCodeAnalysis(); + console.log("Code analysis command completed"); + + console.log("Running stop_code_analysis command..."); + await stopCodeAnalysis(); + console.log("Test completed successfully"); + }); + + test("Start Build command should start build watcher", async () => { + const workspaceRoot = getWorkspaceRoot(); + if (!workspaceRoot) { + console.log("No workspace folder found, skipping test"); + return; + } + console.log("Workspace root:", workspaceRoot); + + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + console.log("Running start_build command..."); + await startBuildWatcher(); + + console.log( + "Test completed - check Language Server log for 'Starting build watcher' or 'Build watcher already running' message", + ); + }); + + test("Build watcher recompiles on file save", async () => { + const workspaceRoot = getWorkspaceRoot(); + if (!workspaceRoot) { + console.log("No workspace folder found, skipping test"); + return; + } + console.log("Workspace root:", workspaceRoot); + + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + const editor = await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + console.log("Starting build watcher..."); + await startBuildWatcher(); + console.log("Build watcher started"); + + const compilerLogPath = getCompilerLogPath(workspaceRoot); + const mtimeBefore = getFileMtime(compilerLogPath); + if (mtimeBefore) { + console.log(`compiler.log mtime before: ${mtimeBefore.toISOString()}`); + } else { + console.log("compiler.log does not exist yet"); + } + + console.log("Editing file..."); + const originalContent = document.getText(); + await insertCommentAndSave(editor, "/* test comment */\n"); + console.log("File saved with edit"); + + console.log("Waiting for compilation..."); + const mtimeAfter = await waitForFileUpdate(compilerLogPath, mtimeBefore); + if (mtimeAfter) { + console.log(`compiler.log mtime after: ${mtimeAfter.toISOString()}`); + } else { + console.log("compiler.log still does not exist"); + } + + assert.ok(mtimeAfter, "compiler.log should exist after file save"); + if (mtimeBefore && mtimeAfter) { + assert.ok( + mtimeAfter > mtimeBefore, + "compiler.log should be updated after file save", + ); + console.log("SUCCESS: compiler.log was updated after file save"); + } else if (!mtimeBefore && mtimeAfter) { + console.log("SUCCESS: compiler.log was created after file save"); + } + + console.log("Restoring original content..."); + await restoreContentAndSave(editor, originalContent); + console.log("Original content restored"); + + await sleep(1000); + console.log("Test completed"); + }); + + test("Code analysis with incremental updates", async () => { + const workspaceRoot = getWorkspaceRoot(); + if (!workspaceRoot) { + console.log("No workspace folder found, skipping test"); + return; + } + console.log("Workspace root:", workspaceRoot); + + // Remove stale socket file to ensure a fresh server is started + // Note: Only remove the socket file, not the lock file, to keep the build watcher running + removeReanalyzeSocketFile(workspaceRoot); + + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Step 1: Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + const editor = await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + console.log("Step 2: Starting build..."); + await startBuildWatcher(); + console.log("Build started"); + + console.log("Step 3: Starting code analysis..."); + await startCodeAnalysis(); + console.log("Code analysis started"); + + console.log("Step 3b: Opening reanalyze server log..."); + await showReanalyzeServerLog(); + + console.log("Step 4: Checking for diagnostics..."); + const diagnostics = vscode.languages.getDiagnostics(document.uri); + console.log(`Found ${diagnostics.length} diagnostics in Hello.res`); + assert.ok(diagnostics.length > 0, "Should have diagnostics for dead code"); + for (const diag of diagnostics.slice(0, 5)) { + console.log( + ` - Line ${diag.range.start.line + 1}: ${diag.message.substring(0, 80)}...`, + ); + } + if (diagnostics.length > 5) { + console.log(` ... and ${diagnostics.length - 5} more`); + } + const initialDiagnosticsCount = diagnostics.length; + + console.log("Step 5: Adding dead code..."); + const originalContent = document.getText(); + const deadCode = "let testDeadVariable12345 = 999\n"; + + const compilerLogPath = getCompilerLogPath(workspaceRoot); + const compilerLogMtimeBefore = getFileMtime(compilerLogPath); + if (compilerLogMtimeBefore) { + console.log( + `compiler.log mtime before: ${compilerLogMtimeBefore.toISOString()}`, + ); + } else { + console.log("compiler.log does not exist before edit"); + } + + await insertCommentAndSave(editor, deadCode); + console.log("Dead code added and saved"); + + console.log("Step 5a: Waiting for compilation..."); + const mtimeAfter = await waitForFileUpdate( + compilerLogPath, + compilerLogMtimeBefore, + ); + if (mtimeAfter) { + console.log(`compiler.log updated: ${mtimeAfter.toISOString()}`); + } else { + console.log("Warning: compilation may not have completed"); + } + + console.log("Step 5b: Re-running code analysis..."); + await vscode.window.showTextDocument(document); + await startCodeAnalysis(); + console.log("Code analysis re-run complete"); + + console.log("Step 6: Checking for updated diagnostics..."); + const updatedDiagnostics = vscode.languages.getDiagnostics(document.uri); + console.log( + `Found ${updatedDiagnostics.length} diagnostics after edit (was ${initialDiagnosticsCount})`, + ); + + assert.ok( + updatedDiagnostics.length > initialDiagnosticsCount, + `Diagnostics count should increase after adding dead code (was ${initialDiagnosticsCount}, now ${updatedDiagnostics.length})`, + ); + + const deadVarDiagnostic = updatedDiagnostics.find((d) => + d.message.includes("testDeadVariable12345"), + ); + assert.ok( + deadVarDiagnostic, + "Should find diagnostic for testDeadVariable12345", + ); + console.log( + `Found diagnostic for testDeadVariable12345: ${deadVarDiagnostic.message}`, + ); + + console.log("Step 7: Undoing change..."); + await restoreContentAndSave(editor, originalContent); + console.log("Change undone and saved"); + + await sleep(1000); + + console.log("Step 8: Stopping code analysis..."); + await stopCodeAnalysis(); + console.log("Code analysis stopped"); + + console.log("Step 9: Test completed - check Reanalyze Server log for:"); + console.log(" - [request #1] with 'files: X processed, 0 cached'"); + console.log( + " - [request #2] with 'files: X processed, Y cached' where Y > 0 (incremental)", + ); + }); + + test("Should prompt to start build when no lock file exists", async () => { + const workspaceRoot = getWorkspaceRoot(); + if (!workspaceRoot) { + console.log("No workspace folder found, skipping test"); + return; + } + console.log("Workspace root:", workspaceRoot); + + removeRescriptLockFile(workspaceRoot); + + const resFilePath = path.join(workspaceRoot, "src", "More.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await sleep(1000); + + const promptResult = findBuildPromptInLogs(); + if (promptResult.found) { + console.log( + `Found prompt message: "Prompting to start build for ${promptResult.path}"`, + ); + } + + assert.ok( + promptResult.found, + "Should find 'Prompting to start build' message in Language Server log", + ); + console.log("SUCCESS: Build prompt was shown"); + }); +}); diff --git a/client/src/test/suite/helpers.ts b/client/src/test/suite/helpers.ts new file mode 100644 index 000000000..32196b9ef --- /dev/null +++ b/client/src/test/suite/helpers.ts @@ -0,0 +1,324 @@ +/** + * Shared test helpers for ReScript VSCode extension tests + */ +import * as assert from "assert"; +import * as fs from "fs"; +import * as path from "path"; +import * as vscode from "vscode"; + +/** + * Get the workspace root folder path + */ +export function getWorkspaceRoot(): string { + const workspaceFolders = vscode.workspace.workspaceFolders; + if (workspaceFolders && workspaceFolders.length > 0) { + return workspaceFolders[0].uri.fsPath; + } + return ""; +} + +/** + * Remove ReScript 12+ lock file (lib/rescript.lock) and reanalyze socket file + */ +export function removeRescriptLockFile(workspaceRoot: string): void { + const filesToRemove = [ + path.join(workspaceRoot, "lib", "rescript.lock"), + // Also remove reanalyze socket file to ensure a fresh server is started + path.join(workspaceRoot, ".rescript-reanalyze.sock"), + ]; + + for (const file of filesToRemove) { + try { + if (fs.existsSync(file)) { + fs.unlinkSync(file); + console.log(`Removed ${path.basename(file)}`); + } + } catch (e) { + console.log(`Could not remove ${path.basename(file)}:`, e); + } + } +} + +/** + * Remove only the reanalyze socket file (not the lock file) + * Use this when you need a fresh reanalyze server but want to keep the build watcher running + */ +export function removeReanalyzeSocketFile(workspaceRoot: string): void { + const socketPath = path.join(workspaceRoot, ".rescript-reanalyze.sock"); + try { + if (fs.existsSync(socketPath)) { + fs.unlinkSync(socketPath); + console.log("Removed .rescript-reanalyze.sock"); + } + } catch (e) { + console.log("Could not remove .rescript-reanalyze.sock:", e); + } +} + +/** + * Remove ReScript 9/10/11 lock file (.bsb.lock) + */ +export function removeBsbLockFile(workspaceRoot: string): void { + const lockPath = path.join(workspaceRoot, ".bsb.lock"); + try { + if (fs.existsSync(lockPath)) { + fs.unlinkSync(lockPath); + console.log("Removed .bsb.lock"); + } + } catch (e) { + console.log("Could not remove .bsb.lock:", e); + } +} + +/** + * Remove monorepo lock files (both rewatch.lock and rescript.lock) + */ +export function removeMonorepoLockFiles(monorepoRoot: string): void { + const filesToRemove = [ + path.join(monorepoRoot, "lib", "rewatch.lock"), + path.join(monorepoRoot, "lib", "rescript.lock"), + // Also remove reanalyze socket file to ensure a fresh server is started + path.join(monorepoRoot, ".rescript-reanalyze.sock"), + ]; + + for (const file of filesToRemove) { + try { + if (fs.existsSync(file)) { + fs.unlinkSync(file); + console.log(`Removed ${path.basename(file)}`); + } + } catch (e) { + console.log(`Could not remove ${path.basename(file)}:`, e); + } + } +} + +/** + * Ensure the ReScript extension is activated + */ +export async function ensureExtensionActivated(): Promise< + vscode.Extension | undefined +> { + const extension = vscode.extensions.getExtension( + "chenglou92.rescript-vscode", + ); + if (extension && !extension.isActive) { + await extension.activate(); + } + return extension; +} + +/** + * Open a file in the editor and return the document + */ +export async function openFile(filePath: string): Promise { + const document = await vscode.workspace.openTextDocument(filePath); + await vscode.window.showTextDocument(document); + return document; +} + +/** + * Find the LSP log file in the most recent test logs directory + */ +export function findLspLogContent(): string | null { + // __dirname is client/out/client/src/test/suite when running tests + const vscodeTestDir = path.resolve(__dirname, "../../../../../.vscode-test"); + const logsBaseDir = path.join(vscodeTestDir, "user-data", "logs"); + + try { + // Find the most recent log directory (format: YYYYMMDDTHHMMSS) + const logDirs = fs + .readdirSync(logsBaseDir) + .filter((d) => /^\d{8}T\d{6}$/.test(d)) + .sort() + .reverse(); + + for (const logDir of logDirs) { + const outputLoggingDir = path.join( + logsBaseDir, + logDir, + "window1", + "exthost", + ); + if (!fs.existsSync(outputLoggingDir)) continue; + + const outputDirs = fs + .readdirSync(outputLoggingDir) + .filter((d) => d.startsWith("output_logging_")); + + for (const outputDir of outputDirs) { + const lspLogPath = path.join( + outputLoggingDir, + outputDir, + "1-ReScript Language Server.log", + ); + if (fs.existsSync(lspLogPath)) { + console.log("Checking log file:", lspLogPath); + return fs.readFileSync(lspLogPath, "utf-8"); + } + } + } + } catch (e) { + console.log("Error reading logs:", e); + } + + return null; +} + +/** + * Wait for a condition to become true, polling at intervals + */ +export async function waitFor( + condition: () => boolean, + options: { timeout?: number; interval?: number; message?: string } = {}, +): Promise { + const { timeout = 5000, interval = 500, message = "condition" } = options; + const maxAttempts = Math.ceil(timeout / interval); + + for (let i = 0; i < maxAttempts; i++) { + await new Promise((resolve) => setTimeout(resolve, interval)); + const result = condition(); + console.log(`Checking ${message} (attempt ${i + 1}): ${result}`); + if (result) return true; + } + return false; +} + +/** + * Sleep for a given number of milliseconds + */ +export function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +/** + * Get the path to the compiler.log file + */ +export function getCompilerLogPath(workspaceRoot: string): string { + return path.join(workspaceRoot, "lib", "bs", ".compiler.log"); +} + +/** + * Get the mtime of a file, or null if it doesn't exist + */ +export function getFileMtime(filePath: string): Date | null { + try { + return fs.statSync(filePath).mtime; + } catch { + return null; + } +} + +/** + * Wait for a file's mtime to be updated (newer than the given mtime) + */ +export async function waitForFileUpdate( + filePath: string, + mtimeBefore: Date | null, + options: { timeout?: number; interval?: number } = {}, +): Promise { + const { timeout = 5000, interval = 500 } = options; + const maxAttempts = Math.ceil(timeout / interval); + + for (let i = 0; i < maxAttempts; i++) { + await sleep(interval); + const mtimeAfter = getFileMtime(filePath); + if (mtimeAfter && (!mtimeBefore || mtimeAfter > mtimeBefore)) { + return mtimeAfter; + } + } + return getFileMtime(filePath); +} + +/** + * Insert a comment at the beginning of a document and save + */ +export async function insertCommentAndSave( + editor: vscode.TextEditor, + comment: string, +): Promise { + await editor.edit((editBuilder) => { + editBuilder.insert(new vscode.Position(0, 0), comment); + }); + await editor.document.save(); +} + +/** + * Restore original content to a document and save + */ +export async function restoreContentAndSave( + editor: vscode.TextEditor, + originalContent: string, +): Promise { + const document = editor.document; + await editor.edit((editBuilder) => { + const fullRange = new vscode.Range( + new vscode.Position(0, 0), + document.lineAt(document.lineCount - 1).range.end, + ); + editBuilder.replace(fullRange, originalContent); + }); + await document.save(); +} + +/** + * Start the build watcher and wait for it to initialize + */ +export async function startBuildWatcher(waitMs: number = 1000): Promise { + await vscode.commands.executeCommand("rescript-vscode.start_build"); + await sleep(waitMs); +} + +/** + * Start code analysis and wait for it to initialize + */ +export async function startCodeAnalysis(waitMs: number = 1000): Promise { + await vscode.commands.executeCommand("rescript-vscode.start_code_analysis"); + await sleep(waitMs); +} + +/** + * Stop code analysis + */ +export async function stopCodeAnalysis(): Promise { + await vscode.commands.executeCommand("rescript-vscode.stop_code_analysis"); +} + +/** + * Show the reanalyze server log and assert it returns true + */ +export async function showReanalyzeServerLog(): Promise { + const result = await vscode.commands.executeCommand( + "rescript-vscode.show_reanalyze_server_log", + ); + console.log(`Show reanalyze server log result: ${result}`); + assert.strictEqual( + result, + true, + "Show reanalyze server log should return true when output channel is shown", + ); +} + +/** + * Result of searching for build prompt in logs + */ +export interface BuildPromptResult { + found: boolean; + path: string; +} + +/** + * Find "Prompting to start build" message in LSP logs + */ +export function findBuildPromptInLogs(): BuildPromptResult { + const logContent = findLspLogContent(); + if (logContent) { + const promptMatch = logContent.match( + /\[Info.*\] Prompting to start build for (.+)/, + ); + if (promptMatch) { + return { found: true, path: promptMatch[1] }; + } + } + return { found: false, path: "" }; +} diff --git a/client/src/test/suite/monorepoRoot.test.ts b/client/src/test/suite/monorepoRoot.test.ts new file mode 100644 index 000000000..aafc52cba --- /dev/null +++ b/client/src/test/suite/monorepoRoot.test.ts @@ -0,0 +1,274 @@ +/** + * Monorepo Root Tests (Monorepo Code Analysis Test Suite) + * + * Run these tests: + * cd client && npm run test -- --label monorepo-root + */ +import * as assert from "assert"; +import * as fs from "fs"; +import * as path from "path"; +import * as vscode from "vscode"; +import { + getWorkspaceRoot, + removeMonorepoLockFiles, + ensureExtensionActivated, + sleep, + getCompilerLogPath, + getFileMtime, + waitForFileUpdate, + insertCommentAndSave, + restoreContentAndSave, + startBuildWatcher, + startCodeAnalysis, + stopCodeAnalysis, + showReanalyzeServerLog, + findBuildPromptInLogs, +} from "./helpers"; + +suite("Monorepo Code Analysis Test Suite", () => { + test("Monorepo: Build watcher works when opening root package", async () => { + const monorepoRoot = getWorkspaceRoot(); + console.log("Monorepo root:", monorepoRoot); + + const rootResPath = path.join(monorepoRoot, "src", "Root.res"); + if (!fs.existsSync(rootResPath)) { + console.log("Monorepo project not found, skipping test"); + return; + } + + removeMonorepoLockFiles(monorepoRoot); + + console.log("Opening root file:", rootResPath); + const document = await vscode.workspace.openTextDocument(rootResPath); + const editor = await vscode.window.showTextDocument(document); + console.log("Root file opened successfully"); + + await ensureExtensionActivated(); + + console.log("Starting build watcher from root..."); + await startBuildWatcher(); + console.log("Build watcher started"); + + const compilerLogPath = getCompilerLogPath(monorepoRoot); + const mtimeBefore = getFileMtime(compilerLogPath); + if (mtimeBefore) { + console.log( + `Root compiler.log mtime before: ${mtimeBefore.toISOString()}`, + ); + } else { + console.log("Root compiler.log does not exist yet"); + } + + console.log("Editing root file..."); + const originalContent = document.getText(); + await insertCommentAndSave(editor, "/* monorepo root test */\n"); + console.log("Root file saved with edit"); + + const mtimeAfter = await waitForFileUpdate(compilerLogPath, mtimeBefore); + if (mtimeAfter) { + console.log(`Root compiler.log mtime after: ${mtimeAfter.toISOString()}`); + } else { + console.log("Root compiler.log still does not exist"); + } + + assert.ok(mtimeAfter, "Root compiler.log should exist after file save"); + if (mtimeBefore && mtimeAfter) { + assert.ok( + mtimeAfter > mtimeBefore, + "Root compiler.log should be updated after file save", + ); + console.log("SUCCESS: Root compiler.log was updated"); + } + + await restoreContentAndSave(editor, originalContent); + console.log("Original content restored"); + + await sleep(1000); + console.log("Monorepo root test completed"); + }); + + test("Monorepo: Build watcher works when opening subpackage file", async () => { + const monorepoRoot = getWorkspaceRoot(); + console.log("Monorepo root:", monorepoRoot); + + const appResPath = path.join( + monorepoRoot, + "packages", + "app", + "src", + "App.res", + ); + if (!fs.existsSync(appResPath)) { + console.log("Monorepo app package not found, skipping test"); + return; + } + + console.log("Opening subpackage file:", appResPath); + const document = await vscode.workspace.openTextDocument(appResPath); + const editor = await vscode.window.showTextDocument(document); + console.log("Subpackage file opened successfully"); + + await ensureExtensionActivated(); + + console.log("Starting build watcher from subpackage..."); + await startBuildWatcher(); + console.log("Build watcher started"); + + const rootCompilerLogPath = getCompilerLogPath(monorepoRoot); + const mtimeBefore = getFileMtime(rootCompilerLogPath); + if (mtimeBefore) { + console.log( + `Root compiler.log mtime before: ${mtimeBefore.toISOString()}`, + ); + } else { + console.log( + "Root compiler.log does not exist yet (expected for monorepo subpackage)", + ); + } + + console.log("Editing subpackage file..."); + const originalContent = document.getText(); + await insertCommentAndSave(editor, "/* monorepo subpackage test */\n"); + console.log("Subpackage file saved with edit"); + + const mtimeAfter = await waitForFileUpdate( + rootCompilerLogPath, + mtimeBefore, + ); + if (mtimeAfter) { + console.log(`Root compiler.log mtime after: ${mtimeAfter.toISOString()}`); + } else { + console.log("Root compiler.log still does not exist"); + } + + assert.ok( + mtimeAfter, + "Root compiler.log should exist after subpackage file save", + ); + if (mtimeBefore && mtimeAfter) { + assert.ok( + mtimeAfter > mtimeBefore, + "Root compiler.log should be updated after subpackage file save", + ); + console.log( + "SUCCESS: Root compiler.log was updated from subpackage edit", + ); + } + + await restoreContentAndSave(editor, originalContent); + console.log("Original content restored"); + + await sleep(1000); + console.log("Monorepo subpackage test completed"); + }); + + test("Monorepo: Code analysis works from subpackage", async () => { + const monorepoRoot = getWorkspaceRoot(); + console.log("Monorepo root:", monorepoRoot); + + const libResPath = path.join( + monorepoRoot, + "packages", + "lib", + "src", + "Lib.res", + ); + if (!fs.existsSync(libResPath)) { + console.log("Monorepo lib package not found, skipping test"); + return; + } + + console.log("Opening lib file:", libResPath); + const document = await vscode.workspace.openTextDocument(libResPath); + await vscode.window.showTextDocument(document); + console.log("Lib file opened successfully"); + + await ensureExtensionActivated(); + + console.log("Starting build..."); + await startBuildWatcher(); + + console.log("Starting code analysis..."); + await startCodeAnalysis(); + console.log("Code analysis started"); + + console.log("Opening reanalyze server log..."); + await showReanalyzeServerLog(); + + const diagnostics = vscode.languages.getDiagnostics(document.uri); + console.log(`Found ${diagnostics.length} diagnostics in Lib.res`); + for (const diag of diagnostics.slice(0, 5)) { + console.log( + ` - Line ${diag.range.start.line + 1}: ${diag.message.substring(0, 80)}...`, + ); + } + + assert.ok( + diagnostics.length > 0, + "Should have diagnostics for dead code in Lib.res", + ); + + const deadFuncDiagnostic = diagnostics.find((d) => + d.message.includes("unusedLibFunction"), + ); + assert.ok( + deadFuncDiagnostic, + "Should find diagnostic for unusedLibFunction in monorepo lib", + ); + console.log( + `Found diagnostic for unusedLibFunction: ${deadFuncDiagnostic?.message}`, + ); + + console.log("Stopping code analysis..."); + await stopCodeAnalysis(); + console.log("Code analysis stopped"); + + console.log("Monorepo code analysis test completed"); + }); + + test("Monorepo: Should prompt to start build when opening subpackage without lock file", async () => { + const monorepoRoot = getWorkspaceRoot(); + console.log("Monorepo root:", monorepoRoot); + + const appResPath = path.join( + monorepoRoot, + "packages", + "app", + "src", + "App.res", + ); + if (!fs.existsSync(appResPath)) { + console.log("Monorepo app package not found, skipping test"); + return; + } + + console.log("Removing lock file from monorepo root..."); + removeMonorepoLockFiles(monorepoRoot); + + console.log("Opening subpackage file:", appResPath); + const document = await vscode.workspace.openTextDocument(appResPath); + await vscode.window.showTextDocument(document); + console.log("Subpackage file opened successfully"); + + await sleep(1000); + + const promptResult = findBuildPromptInLogs(); + if (promptResult.found) { + console.log( + `Found prompt message: "Prompting to start build for ${promptResult.path}"`, + ); + } + + assert.ok( + promptResult.found, + "Should find 'Prompting to start build' message in Language Server log", + ); + + assert.ok( + promptResult.path.includes("monorepo-project") && + !promptResult.path.includes("packages"), + `Prompt path should be monorepo root, not subpackage. Got: ${promptResult.path}`, + ); + console.log("SUCCESS: Build prompt was shown for monorepo root path"); + }); +}); diff --git a/client/src/test/suite/monorepoSubpackage.test.ts b/client/src/test/suite/monorepoSubpackage.test.ts new file mode 100644 index 000000000..f18a4849a --- /dev/null +++ b/client/src/test/suite/monorepoSubpackage.test.ts @@ -0,0 +1,153 @@ +/** + * Monorepo Subpackage Tests (Monorepo Subpackage Test Suite) + * + * This test suite runs with VSCode opened on a subpackage (packages/app), + * not the monorepo root. It tests that the extension correctly detects + * monorepo structure even when opened from a subpackage. + * + * Run these tests: + * cd client && npm run test -- --label monorepo-subpackage + */ +import * as assert from "assert"; +import * as fs from "fs"; +import * as path from "path"; +import * as vscode from "vscode"; +import { + getWorkspaceRoot, + removeMonorepoLockFiles, + sleep, + startBuildWatcher, + startCodeAnalysis, + stopCodeAnalysis, + showReanalyzeServerLog, + findBuildPromptInLogs, +} from "./helpers"; + +suite("Monorepo Subpackage Test Suite", () => { + test("Subpackage workspace: Should prompt to start build with monorepo root path", async () => { + // In this test, workspaceRoot is packages/app (the subpackage) + const workspaceRoot = getWorkspaceRoot(); + console.log("Workspace root (subpackage):", workspaceRoot); + + // The monorepo root is 2 levels up from packages/app + const monorepoRoot = path.resolve(workspaceRoot, "../.."); + console.log("Monorepo root:", monorepoRoot); + + // Verify we're in the right setup - workspace should be a subpackage + assert.ok( + workspaceRoot.includes("packages"), + `Workspace should be in packages folder, got: ${workspaceRoot}`, + ); + + // Check if the subpackage has a rescript.json + const rescriptJsonPath = path.join(workspaceRoot, "rescript.json"); + if (!fs.existsSync(rescriptJsonPath)) { + console.log("Subpackage rescript.json not found, skipping test"); + return; + } + + // Remove lock file from MONOREPO ROOT to trigger the "Start Build" prompt + console.log("Removing lock file from monorepo root..."); + removeMonorepoLockFiles(monorepoRoot); + + // Open a .res file from the subpackage workspace + const resFilePath = path.join(workspaceRoot, "src", "App.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + // Wait for LSP to process - since no lock file exists, it should prompt for build + await sleep(1000); + + // Read the Language Server log to verify the prompt was shown + const promptResult = findBuildPromptInLogs(); + if (promptResult.found) { + console.log( + `Found prompt message: "Prompting to start build for ${promptResult.path}"`, + ); + } + + // Assert that the prompt was shown + assert.ok( + promptResult.found, + "Should find 'Prompting to start build' message in Language Server log", + ); + + // Assert that the prompt path is the monorepo root, not the subpackage + // Even though we opened from packages/app, the build prompt should use monorepo root + assert.ok( + promptResult.path.includes("monorepo-project") && + !promptResult.path.includes("packages"), + `Prompt path should be monorepo root, not subpackage. Got: ${promptResult.path}`, + ); + console.log( + "SUCCESS: Build prompt correctly uses monorepo root path when opened from subpackage", + ); + + // Now start the build and verify the lock file is created at the monorepo root + console.log("Starting build from subpackage workspace..."); + await startBuildWatcher(1500); + console.log("Build started"); + + // Check that the lock file exists at the MONOREPO ROOT, not the subpackage + const monorepoLockPath = path.join(monorepoRoot, "lib", "rescript.lock"); + const subpackageLockPath = path.join(workspaceRoot, "lib", "rescript.lock"); + + const monorepoLockExists = fs.existsSync(monorepoLockPath); + const subpackageLockExists = fs.existsSync(subpackageLockPath); + + console.log( + `Monorepo lock file (${monorepoLockPath}): ${monorepoLockExists ? "EXISTS" : "NOT FOUND"}`, + ); + console.log( + `Subpackage lock file (${subpackageLockPath}): ${subpackageLockExists ? "EXISTS" : "NOT FOUND"}`, + ); + + // The lock file should exist at the monorepo root + assert.ok( + monorepoLockExists, + `Lock file should exist at monorepo root: ${monorepoLockPath}`, + ); + + // The lock file should NOT exist at the subpackage level + assert.ok( + !subpackageLockExists, + `Lock file should NOT exist at subpackage: ${subpackageLockPath}`, + ); + + console.log("SUCCESS: Lock file created at monorepo root, not subpackage"); + + // Remove any stale reanalyze socket file from a previous test run + const socketPath = path.join(monorepoRoot, ".rescript-reanalyze.sock"); + if (fs.existsSync(socketPath)) { + fs.unlinkSync(socketPath); + console.log("Removed stale socket file"); + } + + // Start code analysis + console.log("Starting code analysis..."); + await startCodeAnalysis(); + console.log("Code analysis started"); + + // Open the reanalyze server log - verify it returns true (output channel shown) + console.log("Opening reanalyze server log..."); + await showReanalyzeServerLog(); + + // Verify diagnostics are shown (code analysis is working from subpackage) + const diagnostics = vscode.languages.getDiagnostics(document.uri); + console.log(`Found ${diagnostics.length} diagnostics in App.res`); + assert.ok( + diagnostics.length > 0, + "Code analysis should find diagnostics in App.res when run from subpackage", + ); + + // Stop code analysis + console.log("Stopping code analysis..."); + await stopCodeAnalysis(); + console.log("Code analysis stopped"); + + console.log("Test complete - lock file will be cleaned up on LSP shutdown"); + }); +}); diff --git a/client/src/test/suite/rescript9.test.ts b/client/src/test/suite/rescript9.test.ts new file mode 100644 index 000000000..b2d1dd5ad --- /dev/null +++ b/client/src/test/suite/rescript9.test.ts @@ -0,0 +1,241 @@ +/** + * ReScript 9 Tests (ReScript 9 Build Test Suite) + * + * This tests that the build watcher works with older ReScript versions + * that use "rescript build -w" instead of "rescript watch". + * + * Run these tests: + * cd client && npm run test -- --label rescript9-project + */ +import * as assert from "assert"; +import * as fs from "fs"; +import * as path from "path"; +import * as vscode from "vscode"; +import { + getWorkspaceRoot, + removeBsbLockFile, + ensureExtensionActivated, + waitFor, + sleep, + getCompilerLogPath, + getFileMtime, + waitForFileUpdate, + insertCommentAndSave, + restoreContentAndSave, + startBuildWatcher, + findBuildPromptInLogs, +} from "./helpers"; + +suite("ReScript 9 Build Test Suite", () => { + test("ReScript 9: Extension should be present", async () => { + const extension = vscode.extensions.getExtension( + "chenglou92.rescript-vscode", + ); + assert.ok(extension, "ReScript extension should be present"); + console.log("Extension found:", extension.id); + }); + + test("ReScript 9: Build watcher should start with 'rescript build -w'", async () => { + const workspaceRoot = getWorkspaceRoot(); + console.log("Workspace root:", workspaceRoot); + + // Verify we're in the ReScript 9 project + const bsconfigPath = path.join(workspaceRoot, "bsconfig.json"); + if (!fs.existsSync(bsconfigPath)) { + console.log("bsconfig.json not found, skipping test"); + return; + } + + // Check ReScript version + const packageJsonPath = path.join( + workspaceRoot, + "node_modules", + "rescript", + "package.json", + ); + if (fs.existsSync(packageJsonPath)) { + const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf-8")); + console.log("ReScript version:", packageJson.version); + assert.ok( + packageJson.version.startsWith("9."), + `Expected ReScript 9.x, got ${packageJson.version}`, + ); + } + + // Open a ReScript file + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + // Start the build watcher + console.log("Starting build watcher (should use 'rescript build -w')..."); + await startBuildWatcher(1500); + console.log("Build watcher started"); + + // Check if the lock file was created (.bsb.lock for ReScript 9) + const lockPath = path.join(workspaceRoot, ".bsb.lock"); + const lockExists = fs.existsSync(lockPath); + console.log(`.bsb.lock exists: ${lockExists}`); + assert.ok( + lockExists, + ".bsb.lock should exist after starting build watcher", + ); + + console.log("ReScript 9 build watcher test completed"); + }); + + test("ReScript 9: Build watcher recompiles on file save", async () => { + const workspaceRoot = getWorkspaceRoot(); + console.log("Workspace root:", workspaceRoot); + + // Open a ReScript file + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + const editor = await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + // Start the build watcher + console.log("Starting build watcher..."); + await startBuildWatcher(); + console.log("Build watcher started"); + + // Check compiler.log modification time before edit + const compilerLogPath = getCompilerLogPath(workspaceRoot); + const mtimeBefore = getFileMtime(compilerLogPath); + if (mtimeBefore) { + console.log(`compiler.log mtime before: ${mtimeBefore.toISOString()}`); + } else { + console.log("compiler.log does not exist yet"); + } + + // Edit the file and save + console.log("Editing file..."); + const originalContent = document.getText(); + await insertCommentAndSave(editor, "/* rescript 9 test */\n"); + console.log("File saved with edit"); + + // Wait for compilation + console.log("Waiting for compilation..."); + const mtimeAfter = await waitForFileUpdate(compilerLogPath, mtimeBefore, { + timeout: 3000, + }); + if (mtimeAfter) { + console.log(`compiler.log mtime after: ${mtimeAfter.toISOString()}`); + } else { + console.log("compiler.log still does not exist"); + } + + // Assert that compiler.log was updated + assert.ok(mtimeAfter, "compiler.log should exist after file save"); + if (mtimeBefore && mtimeAfter) { + assert.ok( + mtimeAfter > mtimeBefore, + "compiler.log should be updated after file save", + ); + console.log("SUCCESS: compiler.log was updated after file save"); + } else if (!mtimeBefore && mtimeAfter) { + console.log("SUCCESS: compiler.log was created after file save"); + } + + // Restore original content + console.log("Restoring original content..."); + await restoreContentAndSave(editor, originalContent); + console.log("Original content restored"); + + await sleep(1000); + console.log("ReScript 9 recompilation test completed"); + }); + + test("ReScript 9: Should prompt to start build when no lock file exists", async () => { + const workspaceRoot = getWorkspaceRoot(); + console.log("Workspace root:", workspaceRoot); + + // Remove lock file to trigger the "Start Build" prompt + removeBsbLockFile(workspaceRoot); + + // Open a .res file to trigger the LSP + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + // Wait for LSP to process and potentially show the prompt + await sleep(1000); + + // Read the Language Server log to verify the prompt was shown + const promptResult = findBuildPromptInLogs(); + if (promptResult.found) { + console.log("Found prompt message in logs"); + } + + assert.ok( + promptResult.found, + "Should find 'Prompting to start build' message in Language Server log", + ); + console.log("SUCCESS: Build prompt was shown for ReScript 9 project"); + }); + + test("ReScript 9: Lock file should be cleaned up on language server restart", async () => { + const workspaceRoot = getWorkspaceRoot(); + console.log("Workspace root:", workspaceRoot); + + // First restart language server to ensure clean state + console.log("Restarting language server to ensure clean state..."); + await vscode.commands.executeCommand( + "rescript-vscode.restart_language_server", + ); + await sleep(2000); + + // Open a ReScript file + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + // Start the build watcher + console.log("Starting build watcher..."); + await vscode.commands.executeCommand("rescript-vscode.start_build"); + + // Wait for lock file to appear (poll up to 5 seconds) + await sleep(500); + const lockPath = path.join(workspaceRoot, ".bsb.lock"); + const lockExistsBefore = await waitFor(() => fs.existsSync(lockPath), { + timeout: 5000, + interval: 500, + message: ".bsb.lock", + }); + assert.ok(lockExistsBefore, ".bsb.lock should exist before restart"); + + // Restart language server (this should kill the build watcher and clean up lock file) + console.log("Restarting language server..."); + await vscode.commands.executeCommand( + "rescript-vscode.restart_language_server", + ); + + // Wait for restart to complete + await sleep(2000); + + // Verify lock file is cleaned up + const lockExistsAfter = fs.existsSync(lockPath); + console.log(`.bsb.lock exists after restart: ${lockExistsAfter}`); + assert.ok( + !lockExistsAfter, + ".bsb.lock should be cleaned up after language server restart", + ); + + console.log("SUCCESS: Lock file was cleaned up on language server restart"); + }); +}); diff --git a/client/src/utils.ts b/client/src/utils.ts index 1474888ee..c0c364ff3 100644 --- a/client/src/utils.ts +++ b/client/src/utils.ts @@ -2,6 +2,11 @@ import * as path from "path"; import * as fs from "fs"; import * as os from "os"; import { DocumentUri } from "vscode-languageclient"; +import { findBinary, type BinaryName } from "../../shared/src/findBinary"; +import { + findProjectRootOfFileInDir as findProjectRootOfFileInDirShared, + normalizePath as normalizePathShared, +} from "../../shared/src/projectRoots"; /* * Much of the code in here is duplicated from the server code. @@ -9,6 +14,27 @@ import { DocumentUri } from "vscode-languageclient"; * to the server itself. */ +/** + * Branded type for normalized file paths. + * + * All paths should be normalized to ensure consistent lookups and prevent + * path format mismatches (e.g., trailing slashes, relative vs absolute paths). + * + * Use `normalizePath()` to convert a regular path to a `NormalizedPath`. + */ +export type NormalizedPath = string & { __brand: "NormalizedPath" }; + +/** + * Normalizes a file path and returns it as a `NormalizedPath`. + * + * @param filePath - The path to normalize (can be null) + * @returns The normalized path, or null if input was null + */ +export function normalizePath(filePath: string | null): NormalizedPath | null { + // `path.normalize` ensures we can assume string is now NormalizedPath + return normalizePathShared(filePath) as NormalizedPath | null; +} + type binaryName = "rescript-editor-analysis.exe" | "rescript-tools.exe"; const platformDir = @@ -29,7 +55,7 @@ export const getLegacyBinaryProdPath = (b: binaryName) => export const getBinaryPath = ( binaryName: "rescript-editor-analysis.exe" | "rescript-tools.exe", - projectRootPath: string | null = null, + projectRootPath: NormalizedPath | null = null, ): string | null => { const binaryFromCompilerPackage = path.join( projectRootPath ?? "", @@ -60,20 +86,9 @@ export const createFileInTempDir = (prefix = "", extension = "") => { }; export let findProjectRootOfFileInDir = ( - source: DocumentUri, -): null | DocumentUri => { - let dir = path.dirname(source); - if ( - fs.existsSync(path.join(dir, "rescript.json")) || - fs.existsSync(path.join(dir, "bsconfig.json")) - ) { - return dir; - } else { - if (dir === source) { - // reached top - return null; - } else { - return findProjectRootOfFileInDir(dir); - } - } + source: string, +): NormalizedPath | null => { + return normalizePath(findProjectRootOfFileInDirShared(source)); }; + +export { findBinary, BinaryName }; diff --git a/client/tsconfig.json b/client/tsconfig.json index b0924c856..04ed11eee 100644 --- a/client/tsconfig.json +++ b/client/tsconfig.json @@ -4,9 +4,10 @@ "target": "es2019", "lib": ["ES2019"], "outDir": "out", - "rootDir": "src", - "sourceMap": true + "rootDirs": ["src", "../shared/src"], + "sourceMap": true, + "skipLibCheck": true }, - "include": ["src"], + "include": ["src", "../shared/src"], "exclude": ["node_modules"] } diff --git a/package-lock.json b/package-lock.json index 095f591a4..d565f3626 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "rescript-vscode", - "version": "1.70.0", + "version": "1.72.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "rescript-vscode", - "version": "1.70.0", + "version": "1.72.0", "hasInstallScript": true, "license": "MIT", "dependencies": { diff --git a/package.json b/package.json index 5709e3b32..9eea5f2a9 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "ReScript language support (official)", "author": "ReScript Team", "license": "MIT", - "version": "1.70.0", + "version": "1.72.0", "repository": { "type": "git", "url": "https://github.com/rescript-lang/rescript-vscode" @@ -73,10 +73,18 @@ "command": "rescript-vscode.stop_code_analysis", "title": "ReScript: Stop Code Analyzer" }, + { + "command": "rescript-vscode.show_reanalyze_server_log", + "title": "ReScript: Show Code Analyzer Server Log" + }, { "command": "rescript-vscode.restart_language_server", "title": "ReScript: Restart Language Server" }, + { + "command": "rescript-vscode.start_build", + "title": "ReScript: Start Build" + }, { "command": "rescript-vscode.switch-impl-intf", "title": "ReScript: Switch implementation/interface", @@ -195,11 +203,6 @@ "default": false, "description": "(beta/experimental) Enable incremental type checking across files, so that unsaved file A gets access to unsaved file B." }, - "rescript.settings.incrementalTypechecking.debugLogging": { - "type": "boolean", - "default": false, - "description": "(debug) Enable debug logging (ends up in the extension output)." - }, "rescript.settings.cache.projectConfig.enable": { "type": "boolean", "default": true, @@ -233,6 +236,17 @@ "type": "boolean", "default": true, "description": "Show compile status in the status bar (compiling/errors/warnings/success)." + }, + "rescript.settings.logLevel": { + "type": "string", + "enum": [ + "error", + "warn", + "info", + "log" + ], + "default": "info", + "description": "Verbosity of ReScript language server logs sent to the Output channel." } } }, diff --git a/server/package-lock.json b/server/package-lock.json index 4a6fa75c8..6cfb758d0 100644 --- a/server/package-lock.json +++ b/server/package-lock.json @@ -1,12 +1,12 @@ { "name": "@rescript/language-server", - "version": "1.70.0", + "version": "1.72.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@rescript/language-server", - "version": "1.70.0", + "version": "1.72.0", "license": "MIT", "dependencies": { "semver": "^7.7.2", diff --git a/server/package.json b/server/package.json index 95580ac54..92c17c02d 100644 --- a/server/package.json +++ b/server/package.json @@ -1,7 +1,7 @@ { "name": "@rescript/language-server", "description": "LSP server for ReScript", - "version": "1.70.0", + "version": "1.72.0", "author": "ReScript Team", "license": "MIT", "bin": { diff --git a/server/src/bsc-args/rewatch.ts b/server/src/bsc-args/rewatch.ts index e4dc4ab60..e38b8d2bb 100644 --- a/server/src/bsc-args/rewatch.ts +++ b/server/src/bsc-args/rewatch.ts @@ -3,12 +3,10 @@ import * as utils from "../utils"; import * as cp from "node:child_process"; import * as p from "vscode-languageserver-protocol"; import semver from "semver"; -import { - debug, - IncrementallyCompiledFileInfo, -} from "../incrementalCompilation"; +import { IncrementallyCompiledFileInfo } from "../incrementalCompilation"; import type { projectFiles } from "../projectFiles"; import { jsonrpcVersion } from "../constants"; +import { getLogger } from "../logger"; export type RewatchCompilerArgs = { compiler_args: Array; @@ -17,14 +15,14 @@ export type RewatchCompilerArgs = { async function getRuntimePath( entry: IncrementallyCompiledFileInfo, -): Promise { +): Promise { return utils.getRuntimePathFromWorkspaceRoot(entry.project.workspaceRootPath); } export async function getRewatchBscArgs( send: (msg: p.Message) => void, - bscBinaryLocation: string | null, - projectsFiles: Map, + bscBinaryLocation: utils.NormalizedPath | null, + projectsFiles: Map, entry: IncrementallyCompiledFileInfo, ): Promise { const rewatchCacheEntry = entry.buildRewatch; @@ -68,15 +66,11 @@ export async function getRewatchBscArgs( if (rescriptRewatchPath != null) { rewatchPath = rescriptRewatchPath; - if (debug()) { - console.log( - `Found rewatch binary bundled with v12: ${rescriptRewatchPath}`, - ); - } + getLogger().log( + `Found rewatch binary bundled with v12: ${rescriptRewatchPath}`, + ); } else { - if (debug()) { - console.log("Did not find rewatch binary bundled with v12"); - } + getLogger().log("Did not find rewatch binary bundled with v12"); } const rewatchArguments = semver.satisfies( @@ -103,7 +97,8 @@ export async function getRewatchBscArgs( includePrerelease: true, }) ) { - let rescriptRuntime: string | null = await getRuntimePath(entry); + let rescriptRuntime: utils.NormalizedPath | null = + await getRuntimePath(entry); if (rescriptRuntime !== null) { env["RESCRIPT_RUNTIME"] = rescriptRuntime; diff --git a/server/src/codeActions.ts b/server/src/codeActions.ts index fe15b4928..e361f1cac 100644 --- a/server/src/codeActions.ts +++ b/server/src/codeActions.ts @@ -3,18 +3,17 @@ // OCaml binary. import * as p from "vscode-languageserver-protocol"; import * as utils from "./utils"; -import { fileURLToPath } from "url"; export type fileCodeActions = { range: p.Range; codeAction: p.CodeAction }; export type filesCodeActions = { - [key: string]: fileCodeActions[]; + [key: utils.FileURI]: fileCodeActions[]; }; interface findCodeActionsConfig { diagnostic: p.Diagnostic; diagnosticMessage: string[]; - file: string; + file: utils.FileURI; range: p.Range; addFoundActionsHere: filesCodeActions; } @@ -190,7 +189,7 @@ interface codeActionExtractorConfig { line: string; index: number; array: string[]; - file: string; + file: utils.FileURI; range: p.Range; diagnostic: p.Diagnostic; codeActions: filesCodeActions; @@ -327,7 +326,7 @@ let handleUndefinedRecordFieldsAction = ({ }: { recordFieldNames: string[]; codeActions: filesCodeActions; - file: string; + file: utils.FileURI; range: p.Range; diagnostic: p.Diagnostic; todoValue: string; @@ -631,7 +630,7 @@ let simpleAddMissingCases: codeActionExtractor = async ({ .join("") .trim(); - let filePath = fileURLToPath(file); + let filePath = utils.uriToNormalizedPath(file); let newSwitchCode = await utils.runAnalysisAfterSanityCheck(filePath, [ "codemod", diff --git a/server/src/config.ts b/server/src/config.ts index 97b89985a..874b95696 100644 --- a/server/src/config.ts +++ b/server/src/config.ts @@ -4,6 +4,7 @@ export type send = (msg: Message) => void; export interface extensionConfiguration { askToStartBuild?: boolean; + logLevel?: "error" | "warn" | "info" | "log"; inlayHints?: { enable?: boolean; maxLength?: number | null; @@ -19,7 +20,6 @@ export interface extensionConfiguration { incrementalTypechecking?: { enable?: boolean; acrossFiles?: boolean; - debugLogging?: boolean; }; cache?: { projectConfig?: { @@ -28,33 +28,35 @@ export interface extensionConfiguration { }; } -// All values here are temporary, and will be overridden as the server is -// initialized, and the current config is received from the client. -let config: { extensionConfiguration: extensionConfiguration } = { - extensionConfiguration: { - askToStartBuild: true, - inlayHints: { - enable: false, - maxLength: 25, - }, - codeLens: false, - binaryPath: null, - platformPath: null, - signatureHelp: { - enabled: true, - forConstructorPayloads: true, - }, - incrementalTypechecking: { +export const initialConfiguration: extensionConfiguration = { + askToStartBuild: true, + logLevel: "info", + inlayHints: { + enable: false, + maxLength: 25, + }, + codeLens: false, + binaryPath: null, + platformPath: null, + signatureHelp: { + enabled: true, + forConstructorPayloads: true, + }, + incrementalTypechecking: { + enable: true, + acrossFiles: false, + }, + cache: { + projectConfig: { enable: true, - acrossFiles: false, - debugLogging: false, - }, - cache: { - projectConfig: { - enable: true, - }, }, }, }; +// All values here are temporary, and will be overridden as the server is +// initialized, and the current config is received from the client. +let config: { extensionConfiguration: extensionConfiguration } = { + extensionConfiguration: initialConfiguration, +}; + export default config; diff --git a/server/src/find-runtime.ts b/server/src/find-runtime.ts index 42512e044..0f35032bc 100644 --- a/server/src/find-runtime.ts +++ b/server/src/find-runtime.ts @@ -1,6 +1,7 @@ import { readdir, stat as statAsync, readFile } from "fs/promises"; import { join, resolve } from "path"; import { compilerInfoPartialPath } from "./constants"; +import { NormalizedPath, normalizePath } from "./utils"; // Efficient parallel folder traversal to find node_modules directories async function findNodeModulesDirs( @@ -92,14 +93,18 @@ async function findRescriptRuntimeInAlternativeLayout( return results; } -async function findRuntimePath(project: string): Promise { +async function findRuntimePath( + project: NormalizedPath, +): Promise { // Try a compiler-info.json file first const compilerInfo = resolve(project, compilerInfoPartialPath); try { const contents = await readFile(compilerInfo, "utf8"); const compileInfo: { runtime_path?: string } = JSON.parse(contents); if (compileInfo && compileInfo.runtime_path) { - return [compileInfo.runtime_path]; + // We somewhat assume the user to pass down a normalized path, but we cannot be sure of this. + const normalizedRuntimePath = normalizePath(compileInfo.runtime_path); + return normalizedRuntimePath ? [normalizedRuntimePath] : []; } } catch { // Ignore errors, fallback to node_modules search @@ -146,24 +151,20 @@ async function findRuntimePath(project: string): Promise { }), ).then((results) => results.flatMap((x) => x)); - return rescriptRuntimeDirs.map((runtime) => resolve(runtime)); -} - -function findRuntimeCached(): (project: string) => Promise { - const cache = new Map(); - return async (project: string) => { - if (cache.has(project)) { - return cache.get(project)!; - } - const runtimes = await findRuntimePath(project); - cache.set(project, runtimes); - return runtimes; - }; + return rescriptRuntimeDirs.map( + // `resolve` ensures we can assume string is now NormalizedPath + (runtime) => resolve(runtime) as NormalizedPath, + ); } /** * Find all installed @rescript/runtime directories in the given project path. * In a perfect world, there should be exactly one. - * This function is cached per project path. + * Note: This function is not cached here. Caching is handled by the caller + * (see getRuntimePathFromWorkspaceRoot in utils.ts). */ -export const findRescriptRuntimesInProject = findRuntimeCached(); +export async function findRescriptRuntimesInProject( + project: NormalizedPath, +): Promise { + return await findRuntimePath(project); +} diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index ee1b82ba3..da65b9d4d 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -1,11 +1,10 @@ import * as path from "path"; import fs from "fs"; import * as utils from "./utils"; -import { pathToFileURL } from "url"; -import readline from "readline"; import { performance } from "perf_hooks"; import * as p from "vscode-languageserver-protocol"; import * as cp from "node:child_process"; +import { promisify } from "node:util"; import semver from "semver"; import * as os from "os"; import config, { send } from "./config"; @@ -14,13 +13,10 @@ import { fileCodeActions } from "./codeActions"; import { projectsFiles } from "./projectFiles"; import { getRewatchBscArgs, RewatchCompilerArgs } from "./bsc-args/rewatch"; import { BsbCompilerArgs, getBsbBscArgs } from "./bsc-args/bsb"; -import { workspaceFolders } from "./server"; +import { NormalizedPath } from "./utils"; +import { getLogger } from "./logger"; -export function debug() { - return ( - config.extensionConfiguration.incrementalTypechecking?.debugLogging ?? false - ); -} +const execFilePromise = promisify(cp.execFile); const INCREMENTAL_FOLDER_NAME = "___incremental"; const INCREMENTAL_FILE_FOLDER_LOCATION = path.join( @@ -32,8 +28,8 @@ export type IncrementallyCompiledFileInfo = { file: { /** File type. */ extension: ".res" | ".resi"; - /** Path to the source file. */ - sourceFilePath: string; + /** Path to the source file (normalized). */ + sourceFilePath: NormalizedPath; /** Name of the source file. */ sourceFileName: string; /** Module name of the source file. */ @@ -41,9 +37,9 @@ export type IncrementallyCompiledFileInfo = { /** Namespaced module name of the source file. */ moduleNameNamespaced: string; /** Path to where the incremental file is saved. */ - incrementalFilePath: string; + incrementalFilePath: NormalizedPath; /** Location of the original type file. */ - originalTypeFileLocation: string; + originalTypeFileLocation: NormalizedPath; }; buildSystem: "bsb" | "rewatch"; /** Cache for build.ninja assets. */ @@ -55,7 +51,7 @@ export type IncrementallyCompiledFileInfo = { } | null; /** Cache for rewatch compiler args. */ buildRewatch: { - lastFile: string; + lastFile: NormalizedPath; compilerArgs: RewatchCompilerArgs; } | null; /** Info of the currently active incremental compilation. `null` if no incremental compilation is active. */ @@ -65,47 +61,57 @@ export type IncrementallyCompiledFileInfo = { /** The trigger token for the currently active compilation. */ triggerToken: number; } | null; - /** Listeners for when compilation of this file is killed. List always cleared after each invocation. */ - killCompilationListeners: Array<() => void>; + /** Mechanism to kill the currently active compilation. */ + abortCompilation: (() => void) | null; /** Project specific information. */ project: { - /** The root path of the project. */ - rootPath: string; + /** The root path of the project (normalized to match projectsFiles keys). */ + rootPath: NormalizedPath; /** The root path of the workspace (if a monorepo) */ - workspaceRootPath: string; + workspaceRootPath: NormalizedPath; /** Computed location of bsc. */ - bscBinaryLocation: string; + bscBinaryLocation: NormalizedPath; /** The arguments needed for bsc, derived from the project configuration/build.ninja. */ callArgs: Promise | null>; /** The location of the incremental folder for this project. */ - incrementalFolderPath: string; + incrementalFolderPath: NormalizedPath; }; /** Any code actions for this incremental file. */ codeActions: Array; }; const incrementallyCompiledFileInfo: Map< - string, + NormalizedPath, IncrementallyCompiledFileInfo > = new Map(); -const hasReportedFeatureFailedError: Set = new Set(); -const originalTypeFileToFilePath: Map = new Map(); +const hasReportedFeatureFailedError: Set = new Set(); +const originalTypeFileToFilePath: Map = + new Map(); + +/** + * Cancels the currently active compilation for an entry. + * Clears the timeout, aborts the compilation, and resets state. + */ +function cancelActiveCompilation(entry: IncrementallyCompiledFileInfo): void { + if (entry.compilation != null) { + clearTimeout(entry.compilation.timeout); + entry.abortCompilation?.(); + entry.compilation = null; + entry.abortCompilation = null; + } +} -export function incrementalCompilationFileChanged(changedPath: string) { +export function incrementalCompilationFileChanged(changedPath: NormalizedPath) { const filePath = originalTypeFileToFilePath.get(changedPath); if (filePath != null) { const entry = incrementallyCompiledFileInfo.get(filePath); if (entry != null) { - if (debug()) { - console.log("[watcher] Cleaning up incremental files for " + filePath); - } + getLogger().log( + "[watcher] Cleaning up incremental files for " + filePath, + ); if (entry.compilation != null) { - if (debug()) { - console.log("[watcher] Was compiling, killing"); - } - clearTimeout(entry.compilation.timeout); - entry.killCompilationListeners.forEach((cb) => cb()); - entry.compilation = null; + getLogger().log("[watcher] Was compiling, killing"); + cancelActiveCompilation(entry); } cleanUpIncrementalFiles( entry.file.sourceFilePath, @@ -116,7 +122,7 @@ export function incrementalCompilationFileChanged(changedPath: string) { } export function removeIncrementalFileFolder( - projectRootPath: string, + projectRootPath: NormalizedPath, onAfterRemove?: () => void, ) { fs.rm( @@ -128,10 +134,8 @@ export function removeIncrementalFileFolder( ); } -export function recreateIncrementalFileFolder(projectRootPath: string) { - if (debug()) { - console.log("Recreating incremental file folder"); - } +export function recreateIncrementalFileFolder(projectRootPath: NormalizedPath) { + getLogger().log("Recreating incremental file folder"); removeIncrementalFileFolder(projectRootPath, () => { fs.mkdir( path.resolve(projectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION), @@ -142,8 +146,8 @@ export function recreateIncrementalFileFolder(projectRootPath: string) { } export function cleanUpIncrementalFiles( - filePath: string, - projectRootPath: string, + filePath: NormalizedPath, + projectRootPath: NormalizedPath, ) { const ext = filePath.endsWith(".resi") ? ".resi" : ".res"; const namespace = utils.getNamespaceNameFromConfigFile(projectRootPath); @@ -153,9 +157,7 @@ export function cleanUpIncrementalFiles( ? `${fileNameNoExt}-${namespace.result}` : fileNameNoExt; - if (debug()) { - console.log("Cleaning up incremental file assets for: " + fileNameNoExt); - } + getLogger().log("Cleaning up incremental file assets for: " + fileNameNoExt); fs.unlink( path.resolve( @@ -242,7 +244,7 @@ function removeAnsiCodes(s: string): string { return s.replace(ansiEscape, ""); } function triggerIncrementalCompilationOfFile( - filePath: string, + filePath: NormalizedPath, fileContent: string, send: send, onCompilationFinished?: () => void, @@ -252,13 +254,14 @@ function triggerIncrementalCompilationOfFile( // New file const projectRootPath = utils.findProjectRootOfFile(filePath); if (projectRootPath == null) { - if (debug()) - console.log("Did not find project root path for " + filePath); + getLogger().log("Did not find project root path for " + filePath); return; } - const project = projectsFiles.get(projectRootPath); + // projectRootPath is already normalized (NormalizedPath) from findProjectRootOfFile + // Use getProjectFile to verify the project exists + const project = utils.getProjectFile(projectRootPath); if (project == null) { - if (debug()) console.log("Did not find open project for " + filePath); + getLogger().log("Did not find open project for " + filePath); return; } @@ -267,29 +270,29 @@ function triggerIncrementalCompilationOfFile( utils.computeWorkspaceRootPathFromLockfile(projectRootPath); // If null, it means either a lockfile was found (local package) or no parent project root exists // In both cases, we default to projectRootPath - const workspaceRootPath = computedWorkspaceRoot ?? projectRootPath; + const workspaceRootPath: NormalizedPath = + computedWorkspaceRoot ?? projectRootPath; // Determine if lockfile was found for debug logging // If computedWorkspaceRoot is null and projectRootPath is not null, check if parent exists const foundRewatchLockfileInProjectRoot = computedWorkspaceRoot == null && projectRootPath != null && - utils.findProjectRootOfFile(projectRootPath, true) != null; + utils.findProjectRootOfDir(projectRootPath) != null; - if (foundRewatchLockfileInProjectRoot && debug()) { - console.log( + if (foundRewatchLockfileInProjectRoot) { + getLogger().log( `Found rewatch/rescript lockfile in project root, treating as local package in workspace`, ); - } else if (!foundRewatchLockfileInProjectRoot && debug()) { - console.log( + } else { + getLogger().log( `Did not find rewatch/rescript lockfile in project root, assuming bsb`, ); } const bscBinaryLocation = project.bscBinaryLocation; if (bscBinaryLocation == null) { - if (debug()) - console.log("Could not find bsc binary location for " + filePath); + getLogger().log("Could not find bsc binary location for " + filePath); return; } const ext = filePath.endsWith(".resi") ? ".resi" : ".res"; @@ -299,21 +302,26 @@ function triggerIncrementalCompilationOfFile( ? `${moduleName}-${project.namespaceName}` : moduleName; - const incrementalFolderPath = path.join( + // projectRootPath is already NormalizedPath, appending a constant string still makes it a NormalizedPath + const incrementalFolderPath: NormalizedPath = path.join( projectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION, - ); + ) as NormalizedPath; - let originalTypeFileLocation = path.resolve( + const relSourcePath = path.relative(projectRootPath, filePath); + const relSourceDir = path.dirname(relSourcePath); + const typeExt = ext === ".res" ? ".cmt" : ".cmti"; + const originalTypeFileName = + project.namespaceName != null + ? `${moduleName}-${project.namespaceName}${typeExt}` + : `${moduleName}${typeExt}`; + // projectRootPath is already NormalizedPath, appending constant strings still yields a NormalizedPath + const originalTypeFileLocation = path.resolve( projectRootPath, c.compilerDirPartialPath, - path.relative(projectRootPath, filePath), - ); - - const parsed = path.parse(originalTypeFileLocation); - parsed.ext = ext === ".res" ? ".cmt" : ".cmti"; - parsed.base = ""; - originalTypeFileLocation = path.format(parsed); + relSourceDir, + originalTypeFileName, + ) as NormalizedPath; incrementalFileCacheEntry = { file: { @@ -323,10 +331,14 @@ function triggerIncrementalCompilationOfFile( moduleNameNamespaced, sourceFileName: moduleName + ext, sourceFilePath: filePath, - incrementalFilePath: path.join(incrementalFolderPath, moduleName + ext), + // As incrementalFolderPath was a NormalizedPath, path.join ensures we can assume string is now NormalizedPath + incrementalFilePath: path.join( + incrementalFolderPath, + moduleName + ext, + ) as NormalizedPath, }, project: { - workspaceRootPath: workspaceRootPath ?? projectRootPath, + workspaceRootPath, rootPath: projectRootPath, callArgs: Promise.resolve([]), bscBinaryLocation, @@ -336,7 +348,7 @@ function triggerIncrementalCompilationOfFile( buildRewatch: null, buildNinja: null, compilation: null, - killCompilationListeners: [], + abortCompilation: null, codeActions: [], }; @@ -353,27 +365,21 @@ function triggerIncrementalCompilationOfFile( if (incrementalFileCacheEntry == null) return; const entry = incrementalFileCacheEntry; - if (entry.compilation != null) { - clearTimeout(entry.compilation.timeout); - entry.killCompilationListeners.forEach((cb) => cb()); - entry.killCompilationListeners = []; - } + cancelActiveCompilation(entry); const triggerToken = performance.now(); const timeout = setTimeout(() => { compileContents(entry, fileContent, send, onCompilationFinished); }, 20); - if (entry.compilation != null) { - entry.compilation.timeout = timeout; - entry.compilation.triggerToken = triggerToken; - } else { - entry.compilation = { - timeout, - triggerToken, - }; - } + entry.compilation = { + timeout, + triggerToken, + }; } -function verifyTriggerToken(filePath: string, triggerToken: number): boolean { +function verifyTriggerToken( + filePath: NormalizedPath, + triggerToken: number, +): boolean { return ( incrementallyCompiledFileInfo.get(filePath)?.compilation?.triggerToken === triggerToken @@ -388,12 +394,9 @@ async function figureOutBscArgs( ) { const project = projectsFiles.get(entry.project.rootPath); if (project?.rescriptVersion == null) { - if (debug()) { - console.log( - "Found no project (or ReScript version) for " + - entry.file.sourceFilePath, - ); - } + getLogger().log( + "Found no project (or ReScript version) for " + entry.file.sourceFilePath, + ); return null; } const res = await getBscArgs(send, entry); @@ -488,6 +491,169 @@ rule mij callArgs.push(entry.file.incrementalFilePath); return callArgs; } + +/** + * Remaps code action file paths from the incremental temp file to the actual source file. + */ +function remapCodeActionsToSourceFile( + codeActions: Record, + sourceFilePath: NormalizedPath, +): fileCodeActions[] { + const actions = Object.values(codeActions)[0] ?? []; + + // Code actions will point to the locally saved incremental file, so we must remap + // them so the editor understands it's supposed to apply them to the unsaved doc, + // not the saved "dummy" incremental file. + actions.forEach((ca) => { + if (ca.codeAction.edit != null && ca.codeAction.edit.changes != null) { + const change = Object.values(ca.codeAction.edit.changes)[0]; + + ca.codeAction.edit.changes = { + [utils.pathToURI(sourceFilePath)]: change, + }; + } + }); + + return actions; +} + +/** + * Filters diagnostics to remove unwanted parser errors from incremental compilation. + */ +function filterIncrementalDiagnostics( + diagnostics: p.Diagnostic[], + sourceFileName: string, +): { filtered: p.Diagnostic[]; hasIgnoredMessages: boolean } { + let hasIgnoredMessages = false; + + const filtered = diagnostics + .map((d) => ({ + ...d, + message: removeAnsiCodes(d.message), + })) + // Filter out a few unwanted parser errors since we run the parser in ignore mode + .filter((d) => { + if ( + !d.message.startsWith("Uninterpreted extension 'rescript.") && + (!d.message.includes(`/${INCREMENTAL_FOLDER_NAME}/${sourceFileName}`) || + // The `Multiple definition of the name ` type error's + // message includes the filepath with LOC of the duplicate definition + d.message.startsWith("Multiple definition of the") || + // The signature mismatch, with mismatch and ill typed applicative functor + // type errors all include the filepath with LOC + d.message.startsWith("Signature mismatch") || + d.message.startsWith("In this `with' constraint") || + d.message.startsWith("This `with' constraint on")) + ) { + hasIgnoredMessages = true; + return true; + } + return false; + }); + + return { filtered, hasIgnoredMessages }; +} + +/** + * Logs an error when incremental compilation produces unexpected output. + */ +function logIncrementalCompilationError( + entry: IncrementallyCompiledFileInfo, + stderr: string, + callArgs: string[] | null, + send: (msg: p.Message) => void, +): void { + hasReportedFeatureFailedError.add(entry.project.rootPath); + const logfile = path.resolve( + entry.project.incrementalFolderPath, + "error.log", + ); + + try { + fs.writeFileSync( + logfile, + `== BSC ARGS ==\n${callArgs?.join(" ")}\n\n== OUTPUT ==\n${stderr}`, + ); + + const params: p.ShowMessageParams = { + type: p.MessageType.Warning, + message: `[Incremental typechecking] Something might have gone wrong with incremental type checking. Check out the [error log](file://${logfile}) and report this issue please.`, + }; + + const message: p.NotificationMessage = { + jsonrpc: c.jsonrpcVersion, + method: "window/showMessage", + params: params, + }; + + send(message); + } catch (e) { + console.error(e); + } +} + +/** + * Processes compilation results and publishes diagnostics to the LSP client. + */ +function processAndPublishDiagnostics( + entry: IncrementallyCompiledFileInfo, + result: Record, + codeActions: Record, + stderr: string, + callArgs: string[] | null, + send: (msg: p.Message) => void, +): void { + // Remap code actions to source file + const actions = remapCodeActionsToSourceFile( + codeActions, + entry.file.sourceFilePath, + ); + entry.codeActions = actions; + + // Filter diagnostics + const rawDiagnostics = Object.values(result)[0] ?? []; + const { filtered: res, hasIgnoredMessages } = filterIncrementalDiagnostics( + rawDiagnostics, + entry.file.sourceFileName, + ); + + // Log error if compilation produced unexpected output + if ( + res.length === 0 && + stderr !== "" && + !hasIgnoredMessages && + !hasReportedFeatureFailedError.has(entry.project.rootPath) + ) { + logIncrementalCompilationError(entry, stderr, callArgs, send); + } + + const fileUri = utils.pathToURI(entry.file.sourceFilePath); + + // Update filesWithDiagnostics to track this file + // entry.project.rootPath is guaranteed to match a key in projectsFiles + // (see triggerIncrementalCompilationOfFile where the entry is created) + const projectFile = projectsFiles.get(entry.project.rootPath); + + if (projectFile != null) { + if (res.length > 0) { + projectFile.filesWithDiagnostics.add(fileUri); + } else { + // Only remove if there are no diagnostics at all + projectFile.filesWithDiagnostics.delete(fileUri); + } + } + + const notification: p.NotificationMessage = { + jsonrpc: c.jsonrpcVersion, + method: "textDocument/publishDiagnostics", + params: { + uri: fileUri, + diagnostics: res, + }, + }; + send(notification); +} + async function compileContents( entry: IncrementallyCompiledFileInfo, fileContent: string, @@ -502,11 +668,9 @@ async function compileContents( callArgs = callArgsRetried; entry.project.callArgs = Promise.resolve(callArgsRetried); } else { - if (debug()) { - console.log( - "Could not figure out call args. Maybe build.ninja does not exist yet?", - ); - } + getLogger().log( + "Could not figure out call args. Maybe build.ninja does not exist yet?", + ); return; } } @@ -524,157 +688,107 @@ async function compileContents( entry.buildSystem === "bsb" ? entry.project.rootPath : path.resolve(entry.project.rootPath, c.compilerDirPartialPath); - if (debug()) { - console.log( - `About to invoke bsc from \"${cwd}\", used ${entry.buildSystem}`, + + getLogger().log( + `About to invoke bsc from \"${cwd}\", used ${entry.buildSystem}`, + ); + getLogger().log( + `${entry.project.bscBinaryLocation} ${callArgs.map((c) => `"${c}"`).join(" ")}`, + ); + + // Create AbortController for this compilation + const abortController = new AbortController(); + const { signal } = abortController; + + // Store abort function directly on the entry + entry.abortCompilation = () => { + getLogger().log(`Aborting compilation of ${entry.file.sourceFileName}`); + abortController.abort(); + }; + + try { + const { stderr } = await execFilePromise( + entry.project.bscBinaryLocation, + callArgs, + { cwd, signal }, + ).catch((error) => { + if (error.stderr) { + return { stderr: error.stderr }; + } + throw error; + }); + + getLogger().log( + `Recompiled ${entry.file.sourceFileName} in ${ + (performance.now() - startTime) / 1000 + }s`, ); - console.log( - `${entry.project.bscBinaryLocation} ${callArgs.map((c) => `"${c}"`).join(" ")}`, + + // Verify token after async operation + if ( + triggerToken != null && + !verifyTriggerToken(entry.file.sourceFilePath, triggerToken) + ) { + getLogger().log( + `Discarding stale compilation results for ${entry.file.sourceFileName} (token changed)`, + ); + return; + } + + const { result, codeActions } = await utils.parseCompilerLogOutput( + `${stderr}\n#Done()`, ); - } - const process = cp.execFile( - entry.project.bscBinaryLocation, - callArgs, - { cwd }, - async (error, _stdout, stderr) => { - if (!error?.killed) { - if (debug()) - console.log( - `Recompiled ${entry.file.sourceFileName} in ${ - (performance.now() - startTime) / 1000 - }s`, - ); - } else { - if (debug()) - console.log( - `Compilation of ${entry.file.sourceFileName} was killed.`, - ); - } - let hasIgnoredErrorMessages = false; - if ( - !error?.killed && - triggerToken != null && - verifyTriggerToken(entry.file.sourceFilePath, triggerToken) - ) { - if (debug()) { - console.log("Resetting compilation status."); - } - // Reset compilation status as this compilation finished - entry.compilation = null; - const { result, codeActions } = await utils.parseCompilerLogOutput( - `${stderr}\n#Done()`, - ); - const actions = Object.values(codeActions)[0] ?? []; - - // Code actions will point to the locally saved incremental file, so we must remap - // them so the editor understand it's supposed to apply them to the unsaved doc, - // not the saved "dummy" incremental file. - actions.forEach((ca) => { - if ( - ca.codeAction.edit != null && - ca.codeAction.edit.changes != null - ) { - const change = Object.values(ca.codeAction.edit.changes)[0]; - - ca.codeAction.edit.changes = { - [pathToFileURL(entry.file.sourceFilePath).toString()]: change, - }; - } - }); - - entry.codeActions = actions; - - const res = (Object.values(result)[0] ?? []) - .map((d) => ({ - ...d, - message: removeAnsiCodes(d.message), - })) - // Filter out a few unwanted parser errors since we run the parser in ignore mode - .filter((d) => { - if ( - !d.message.startsWith("Uninterpreted extension 'rescript.") && - (!d.message.includes( - `/${INCREMENTAL_FOLDER_NAME}/${entry.file.sourceFileName}`, - ) || - // The `Multiple definition of the name ` type error's - // message includes the filepath with LOC of the duplicate definition - d.message.startsWith("Multiple definition of the") || - // The signature mismatch, with mismatch and ill typed applicative functor - // type errors all include the filepath with LOC - d.message.startsWith("Signature mismatch") || - d.message.startsWith("In this `with' constraint") || - d.message.startsWith("This `with' constraint on")) - ) { - hasIgnoredErrorMessages = true; - return true; - } - return false; - }); - - if ( - res.length === 0 && - stderr !== "" && - !hasIgnoredErrorMessages && - !hasReportedFeatureFailedError.has(entry.project.rootPath) - ) { - try { - hasReportedFeatureFailedError.add(entry.project.rootPath); - const logfile = path.resolve( - entry.project.incrementalFolderPath, - "error.log", - ); - fs.writeFileSync( - logfile, - `== BSC ARGS ==\n${callArgs?.join( - " ", - )}\n\n== OUTPUT ==\n${stderr}`, - ); - let params: p.ShowMessageParams = { - type: p.MessageType.Warning, - message: `[Incremental typechecking] Something might have gone wrong with incremental type checking. Check out the [error log](file://${logfile}) and report this issue please.`, - }; - let message: p.NotificationMessage = { - jsonrpc: c.jsonrpcVersion, - method: "window/showMessage", - params: params, - }; - send(message); - } catch (e) { - console.error(e); - } - } - - const notification: p.NotificationMessage = { - jsonrpc: c.jsonrpcVersion, - method: "textDocument/publishDiagnostics", - params: { - uri: pathToFileURL(entry.file.sourceFilePath), - diagnostics: res, - }, - }; - send(notification); - } - onCompilationFinished?.(); - }, - ); - entry.killCompilationListeners.push(() => { - process.kill("SIGKILL"); - }); - } catch (e) { - console.error(e); + // Re-verify again after second async operation + if ( + triggerToken != null && + !verifyTriggerToken(entry.file.sourceFilePath, triggerToken) + ) { + getLogger().log( + `Discarding stale compilation results for ${entry.file.sourceFileName} (token changed after parsing)`, + ); + return; + } + + processAndPublishDiagnostics( + entry, + result, + codeActions, + stderr, + callArgs, + send, + ); + } catch (error: any) { + if (error.name === "AbortError") { + getLogger().log( + `Compilation of ${entry.file.sourceFileName} was aborted.`, + ); + } else { + getLogger().error( + `Unexpected error during compilation of ${entry.file.sourceFileName}: ${error}`, + ); + throw error; + } + } finally { + // Only clean up if this is still the active compilation + if (entry.compilation?.triggerToken === triggerToken) { + getLogger().log("Cleaning up compilation status."); + entry.compilation = null; + entry.abortCompilation = null; + } + } + } finally { + onCompilationFinished?.(); } } export function handleUpdateOpenedFile( - filePath: string, + filePath: utils.NormalizedPath, fileContent: string, send: send, onCompilationFinished?: () => void, ) { - if (debug()) { - console.log("Updated: " + filePath); - } + getLogger().log("Updated: " + filePath); triggerIncrementalCompilationOfFile( filePath, fileContent, @@ -683,10 +797,8 @@ export function handleUpdateOpenedFile( ); } -export function handleClosedFile(filePath: string) { - if (debug()) { - console.log("Closed: " + filePath); - } +export function handleClosedFile(filePath: NormalizedPath) { + getLogger().log("Closed: " + filePath); const entry = incrementallyCompiledFileInfo.get(filePath); if (entry == null) return; cleanUpIncrementalFiles(filePath, entry.project.rootPath); @@ -695,7 +807,7 @@ export function handleClosedFile(filePath: string) { } export function getCodeActionsFromIncrementalCompilation( - filePath: string, + filePath: NormalizedPath, ): Array | null { const entry = incrementallyCompiledFileInfo.get(filePath); if (entry != null) { diff --git a/server/src/logger.ts b/server/src/logger.ts new file mode 100644 index 000000000..3653aa5e9 --- /dev/null +++ b/server/src/logger.ts @@ -0,0 +1,89 @@ +import * as p from "vscode-languageserver-protocol"; +import * as c from "./constants"; + +export type LogLevel = "error" | "warn" | "info" | "log"; + +const levelOrder: Record = { + log: 1, + info: 2, + warn: 3, + error: 4, +}; + +export interface Logger { + error(message: string): void; + warn(message: string): void; + info(message: string): void; + log(message: string): void; +} + +class NoOpLogger implements Logger { + error(_message: string): void {} + warn(_message: string): void {} + info(_message: string): void {} + log(_message: string): void {} +} + +class LSPLogger implements Logger { + private logLevel: LogLevel = "info"; + + constructor(private send: (msg: p.Message) => void) {} + + setLogLevel(level: LogLevel): void { + this.logLevel = level; + } + + private shouldLog(level: LogLevel): boolean { + return levelOrder[level] >= levelOrder[this.logLevel]; + } + + error(message: string): void { + if (this.shouldLog("error")) { + this.sendLogMessage(message, p.MessageType.Error); + } + } + + warn(message: string): void { + if (this.shouldLog("warn")) { + this.sendLogMessage(message, p.MessageType.Warning); + } + } + + info(message: string): void { + if (this.shouldLog("info")) { + this.sendLogMessage(message, p.MessageType.Info); + } + } + + log(message: string): void { + if (this.shouldLog("log")) { + this.sendLogMessage(message, p.MessageType.Log); + } + } + + private sendLogMessage(message: string, type: p.MessageType): void { + const notification: p.NotificationMessage = { + jsonrpc: c.jsonrpcVersion, + method: "window/logMessage", + params: { type, message }, + }; + this.send(notification); + } +} + +// Default no-op instance +let instance: Logger = new NoOpLogger(); + +export function initializeLogger(send: (msg: p.Message) => void): void { + instance = new LSPLogger(send); +} + +export function setLogLevel(level: LogLevel): void { + if (instance instanceof LSPLogger) { + instance.setLogLevel(level); + } +} + +export function getLogger(): Logger { + return instance; +} diff --git a/server/src/lookup.ts b/server/src/lookup.ts index 38dff3761..6ab3bc6c8 100644 --- a/server/src/lookup.ts +++ b/server/src/lookup.ts @@ -1,9 +1,9 @@ import * as fs from "fs"; import * as path from "path"; -import * as p from "vscode-languageserver-protocol"; import { BuildSchema, ModuleFormat, ModuleFormatObject } from "./buildSchema"; import * as c from "./constants"; +import { NormalizedPath, normalizePath } from "./utils"; const getCompiledFolderName = (moduleFormat: ModuleFormat): string => { switch (moduleFormat) { @@ -23,31 +23,48 @@ export const replaceFileExtension = (filePath: string, ext: string): string => { return path.format({ dir: path.dirname(filePath), name, ext }); }; +export const replaceFileExtensionWithNormalizedPath = ( + filePath: NormalizedPath, + ext: string, +): NormalizedPath => { + let name = path.basename(filePath, path.extname(filePath)); + const result = path.format({ dir: path.dirname(filePath), name, ext }); + // path.format() doesn't preserve normalization, so we need to normalize the result + const normalized = normalizePath(result); + if (normalized == null) { + // Should never happen, but handle gracefully + return result as NormalizedPath; + } + return normalized; +}; + // Check if filePartialPath exists at directory and return the joined path, // otherwise recursively check parent directories for it. export const findFilePathFromProjectRoot = ( - directory: p.DocumentUri | null, // This must be a directory and not a file! + directory: NormalizedPath | null, // This must be a directory and not a file! filePartialPath: string, -): null | p.DocumentUri => { +): NormalizedPath | null => { if (directory == null) { return null; } - let filePath: p.DocumentUri = path.join(directory, filePartialPath); + let filePath = path.join(directory, filePartialPath); if (fs.existsSync(filePath)) { - return filePath; + return normalizePath(filePath); } - let parentDir: p.DocumentUri = path.dirname(directory); - if (parentDir === directory) { + let parentDirStr = path.dirname(directory); + if (parentDirStr === directory) { // reached the top return null; } + const parentDir = normalizePath(parentDirStr); + return findFilePathFromProjectRoot(parentDir, filePartialPath); }; -export const readConfig = (projDir: p.DocumentUri): BuildSchema | null => { +export const readConfig = (projDir: NormalizedPath): BuildSchema | null => { try { let rescriptJson = path.join(projDir, c.rescriptJsonPartialPath); let bsconfigJson = path.join(projDir, c.bsconfigPartialPath); @@ -109,9 +126,9 @@ export const getSuffixAndPathFragmentFromBsconfig = (bsconfig: BuildSchema) => { }; export const getFilenameFromBsconfig = ( - projDir: string, + projDir: NormalizedPath, partialFilePath: string, -): string | null => { +): NormalizedPath | null => { let bsconfig = readConfig(projDir); if (!bsconfig) { @@ -122,22 +139,26 @@ export const getFilenameFromBsconfig = ( let compiledPartialPath = replaceFileExtension(partialFilePath, suffix); - return path.join(projDir, pathFragment, compiledPartialPath); + const result = path.join(projDir, pathFragment, compiledPartialPath); + return normalizePath(result); }; // Monorepo helpers export const getFilenameFromRootBsconfig = ( - projDir: string, + projDir: NormalizedPath, partialFilePath: string, -): string | null => { +): NormalizedPath | null => { + // Start searching from the parent directory of projDir to find the workspace root + const parentDir = normalizePath(path.dirname(projDir)); + let rootConfigPath = findFilePathFromProjectRoot( - path.join("..", projDir), + parentDir, c.rescriptJsonPartialPath, ); if (!rootConfigPath) { rootConfigPath = findFilePathFromProjectRoot( - path.join("..", projDir), + parentDir, c.bsconfigPartialPath, ); } @@ -146,7 +167,11 @@ export const getFilenameFromRootBsconfig = ( return null; } - let rootConfig = readConfig(path.dirname(rootConfigPath)); + const rootConfigDir = normalizePath(path.dirname(rootConfigPath)); + if (rootConfigDir == null) { + return null; + } + let rootConfig = readConfig(rootConfigDir); if (!rootConfig) { return null; @@ -156,5 +181,6 @@ export const getFilenameFromRootBsconfig = ( let compiledPartialPath = replaceFileExtension(partialFilePath, suffix); - return path.join(projDir, pathFragment, compiledPartialPath); + const result = path.join(projDir, pathFragment, compiledPartialPath); + return normalizePath(result); }; diff --git a/server/src/projectFiles.ts b/server/src/projectFiles.ts index 82582e123..f39edd2fc 100644 --- a/server/src/projectFiles.ts +++ b/server/src/projectFiles.ts @@ -1,20 +1,24 @@ import * as cp from "node:child_process"; import * as p from "vscode-languageserver-protocol"; +import { NormalizedPath, FileURI } from "./utils"; export type filesDiagnostics = { - [key: string]: p.Diagnostic[]; + [key: FileURI]: p.Diagnostic[]; }; export interface projectFiles { - openFiles: Set; - filesWithDiagnostics: Set; + openFiles: Set; + filesWithDiagnostics: Set; filesDiagnostics: filesDiagnostics; rescriptVersion: string | undefined; - bscBinaryLocation: string | null; + bscBinaryLocation: NormalizedPath | null; editorAnalysisLocation: string | null; namespaceName: string | null; bsbWatcherByEditor: null | cp.ChildProcess; + // The root path where the build watcher runs (could be monorepo root) + // Used for lock file cleanup when killing the watcher + buildRootPath: NormalizedPath | null; // This keeps track of whether we've prompted the user to start a build // automatically, if there's no build currently running for the project. We @@ -24,5 +28,11 @@ export interface projectFiles { hasPromptedToStartBuild: boolean | "never"; } -export let projectsFiles: Map = // project root path - new Map(); +/** + * Map of project root paths to their project state. + * + * Keys are normalized paths (NormalizedPath) to ensure consistent lookups + * and prevent path format mismatches. All paths should be normalized using + * `normalizePath()` before being used as keys. + */ +export let projectsFiles: Map = new Map(); diff --git a/server/src/server.ts b/server/src/server.ts index 16515b415..f4829dd0e 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -5,6 +5,7 @@ import * as rpc from "vscode-jsonrpc/node"; import * as path from "path"; import semver from "semver"; import fs from "fs"; +import fsAsync from "fs/promises"; import { DidChangeWatchedFilesNotification, DidOpenTextDocumentNotification, @@ -22,16 +23,37 @@ import * as utils from "./utils"; import * as codeActions from "./codeActions"; import * as c from "./constants"; import { assert } from "console"; -import { fileURLToPath } from "url"; import { WorkspaceEdit } from "vscode-languageserver"; import { onErrorReported } from "./errorReporter"; import * as ic from "./incrementalCompilation"; -import config, { extensionConfiguration } from "./config"; +import config, { extensionConfiguration, initialConfiguration } from "./config"; import { projectsFiles } from "./projectFiles"; +import { NormalizedPath } from "./utils"; +import { initializeLogger, getLogger, setLogLevel } from "./logger"; + +function applyUserConfiguration(configuration: extensionConfiguration) { + // We always want to spread the initial configuration to ensure all defaults are respected. + config.extensionConfiguration = Object.assign( + {}, + initialConfiguration, + configuration, + ); + + const level = config.extensionConfiguration.logLevel; + + if ( + level === "error" || + level === "warn" || + level === "info" || + level === "log" + ) { + setLogLevel(level); + } +} // Absolute paths to all the workspace folders // Configured during the initialize request -export const workspaceFolders = new Set(); +export const workspaceFolders = new Set(); // This holds client capabilities specific to our extension, and not necessarily // related to the LS protocol. It's for enabling/disabling features that might @@ -76,7 +98,7 @@ const projectCompilationStates: Map = type CompilationStatusPayload = { project: string; - projectRootPath: string; + projectRootPath: NormalizedPath; status: "compiling" | "success" | "error" | "warning"; errorCount: number; warningCount: number; @@ -92,15 +114,17 @@ const sendCompilationStatus = (payload: CompilationStatusPayload) => { }; let findRescriptBinary = async ( - projectRootPath: p.DocumentUri | null, -): Promise => { + projectRootPath: utils.NormalizedPath | null, +): Promise => { if ( config.extensionConfiguration.binaryPath != null && fs.existsSync( path.join(config.extensionConfiguration.binaryPath, "rescript"), ) ) { - return path.join(config.extensionConfiguration.binaryPath, "rescript"); + return utils.normalizePath( + path.join(config.extensionConfiguration.binaryPath, "rescript"), + ); } return utils.findRescriptBinary(projectRootPath); @@ -118,8 +142,15 @@ let openCompiledFileRequest = new v.RequestType< void >("textDocument/openCompiled"); -let getCurrentCompilerDiagnosticsForFile = ( - fileUri: string, +// Request to start the build watcher for a project +let startBuildRequest = new v.RequestType< + p.TextDocumentIdentifier, + { success: boolean }, + void +>("rescript/startBuild"); + +export let getCurrentCompilerDiagnosticsForFile = ( + fileUri: utils.FileURI, ): p.Diagnostic[] => { let diagnostics: p.Diagnostic[] | null = null; @@ -166,10 +197,12 @@ let sendUpdatedDiagnostics = async () => { codeActionsFromDiagnostics = codeActions; // diff - Object.keys(filesAndErrors).forEach((file) => { + ( + Object.entries(filesAndErrors) as Array<[utils.FileURI, p.Diagnostic[]]> + ).forEach(([fileUri, diagnostics]) => { let params: p.PublishDiagnosticsParams = { - uri: file, - diagnostics: filesAndErrors[file], + uri: fileUri, + diagnostics, }; let notification: p.NotificationMessage = { jsonrpc: c.jsonrpcVersion, @@ -178,7 +211,7 @@ let sendUpdatedDiagnostics = async () => { }; send(notification); - filesWithDiagnostics.add(file); + filesWithDiagnostics.add(fileUri); }); if (done) { // clear old files @@ -214,8 +247,10 @@ let sendUpdatedDiagnostics = async () => { let errorCount = 0; let warningCount = 0; - for (const [fileUri, diags] of Object.entries(filesAndErrors)) { - const filePath = fileURLToPath(fileUri); + for (const [fileUri, diags] of Object.entries(filesAndErrors) as Array< + [utils.FileURI, p.Diagnostic[]] + >) { + const filePath = utils.uriToNormalizedPath(fileUri); if (filePath.startsWith(projectRootPath)) { for (const d of diags as v.Diagnostic[]) { if (d.severity === v.DiagnosticSeverity.Error) errorCount++; @@ -282,7 +317,7 @@ let sendUpdatedDiagnostics = async () => { } }; -let deleteProjectDiagnostics = (projectRootPath: string) => { +let deleteProjectDiagnostics = (projectRootPath: utils.NormalizedPath) => { let root = projectsFiles.get(projectRootPath); if (root != null) { root.filesWithDiagnostics.forEach((file) => { @@ -315,31 +350,29 @@ let sendCompilationFinishedMessage = () => { send(notification); }; -let debug = false; - -let syncProjectConfigCache = async (rootPath: string) => { +let syncProjectConfigCache = async (rootPath: utils.NormalizedPath) => { try { - if (debug) console.log("syncing project config cache for " + rootPath); + getLogger().log("syncing project config cache for " + rootPath); await utils.runAnalysisAfterSanityCheck(rootPath, [ "cache-project", rootPath, ]); - if (debug) console.log("OK - synced project config cache for " + rootPath); + getLogger().log("OK - synced project config cache for " + rootPath); } catch (e) { - if (debug) console.error(e); + getLogger().error(String(e)); } }; -let deleteProjectConfigCache = async (rootPath: string) => { +let deleteProjectConfigCache = async (rootPath: utils.NormalizedPath) => { try { - if (debug) console.log("deleting project config cache for " + rootPath); + getLogger().log("deleting project config cache for " + rootPath); await utils.runAnalysisAfterSanityCheck(rootPath, [ "cache-delete", rootPath, ]); - if (debug) console.log("OK - deleted project config cache for " + rootPath); + getLogger().log("OK - deleted project config cache for " + rootPath); } catch (e) { - if (debug) console.error(e); + getLogger().error(String(e)); } }; @@ -352,7 +385,9 @@ async function onWorkspaceDidChangeWatchedFiles( if ( config.extensionConfiguration.cache?.projectConfig?.enable === true ) { - let projectRoot = utils.findProjectRootOfFile(change.uri); + let projectRoot = utils.findProjectRootOfFile( + utils.uriToNormalizedPath(change.uri as utils.FileURI), + ); if (projectRoot != null) { await syncProjectConfigCache(projectRoot); } @@ -368,10 +403,12 @@ async function onWorkspaceDidChangeWatchedFiles( sendCodeLensRefresh(); } } catch { - console.log("Error while sending updated diagnostics"); + getLogger().error("Error while sending updated diagnostics"); } } else { - ic.incrementalCompilationFileChanged(fileURLToPath(change.uri)); + ic.incrementalCompilationFileChanged( + utils.uriToNormalizedPath(change.uri as utils.FileURI), + ); } }), ); @@ -379,15 +416,16 @@ async function onWorkspaceDidChangeWatchedFiles( type clientSentBuildAction = { title: string; - projectRootPath: string; + projectRootPath: utils.NormalizedPath; }; -let openedFile = async (fileUri: string, fileContent: string) => { - let filePath = fileURLToPath(fileUri); +let openedFile = async (fileUri: utils.FileURI, fileContent: string) => { + let filePath = utils.uriToNormalizedPath(fileUri); stupidFileContentCache.set(filePath, fileContent); let projectRootPath = utils.findProjectRootOfFile(filePath); if (projectRootPath != null) { + // projectRootPath is already normalized (NormalizedPath) from findProjectRootOfFile let projectRootState = projectsFiles.get(projectRootPath); if (projectRootState == null) { if (config.extensionConfiguration.incrementalTypechecking?.enable) { @@ -405,6 +443,7 @@ let openedFile = async (fileUri: string, fileContent: string) => { rescriptVersion: await utils.findReScriptVersionForProjectRoot(projectRootPath), bsbWatcherByEditor: null, + buildRootPath: null, bscBinaryLocation: await utils.findBscExeBinary(projectRootPath), editorAnalysisLocation: await utils.findEditorAnalysisBinary(projectRootPath), @@ -421,21 +460,37 @@ let openedFile = async (fileUri: string, fileContent: string) => { } let root = projectsFiles.get(projectRootPath)!; root.openFiles.add(filePath); - // check if .bsb.lock is still there. If not, start a bsb -w ourselves + // check if a lock file exists. If not, start a build watcher ourselves // because otherwise the diagnostics info we'll display might be stale - let bsbLockPath = path.join(projectRootPath, c.bsbLock); + // ReScript < 12: .bsb.lock in project root + // ReScript >= 12: lib/rescript.lock + // For monorepos, the lock file is at the monorepo root, not the subpackage + let rescriptBinaryPath = await findRescriptBinary(projectRootPath); + let buildRootPath = projectRootPath; + if (rescriptBinaryPath != null) { + const monorepoRootPath = + utils.getMonorepoRootFromBinaryPath(rescriptBinaryPath); + if (monorepoRootPath != null) { + buildRootPath = monorepoRootPath; + } + } + let bsbLockPath = path.join(buildRootPath, c.bsbLock); + let rescriptLockPath = path.join(buildRootPath, c.rescriptLockPartialPath); + let hasLockFile = + fs.existsSync(bsbLockPath) || fs.existsSync(rescriptLockPath); if ( projectRootState.hasPromptedToStartBuild === false && config.extensionConfiguration.askToStartBuild === true && - !fs.existsSync(bsbLockPath) + !hasLockFile ) { // TODO: sometime stale .bsb.lock dangling. bsb -w knows .bsb.lock is // stale. Use that logic // TODO: close watcher when lang-server shuts down - if ((await findRescriptBinary(projectRootPath)) != null) { + if (rescriptBinaryPath != null) { + getLogger().info(`Prompting to start build for ${buildRootPath}`); let payload: clientSentBuildAction = { title: c.startBuildAction, - projectRootPath: projectRootPath, + projectRootPath: buildRootPath, }; let params = { type: p.MessageType.Info, @@ -477,8 +532,9 @@ let openedFile = async (fileUri: string, fileContent: string) => { } }; -let closedFile = async (fileUri: string) => { - let filePath = fileURLToPath(fileUri); +let closedFile = async (fileUri: utils.FileURI) => { + let filePath = utils.uriToNormalizedPath(fileUri); + getLogger().log(`Closing file ${filePath}`); if (config.extensionConfiguration.incrementalTypechecking?.enable) { ic.handleClosedFile(filePath); @@ -491,21 +547,36 @@ let closedFile = async (fileUri: string) => { let root = projectsFiles.get(projectRootPath); if (root != null) { root.openFiles.delete(filePath); + getLogger().log( + `Open files remaining for ${projectRootPath}: ${root.openFiles.size}`, + ); // clear diagnostics too if no open files open in said project if (root.openFiles.size === 0) { await deleteProjectConfigCache(projectRootPath); deleteProjectDiagnostics(projectRootPath); if (root.bsbWatcherByEditor !== null) { - root.bsbWatcherByEditor.kill(); + getLogger().info( + `Killing build watcher for ${projectRootPath} (all files closed)`, + ); + utils.killBuildWatcher( + root.bsbWatcherByEditor, + root.buildRootPath ?? undefined, + ); root.bsbWatcherByEditor = null; + root.buildRootPath = null; } } + } else { + getLogger().log(`No project state found for ${projectRootPath}`); } } }; -let updateOpenedFile = (fileUri: string, fileContent: string) => { - let filePath = fileURLToPath(fileUri); +let updateOpenedFile = (fileUri: utils.FileURI, fileContent: string) => { + getLogger().info( + `Updating opened file ${fileUri}, incremental TC enabled: ${config.extensionConfiguration.incrementalTypechecking?.enable}`, + ); + let filePath = utils.uriToNormalizedPath(fileUri); assert(stupidFileContentCache.has(filePath)); stupidFileContentCache.set(filePath, fileContent); if (config.extensionConfiguration.incrementalTypechecking?.enable) { @@ -519,8 +590,8 @@ let updateOpenedFile = (fileUri: string, fileContent: string) => { }); } }; -let getOpenedFileContent = (fileUri: string) => { - let filePath = fileURLToPath(fileUri); +let getOpenedFileContent = (fileUri: utils.FileURI) => { + let filePath = utils.uriToNormalizedPath(fileUri); let content = stupidFileContentCache.get(filePath)!; assert(content != null); return content; @@ -536,18 +607,22 @@ export default function listen(useStdio = false) { let reader = new rpc.StreamMessageReader(process.stdin); // proper `this` scope for writer send = (msg: p.Message) => writer.write(msg); + initializeLogger(send); reader.listen(onMessage); } else { // proper `this` scope for process send = (msg: p.Message) => process.send!(msg); + initializeLogger(send); process.on("message", onMessage); } } async function hover(msg: p.RequestMessage) { let params = msg.params as p.HoverParams; - let filePath = fileURLToPath(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let tmpname = utils.createFileInTempDir(); fs.writeFileSync(tmpname, code, { encoding: "utf-8" }); let response = await utils.runAnalysisCommand( @@ -568,7 +643,9 @@ async function hover(msg: p.RequestMessage) { async function inlayHint(msg: p.RequestMessage) { const params = msg.params as p.InlayHintParams; - const filePath = fileURLToPath(params.textDocument.uri); + const filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); const response = await utils.runAnalysisCommand( filePath, @@ -595,7 +672,9 @@ function sendInlayHintsRefresh() { async function codeLens(msg: p.RequestMessage) { const params = msg.params as p.CodeLensParams; - const filePath = fileURLToPath(params.textDocument.uri); + const filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); const response = await utils.runAnalysisCommand( filePath, @@ -616,8 +695,10 @@ function sendCodeLensRefresh() { async function signatureHelp(msg: p.RequestMessage) { let params = msg.params as p.SignatureHelpParams; - let filePath = fileURLToPath(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let tmpname = utils.createFileInTempDir(); fs.writeFileSync(tmpname, code, { encoding: "utf-8" }); let response = await utils.runAnalysisCommand( @@ -641,7 +722,9 @@ async function signatureHelp(msg: p.RequestMessage) { async function definition(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_definition let params = msg.params as p.DefinitionParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let response = await utils.runAnalysisCommand( filePath, ["definition", filePath, params.position.line, params.position.character], @@ -653,7 +736,9 @@ async function definition(msg: p.RequestMessage) { async function typeDefinition(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specification/specification-current/#textDocument_typeDefinition let params = msg.params as p.TypeDefinitionParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let response = await utils.runAnalysisCommand( filePath, [ @@ -670,7 +755,9 @@ async function typeDefinition(msg: p.RequestMessage) { async function references(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_references let params = msg.params as p.ReferenceParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let result: typeof p.ReferencesRequest.type = await utils.getReferencesForPosition(filePath, params.position); let response: p.ResponseMessage = { @@ -687,7 +774,9 @@ async function prepareRename( ): Promise { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_prepareRename let params = msg.params as p.PrepareRenameParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); // `prepareRename` was introduced in 12.0.0-beta.10 let projectRootPath = utils.findProjectRootOfFile(filePath); @@ -727,8 +816,8 @@ async function prepareRename( if (locations !== null) { locations.forEach((loc) => { if ( - path.normalize(fileURLToPath(loc.uri)) === - path.normalize(fileURLToPath(params.textDocument.uri)) + utils.uriToNormalizedPath(loc.uri as utils.FileURI) === + utils.uriToNormalizedPath(params.textDocument.uri as utils.FileURI) ) { let { start, end } = loc.range; let pos = params.position; @@ -753,7 +842,9 @@ async function prepareRename( async function rename(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_rename let params = msg.params as p.RenameParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let documentChanges: (p.RenameFile | p.TextDocumentEdit)[] | null = await utils.runAnalysisAfterSanityCheck(filePath, [ "rename", @@ -777,9 +868,11 @@ async function rename(msg: p.RequestMessage) { async function documentSymbol(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_documentSymbol let params = msg.params as p.DocumentSymbolParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let extension = path.extname(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let tmpname = utils.createFileInTempDir(extension); fs.writeFileSync(tmpname, code, { encoding: "utf-8" }); let response = await utils.runAnalysisCommand( @@ -813,9 +906,11 @@ function askForAllCurrentConfiguration() { async function semanticTokens(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_semanticTokens let params = msg.params as p.SemanticTokensParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let extension = path.extname(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let tmpname = utils.createFileInTempDir(extension); fs.writeFileSync(tmpname, code, { encoding: "utf-8" }); let response = await utils.runAnalysisCommand( @@ -831,8 +926,10 @@ async function semanticTokens(msg: p.RequestMessage) { async function completion(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_completion let params = msg.params as p.ReferenceParams; - let filePath = fileURLToPath(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let tmpname = utils.createFileInTempDir(); fs.writeFileSync(tmpname, code, { encoding: "utf-8" }); let response = await utils.runAnalysisCommand( @@ -860,8 +957,12 @@ async function completionResolve(msg: p.RequestMessage) { if (item.documentation == null && item.data != null) { const data = item.data as { filePath: string; modulePath: string }; + const normalizedFilePath = utils.normalizePath(data.filePath); + if (normalizedFilePath == null) { + return response; + } let result = await utils.runAnalysisAfterSanityCheck( - data.filePath, + normalizedFilePath, ["completionResolve", data.filePath, data.modulePath], true, ); @@ -873,15 +974,17 @@ async function completionResolve(msg: p.RequestMessage) { async function codeAction(msg: p.RequestMessage): Promise { let params = msg.params as p.CodeActionParams; - let filePath = fileURLToPath(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let extension = path.extname(params.textDocument.uri); let tmpname = utils.createFileInTempDir(extension); // Check local code actions coming from the diagnostics, or from incremental compilation. let localResults: v.CodeAction[] = []; const fromDiagnostics = - codeActionsFromDiagnostics[params.textDocument.uri] ?? []; + codeActionsFromDiagnostics[params.textDocument.uri as utils.FileURI] ?? []; const fromIncrementalCompilation = ic.getCodeActionsFromIncrementalCompilation(filePath) ?? []; [...fromDiagnostics, ...fromIncrementalCompilation].forEach( @@ -938,7 +1041,9 @@ function format(msg: p.RequestMessage): Array { result: [], }; let params = msg.params as p.DocumentFormattingParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let extension = path.extname(params.textDocument.uri); if (extension !== c.resExt && extension !== c.resiExt) { let params: p.ShowMessageParams = { @@ -953,7 +1058,7 @@ function format(msg: p.RequestMessage): Array { return [fakeSuccessResponse, response]; } else { // code will always be defined here, even though technically it can be undefined - let code = getOpenedFileContent(params.textDocument.uri); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let projectRootPath = utils.findProjectRootOfFile(filePath); let project = @@ -988,12 +1093,15 @@ function format(msg: p.RequestMessage): Array { } } -let updateDiagnosticSyntax = async (fileUri: string, fileContent: string) => { +let updateDiagnosticSyntax = async ( + fileUri: utils.FileURI, + fileContent: string, +) => { if (config.extensionConfiguration.incrementalTypechecking?.enable) { // The incremental typechecking already sends syntax diagnostics. return; } - let filePath = fileURLToPath(fileUri); + let filePath = utils.uriToNormalizedPath(fileUri); let extension = path.extname(filePath); let tmpname = utils.createFileInTempDir(extension); fs.writeFileSync(tmpname, fileContent, { encoding: "utf-8" }); @@ -1011,12 +1119,29 @@ let updateDiagnosticSyntax = async (fileUri: string, fileContent: string) => { tmpname, ]); + let allDiagnostics = [ + ...syntaxDiagnosticsForFile, + ...compilerDiagnosticsForFile, + ]; + + // Update filesWithDiagnostics to track this file + let projectRootPath = utils.findProjectRootOfFile(filePath); + let projectFile = utils.getProjectFile(projectRootPath); + + if (projectFile != null) { + if (allDiagnostics.length > 0) { + projectFile.filesWithDiagnostics.add(fileUri); + } else { + projectFile.filesWithDiagnostics.delete(fileUri); + } + } + let notification: p.NotificationMessage = { jsonrpc: c.jsonrpcVersion, method: "textDocument/publishDiagnostics", params: { uri: fileUri, - diagnostics: [...syntaxDiagnosticsForFile, ...compilerDiagnosticsForFile], + diagnostics: allDiagnostics, }, }; @@ -1028,7 +1153,7 @@ let updateDiagnosticSyntax = async (fileUri: string, fileContent: string) => { async function createInterface(msg: p.RequestMessage): Promise { let params = msg.params as p.TextDocumentIdentifier; let extension = path.extname(params.uri); - let filePath = fileURLToPath(params.uri); + let filePath = utils.uriToNormalizedPath(params.uri as utils.FileURI); let projDir = utils.findProjectRootOfFile(filePath); if (projDir === null) { @@ -1115,7 +1240,10 @@ async function createInterface(msg: p.RequestMessage): Promise { let result = typeof response.result === "string" ? response.result : ""; try { - let resiPath = lookup.replaceFileExtension(filePath, c.resiExt); + let resiPath = lookup.replaceFileExtensionWithNormalizedPath( + filePath, + c.resiExt, + ); fs.writeFileSync(resiPath, result, { encoding: "utf-8" }); let response: p.ResponseMessage = { jsonrpc: c.jsonrpcVersion, @@ -1138,9 +1266,109 @@ async function createInterface(msg: p.RequestMessage): Promise { } } +// Shared function to start build watcher for a project +// Returns true if watcher was started or already running, false on failure +async function startBuildWatcher( + projectRootPath: utils.NormalizedPath, + rescriptBinaryPath: utils.NormalizedPath, + options?: { + createProjectStateIfMissing?: boolean; + monorepoRootPath?: utils.NormalizedPath | null; + }, +): Promise { + let root = projectsFiles.get(projectRootPath); + + // Create project state if missing and option is set + if (root == null && options?.createProjectStateIfMissing) { + const namespaceName = utils.getNamespaceNameFromConfigFile(projectRootPath); + root = { + openFiles: new Set(), + filesWithDiagnostics: new Set(), + filesDiagnostics: {}, + namespaceName: + namespaceName.kind === "success" ? namespaceName.result : null, + rescriptVersion: + await utils.findReScriptVersionForProjectRoot(projectRootPath), + bsbWatcherByEditor: null, + buildRootPath: null, + bscBinaryLocation: await utils.findBscExeBinary(projectRootPath), + editorAnalysisLocation: + await utils.findEditorAnalysisBinary(projectRootPath), + hasPromptedToStartBuild: true, // Don't prompt since we're starting the build + }; + projectsFiles.set(projectRootPath, root); + } + + if (root == null) { + return false; + } + + // If a build watcher is already running, return success + if (root.bsbWatcherByEditor != null) { + getLogger().info(`Build watcher already running for ${projectRootPath}`); + return true; + } + + // Use monorepo root for cwd (monorepo support), fall back to project root + const buildCwd = options?.monorepoRootPath ?? projectRootPath; + + getLogger().info( + `Starting build watcher for ${projectRootPath} in ${buildCwd} (ReScript ${root.rescriptVersion ?? "unknown"})`, + ); + let bsbProcess = utils.runBuildWatcherUsingValidBuildPath( + rescriptBinaryPath, + buildCwd, + root.rescriptVersion, + ); + root.bsbWatcherByEditor = bsbProcess; + root.buildRootPath = buildCwd; + + return true; +} + +async function handleStartBuildRequest( + msg: p.RequestMessage, +): Promise { + let params = msg.params as p.TextDocumentIdentifier; + let filePath = utils.uriToNormalizedPath(params.uri as utils.FileURI); + let projectRootPath = utils.findProjectRootOfFile(filePath); + + if (projectRootPath == null) { + return { + jsonrpc: c.jsonrpcVersion, + id: msg.id, + result: { success: false }, + }; + } + + let rescriptBinaryPath = await findRescriptBinary(projectRootPath); + if (rescriptBinaryPath == null) { + return { + jsonrpc: c.jsonrpcVersion, + id: msg.id, + result: { success: false }, + }; + } + + // Derive monorepo root from binary path for monorepo support + const monorepoRootPath = + utils.getMonorepoRootFromBinaryPath(rescriptBinaryPath); + + const success = await startBuildWatcher(projectRootPath, rescriptBinaryPath, { + createProjectStateIfMissing: true, + monorepoRootPath, + }); + + return { + jsonrpc: c.jsonrpcVersion, + id: msg.id, + result: { success }, + }; +} + function openCompiledFile(msg: p.RequestMessage): p.Message { let params = msg.params as p.TextDocumentIdentifier; - let filePath = fileURLToPath(params.uri); + let filePath = utils.uriToNormalizedPath(params.uri as utils.FileURI); let projDir = utils.findProjectRootOfFile(filePath); if (projDir === null) { @@ -1194,6 +1422,83 @@ function openCompiledFile(msg: p.RequestMessage): p.Message { return response; } +async function getServerVersion(): Promise { + // Read the server version from package.json + let serverVersion: string | undefined; + try { + const packageJsonPath = path.join(__dirname, "..", "package.json"); + const packageJsonContent = await fsAsync.readFile(packageJsonPath, { + encoding: "utf-8", + }); + const packageJson: { version?: unknown } = JSON.parse(packageJsonContent); + serverVersion = + typeof packageJson.version === "string" ? packageJson.version : undefined; + } catch (e) { + // If we can't read the version, that's okay - we'll just omit it + serverVersion = undefined; + } + return serverVersion; +} + +async function dumpServerState( + msg: p.RequestMessage, +): Promise { + // Custom debug endpoint: dump current server state (config + projectsFiles) + try { + const serverVersion = await getServerVersion(); + + const projects = Array.from(projectsFiles.entries()).map( + ([projectRootPath, pf]) => ({ + projectRootPath, + openFiles: Array.from(pf.openFiles), + filesWithDiagnostics: Array.from(pf.filesWithDiagnostics), + filesDiagnostics: pf.filesDiagnostics, + rescriptVersion: pf.rescriptVersion, + bscBinaryLocation: pf.bscBinaryLocation, + editorAnalysisLocation: pf.editorAnalysisLocation, + namespaceName: pf.namespaceName, + hasPromptedToStartBuild: pf.hasPromptedToStartBuild, + bsbWatcherByEditor: + pf.bsbWatcherByEditor != null + ? { pid: pf.bsbWatcherByEditor.pid ?? null } + : null, + }), + ); + + const state = { + lspServerVersion: serverVersion, + config: config.extensionConfiguration, + projects, + workspaceFolders: Array.from(workspaceFolders), + runtimePathCache: utils.getRuntimePathCacheSnapshot(), + }; + + // Format JSON with pretty-printing (2-space indent) on the server side + // This ensures consistent formatting and handles any Maps/Sets that might + // have been converted to plain objects/arrays above + const formattedJson = JSON.stringify(state, null, 2); + + // Return the content so the client can create an unsaved document + // This avoids creating temporary files that would never be cleaned up + let response: p.ResponseMessage = { + jsonrpc: c.jsonrpcVersion, + id: msg.id, + result: { content: formattedJson }, + }; + return response; + } catch (e) { + let response: p.ResponseMessage = { + jsonrpc: c.jsonrpcVersion, + id: msg.id, + error: { + code: p.ErrorCodes.InternalError, + message: `Failed to dump server state: ${String(e)}`, + }, + }; + return response; + } +} + async function onMessage(msg: p.Message) { if (p.Message.isNotification(msg)) { // notification message, aka the client ends it and doesn't want a reply @@ -1220,24 +1525,30 @@ async function onMessage(msg: p.Message) { const watchers = Array.from(workspaceFolders).flatMap( (projectRootPath) => [ { - globPattern: path.join( - projectRootPath, - "**", - c.compilerLogPartialPath, - ), + // Only watch the root compiler log for each workspace folder. + // In monorepos, `**/lib/bs/.compiler.log` matches every package and dependency, + // causing a burst of events per save. + globPattern: { + baseUri: utils.pathToURI(projectRootPath), + pattern: c.compilerLogPartialPath, + }, kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete, }, { - globPattern: path.join( - projectRootPath, - "**", - c.buildNinjaPartialPath, - ), + // Watch ninja output + globPattern: { + baseUri: utils.pathToURI(projectRootPath), + pattern: path.join("**", c.buildNinjaPartialPath), + }, kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete, }, { - globPattern: `${path.join(projectRootPath, "**", c.compilerDirPartialPath)}/**/*.{cmt,cmi}`, - kind: p.WatchKind.Change | p.WatchKind.Delete, + // Watch build artifacts + globPattern: { + baseUri: utils.pathToURI(projectRootPath), + pattern: path.join(c.compilerDirPartialPath, "**/*.{cmi,cmt}"), + }, + kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete, }, ], ); @@ -1264,10 +1575,26 @@ async function onMessage(msg: p.Message) { await onWorkspaceDidChangeWatchedFiles(params); } else if (msg.method === DidOpenTextDocumentNotification.method) { let params = msg.params as p.DidOpenTextDocumentParams; - await openedFile(params.textDocument.uri, params.textDocument.text); - await sendUpdatedDiagnostics(); + await openedFile( + params.textDocument.uri as utils.FileURI, + params.textDocument.text, + ); + + if (config.extensionConfiguration.incrementalTypechecking?.enable) { + // We run incremental typechecking to get the most accurate results + // the current file may have deviated from the last compilation. + updateOpenedFile( + params.textDocument.uri as utils.FileURI, + params.textDocument.text, + ); + } else { + // Check the .compiler.log file for diagnostics + // This could be stale data of course. + await sendUpdatedDiagnostics(); + } + await updateDiagnosticSyntax( - params.textDocument.uri, + params.textDocument.uri as utils.FileURI, params.textDocument.text, ); } else if (msg.method === DidChangeTextDocumentNotification.method) { @@ -1280,18 +1607,18 @@ async function onMessage(msg: p.Message) { } else { // we currently only support full changes updateOpenedFile( - params.textDocument.uri, + params.textDocument.uri as utils.FileURI, changes[changes.length - 1].text, ); await updateDiagnosticSyntax( - params.textDocument.uri, + params.textDocument.uri as utils.FileURI, changes[changes.length - 1].text, ); } } } else if (msg.method === DidCloseTextDocumentNotification.method) { let params = msg.params as p.DidCloseTextDocumentParams; - await closedFile(params.textDocument.uri); + await closedFile(params.textDocument.uri as utils.FileURI); } else if (msg.method === DidChangeConfigurationNotification.type.method) { // Can't seem to get this notification to trigger, but if it does this will be here and ensure we're synced up at the server. askForAllCurrentConfiguration(); @@ -1309,17 +1636,22 @@ async function onMessage(msg: p.Message) { }; send(response); } else if (msg.method === "initialize") { + const serverVersion = await getServerVersion(); + getLogger().info( + `Received initialize request from client. Server version: ${serverVersion}`, + ); // Save initial configuration, if present let initParams = msg.params as InitializeParams; for (const workspaceFolder of initParams.workspaceFolders || []) { - const workspaceRootPath = fileURLToPath(workspaceFolder.uri); - workspaceFolders.add(workspaceRootPath); + workspaceFolders.add( + utils.uriToNormalizedPath(workspaceFolder.uri as utils.FileURI), + ); } let initialConfiguration = initParams.initializationOptions ?.extensionConfiguration as extensionConfiguration | undefined; if (initialConfiguration != null) { - config.extensionConfiguration = initialConfiguration; + applyUserConfiguration(initialConfiguration); } // These are static configuration options the client can set to enable certain @@ -1388,6 +1720,9 @@ async function onMessage(msg: p.Message) { retriggerCharacters: ["=", ","], } : undefined, + executeCommandProvider: { + commands: ["rescript/dumpServerState"], + }, }, }; let response: p.ResponseMessage = { @@ -1431,6 +1766,19 @@ async function onMessage(msg: p.Message) { clearInterval(pullConfigurationPeriodically); } + // Kill all build watchers on shutdown + for (const [projectPath, projectState] of projectsFiles) { + if (projectState.bsbWatcherByEditor != null) { + getLogger().info(`Killing build watcher for ${projectPath}`); + utils.killBuildWatcher( + projectState.bsbWatcherByEditor, + projectState.buildRootPath ?? undefined, + ); + projectState.bsbWatcherByEditor = null; + projectState.buildRootPath = null; + } + } + let response: p.ResponseMessage = { jsonrpc: c.jsonrpcVersion, id: msg.id, @@ -1467,6 +1815,8 @@ async function onMessage(msg: p.Message) { send(await createInterface(msg)); } else if (msg.method === openCompiledFileRequest.method) { send(openCompiledFile(msg)); + } else if (msg.method === startBuildRequest.method) { + send(await handleStartBuildRequest(msg)); } else if (msg.method === p.InlayHintRequest.method) { let params = msg.params as InlayHintParams; let extName = path.extname(params.textDocument.uri); @@ -1485,46 +1835,18 @@ async function onMessage(msg: p.Message) { if (extName === c.resExt) { send(await signatureHelp(msg)); } - } else if (msg.method === "rescript/dumpServerState") { - // Custom debug endpoint: dump current server state (config + projectsFiles) - try { - const projects = Array.from(projectsFiles.entries()).map( - ([projectRootPath, pf]) => ({ - projectRootPath, - openFiles: Array.from(pf.openFiles), - filesWithDiagnostics: Array.from(pf.filesWithDiagnostics), - filesDiagnostics: pf.filesDiagnostics, - rescriptVersion: pf.rescriptVersion, - bscBinaryLocation: pf.bscBinaryLocation, - editorAnalysisLocation: pf.editorAnalysisLocation, - namespaceName: pf.namespaceName, - hasPromptedToStartBuild: pf.hasPromptedToStartBuild, - bsbWatcherByEditor: - pf.bsbWatcherByEditor != null - ? { pid: pf.bsbWatcherByEditor.pid ?? null } - : null, - }), - ); - - const result = { - config: config.extensionConfiguration, - projects, - workspaceFolders: Array.from(workspaceFolders), - }; - - let response: p.ResponseMessage = { - jsonrpc: c.jsonrpcVersion, - id: msg.id, - result, - }; - send(response); - } catch (e) { + } else if (msg.method === p.ExecuteCommandRequest.method) { + // Standard LSP executeCommand - supports editor-agnostic command execution + const params = msg.params as p.ExecuteCommandParams; + if (params.command === "rescript/dumpServerState") { + send(await dumpServerState(msg)); + } else { let response: p.ResponseMessage = { jsonrpc: c.jsonrpcVersion, id: msg.id, error: { - code: p.ErrorCodes.InternalError, - message: `Failed to dump server state: ${String(e)}`, + code: p.ErrorCodes.InvalidRequest, + message: `Unknown command: ${params.command}`, }, }; send(response); @@ -1554,7 +1876,7 @@ async function onMessage(msg: p.Message) { extensionConfiguration | null | undefined, ]; if (configuration != null) { - config.extensionConfiguration = configuration; + applyUserConfiguration(configuration); } } } else if ( @@ -1565,20 +1887,33 @@ async function onMessage(msg: p.Message) { msg.result.title === c.startBuildAction ) { let msg_ = msg.result as clientSentBuildAction; - let projectRootPath = msg_.projectRootPath; - // TODO: sometime stale .bsb.lock dangling + // Normalize the path since JSON serialization loses the branded type + // The type says it's NormalizedPath, so we ensure it actually is + let projectRootPath = utils.normalizePath(msg_.projectRootPath); + if (projectRootPath == null) { + // Should never happen, but handle gracefully and log a warning + console.warn( + "[ReScript Language Server] Failed to normalize projectRootPath from clientSentBuildAction:", + msg_.projectRootPath, + ); + return; + } + // TODO: sometime stale lock file dangling // TODO: close watcher when lang-server shuts down. However, by Node's // default, these subprocesses are automatically killed when this // language-server process exits let rescriptBinaryPath = await findRescriptBinary(projectRootPath); if (rescriptBinaryPath != null) { - let bsbProcess = utils.runBuildWatcherUsingValidBuildPath( - rescriptBinaryPath, - projectRootPath, - ); - let root = projectsFiles.get(projectRootPath)!; - root.bsbWatcherByEditor = bsbProcess; - // bsbProcess.on("message", (a) => console.log(a)); + // Derive monorepo root from binary path for monorepo support + const monorepoRootPath = + utils.getMonorepoRootFromBinaryPath(rescriptBinaryPath); + // Note: projectRootPath here might be the monorepo root (buildRootPath from prompt), + // which may not have project state if the file was opened from a subpackage. + // Use createProjectStateIfMissing to handle this case. + await startBuildWatcher(projectRootPath, rescriptBinaryPath, { + createProjectStateIfMissing: true, + monorepoRootPath, + }); } } } diff --git a/server/src/utils.ts b/server/src/utils.ts index ac0d346a7..e249f3398 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -10,55 +10,103 @@ import fs from "fs"; import fsAsync from "fs/promises"; import * as os from "os"; import semver from "semver"; +import { fileURLToPath, pathToFileURL } from "url"; +import { getLogger } from "./logger"; +import { + findBinary as findSharedBinary, + getMonorepoRootFromBinaryPath as getMonorepoRootFromBinaryPathShared, + type BinaryName, +} from "../../shared/src/findBinary"; +import { findProjectRootOfFileInDir as findProjectRootOfFileInDirShared } from "../../shared/src/projectRoots"; import * as codeActions from "./codeActions"; import * as c from "./constants"; import * as lookup from "./lookup"; import { reportError } from "./errorReporter"; import config from "./config"; -import { filesDiagnostics, projectsFiles } from "./projectFiles"; +import { filesDiagnostics, projectsFiles, projectFiles } from "./projectFiles"; import { workspaceFolders } from "./server"; import { rewatchLockPartialPath, rescriptLockPartialPath } from "./constants"; import { findRescriptRuntimesInProject } from "./find-runtime"; +/** + * Branded type for normalized file paths. + * + * All paths stored as keys in `projectsFiles` are normalized to ensure + * consistent lookups and prevent path format mismatches (e.g., trailing + * slashes, symlinks, relative vs absolute paths). + * + * Use `normalizePath()` to convert a regular path to a `NormalizedPath`. + */ +export type NormalizedPath = string & { __brand: "NormalizedPath" }; + +/** + * Branded type for file URIs (e.g., `file:///path/to/file.res`). + * + * This represents a URI as used in the Language Server Protocol. + * Use `uriToNormalizedPath()` to convert a URI to a normalized file path. + */ +export type FileURI = string & { __brand: "FileURI" }; + +/** + * Normalizes a file path and returns it as a `NormalizedPath`. + * + * This function should be used whenever storing a path as a key in `projectsFiles` + * or when comparing paths that need to match exactly. + * + * @param filePath - The path to normalize (can be null) + * @returns The normalized path, or null if input was null + */ +export function normalizePath(filePath: string | null): NormalizedPath | null { + // `path.normalize` ensures we can assume string is now NormalizedPath + return filePath != null ? (path.normalize(filePath) as NormalizedPath) : null; +} + +/** + * Converts a file URI (e.g., `file:///path/to/file.res`) to a normalized file path. + * + * This is the preferred way to convert LSP DocumentUri values to file paths, + * as it ensures the resulting path is normalized for consistent use throughout + * the codebase. + * + * @param uri - The file URI to convert + * @returns The normalized file path + * @throws If the URI cannot be converted to a file path + */ +export function uriToNormalizedPath(uri: FileURI): NormalizedPath { + const filePath = fileURLToPath(uri); + // fileURLToPath always returns a string (throws on invalid URI), so we can directly normalize + return path.normalize(filePath) as NormalizedPath; +} + let tempFilePrefix = "rescript_format_file_" + process.pid + "_"; let tempFileId = 0; -export let createFileInTempDir = (extension = "") => { +export let createFileInTempDir = (extension = ""): NormalizedPath => { let tempFileName = tempFilePrefix + tempFileId + extension; tempFileId = tempFileId + 1; - return path.join(os.tmpdir(), tempFileName); + // `os.tmpdir` returns an absolute path, so `path.join` ensures we can assume string is now NormalizedPath + return path.join(os.tmpdir(), tempFileName) as NormalizedPath; }; -let findProjectRootOfFileInDir = ( - source: p.DocumentUri, -): null | p.DocumentUri => { - let dir = path.dirname(source); - if ( - fs.existsSync(path.join(dir, c.rescriptJsonPartialPath)) || - fs.existsSync(path.join(dir, c.bsconfigPartialPath)) - ) { - return dir; - } else { - if (dir === source) { - // reached top - return null; - } else { - return findProjectRootOfFileInDir(dir); - } - } -}; +function findProjectRootOfFileInDir( + source: NormalizedPath, +): NormalizedPath | null { + return normalizePath(findProjectRootOfFileInDirShared(source)); +} -// TODO: races here? -// TODO: this doesn't handle file:/// scheme -export let findProjectRootOfFile = ( - source: p.DocumentUri, - allowDir?: boolean, -): null | p.DocumentUri => { - // First look in project files - let foundRootFromProjectFiles: string | null = null; +/** + * Searches for a project root path in projectsFiles that contains the given file path. + * Excludes exact matches - the source must be a file inside a project, not the project root itself. + */ +function findProjectRootContainingFile( + source: NormalizedPath, +): NormalizedPath | null { + let foundRootFromProjectFiles: NormalizedPath | null = null; for (const rootPath of projectsFiles.keys()) { - if (source.startsWith(rootPath) && (!allowDir || source !== rootPath)) { + // Both are normalized, so direct comparison works + // For files, we exclude exact matches (source !== rootPath) + if (source.startsWith(rootPath) && source !== rootPath) { // Prefer the longest path (most nested) if ( foundRootFromProjectFiles == null || @@ -69,135 +117,138 @@ export let findProjectRootOfFile = ( } } - if (foundRootFromProjectFiles != null) { - return foundRootFromProjectFiles; - } else { - const isDir = path.extname(source) === ""; - return findProjectRootOfFileInDir( - isDir && !allowDir ? path.join(source, "dummy.res") : source, - ); - } -}; + return foundRootFromProjectFiles; +} -// If ReScript < 12.0.0-alpha.13, then we want `{project_root}/node_modules/rescript/{c.platformDir}/{binary}`. -// Otherwise, we want to dynamically import `{project_root}/node_modules/rescript` and from `binPaths` get the relevant binary. -// We won't know which version is in the project root until we read and parse `{project_root}/node_modules/rescript/package.json` -let findBinary = async ( - projectRootPath: p.DocumentUri | null, - binary: - | "bsc.exe" - | "rescript-editor-analysis.exe" - | "rescript" - | "rewatch.exe" - | "rescript.exe", -) => { - if (config.extensionConfiguration.platformPath != null) { - return path.join(config.extensionConfiguration.platformPath, binary); - } - - if (projectRootPath !== null) { - try { - const compilerInfo = path.resolve( - projectRootPath, - c.compilerInfoPartialPath, - ); - const contents = await fsAsync.readFile(compilerInfo, "utf8"); - const compileInfo = JSON.parse(contents); - if (compileInfo && compileInfo.bsc_path) { - const bsc_path = compileInfo.bsc_path; - if (binary === "bsc.exe") { - return bsc_path; - } else { - const binary_path = path.join(path.dirname(bsc_path), binary); - return binary_path; - } +/** + * Searches for a project root path in projectsFiles that matches the given directory path. + * Allows exact matches - the source can be the project root directory itself. + */ +function findProjectRootMatchingDir( + source: NormalizedPath, +): NormalizedPath | null { + let foundRootFromProjectFiles: NormalizedPath | null = null; + for (const rootPath of projectsFiles.keys()) { + // Both are normalized, so direct comparison works + // For directories, we allow exact matches + if (source.startsWith(rootPath)) { + // Prefer the longest path (most nested) + if ( + foundRootFromProjectFiles == null || + rootPath.length > foundRootFromProjectFiles.length + ) { + foundRootFromProjectFiles = rootPath; } - } catch {} + } } - const rescriptDir = lookup.findFilePathFromProjectRoot( - projectRootPath, - path.join("node_modules", "rescript"), - ); - if (rescriptDir == null) { - return null; - } + return foundRootFromProjectFiles; +} - let rescriptVersion = null; - let rescriptJSWrapperPath = null; - try { - const rescriptPackageJSONPath = path.join(rescriptDir, "package.json"); - const rescriptPackageJSON = JSON.parse( - await fsAsync.readFile(rescriptPackageJSONPath, "utf-8"), - ); - rescriptVersion = rescriptPackageJSON.version; - rescriptJSWrapperPath = rescriptPackageJSON.bin.rescript; - } catch (error) { - return null; +/** + * Finds the project root for a given file path. + * This is the main function used throughout the codebase for finding project roots. + */ +export function findProjectRootOfFile( + source: NormalizedPath, +): NormalizedPath | null { + // First look in project files - exclude exact matches since we're looking for a file + let foundRoot = findProjectRootContainingFile(source); + + if (foundRoot != null) { + return foundRoot; } - let binaryPath: string | null = null; - if (binary == "rescript") { - // Can't use the native bsb/rescript since we might need the watcher -w - // flag, which is only in the JS wrapper - binaryPath = path.join(rescriptDir, rescriptJSWrapperPath); - } else if (semver.gte(rescriptVersion, "12.0.0-alpha.13")) { - // TODO: export `binPaths` from `rescript` package so that we don't need to - // copy the logic for figuring out `target`. - const target = `${process.platform}-${process.arch}`; - const targetPackagePath = path.join( - rescriptDir, - "..", - `@rescript/${target}/bin.js`, - ); - const { binPaths } = await import(targetPackagePath); - - if (binary == "bsc.exe") { - binaryPath = binPaths.bsc_exe; - } else if (binary == "rescript-editor-analysis.exe") { - binaryPath = binPaths.rescript_editor_analysis_exe; - } else if (binary == "rewatch.exe") { - binaryPath = binPaths.rewatch_exe; - } else if (binary == "rescript.exe") { - binaryPath = binPaths.rescript_exe; - } - } else { - binaryPath = path.join(rescriptDir, c.platformDir, binary); + // Fallback: search the filesystem + const foundPath = findProjectRootOfFileInDir(source); + return foundPath; +} + +/** + * Finds the project root for a given directory path. + * This allows exact matches and is used for workspace/lockfile detection. + */ +export function findProjectRootOfDir( + source: NormalizedPath, +): NormalizedPath | null { + // First look in project files - allow exact matches since we're looking for a directory + let foundRoot = findProjectRootMatchingDir(source); + + if (foundRoot != null) { + return foundRoot; } - if (binaryPath != null && fs.existsSync(binaryPath)) { - return binaryPath; - } else { + // Fallback: search the filesystem + const foundPath = findProjectRootOfFileInDir(source); + return foundPath; +} + +/** + * Gets the project file for a given project root path. + * + * All keys in `projectsFiles` are normalized (see `openedFile` in server.ts). + * This function accepts a normalized project root path (or null) and performs a direct lookup. + * The path must already be normalized before calling this function. + * + * @param projectRootPath - The normalized project root path to look up (or null) + * @returns The project file if found, null otherwise + */ +export let getProjectFile = ( + projectRootPath: NormalizedPath | null, +): projectFiles | null => { + if (projectRootPath == null) { return null; } + return projectsFiles.get(projectRootPath) ?? null; +}; + +// If ReScript < 12.0.0-alpha.13, then we want `{project_root}/node_modules/rescript/{c.platformDir}/{binary}`. +// Otherwise, we want to dynamically import `{project_root}/node_modules/rescript` and from `binPaths` get the relevant binary. +// We won't know which version is in the project root until we read and parse `{project_root}/node_modules/rescript/package.json` +let findBinary = async ( + projectRootPath: NormalizedPath | null, + binary: BinaryName, +): Promise => { + const result = await findSharedBinary({ + projectRootPath, + binary, + platformPath: config.extensionConfiguration.platformPath ?? null, + }); + return normalizePath(result); }; -export let findRescriptBinary = (projectRootPath: p.DocumentUri | null) => +export let findRescriptBinary = (projectRootPath: NormalizedPath | null) => findBinary(projectRootPath, "rescript"); -export let findBscExeBinary = (projectRootPath: p.DocumentUri | null) => +export let getMonorepoRootFromBinaryPath = ( + binaryPath: string | null, +): NormalizedPath | null => + normalizePath(getMonorepoRootFromBinaryPathShared(binaryPath)); + +export let findBscExeBinary = (projectRootPath: NormalizedPath | null) => findBinary(projectRootPath, "bsc.exe"); -export let findEditorAnalysisBinary = (projectRootPath: p.DocumentUri | null) => - findBinary(projectRootPath, "rescript-editor-analysis.exe"); +export let findEditorAnalysisBinary = ( + projectRootPath: NormalizedPath | null, +) => findBinary(projectRootPath, "rescript-editor-analysis.exe"); -export let findRewatchBinary = (projectRootPath: p.DocumentUri | null) => +export let findRewatchBinary = (projectRootPath: NormalizedPath | null) => findBinary(projectRootPath, "rewatch.exe"); -export let findRescriptExeBinary = (projectRootPath: p.DocumentUri | null) => +export let findRescriptExeBinary = (projectRootPath: NormalizedPath | null) => findBinary(projectRootPath, "rescript.exe"); -type execResult = +type execResult = | { kind: "success"; - result: string; + result: T; } | { kind: "error"; error: string; }; -type formatCodeResult = execResult; +type formatCodeResult = execResult; export let formatCode = ( bscPath: p.DocumentUri | null, @@ -238,7 +289,7 @@ export let formatCode = ( }; export async function findReScriptVersionForProjectRoot( - projectRootPath: string | null, + projectRootPath: NormalizedPath | null, ): Promise { if (projectRootPath == null) { return undefined; @@ -270,7 +321,7 @@ if (fs.existsSync(c.builtinAnalysisDevPath)) { } export let runAnalysisAfterSanityCheck = async ( - filePath: p.DocumentUri, + filePath: NormalizedPath, args: Array, projectRequired = false, ) => { @@ -279,8 +330,9 @@ export let runAnalysisAfterSanityCheck = async ( return null; } let rescriptVersion = - projectsFiles.get(projectRootPath ?? "")?.rescriptVersion ?? - (await findReScriptVersionForProjectRoot(projectRootPath)); + (projectRootPath + ? projectsFiles.get(projectRootPath)?.rescriptVersion + : null) ?? (await findReScriptVersionForProjectRoot(projectRootPath)); let binaryPath = builtinBinaryPath; @@ -347,7 +399,7 @@ export let runAnalysisAfterSanityCheck = async ( }; export let runAnalysisCommand = async ( - filePath: p.DocumentUri, + filePath: NormalizedPath, args: Array, msg: RequestMessage, projectRequired = true, @@ -366,7 +418,7 @@ export let runAnalysisCommand = async ( }; export let getReferencesForPosition = async ( - filePath: p.DocumentUri, + filePath: NormalizedPath, position: p.Position, ) => await runAnalysisAfterSanityCheck(filePath, [ @@ -389,8 +441,8 @@ export const toCamelCase = (text: string): string => { * in the workspace, so we return null (which will default to projectRootPath). */ export function computeWorkspaceRootPathFromLockfile( - projectRootPath: string | null, -): string | null { + projectRootPath: NormalizedPath | null, +): NormalizedPath | null { if (projectRootPath == null) { return null; } @@ -413,28 +465,29 @@ export function computeWorkspaceRootPathFromLockfile( // if we find a rewatch.lock in the project root, it's a compilation of a local package // in the workspace. return !foundRewatchLockfileInProjectRoot - ? findProjectRootOfFile(projectRootPath, true) + ? findProjectRootOfDir(projectRootPath) : null; } // Shared cache: key is either workspace root path or project root path -const runtimePathCache = new Map(); +const runtimePathCache = new Map(); /** * Gets the runtime path from a workspace root path. * This function is cached per workspace root path. */ export async function getRuntimePathFromWorkspaceRoot( - workspaceRootPath: string, -): Promise { + workspaceRootPath: NormalizedPath, +): Promise { // Check cache first if (runtimePathCache.has(workspaceRootPath)) { return runtimePathCache.get(workspaceRootPath)!; } // Compute and cache - let rescriptRuntime: string | null = - config.extensionConfiguration.runtimePath ?? null; + let rescriptRuntime: NormalizedPath | null = normalizePath( + config.extensionConfiguration.runtimePath ?? null, + ); if (rescriptRuntime !== null) { runtimePathCache.set(workspaceRootPath, rescriptRuntime); @@ -455,8 +508,8 @@ export async function getRuntimePathFromWorkspaceRoot( * This function is cached per project root path. */ export async function getRuntimePathFromProjectRoot( - projectRootPath: string | null, -): Promise { + projectRootPath: NormalizedPath | null, +): Promise { if (projectRootPath == null) { return null; } @@ -467,7 +520,7 @@ export async function getRuntimePathFromProjectRoot( } // Compute workspace root and resolve runtime - const workspaceRootPath = + const workspaceRootPath: NormalizedPath = computeWorkspaceRootPathFromLockfile(projectRootPath) ?? projectRootPath; // Check cache again with workspace root (might have been cached from a previous call) @@ -486,9 +539,17 @@ export async function getRuntimePathFromProjectRoot( return result; } +/** + * Returns a snapshot of the runtime path cache as a plain object. + * Useful for debugging and state dumps. + */ +export function getRuntimePathCacheSnapshot(): Record { + return Object.fromEntries(runtimePathCache); +} + export const getNamespaceNameFromConfigFile = ( - projDir: p.DocumentUri, -): execResult => { + projDir: NormalizedPath, +): execResult => { let config = lookup.readConfig(projDir); let result = ""; @@ -513,9 +574,9 @@ export const getNamespaceNameFromConfigFile = ( export let getCompiledFilePath = ( filePath: string, - projDir: string, -): execResult => { - let error: execResult = { + projDir: NormalizedPath, +): execResult => { + let error: execResult = { kind: "error", error: "Could not read ReScript config file", }; @@ -543,19 +604,35 @@ export let getCompiledFilePath = ( result = compiledPath; } + // Normalize the path before returning + const normalizedResult = normalizePath(result)!; + return { kind: "success", - result, + result: normalizedResult, }; }; export let runBuildWatcherUsingValidBuildPath = ( buildPath: p.DocumentUri, projectRootPath: p.DocumentUri, + rescriptVersion?: string | null, ) => { let cwdEnv = { cwd: projectRootPath, }; + // ReScript >= 12.0.0 uses "rescript watch" instead of "rescript build -w" + let useWatchCommand = + rescriptVersion != null && + semver.valid(rescriptVersion) != null && + semver.gte(rescriptVersion, "12.0.0"); + let args = useWatchCommand ? ["watch"] : ["build", "-w"]; + + getLogger().info( + `Running build watcher: ${buildPath} ${args.join(" ")} in ${projectRootPath}`, + ); + + let proc: childProcess.ChildProcess; if (process.platform === "win32") { /* - a node.js script in node_modules/.bin on windows is wrapped in a @@ -569,9 +646,86 @@ export let runBuildWatcherUsingValidBuildPath = ( (since the path might have spaces), which `execFile` would have done for you under the hood */ - return childProcess.exec(`"${buildPath}".cmd build -w`, cwdEnv); + proc = childProcess.exec(`"${buildPath}".cmd ${args.join(" ")}`, cwdEnv); } else { - return childProcess.execFile(buildPath, ["build", "-w"], cwdEnv); + // Use spawn with detached:true so we can kill the entire process group later + // This ensures child processes (like native rescript binary) are also killed + // Use "pipe" for stdin instead of "ignore" because older ReScript versions (9.x, 10.x, 11.x) + // have a handler that exits when stdin closes: `process.stdin.on("close", exitProcess)` + proc = childProcess.spawn(buildPath, args, { + ...cwdEnv, + detached: true, + stdio: ["pipe", "pipe", "pipe"], + }); + } + + proc.on("error", (err) => { + getLogger().error(`Build watcher error: ${err.message}`); + }); + + proc.on("exit", (code, signal) => { + getLogger().info( + `Build watcher exited with code ${code}, signal ${signal}`, + ); + }); + + if (proc.stdout) { + proc.stdout.on("data", (data) => { + getLogger().log(`[build stdout] ${data.toString().trim()}`); + }); + } + + if (proc.stderr) { + proc.stderr.on("data", (data) => { + getLogger().log(`[build stderr] ${data.toString().trim()}`); + }); + } + + return proc; +}; + +/** + * Kill a build watcher process and all its children, and clean up the lock file. + * On Unix, kills the entire process group if the process was started with detached:true. + * Also removes the lock file since the rescript compiler doesn't clean it up on SIGTERM. + */ +export let killBuildWatcher = ( + proc: childProcess.ChildProcess, + buildRootPath?: string, +): void => { + if (proc.pid == null) { + return; + } + try { + if (process.platform !== "win32") { + // Kill the entire process group (negative PID) + // This ensures child processes spawned by the JS wrapper are also killed + process.kill(-proc.pid, "SIGTERM"); + } else { + proc.kill(); + } + } catch (e) { + // Process might already be dead + getLogger().log(`Error killing build watcher: ${e}`); + } + + // Clean up lock files since the rescript compiler doesn't remove them on SIGTERM + // ReScript >= 12 uses lib/rescript.lock, older versions use .bsb.lock + if (buildRootPath != null) { + const lockFiles = [ + path.join(buildRootPath, "lib", "rescript.lock"), + path.join(buildRootPath, ".bsb.lock"), + ]; + for (const lockFilePath of lockFiles) { + try { + if (fs.existsSync(lockFilePath)) { + fs.unlinkSync(lockFilePath); + getLogger().log(`Removed lock file: ${lockFilePath}`); + } + } catch (e) { + getLogger().log(`Error removing lock file: ${e}`); + } + } } }; @@ -617,8 +771,9 @@ export let runBuildWatcherUsingValidBuildPath = ( */ // parser helpers -export let pathToURI = (file: string) => { - return process.platform === "win32" ? `file:\\\\\\${file}` : `file://${file}`; +export let pathToURI = (file: NormalizedPath): FileURI => { + // `pathToFileURL` ensures we can assume string is now FileURI + return pathToFileURL(file).toString() as FileURI; }; let parseFileAndRange = (fileAndRange: string) => { // https://github.com/rescript-lang/rescript-compiler/blob/0a3f4bb32ca81e89cefd5a912b8795878836f883/jscomp/super_errors/super_location.ml#L15-L25 @@ -641,8 +796,9 @@ let parseFileAndRange = (fileAndRange: string) => { let match = trimmedFileAndRange.match(regex); if (match === null) { // no location! Though LSP insist that we provide at least a dummy location + const normalizedPath = normalizePath(trimmedFileAndRange)!; return { - file: pathToURI(trimmedFileAndRange), + file: pathToURI(normalizedPath), range: { start: { line: 0, character: 0 }, end: { line: 0, character: 0 }, @@ -686,8 +842,9 @@ let parseFileAndRange = (fileAndRange: string) => { end: { line: parseInt(endLine) - 1, character: parseInt(endChar) }, }; } + const normalizedFile = normalizePath(file)!; return { - file: pathToURI(file), + file: pathToURI(normalizedFile), range, }; }; diff --git a/server/tsconfig.json b/server/tsconfig.json index 5dab877e0..c74b81c04 100644 --- a/server/tsconfig.json +++ b/server/tsconfig.json @@ -7,9 +7,9 @@ "sourceMap": true, "strict": true, "outDir": "out", - "rootDir": "src", + "rootDirs": ["src", "../shared/src"], "esModuleInterop": true }, - "include": ["src"], + "include": ["src", "../shared/src"], "exclude": ["node_modules"] } diff --git a/shared/src/findBinary.ts b/shared/src/findBinary.ts new file mode 100644 index 000000000..090157715 --- /dev/null +++ b/shared/src/findBinary.ts @@ -0,0 +1,165 @@ +import * as fs from "fs"; +import * as fsAsync from "fs/promises"; +import * as path from "path"; +import * as semver from "semver"; +import * as url from "url"; + +export type BinaryName = + | "bsc.exe" + | "rescript-editor-analysis.exe" + | "rescript-tools.exe" + | "rescript" + | "rewatch.exe" + | "rescript.exe"; + +type FindBinaryOptions = { + projectRootPath: string | null; + binary: BinaryName; + platformPath?: string | null; +}; + +const compilerInfoPartialPath = path.join("lib", "bs", "compiler-info.json"); +// For arm64, try the arm64-specific directory first (e.g., darwinarm64), +// then fall back to the generic platform directory (e.g., darwin) for older ReScript versions +const platformDirArm64 = + process.arch === "arm64" ? process.platform + process.arch : null; +const platformDirGeneric = process.platform; + +const normalizePath = (filePath: string | null): string | null => { + return filePath != null ? path.normalize(filePath) : null; +}; + +const findFilePathFromProjectRoot = ( + directory: string | null, + filePartialPath: string, +): string | null => { + if (directory == null) { + return null; + } + + const filePath = path.join(directory, filePartialPath); + if (fs.existsSync(filePath)) { + return normalizePath(filePath); + } + + const parentDirStr = path.dirname(directory); + if (parentDirStr === directory) { + return null; + } + + return findFilePathFromProjectRoot( + normalizePath(parentDirStr), + filePartialPath, + ); +}; + +export const findBinary = async ({ + projectRootPath, + binary, + platformPath, +}: FindBinaryOptions): Promise => { + if (platformPath != null) { + const result = path.join(platformPath, binary); + return normalizePath(result); + } + + if (projectRootPath !== null) { + try { + const compilerInfo = path.resolve( + projectRootPath, + compilerInfoPartialPath, + ); + const contents = await fsAsync.readFile(compilerInfo, "utf8"); + const compileInfo = JSON.parse(contents); + if (compileInfo && compileInfo.bsc_path) { + const bscPath = compileInfo.bsc_path; + if (binary === "bsc.exe") { + return normalizePath(bscPath); + } else if (binary !== "rescript") { + // For native binaries (not "rescript" JS wrapper), use the bsc_path directory + const binaryPath = path.join(path.dirname(bscPath), binary); + return normalizePath(binaryPath); + } + // For "rescript", fall through to find the JS wrapper below + } + } catch {} + } + + const rescriptDir = findFilePathFromProjectRoot( + projectRootPath, + path.join("node_modules", "rescript"), + ); + if (rescriptDir == null) { + return null; + } + + let rescriptVersion = null; + let rescriptJSWrapperPath = null; + try { + const rescriptPackageJSONPath = path.join(rescriptDir, "package.json"); + const rescriptPackageJSON = JSON.parse( + await fsAsync.readFile(rescriptPackageJSONPath, "utf-8"), + ); + rescriptVersion = rescriptPackageJSON.version; + rescriptJSWrapperPath = rescriptPackageJSON.bin.rescript; + } catch { + return null; + } + + let binaryPath: string | null = null; + if (binary === "rescript") { + binaryPath = path.join(rescriptDir, rescriptJSWrapperPath); + } else if (semver.gte(rescriptVersion, "12.0.0-alpha.13")) { + const target = `${process.platform}-${process.arch}`; + const targetPackagePath = path.join( + fs.realpathSync(rescriptDir), + "..", + `@rescript/${target}/bin.js`, + ); + const { binPaths } = await import(url.fileURLToPath(targetPackagePath)); + + if (binary === "bsc.exe") { + binaryPath = binPaths.bsc_exe; + } else if (binary === "rescript-editor-analysis.exe") { + binaryPath = binPaths.rescript_editor_analysis_exe; + } else if (binary === "rewatch.exe") { + binaryPath = binPaths.rewatch_exe; + } else if (binary === "rescript.exe") { + binaryPath = binPaths.rescript_exe; + } + } else { + // For older ReScript versions (< 12.0.0-alpha.13), try arm64-specific directory first, + // then fall back to generic platform directory (older versions don't have arm64 directories) + if (platformDirArm64 != null) { + const arm64Path = path.join(rescriptDir, platformDirArm64, binary); + if (fs.existsSync(arm64Path)) { + binaryPath = arm64Path; + } + } + if (binaryPath == null) { + binaryPath = path.join(rescriptDir, platformDirGeneric, binary); + } + } + + if (binaryPath != null && fs.existsSync(binaryPath)) { + return normalizePath(binaryPath); + } + + return null; +}; + +/** + * Derives the monorepo root directory from a binary path. + * For a path like `/monorepo/node_modules/.bin/rescript`, returns `/monorepo`. + * This is useful for monorepo support where the binary is in the monorepo root's + * node_modules, but the project root (nearest rescript.json) might be a subpackage. + */ +export const getMonorepoRootFromBinaryPath = ( + binaryPath: string | null, +): string | null => { + if (binaryPath == null) { + return null; + } + const match = binaryPath.match(/^(.*?)[\\/]+node_modules[\\/]+/); + return match ? normalizePath(match[1]) : null; +}; diff --git a/shared/src/projectRoots.ts b/shared/src/projectRoots.ts new file mode 100644 index 000000000..2ddd8a08f --- /dev/null +++ b/shared/src/projectRoots.ts @@ -0,0 +1,40 @@ +import * as fs from "fs"; +import * as path from "path"; + +export const normalizePath = (filePath: string | null): string | null => { + return filePath != null ? path.normalize(filePath) : null; +}; + +export const findProjectRootOfFileInDir = (source: string): string | null => { + const normalizedSource = normalizePath(source); + if (normalizedSource == null) { + return null; + } + + const dir = normalizePath(path.dirname(normalizedSource)); + if (dir == null) { + return null; + } + + if ( + fs.existsSync(path.join(dir, "rescript.json")) || + fs.existsSync(path.join(dir, "bsconfig.json")) + ) { + return dir; + } + + if (dir === normalizedSource) { + return null; + } + + return findProjectRootOfFileInDir(dir); +}; + +export const findProjectRootOfFile = (source: string): string | null => { + const normalizedSource = normalizePath(source); + if (normalizedSource == null) { + return null; + } + + return findProjectRootOfFileInDir(normalizedSource); +}; diff --git a/shared/tsconfig.json b/shared/tsconfig.json new file mode 100644 index 000000000..b0924c856 --- /dev/null +++ b/shared/tsconfig.json @@ -0,0 +1,12 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "es2019", + "lib": ["ES2019"], + "outDir": "out", + "rootDir": "src", + "sourceMap": true + }, + "include": ["src"], + "exclude": ["node_modules"] +} diff --git a/tsconfig.json b/tsconfig.json index 0f5dff06a..7cdfe7c07 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -15,6 +15,9 @@ }, { "path": "./server" + }, + { + "path": "./shared" } ] }