From 9d0cbc0103ce7847bbb701fdb44039525c61e6e3 Mon Sep 17 00:00:00 2001 From: nojaf Date: Fri, 28 Nov 2025 09:47:19 +0100 Subject: [PATCH 01/56] Expose runtime cache in dump command --- server/src/find-runtime.ts | 21 +++++++-------------- server/src/server.ts | 1 + server/src/utils.ts | 8 ++++++++ 3 files changed, 16 insertions(+), 14 deletions(-) diff --git a/server/src/find-runtime.ts b/server/src/find-runtime.ts index 42512e044..a863321dd 100644 --- a/server/src/find-runtime.ts +++ b/server/src/find-runtime.ts @@ -149,21 +149,14 @@ async function findRuntimePath(project: string): Promise { return rescriptRuntimeDirs.map((runtime) => resolve(runtime)); } -function findRuntimeCached(): (project: string) => Promise { - const cache = new Map(); - return async (project: string) => { - if (cache.has(project)) { - return cache.get(project)!; - } - const runtimes = await findRuntimePath(project); - cache.set(project, runtimes); - return runtimes; - }; -} - /** * Find all installed @rescript/runtime directories in the given project path. * In a perfect world, there should be exactly one. - * This function is cached per project path. + * Note: This function is not cached here. Caching is handled by the caller + * (see getRuntimePathFromWorkspaceRoot in utils.ts). */ -export const findRescriptRuntimesInProject = findRuntimeCached(); +export async function findRescriptRuntimesInProject( + project: string, +): Promise { + return await findRuntimePath(project); +} diff --git a/server/src/server.ts b/server/src/server.ts index 16515b415..eb29ab634 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1510,6 +1510,7 @@ async function onMessage(msg: p.Message) { config: config.extensionConfiguration, projects, workspaceFolders: Array.from(workspaceFolders), + runtimePathCache: utils.getRuntimePathCacheSnapshot(), }; let response: p.ResponseMessage = { diff --git a/server/src/utils.ts b/server/src/utils.ts index ac0d346a7..9446585cd 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -486,6 +486,14 @@ export async function getRuntimePathFromProjectRoot( return result; } +/** + * Returns a snapshot of the runtime path cache as a plain object. + * Useful for debugging and state dumps. + */ +export function getRuntimePathCacheSnapshot(): Record { + return Object.fromEntries(runtimePathCache); +} + export const getNamespaceNameFromConfigFile = ( projDir: p.DocumentUri, ): execResult => { From e8643207c3c732eb3d4d7ce3ade77d924625c99e Mon Sep 17 00:00:00 2001 From: nojaf Date: Fri, 28 Nov 2025 11:27:47 +0100 Subject: [PATCH 02/56] Fix filesWithDiagnostics tracking for syntax-only errors --- server/src/server.ts | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/server/src/server.ts b/server/src/server.ts index 16515b415..968e3682c 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1011,12 +1011,34 @@ let updateDiagnosticSyntax = async (fileUri: string, fileContent: string) => { tmpname, ]); + let allDiagnostics = [ + ...syntaxDiagnosticsForFile, + ...compilerDiagnosticsForFile, + ]; + + // Update filesWithDiagnostics to track this file + let projectRootPath = utils.findProjectRootOfFile(filePath); + if (projectRootPath != null) { + let projectFile = projectsFiles.get(projectRootPath); + if (projectFile != null) { + if (allDiagnostics.length > 0) { + projectFile.filesWithDiagnostics.add(fileUri); + } else { + // Only remove if there are no compiler diagnostics either + // (compiler diagnostics are tracked separately in filesDiagnostics) + if (compilerDiagnosticsForFile.length === 0) { + projectFile.filesWithDiagnostics.delete(fileUri); + } + } + } + } + let notification: p.NotificationMessage = { jsonrpc: c.jsonrpcVersion, method: "textDocument/publishDiagnostics", params: { uri: fileUri, - diagnostics: [...syntaxDiagnosticsForFile, ...compilerDiagnosticsForFile], + diagnostics: allDiagnostics, }, }; From be41c2827b12761997c367e36a7a28095de94d9b Mon Sep 17 00:00:00 2001 From: nojaf Date: Fri, 28 Nov 2025 14:25:02 +0100 Subject: [PATCH 03/56] Use NormalizedPath and FileURI types --- client/src/utils.ts | 4 +- server/src/bsc-args/rewatch.ts | 2 +- server/src/codeActions.ts | 7 +- server/src/incrementalCompilation.ts | 109 ++++++++----- server/src/lookup.ts | 61 +++++--- server/src/projectFiles.ts | 19 ++- server/src/server.ts | 193 ++++++++++++++--------- server/src/utils.ts | 219 ++++++++++++++++++++------- 8 files changed, 421 insertions(+), 193 deletions(-) diff --git a/client/src/utils.ts b/client/src/utils.ts index 1474888ee..443dc7b60 100644 --- a/client/src/utils.ts +++ b/client/src/utils.ts @@ -59,9 +59,7 @@ export const createFileInTempDir = (prefix = "", extension = "") => { return path.join(os.tmpdir(), tempFileName); }; -export let findProjectRootOfFileInDir = ( - source: DocumentUri, -): null | DocumentUri => { +export let findProjectRootOfFileInDir = (source: string): null | string => { let dir = path.dirname(source); if ( fs.existsSync(path.join(dir, "rescript.json")) || diff --git a/server/src/bsc-args/rewatch.ts b/server/src/bsc-args/rewatch.ts index e4dc4ab60..21ee17893 100644 --- a/server/src/bsc-args/rewatch.ts +++ b/server/src/bsc-args/rewatch.ts @@ -23,7 +23,7 @@ async function getRuntimePath( export async function getRewatchBscArgs( send: (msg: p.Message) => void, - bscBinaryLocation: string | null, + bscBinaryLocation: utils.NormalizedPath | null, projectsFiles: Map, entry: IncrementallyCompiledFileInfo, ): Promise { diff --git a/server/src/codeActions.ts b/server/src/codeActions.ts index fe15b4928..e3577ddaa 100644 --- a/server/src/codeActions.ts +++ b/server/src/codeActions.ts @@ -3,7 +3,6 @@ // OCaml binary. import * as p from "vscode-languageserver-protocol"; import * as utils from "./utils"; -import { fileURLToPath } from "url"; export type fileCodeActions = { range: p.Range; codeAction: p.CodeAction }; @@ -14,7 +13,7 @@ export type filesCodeActions = { interface findCodeActionsConfig { diagnostic: p.Diagnostic; diagnosticMessage: string[]; - file: string; + file: utils.FileURI; range: p.Range; addFoundActionsHere: filesCodeActions; } @@ -190,7 +189,7 @@ interface codeActionExtractorConfig { line: string; index: number; array: string[]; - file: string; + file: utils.FileURI; range: p.Range; diagnostic: p.Diagnostic; codeActions: filesCodeActions; @@ -631,7 +630,7 @@ let simpleAddMissingCases: codeActionExtractor = async ({ .join("") .trim(); - let filePath = fileURLToPath(file); + let filePath = utils.uriToNormalizedPath(file as utils.FileURI); let newSwitchCode = await utils.runAnalysisAfterSanityCheck(filePath, [ "codemod", diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index ee1b82ba3..cb9fb644c 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -1,7 +1,6 @@ import * as path from "path"; import fs from "fs"; import * as utils from "./utils"; -import { pathToFileURL } from "url"; import readline from "readline"; import { performance } from "perf_hooks"; import * as p from "vscode-languageserver-protocol"; @@ -14,7 +13,8 @@ import { fileCodeActions } from "./codeActions"; import { projectsFiles } from "./projectFiles"; import { getRewatchBscArgs, RewatchCompilerArgs } from "./bsc-args/rewatch"; import { BsbCompilerArgs, getBsbBscArgs } from "./bsc-args/bsb"; -import { workspaceFolders } from "./server"; +import { getCurrentCompilerDiagnosticsForFile } from "./server"; +import { NormalizedPath } from "./utils"; export function debug() { return ( @@ -32,8 +32,8 @@ export type IncrementallyCompiledFileInfo = { file: { /** File type. */ extension: ".res" | ".resi"; - /** Path to the source file. */ - sourceFilePath: string; + /** Path to the source file (normalized). */ + sourceFilePath: NormalizedPath; /** Name of the source file. */ sourceFileName: string; /** Module name of the source file. */ @@ -69,29 +69,29 @@ export type IncrementallyCompiledFileInfo = { killCompilationListeners: Array<() => void>; /** Project specific information. */ project: { - /** The root path of the project. */ - rootPath: string; + /** The root path of the project (normalized to match projectsFiles keys). */ + rootPath: NormalizedPath; /** The root path of the workspace (if a monorepo) */ - workspaceRootPath: string; + workspaceRootPath: NormalizedPath; /** Computed location of bsc. */ - bscBinaryLocation: string; + bscBinaryLocation: NormalizedPath; /** The arguments needed for bsc, derived from the project configuration/build.ninja. */ callArgs: Promise | null>; /** The location of the incremental folder for this project. */ - incrementalFolderPath: string; + incrementalFolderPath: NormalizedPath; }; /** Any code actions for this incremental file. */ codeActions: Array; }; const incrementallyCompiledFileInfo: Map< - string, + NormalizedPath, IncrementallyCompiledFileInfo > = new Map(); const hasReportedFeatureFailedError: Set = new Set(); -const originalTypeFileToFilePath: Map = new Map(); +const originalTypeFileToFilePath: Map = new Map(); -export function incrementalCompilationFileChanged(changedPath: string) { +export function incrementalCompilationFileChanged(changedPath: NormalizedPath) { const filePath = originalTypeFileToFilePath.get(changedPath); if (filePath != null) { const entry = incrementallyCompiledFileInfo.get(filePath); @@ -116,7 +116,7 @@ export function incrementalCompilationFileChanged(changedPath: string) { } export function removeIncrementalFileFolder( - projectRootPath: string, + projectRootPath: NormalizedPath, onAfterRemove?: () => void, ) { fs.rm( @@ -128,7 +128,7 @@ export function removeIncrementalFileFolder( ); } -export function recreateIncrementalFileFolder(projectRootPath: string) { +export function recreateIncrementalFileFolder(projectRootPath: NormalizedPath) { if (debug()) { console.log("Recreating incremental file folder"); } @@ -142,8 +142,8 @@ export function recreateIncrementalFileFolder(projectRootPath: string) { } export function cleanUpIncrementalFiles( - filePath: string, - projectRootPath: string, + filePath: NormalizedPath, + projectRootPath: NormalizedPath, ) { const ext = filePath.endsWith(".resi") ? ".resi" : ".res"; const namespace = utils.getNamespaceNameFromConfigFile(projectRootPath); @@ -242,7 +242,7 @@ function removeAnsiCodes(s: string): string { return s.replace(ansiEscape, ""); } function triggerIncrementalCompilationOfFile( - filePath: string, + filePath: NormalizedPath, fileContent: string, send: send, onCompilationFinished?: () => void, @@ -256,25 +256,31 @@ function triggerIncrementalCompilationOfFile( console.log("Did not find project root path for " + filePath); return; } - const project = projectsFiles.get(projectRootPath); + // projectRootPath is already normalized (NormalizedPath) from findProjectRootOfFile + // Use getProjectFile to verify the project exists + const project = utils.getProjectFile(projectRootPath); if (project == null) { if (debug()) console.log("Did not find open project for " + filePath); return; } + // projectRootPath is already the correct normalized key format + const actualProjectRootPath = projectRootPath; // computeWorkspaceRootPathFromLockfile returns null if lockfile found (local package) or if no parent found - const computedWorkspaceRoot = - utils.computeWorkspaceRootPathFromLockfile(projectRootPath); + const computedWorkspaceRoot = utils.computeWorkspaceRootPathFromLockfile( + actualProjectRootPath, + ); // If null, it means either a lockfile was found (local package) or no parent project root exists - // In both cases, we default to projectRootPath - const workspaceRootPath = computedWorkspaceRoot ?? projectRootPath; + // In both cases, we default to actualProjectRootPath + const workspaceRootPath: NormalizedPath = + computedWorkspaceRoot ?? actualProjectRootPath; // Determine if lockfile was found for debug logging - // If computedWorkspaceRoot is null and projectRootPath is not null, check if parent exists + // If computedWorkspaceRoot is null and actualProjectRootPath is not null, check if parent exists const foundRewatchLockfileInProjectRoot = computedWorkspaceRoot == null && - projectRootPath != null && - utils.findProjectRootOfFile(projectRootPath, true) != null; + actualProjectRootPath != null && + utils.findProjectRootOfFile(actualProjectRootPath, true) != null; if (foundRewatchLockfileInProjectRoot && debug()) { console.log( @@ -299,15 +305,15 @@ function triggerIncrementalCompilationOfFile( ? `${moduleName}-${project.namespaceName}` : moduleName; - const incrementalFolderPath = path.join( - projectRootPath, + const incrementalFolderPath: NormalizedPath = path.join( + actualProjectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION, - ); + ) as NormalizedPath; let originalTypeFileLocation = path.resolve( - projectRootPath, + actualProjectRootPath, c.compilerDirPartialPath, - path.relative(projectRootPath, filePath), + path.relative(actualProjectRootPath, filePath), ); const parsed = path.parse(originalTypeFileLocation); @@ -326,8 +332,8 @@ function triggerIncrementalCompilationOfFile( incrementalFilePath: path.join(incrementalFolderPath, moduleName + ext), }, project: { - workspaceRootPath: workspaceRootPath ?? projectRootPath, - rootPath: projectRootPath, + workspaceRootPath, + rootPath: actualProjectRootPath, callArgs: Promise.resolve([]), bscBinaryLocation, incrementalFolderPath, @@ -373,7 +379,10 @@ function triggerIncrementalCompilationOfFile( }; } } -function verifyTriggerToken(filePath: string, triggerToken: number): boolean { +function verifyTriggerToken( + filePath: NormalizedPath, + triggerToken: number, +): boolean { return ( incrementallyCompiledFileInfo.get(filePath)?.compilation?.triggerToken === triggerToken @@ -578,7 +587,8 @@ async function compileContents( const change = Object.values(ca.codeAction.edit.changes)[0]; ca.codeAction.edit.changes = { - [pathToFileURL(entry.file.sourceFilePath).toString()]: change, + [utils.pathToURI(entry.file.sourceFilePath) as utils.FileURI]: + change, }; } }); @@ -645,11 +655,34 @@ async function compileContents( } } + const fileUri = utils.pathToURI( + entry.file.sourceFilePath, + ) as utils.FileURI; + + // Update filesWithDiagnostics to track this file + // entry.project.rootPath is guaranteed to match a key in projectsFiles + // (see triggerIncrementalCompilationOfFile where the entry is created) + const projectFile = projectsFiles.get(entry.project.rootPath); + + if (projectFile != null) { + // Get compiler diagnostics from main build (if any) + const compilerDiagnosticsForFile = + getCurrentCompilerDiagnosticsForFile(fileUri); + const allDiagnostics = [...res, ...compilerDiagnosticsForFile]; + + if (allDiagnostics.length > 0) { + projectFile.filesWithDiagnostics.add(fileUri as utils.FileURI); + } else { + // Only remove if there are no diagnostics at all + projectFile.filesWithDiagnostics.delete(fileUri as utils.FileURI); + } + } + const notification: p.NotificationMessage = { jsonrpc: c.jsonrpcVersion, method: "textDocument/publishDiagnostics", params: { - uri: pathToFileURL(entry.file.sourceFilePath), + uri: fileUri, diagnostics: res, }, }; @@ -667,7 +700,7 @@ async function compileContents( } export function handleUpdateOpenedFile( - filePath: string, + filePath: utils.NormalizedPath, fileContent: string, send: send, onCompilationFinished?: () => void, @@ -683,7 +716,7 @@ export function handleUpdateOpenedFile( ); } -export function handleClosedFile(filePath: string) { +export function handleClosedFile(filePath: NormalizedPath) { if (debug()) { console.log("Closed: " + filePath); } @@ -695,7 +728,7 @@ export function handleClosedFile(filePath: string) { } export function getCodeActionsFromIncrementalCompilation( - filePath: string, + filePath: NormalizedPath, ): Array | null { const entry = incrementallyCompiledFileInfo.get(filePath); if (entry != null) { diff --git a/server/src/lookup.ts b/server/src/lookup.ts index 38dff3761..893137607 100644 --- a/server/src/lookup.ts +++ b/server/src/lookup.ts @@ -1,9 +1,9 @@ import * as fs from "fs"; import * as path from "path"; -import * as p from "vscode-languageserver-protocol"; import { BuildSchema, ModuleFormat, ModuleFormatObject } from "./buildSchema"; import * as c from "./constants"; +import { NormalizedPath, normalizePath } from "./utils"; const getCompiledFolderName = (moduleFormat: ModuleFormat): string => { switch (moduleFormat) { @@ -18,36 +18,47 @@ const getCompiledFolderName = (moduleFormat: ModuleFormat): string => { } }; -export const replaceFileExtension = (filePath: string, ext: string): string => { +export const replaceFileExtension = ( + filePath: T, + ext: string, +): T => { let name = path.basename(filePath, path.extname(filePath)); - return path.format({ dir: path.dirname(filePath), name, ext }); + const result = path.format({ dir: path.dirname(filePath), name, ext }); + // If input was NormalizedPath, result is still normalized + // If input was string, result is string + return result as T; }; // Check if filePartialPath exists at directory and return the joined path, // otherwise recursively check parent directories for it. export const findFilePathFromProjectRoot = ( - directory: p.DocumentUri | null, // This must be a directory and not a file! + directory: NormalizedPath | null, // This must be a directory and not a file! filePartialPath: string, -): null | p.DocumentUri => { +): NormalizedPath | null => { if (directory == null) { return null; } - let filePath: p.DocumentUri = path.join(directory, filePartialPath); + let filePath = path.join(directory, filePartialPath); if (fs.existsSync(filePath)) { - return filePath; + return normalizePath(filePath); } - let parentDir: p.DocumentUri = path.dirname(directory); - if (parentDir === directory) { + let parentDirStr = path.dirname(directory); + if (parentDirStr === directory) { // reached the top return null; } + const parentDir = normalizePath(parentDirStr); + if (parentDir == null) { + return null; + } + return findFilePathFromProjectRoot(parentDir, filePartialPath); }; -export const readConfig = (projDir: p.DocumentUri): BuildSchema | null => { +export const readConfig = (projDir: NormalizedPath): BuildSchema | null => { try { let rescriptJson = path.join(projDir, c.rescriptJsonPartialPath); let bsconfigJson = path.join(projDir, c.bsconfigPartialPath); @@ -109,9 +120,9 @@ export const getSuffixAndPathFragmentFromBsconfig = (bsconfig: BuildSchema) => { }; export const getFilenameFromBsconfig = ( - projDir: string, + projDir: NormalizedPath, partialFilePath: string, -): string | null => { +): NormalizedPath | null => { let bsconfig = readConfig(projDir); if (!bsconfig) { @@ -122,22 +133,29 @@ export const getFilenameFromBsconfig = ( let compiledPartialPath = replaceFileExtension(partialFilePath, suffix); - return path.join(projDir, pathFragment, compiledPartialPath); + const result = path.join(projDir, pathFragment, compiledPartialPath); + return normalizePath(result); }; // Monorepo helpers export const getFilenameFromRootBsconfig = ( - projDir: string, + projDir: NormalizedPath, partialFilePath: string, -): string | null => { +): NormalizedPath | null => { + // Start searching from the parent directory of projDir to find the workspace root + const parentDir = normalizePath(path.dirname(projDir)); + if (parentDir == null) { + return null; + } + let rootConfigPath = findFilePathFromProjectRoot( - path.join("..", projDir), + parentDir, c.rescriptJsonPartialPath, ); if (!rootConfigPath) { rootConfigPath = findFilePathFromProjectRoot( - path.join("..", projDir), + parentDir, c.bsconfigPartialPath, ); } @@ -146,7 +164,11 @@ export const getFilenameFromRootBsconfig = ( return null; } - let rootConfig = readConfig(path.dirname(rootConfigPath)); + const rootConfigDir = normalizePath(path.dirname(rootConfigPath)); + if (rootConfigDir == null) { + return null; + } + let rootConfig = readConfig(rootConfigDir); if (!rootConfig) { return null; @@ -156,5 +178,6 @@ export const getFilenameFromRootBsconfig = ( let compiledPartialPath = replaceFileExtension(partialFilePath, suffix); - return path.join(projDir, pathFragment, compiledPartialPath); + const result = path.join(projDir, pathFragment, compiledPartialPath); + return normalizePath(result); }; diff --git a/server/src/projectFiles.ts b/server/src/projectFiles.ts index 82582e123..a8684360a 100644 --- a/server/src/projectFiles.ts +++ b/server/src/projectFiles.ts @@ -1,16 +1,17 @@ import * as cp from "node:child_process"; import * as p from "vscode-languageserver-protocol"; +import { NormalizedPath, FileURI } from "./utils"; export type filesDiagnostics = { - [key: string]: p.Diagnostic[]; + [key: FileURI]: p.Diagnostic[]; }; export interface projectFiles { - openFiles: Set; - filesWithDiagnostics: Set; + openFiles: Set; + filesWithDiagnostics: Set; filesDiagnostics: filesDiagnostics; rescriptVersion: string | undefined; - bscBinaryLocation: string | null; + bscBinaryLocation: NormalizedPath | null; editorAnalysisLocation: string | null; namespaceName: string | null; @@ -24,5 +25,11 @@ export interface projectFiles { hasPromptedToStartBuild: boolean | "never"; } -export let projectsFiles: Map = // project root path - new Map(); +/** + * Map of project root paths to their project state. + * + * Keys are normalized paths (NormalizedPath) to ensure consistent lookups + * and prevent path format mismatches. All paths should be normalized using + * `normalizePath()` before being used as keys. + */ +export let projectsFiles: Map = new Map(); diff --git a/server/src/server.ts b/server/src/server.ts index 968e3682c..7692e4fe5 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -22,7 +22,6 @@ import * as utils from "./utils"; import * as codeActions from "./codeActions"; import * as c from "./constants"; import { assert } from "console"; -import { fileURLToPath } from "url"; import { WorkspaceEdit } from "vscode-languageserver"; import { onErrorReported } from "./errorReporter"; import * as ic from "./incrementalCompilation"; @@ -92,15 +91,17 @@ const sendCompilationStatus = (payload: CompilationStatusPayload) => { }; let findRescriptBinary = async ( - projectRootPath: p.DocumentUri | null, -): Promise => { + projectRootPath: utils.NormalizedPath | null, +): Promise => { if ( config.extensionConfiguration.binaryPath != null && fs.existsSync( path.join(config.extensionConfiguration.binaryPath, "rescript"), ) ) { - return path.join(config.extensionConfiguration.binaryPath, "rescript"); + return utils.normalizePath( + path.join(config.extensionConfiguration.binaryPath, "rescript"), + ); } return utils.findRescriptBinary(projectRootPath); @@ -118,8 +119,8 @@ let openCompiledFileRequest = new v.RequestType< void >("textDocument/openCompiled"); -let getCurrentCompilerDiagnosticsForFile = ( - fileUri: string, +export let getCurrentCompilerDiagnosticsForFile = ( + fileUri: utils.FileURI, ): p.Diagnostic[] => { let diagnostics: p.Diagnostic[] | null = null; @@ -167,9 +168,10 @@ let sendUpdatedDiagnostics = async () => { // diff Object.keys(filesAndErrors).forEach((file) => { + const fileUri = file as utils.FileURI; let params: p.PublishDiagnosticsParams = { - uri: file, - diagnostics: filesAndErrors[file], + uri: fileUri, + diagnostics: filesAndErrors[fileUri], }; let notification: p.NotificationMessage = { jsonrpc: c.jsonrpcVersion, @@ -178,7 +180,7 @@ let sendUpdatedDiagnostics = async () => { }; send(notification); - filesWithDiagnostics.add(file); + filesWithDiagnostics.add(fileUri); }); if (done) { // clear old files @@ -215,7 +217,7 @@ let sendUpdatedDiagnostics = async () => { let errorCount = 0; let warningCount = 0; for (const [fileUri, diags] of Object.entries(filesAndErrors)) { - const filePath = fileURLToPath(fileUri); + const filePath = utils.uriToNormalizedPath(fileUri as utils.FileURI); if (filePath.startsWith(projectRootPath)) { for (const d of diags as v.Diagnostic[]) { if (d.severity === v.DiagnosticSeverity.Error) errorCount++; @@ -282,7 +284,7 @@ let sendUpdatedDiagnostics = async () => { } }; -let deleteProjectDiagnostics = (projectRootPath: string) => { +let deleteProjectDiagnostics = (projectRootPath: utils.NormalizedPath) => { let root = projectsFiles.get(projectRootPath); if (root != null) { root.filesWithDiagnostics.forEach((file) => { @@ -317,7 +319,7 @@ let sendCompilationFinishedMessage = () => { let debug = false; -let syncProjectConfigCache = async (rootPath: string) => { +let syncProjectConfigCache = async (rootPath: utils.NormalizedPath) => { try { if (debug) console.log("syncing project config cache for " + rootPath); await utils.runAnalysisAfterSanityCheck(rootPath, [ @@ -330,7 +332,7 @@ let syncProjectConfigCache = async (rootPath: string) => { } }; -let deleteProjectConfigCache = async (rootPath: string) => { +let deleteProjectConfigCache = async (rootPath: utils.NormalizedPath) => { try { if (debug) console.log("deleting project config cache for " + rootPath); await utils.runAnalysisAfterSanityCheck(rootPath, [ @@ -352,7 +354,9 @@ async function onWorkspaceDidChangeWatchedFiles( if ( config.extensionConfiguration.cache?.projectConfig?.enable === true ) { - let projectRoot = utils.findProjectRootOfFile(change.uri); + let projectRoot = utils.findProjectRootOfFile( + utils.uriToNormalizedPath(change.uri as utils.FileURI), + ); if (projectRoot != null) { await syncProjectConfigCache(projectRoot); } @@ -371,7 +375,9 @@ async function onWorkspaceDidChangeWatchedFiles( console.log("Error while sending updated diagnostics"); } } else { - ic.incrementalCompilationFileChanged(fileURLToPath(change.uri)); + ic.incrementalCompilationFileChanged( + utils.uriToNormalizedPath(change.uri as utils.FileURI), + ); } }), ); @@ -379,15 +385,16 @@ async function onWorkspaceDidChangeWatchedFiles( type clientSentBuildAction = { title: string; - projectRootPath: string; + projectRootPath: utils.NormalizedPath; }; -let openedFile = async (fileUri: string, fileContent: string) => { - let filePath = fileURLToPath(fileUri); +let openedFile = async (fileUri: utils.FileURI, fileContent: string) => { + let filePath = utils.uriToNormalizedPath(fileUri); stupidFileContentCache.set(filePath, fileContent); let projectRootPath = utils.findProjectRootOfFile(filePath); if (projectRootPath != null) { + // projectRootPath is already normalized (NormalizedPath) from findProjectRootOfFile let projectRootState = projectsFiles.get(projectRootPath); if (projectRootState == null) { if (config.extensionConfiguration.incrementalTypechecking?.enable) { @@ -477,8 +484,8 @@ let openedFile = async (fileUri: string, fileContent: string) => { } }; -let closedFile = async (fileUri: string) => { - let filePath = fileURLToPath(fileUri); +let closedFile = async (fileUri: utils.FileURI) => { + let filePath = utils.uriToNormalizedPath(fileUri); if (config.extensionConfiguration.incrementalTypechecking?.enable) { ic.handleClosedFile(filePath); @@ -504,8 +511,8 @@ let closedFile = async (fileUri: string) => { } }; -let updateOpenedFile = (fileUri: string, fileContent: string) => { - let filePath = fileURLToPath(fileUri); +let updateOpenedFile = (fileUri: utils.FileURI, fileContent: string) => { + let filePath = utils.uriToNormalizedPath(fileUri); assert(stupidFileContentCache.has(filePath)); stupidFileContentCache.set(filePath, fileContent); if (config.extensionConfiguration.incrementalTypechecking?.enable) { @@ -519,8 +526,8 @@ let updateOpenedFile = (fileUri: string, fileContent: string) => { }); } }; -let getOpenedFileContent = (fileUri: string) => { - let filePath = fileURLToPath(fileUri); +let getOpenedFileContent = (fileUri: utils.FileURI) => { + let filePath = utils.uriToNormalizedPath(fileUri); let content = stupidFileContentCache.get(filePath)!; assert(content != null); return content; @@ -546,8 +553,10 @@ export default function listen(useStdio = false) { async function hover(msg: p.RequestMessage) { let params = msg.params as p.HoverParams; - let filePath = fileURLToPath(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let tmpname = utils.createFileInTempDir(); fs.writeFileSync(tmpname, code, { encoding: "utf-8" }); let response = await utils.runAnalysisCommand( @@ -568,7 +577,9 @@ async function hover(msg: p.RequestMessage) { async function inlayHint(msg: p.RequestMessage) { const params = msg.params as p.InlayHintParams; - const filePath = fileURLToPath(params.textDocument.uri); + const filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); const response = await utils.runAnalysisCommand( filePath, @@ -595,7 +606,9 @@ function sendInlayHintsRefresh() { async function codeLens(msg: p.RequestMessage) { const params = msg.params as p.CodeLensParams; - const filePath = fileURLToPath(params.textDocument.uri); + const filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); const response = await utils.runAnalysisCommand( filePath, @@ -616,8 +629,10 @@ function sendCodeLensRefresh() { async function signatureHelp(msg: p.RequestMessage) { let params = msg.params as p.SignatureHelpParams; - let filePath = fileURLToPath(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let tmpname = utils.createFileInTempDir(); fs.writeFileSync(tmpname, code, { encoding: "utf-8" }); let response = await utils.runAnalysisCommand( @@ -641,7 +656,9 @@ async function signatureHelp(msg: p.RequestMessage) { async function definition(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_definition let params = msg.params as p.DefinitionParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let response = await utils.runAnalysisCommand( filePath, ["definition", filePath, params.position.line, params.position.character], @@ -653,7 +670,9 @@ async function definition(msg: p.RequestMessage) { async function typeDefinition(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specification/specification-current/#textDocument_typeDefinition let params = msg.params as p.TypeDefinitionParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let response = await utils.runAnalysisCommand( filePath, [ @@ -670,7 +689,9 @@ async function typeDefinition(msg: p.RequestMessage) { async function references(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_references let params = msg.params as p.ReferenceParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let result: typeof p.ReferencesRequest.type = await utils.getReferencesForPosition(filePath, params.position); let response: p.ResponseMessage = { @@ -687,7 +708,9 @@ async function prepareRename( ): Promise { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_prepareRename let params = msg.params as p.PrepareRenameParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); // `prepareRename` was introduced in 12.0.0-beta.10 let projectRootPath = utils.findProjectRootOfFile(filePath); @@ -727,8 +750,8 @@ async function prepareRename( if (locations !== null) { locations.forEach((loc) => { if ( - path.normalize(fileURLToPath(loc.uri)) === - path.normalize(fileURLToPath(params.textDocument.uri)) + utils.uriToNormalizedPath(loc.uri as utils.FileURI) === + utils.uriToNormalizedPath(params.textDocument.uri as utils.FileURI) ) { let { start, end } = loc.range; let pos = params.position; @@ -753,7 +776,9 @@ async function prepareRename( async function rename(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_rename let params = msg.params as p.RenameParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let documentChanges: (p.RenameFile | p.TextDocumentEdit)[] | null = await utils.runAnalysisAfterSanityCheck(filePath, [ "rename", @@ -777,9 +802,11 @@ async function rename(msg: p.RequestMessage) { async function documentSymbol(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_documentSymbol let params = msg.params as p.DocumentSymbolParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let extension = path.extname(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let tmpname = utils.createFileInTempDir(extension); fs.writeFileSync(tmpname, code, { encoding: "utf-8" }); let response = await utils.runAnalysisCommand( @@ -813,9 +840,11 @@ function askForAllCurrentConfiguration() { async function semanticTokens(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_semanticTokens let params = msg.params as p.SemanticTokensParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let extension = path.extname(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let tmpname = utils.createFileInTempDir(extension); fs.writeFileSync(tmpname, code, { encoding: "utf-8" }); let response = await utils.runAnalysisCommand( @@ -831,8 +860,10 @@ async function semanticTokens(msg: p.RequestMessage) { async function completion(msg: p.RequestMessage) { // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_completion let params = msg.params as p.ReferenceParams; - let filePath = fileURLToPath(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let tmpname = utils.createFileInTempDir(); fs.writeFileSync(tmpname, code, { encoding: "utf-8" }); let response = await utils.runAnalysisCommand( @@ -860,8 +891,12 @@ async function completionResolve(msg: p.RequestMessage) { if (item.documentation == null && item.data != null) { const data = item.data as { filePath: string; modulePath: string }; + const normalizedFilePath = utils.normalizePath(data.filePath); + if (normalizedFilePath == null) { + return response; + } let result = await utils.runAnalysisAfterSanityCheck( - data.filePath, + normalizedFilePath, ["completionResolve", data.filePath, data.modulePath], true, ); @@ -873,8 +908,10 @@ async function completionResolve(msg: p.RequestMessage) { async function codeAction(msg: p.RequestMessage): Promise { let params = msg.params as p.CodeActionParams; - let filePath = fileURLToPath(params.textDocument.uri); - let code = getOpenedFileContent(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let extension = path.extname(params.textDocument.uri); let tmpname = utils.createFileInTempDir(extension); @@ -938,7 +975,9 @@ function format(msg: p.RequestMessage): Array { result: [], }; let params = msg.params as p.DocumentFormattingParams; - let filePath = fileURLToPath(params.textDocument.uri); + let filePath = utils.uriToNormalizedPath( + params.textDocument.uri as utils.FileURI, + ); let extension = path.extname(params.textDocument.uri); if (extension !== c.resExt && extension !== c.resiExt) { let params: p.ShowMessageParams = { @@ -953,7 +992,7 @@ function format(msg: p.RequestMessage): Array { return [fakeSuccessResponse, response]; } else { // code will always be defined here, even though technically it can be undefined - let code = getOpenedFileContent(params.textDocument.uri); + let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI); let projectRootPath = utils.findProjectRootOfFile(filePath); let project = @@ -988,12 +1027,15 @@ function format(msg: p.RequestMessage): Array { } } -let updateDiagnosticSyntax = async (fileUri: string, fileContent: string) => { +let updateDiagnosticSyntax = async ( + fileUri: utils.FileURI, + fileContent: string, +) => { if (config.extensionConfiguration.incrementalTypechecking?.enable) { // The incremental typechecking already sends syntax diagnostics. return; } - let filePath = fileURLToPath(fileUri); + let filePath = utils.uriToNormalizedPath(fileUri); let extension = path.extname(filePath); let tmpname = utils.createFileInTempDir(extension); fs.writeFileSync(tmpname, fileContent, { encoding: "utf-8" }); @@ -1018,17 +1060,16 @@ let updateDiagnosticSyntax = async (fileUri: string, fileContent: string) => { // Update filesWithDiagnostics to track this file let projectRootPath = utils.findProjectRootOfFile(filePath); - if (projectRootPath != null) { - let projectFile = projectsFiles.get(projectRootPath); - if (projectFile != null) { - if (allDiagnostics.length > 0) { - projectFile.filesWithDiagnostics.add(fileUri); - } else { - // Only remove if there are no compiler diagnostics either - // (compiler diagnostics are tracked separately in filesDiagnostics) - if (compilerDiagnosticsForFile.length === 0) { - projectFile.filesWithDiagnostics.delete(fileUri); - } + let projectFile = utils.getProjectFile(projectRootPath); + + if (projectFile != null) { + if (allDiagnostics.length > 0) { + projectFile.filesWithDiagnostics.add(fileUri); + } else { + // Only remove if there are no compiler diagnostics either + // (compiler diagnostics are tracked separately in filesDiagnostics) + if (compilerDiagnosticsForFile.length === 0) { + projectFile.filesWithDiagnostics.delete(fileUri); } } } @@ -1050,7 +1091,7 @@ let updateDiagnosticSyntax = async (fileUri: string, fileContent: string) => { async function createInterface(msg: p.RequestMessage): Promise { let params = msg.params as p.TextDocumentIdentifier; let extension = path.extname(params.uri); - let filePath = fileURLToPath(params.uri); + let filePath = utils.uriToNormalizedPath(params.uri as utils.FileURI); let projDir = utils.findProjectRootOfFile(filePath); if (projDir === null) { @@ -1162,7 +1203,7 @@ async function createInterface(msg: p.RequestMessage): Promise { function openCompiledFile(msg: p.RequestMessage): p.Message { let params = msg.params as p.TextDocumentIdentifier; - let filePath = fileURLToPath(params.uri); + let filePath = utils.uriToNormalizedPath(params.uri as utils.FileURI); let projDir = utils.findProjectRootOfFile(filePath); if (projDir === null) { @@ -1286,10 +1327,13 @@ async function onMessage(msg: p.Message) { await onWorkspaceDidChangeWatchedFiles(params); } else if (msg.method === DidOpenTextDocumentNotification.method) { let params = msg.params as p.DidOpenTextDocumentParams; - await openedFile(params.textDocument.uri, params.textDocument.text); + await openedFile( + params.textDocument.uri as utils.FileURI, + params.textDocument.text, + ); await sendUpdatedDiagnostics(); await updateDiagnosticSyntax( - params.textDocument.uri, + params.textDocument.uri as utils.FileURI, params.textDocument.text, ); } else if (msg.method === DidChangeTextDocumentNotification.method) { @@ -1302,18 +1346,18 @@ async function onMessage(msg: p.Message) { } else { // we currently only support full changes updateOpenedFile( - params.textDocument.uri, + params.textDocument.uri as utils.FileURI, changes[changes.length - 1].text, ); await updateDiagnosticSyntax( - params.textDocument.uri, + params.textDocument.uri as utils.FileURI, changes[changes.length - 1].text, ); } } } else if (msg.method === DidCloseTextDocumentNotification.method) { let params = msg.params as p.DidCloseTextDocumentParams; - await closedFile(params.textDocument.uri); + await closedFile(params.textDocument.uri as utils.FileURI); } else if (msg.method === DidChangeConfigurationNotification.type.method) { // Can't seem to get this notification to trigger, but if it does this will be here and ensure we're synced up at the server. askForAllCurrentConfiguration(); @@ -1334,8 +1378,9 @@ async function onMessage(msg: p.Message) { // Save initial configuration, if present let initParams = msg.params as InitializeParams; for (const workspaceFolder of initParams.workspaceFolders || []) { - const workspaceRootPath = fileURLToPath(workspaceFolder.uri); - workspaceFolders.add(workspaceRootPath); + workspaceFolders.add( + utils.uriToNormalizedPath(workspaceFolder.uri as utils.FileURI), + ); } let initialConfiguration = initParams.initializationOptions ?.extensionConfiguration as extensionConfiguration | undefined; @@ -1587,7 +1632,13 @@ async function onMessage(msg: p.Message) { msg.result.title === c.startBuildAction ) { let msg_ = msg.result as clientSentBuildAction; - let projectRootPath = msg_.projectRootPath; + // Normalize the path since JSON serialization loses the branded type + // The type says it's NormalizedPath, so we ensure it actually is + let projectRootPath = utils.normalizePath(msg_.projectRootPath); + if (projectRootPath == null) { + // Should never happen, but handle gracefully + return; + } // TODO: sometime stale .bsb.lock dangling // TODO: close watcher when lang-server shuts down. However, by Node's // default, these subprocesses are automatically killed when this diff --git a/server/src/utils.ts b/server/src/utils.ts index ac0d346a7..ba10c5cef 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -10,17 +10,67 @@ import fs from "fs"; import fsAsync from "fs/promises"; import * as os from "os"; import semver from "semver"; +import { fileURLToPath } from "url"; import * as codeActions from "./codeActions"; import * as c from "./constants"; import * as lookup from "./lookup"; import { reportError } from "./errorReporter"; import config from "./config"; -import { filesDiagnostics, projectsFiles } from "./projectFiles"; +import { filesDiagnostics, projectsFiles, projectFiles } from "./projectFiles"; import { workspaceFolders } from "./server"; import { rewatchLockPartialPath, rescriptLockPartialPath } from "./constants"; import { findRescriptRuntimesInProject } from "./find-runtime"; +/** + * Branded type for normalized file paths. + * + * All paths stored as keys in `projectsFiles` are normalized to ensure + * consistent lookups and prevent path format mismatches (e.g., trailing + * slashes, symlinks, relative vs absolute paths). + * + * Use `normalizePath()` to convert a regular path to a `NormalizedPath`. + */ +export type NormalizedPath = string & { __brand: "NormalizedPath" }; + +/** + * Branded type for file URIs (e.g., `file:///path/to/file.res`). + * + * This represents a URI as used in the Language Server Protocol. + * Use `uriToNormalizedPath()` to convert a URI to a normalized file path. + */ +export type FileURI = string & { __brand: "FileURI" }; + +/** + * Normalizes a file path and returns it as a `NormalizedPath`. + * + * This function should be used whenever storing a path as a key in `projectsFiles` + * or when comparing paths that need to match exactly. + * + * @param filePath - The path to normalize (can be null) + * @returns The normalized path, or null if input was null + */ +export function normalizePath(filePath: string | null): NormalizedPath | null { + return filePath != null ? (path.normalize(filePath) as NormalizedPath) : null; +} + +/** + * Converts a file URI (e.g., `file:///path/to/file.res`) to a normalized file path. + * + * This is the preferred way to convert LSP DocumentUri values to file paths, + * as it ensures the resulting path is normalized for consistent use throughout + * the codebase. + * + * @param uri - The file URI to convert + * @returns The normalized file path + * @throws If the URI cannot be converted to a file path + */ +export function uriToNormalizedPath(uri: FileURI): NormalizedPath { + const filePath = fileURLToPath(uri); + // fileURLToPath always returns a string (throws on invalid URI), so we can directly normalize + return path.normalize(filePath) as NormalizedPath; +} + let tempFilePrefix = "rescript_format_file_" + process.pid + "_"; let tempFileId = 0; @@ -31,9 +81,13 @@ export let createFileInTempDir = (extension = "") => { }; let findProjectRootOfFileInDir = ( - source: p.DocumentUri, -): null | p.DocumentUri => { - let dir = path.dirname(source); + source: NormalizedPath, +): NormalizedPath | null => { + let dirStr = path.dirname(source); + const dir = normalizePath(dirStr); + if (dir == null) { + return null; + } if ( fs.existsSync(path.join(dir, c.rescriptJsonPartialPath)) || fs.existsSync(path.join(dir, c.bsconfigPartialPath)) @@ -52,13 +106,20 @@ let findProjectRootOfFileInDir = ( // TODO: races here? // TODO: this doesn't handle file:/// scheme export let findProjectRootOfFile = ( - source: p.DocumentUri, + source: NormalizedPath, allowDir?: boolean, -): null | p.DocumentUri => { - // First look in project files - let foundRootFromProjectFiles: string | null = null; +): NormalizedPath | null => { + // source is already normalized + const normalizedSource = source; + + // First look in project files (keys are already normalized) + let foundRootFromProjectFiles: NormalizedPath | null = null; for (const rootPath of projectsFiles.keys()) { - if (source.startsWith(rootPath) && (!allowDir || source !== rootPath)) { + // Both are normalized, so direct comparison works + if ( + normalizedSource.startsWith(rootPath) && + (!allowDir || normalizedSource !== rootPath) + ) { // Prefer the longest path (most nested) if ( foundRootFromProjectFiles == null || @@ -73,26 +134,54 @@ export let findProjectRootOfFile = ( return foundRootFromProjectFiles; } else { const isDir = path.extname(source) === ""; - return findProjectRootOfFileInDir( - isDir && !allowDir ? path.join(source, "dummy.res") : source, - ); + const searchPath = + isDir && !allowDir ? path.join(source, "dummy.res") : source; + const normalizedSearchPath = normalizePath(searchPath); + if (normalizedSearchPath == null) { + return null; + } + const foundPath = findProjectRootOfFileInDir(normalizedSearchPath); + return foundPath; + } +}; + +/** + * Gets the project file for a given project root path. + * + * All keys in `projectsFiles` are normalized (see `openedFile` in server.ts), + * so this function accepts either a normalized or non-normalized path and + * normalizes it before lookup. + * + * @param projectRootPath - The project root path to look up (can be null or already normalized) + * @returns The project file if found, null otherwise + */ +export let getProjectFile = ( + projectRootPath: NormalizedPath | null, +): projectFiles | null => { + if (projectRootPath == null) { + return null; } + return projectsFiles.get(projectRootPath) ?? null; }; // If ReScript < 12.0.0-alpha.13, then we want `{project_root}/node_modules/rescript/{c.platformDir}/{binary}`. // Otherwise, we want to dynamically import `{project_root}/node_modules/rescript` and from `binPaths` get the relevant binary. // We won't know which version is in the project root until we read and parse `{project_root}/node_modules/rescript/package.json` let findBinary = async ( - projectRootPath: p.DocumentUri | null, + projectRootPath: NormalizedPath | null, binary: | "bsc.exe" | "rescript-editor-analysis.exe" | "rescript" | "rewatch.exe" | "rescript.exe", -) => { +): Promise => { if (config.extensionConfiguration.platformPath != null) { - return path.join(config.extensionConfiguration.platformPath, binary); + const result = path.join( + config.extensionConfiguration.platformPath, + binary, + ); + return normalizePath(result); } if (projectRootPath !== null) { @@ -106,10 +195,10 @@ let findBinary = async ( if (compileInfo && compileInfo.bsc_path) { const bsc_path = compileInfo.bsc_path; if (binary === "bsc.exe") { - return bsc_path; + return normalizePath(bsc_path); } else { const binary_path = path.join(path.dirname(bsc_path), binary); - return binary_path; + return normalizePath(binary_path); } } } catch {} @@ -166,38 +255,39 @@ let findBinary = async ( } if (binaryPath != null && fs.existsSync(binaryPath)) { - return binaryPath; + return normalizePath(binaryPath); } else { return null; } }; -export let findRescriptBinary = (projectRootPath: p.DocumentUri | null) => +export let findRescriptBinary = (projectRootPath: NormalizedPath | null) => findBinary(projectRootPath, "rescript"); -export let findBscExeBinary = (projectRootPath: p.DocumentUri | null) => +export let findBscExeBinary = (projectRootPath: NormalizedPath | null) => findBinary(projectRootPath, "bsc.exe"); -export let findEditorAnalysisBinary = (projectRootPath: p.DocumentUri | null) => - findBinary(projectRootPath, "rescript-editor-analysis.exe"); +export let findEditorAnalysisBinary = ( + projectRootPath: NormalizedPath | null, +) => findBinary(projectRootPath, "rescript-editor-analysis.exe"); -export let findRewatchBinary = (projectRootPath: p.DocumentUri | null) => +export let findRewatchBinary = (projectRootPath: NormalizedPath | null) => findBinary(projectRootPath, "rewatch.exe"); -export let findRescriptExeBinary = (projectRootPath: p.DocumentUri | null) => +export let findRescriptExeBinary = (projectRootPath: NormalizedPath | null) => findBinary(projectRootPath, "rescript.exe"); -type execResult = +type execResult = | { kind: "success"; - result: string; + result: T; } | { kind: "error"; error: string; }; -type formatCodeResult = execResult; +type formatCodeResult = execResult; export let formatCode = ( bscPath: p.DocumentUri | null, @@ -238,7 +328,7 @@ export let formatCode = ( }; export async function findReScriptVersionForProjectRoot( - projectRootPath: string | null, + projectRootPath: NormalizedPath | null, ): Promise { if (projectRootPath == null) { return undefined; @@ -270,7 +360,7 @@ if (fs.existsSync(c.builtinAnalysisDevPath)) { } export let runAnalysisAfterSanityCheck = async ( - filePath: p.DocumentUri, + filePath: NormalizedPath, args: Array, projectRequired = false, ) => { @@ -279,8 +369,9 @@ export let runAnalysisAfterSanityCheck = async ( return null; } let rescriptVersion = - projectsFiles.get(projectRootPath ?? "")?.rescriptVersion ?? - (await findReScriptVersionForProjectRoot(projectRootPath)); + (projectRootPath + ? projectsFiles.get(projectRootPath)?.rescriptVersion + : null) ?? (await findReScriptVersionForProjectRoot(projectRootPath)); let binaryPath = builtinBinaryPath; @@ -347,7 +438,7 @@ export let runAnalysisAfterSanityCheck = async ( }; export let runAnalysisCommand = async ( - filePath: p.DocumentUri, + filePath: NormalizedPath, args: Array, msg: RequestMessage, projectRequired = true, @@ -366,7 +457,7 @@ export let runAnalysisCommand = async ( }; export let getReferencesForPosition = async ( - filePath: p.DocumentUri, + filePath: NormalizedPath, position: p.Position, ) => await runAnalysisAfterSanityCheck(filePath, [ @@ -389,8 +480,8 @@ export const toCamelCase = (text: string): string => { * in the workspace, so we return null (which will default to projectRootPath). */ export function computeWorkspaceRootPathFromLockfile( - projectRootPath: string | null, -): string | null { + projectRootPath: NormalizedPath | null, +): NormalizedPath | null { if (projectRootPath == null) { return null; } @@ -425,7 +516,7 @@ const runtimePathCache = new Map(); * This function is cached per workspace root path. */ export async function getRuntimePathFromWorkspaceRoot( - workspaceRootPath: string, + workspaceRootPath: NormalizedPath, ): Promise { // Check cache first if (runtimePathCache.has(workspaceRootPath)) { @@ -455,7 +546,7 @@ export async function getRuntimePathFromWorkspaceRoot( * This function is cached per project root path. */ export async function getRuntimePathFromProjectRoot( - projectRootPath: string | null, + projectRootPath: NormalizedPath | null, ): Promise { if (projectRootPath == null) { return null; @@ -467,7 +558,7 @@ export async function getRuntimePathFromProjectRoot( } // Compute workspace root and resolve runtime - const workspaceRootPath = + const workspaceRootPath: NormalizedPath = computeWorkspaceRootPathFromLockfile(projectRootPath) ?? projectRootPath; // Check cache again with workspace root (might have been cached from a previous call) @@ -487,8 +578,8 @@ export async function getRuntimePathFromProjectRoot( } export const getNamespaceNameFromConfigFile = ( - projDir: p.DocumentUri, -): execResult => { + projDir: NormalizedPath, +): execResult => { let config = lookup.readConfig(projDir); let result = ""; @@ -513,9 +604,9 @@ export const getNamespaceNameFromConfigFile = ( export let getCompiledFilePath = ( filePath: string, - projDir: string, -): execResult => { - let error: execResult = { + projDir: NormalizedPath, +): execResult => { + let error: execResult = { kind: "error", error: "Could not read ReScript config file", }; @@ -543,9 +634,15 @@ export let getCompiledFilePath = ( result = compiledPath; } + // Normalize the path before returning + const normalizedResult = normalizePath(result); + if (normalizedResult == null) { + return error; + } + return { kind: "success", - result, + result: normalizedResult, }; }; @@ -617,8 +714,10 @@ export let runBuildWatcherUsingValidBuildPath = ( */ // parser helpers -export let pathToURI = (file: string) => { - return process.platform === "win32" ? `file:\\\\\\${file}` : `file://${file}`; +export let pathToURI = (file: NormalizedPath): FileURI => { + return ( + process.platform === "win32" ? `file:\\\\\\${file}` : `file://${file}` + ) as FileURI; }; let parseFileAndRange = (fileAndRange: string) => { // https://github.com/rescript-lang/rescript-compiler/blob/0a3f4bb32ca81e89cefd5a912b8795878836f883/jscomp/super_errors/super_location.ml#L15-L25 @@ -641,8 +740,18 @@ let parseFileAndRange = (fileAndRange: string) => { let match = trimmedFileAndRange.match(regex); if (match === null) { // no location! Though LSP insist that we provide at least a dummy location + const normalizedPath = normalizePath(trimmedFileAndRange); + if (normalizedPath == null) { + return { + file: pathToURI(normalizePath("")!), + range: { + start: { line: 0, character: 0 }, + end: { line: 0, character: 0 }, + }, + }; + } return { - file: pathToURI(trimmedFileAndRange), + file: pathToURI(normalizedPath), range: { start: { line: 0, character: 0 }, end: { line: 0, character: 0 }, @@ -686,8 +795,15 @@ let parseFileAndRange = (fileAndRange: string) => { end: { line: parseInt(endLine) - 1, character: parseInt(endChar) }, }; } + const normalizedFile = normalizePath(file); + if (normalizedFile == null) { + return { + file: pathToURI(normalizePath("")!), + range, + }; + } return { - file: pathToURI(file), + file: pathToURI(normalizedFile), range, }; }; @@ -854,9 +970,10 @@ export let parseCompilerLogOutput = async ( for (const parsedDiagnostic of parsedDiagnostics) { let [fileAndRangeLine, ...diagnosticMessage] = parsedDiagnostic.content; let { file, range } = parseFileAndRange(fileAndRangeLine); + const fileUri = file as FileURI; - if (result[file] == null) { - result[file] = []; + if (result[fileUri] == null) { + result[fileUri] = []; } // remove start and end whitespaces/newlines @@ -882,7 +999,7 @@ export let parseCompilerLogOutput = async ( range, }); - result[file].push(diagnostic); + result[fileUri].push(diagnostic); } } From a41ae321ce399bb62709f113caa91045aeefba20 Mon Sep 17 00:00:00 2001 From: nojaf Date: Fri, 28 Nov 2025 14:38:55 +0100 Subject: [PATCH 04/56] Remove some redundant casts --- server/src/codeActions.ts | 2 +- server/src/incrementalCompilation.ts | 11 ++++------- server/src/server.ts | 13 ++++++++----- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/server/src/codeActions.ts b/server/src/codeActions.ts index e3577ddaa..30a0c24d2 100644 --- a/server/src/codeActions.ts +++ b/server/src/codeActions.ts @@ -630,7 +630,7 @@ let simpleAddMissingCases: codeActionExtractor = async ({ .join("") .trim(); - let filePath = utils.uriToNormalizedPath(file as utils.FileURI); + let filePath = utils.uriToNormalizedPath(file); let newSwitchCode = await utils.runAnalysisAfterSanityCheck(filePath, [ "codemod", diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index cb9fb644c..06e4a90ae 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -587,8 +587,7 @@ async function compileContents( const change = Object.values(ca.codeAction.edit.changes)[0]; ca.codeAction.edit.changes = { - [utils.pathToURI(entry.file.sourceFilePath) as utils.FileURI]: - change, + [utils.pathToURI(entry.file.sourceFilePath)]: change, }; } }); @@ -655,9 +654,7 @@ async function compileContents( } } - const fileUri = utils.pathToURI( - entry.file.sourceFilePath, - ) as utils.FileURI; + const fileUri = utils.pathToURI(entry.file.sourceFilePath); // Update filesWithDiagnostics to track this file // entry.project.rootPath is guaranteed to match a key in projectsFiles @@ -671,10 +668,10 @@ async function compileContents( const allDiagnostics = [...res, ...compilerDiagnosticsForFile]; if (allDiagnostics.length > 0) { - projectFile.filesWithDiagnostics.add(fileUri as utils.FileURI); + projectFile.filesWithDiagnostics.add(fileUri); } else { // Only remove if there are no diagnostics at all - projectFile.filesWithDiagnostics.delete(fileUri as utils.FileURI); + projectFile.filesWithDiagnostics.delete(fileUri); } } diff --git a/server/src/server.ts b/server/src/server.ts index 7692e4fe5..7d20e4bcc 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -167,11 +167,12 @@ let sendUpdatedDiagnostics = async () => { codeActionsFromDiagnostics = codeActions; // diff - Object.keys(filesAndErrors).forEach((file) => { - const fileUri = file as utils.FileURI; + ( + Object.entries(filesAndErrors) as Array<[utils.FileURI, p.Diagnostic[]]> + ).forEach(([fileUri, diagnostics]) => { let params: p.PublishDiagnosticsParams = { uri: fileUri, - diagnostics: filesAndErrors[fileUri], + diagnostics, }; let notification: p.NotificationMessage = { jsonrpc: c.jsonrpcVersion, @@ -216,8 +217,10 @@ let sendUpdatedDiagnostics = async () => { let errorCount = 0; let warningCount = 0; - for (const [fileUri, diags] of Object.entries(filesAndErrors)) { - const filePath = utils.uriToNormalizedPath(fileUri as utils.FileURI); + for (const [fileUri, diags] of Object.entries(filesAndErrors) as Array< + [utils.FileURI, p.Diagnostic[]] + >) { + const filePath = utils.uriToNormalizedPath(fileUri); if (filePath.startsWith(projectRootPath)) { for (const d of diags as v.Diagnostic[]) { if (d.severity === v.DiagnosticSeverity.Error) errorCount++; From 36121d6b85cc4ba8d959f44d31a10f10de6f92c5 Mon Sep 17 00:00:00 2001 From: nojaf Date: Fri, 28 Nov 2025 14:47:51 +0100 Subject: [PATCH 05/56] Code review feedback --- server/src/bsc-args/rewatch.ts | 2 +- server/src/incrementalCompilation.ts | 21 +++++++++------------ server/src/lookup.ts | 21 +++++++++++++++------ server/src/server.ts | 5 ++++- server/src/utils.ts | 26 +++++++------------------- 5 files changed, 36 insertions(+), 39 deletions(-) diff --git a/server/src/bsc-args/rewatch.ts b/server/src/bsc-args/rewatch.ts index 21ee17893..ae607c070 100644 --- a/server/src/bsc-args/rewatch.ts +++ b/server/src/bsc-args/rewatch.ts @@ -24,7 +24,7 @@ async function getRuntimePath( export async function getRewatchBscArgs( send: (msg: p.Message) => void, bscBinaryLocation: utils.NormalizedPath | null, - projectsFiles: Map, + projectsFiles: Map, entry: IncrementallyCompiledFileInfo, ): Promise { const rewatchCacheEntry = entry.buildRewatch; diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index 06e4a90ae..da1bd4e56 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -263,24 +263,21 @@ function triggerIncrementalCompilationOfFile( if (debug()) console.log("Did not find open project for " + filePath); return; } - // projectRootPath is already the correct normalized key format - const actualProjectRootPath = projectRootPath; // computeWorkspaceRootPathFromLockfile returns null if lockfile found (local package) or if no parent found - const computedWorkspaceRoot = utils.computeWorkspaceRootPathFromLockfile( - actualProjectRootPath, - ); + const computedWorkspaceRoot = + utils.computeWorkspaceRootPathFromLockfile(projectRootPath); // If null, it means either a lockfile was found (local package) or no parent project root exists // In both cases, we default to actualProjectRootPath const workspaceRootPath: NormalizedPath = - computedWorkspaceRoot ?? actualProjectRootPath; + computedWorkspaceRoot ?? projectRootPath; // Determine if lockfile was found for debug logging // If computedWorkspaceRoot is null and actualProjectRootPath is not null, check if parent exists const foundRewatchLockfileInProjectRoot = computedWorkspaceRoot == null && - actualProjectRootPath != null && - utils.findProjectRootOfFile(actualProjectRootPath, true) != null; + projectRootPath != null && + utils.findProjectRootOfFile(projectRootPath, true) != null; if (foundRewatchLockfileInProjectRoot && debug()) { console.log( @@ -306,14 +303,14 @@ function triggerIncrementalCompilationOfFile( : moduleName; const incrementalFolderPath: NormalizedPath = path.join( - actualProjectRootPath, + projectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION, ) as NormalizedPath; let originalTypeFileLocation = path.resolve( - actualProjectRootPath, + projectRootPath, c.compilerDirPartialPath, - path.relative(actualProjectRootPath, filePath), + path.relative(projectRootPath, filePath), ); const parsed = path.parse(originalTypeFileLocation); @@ -333,7 +330,7 @@ function triggerIncrementalCompilationOfFile( }, project: { workspaceRootPath, - rootPath: actualProjectRootPath, + rootPath: projectRootPath, callArgs: Promise.resolve([]), bscBinaryLocation, incrementalFolderPath, diff --git a/server/src/lookup.ts b/server/src/lookup.ts index 893137607..68c9910e7 100644 --- a/server/src/lookup.ts +++ b/server/src/lookup.ts @@ -18,15 +18,24 @@ const getCompiledFolderName = (moduleFormat: ModuleFormat): string => { } }; -export const replaceFileExtension = ( - filePath: T, +export const replaceFileExtension = (filePath: string, ext: string): string => { + let name = path.basename(filePath, path.extname(filePath)); + return path.format({ dir: path.dirname(filePath), name, ext }); +}; + +export const replaceFileExtensionWithNormalizedPath = ( + filePath: NormalizedPath, ext: string, -): T => { +): NormalizedPath => { let name = path.basename(filePath, path.extname(filePath)); const result = path.format({ dir: path.dirname(filePath), name, ext }); - // If input was NormalizedPath, result is still normalized - // If input was string, result is string - return result as T; + // path.format() doesn't preserve normalization, so we need to normalize the result + const normalized = normalizePath(result); + if (normalized == null) { + // Should never happen, but handle gracefully + return result as NormalizedPath; + } + return normalized; }; // Check if filePartialPath exists at directory and return the joined path, diff --git a/server/src/server.ts b/server/src/server.ts index 7d20e4bcc..ce6ea4667 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1181,7 +1181,10 @@ async function createInterface(msg: p.RequestMessage): Promise { let result = typeof response.result === "string" ? response.result : ""; try { - let resiPath = lookup.replaceFileExtension(filePath, c.resiExt); + let resiPath = lookup.replaceFileExtensionWithNormalizedPath( + filePath, + c.resiExt, + ); fs.writeFileSync(resiPath, result, { encoding: "utf-8" }); let response: p.ResponseMessage = { jsonrpc: c.jsonrpcVersion, diff --git a/server/src/utils.ts b/server/src/utils.ts index ba10c5cef..ee7eb11c2 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -104,22 +104,15 @@ let findProjectRootOfFileInDir = ( }; // TODO: races here? -// TODO: this doesn't handle file:/// scheme export let findProjectRootOfFile = ( source: NormalizedPath, allowDir?: boolean, ): NormalizedPath | null => { - // source is already normalized - const normalizedSource = source; - // First look in project files (keys are already normalized) let foundRootFromProjectFiles: NormalizedPath | null = null; for (const rootPath of projectsFiles.keys()) { // Both are normalized, so direct comparison works - if ( - normalizedSource.startsWith(rootPath) && - (!allowDir || normalizedSource !== rootPath) - ) { + if (source.startsWith(rootPath) && (!allowDir || source !== rootPath)) { // Prefer the longest path (most nested) if ( foundRootFromProjectFiles == null || @@ -742,13 +735,9 @@ let parseFileAndRange = (fileAndRange: string) => { // no location! Though LSP insist that we provide at least a dummy location const normalizedPath = normalizePath(trimmedFileAndRange); if (normalizedPath == null) { - return { - file: pathToURI(normalizePath("")!), - range: { - start: { line: 0, character: 0 }, - end: { line: 0, character: 0 }, - }, - }; + // If we can't normalize the file path, throw an error + // This should never happen in practice, but we need to handle it + throw new Error(`Failed to normalize file path: ${trimmedFileAndRange}`); } return { file: pathToURI(normalizedPath), @@ -797,10 +786,9 @@ let parseFileAndRange = (fileAndRange: string) => { } const normalizedFile = normalizePath(file); if (normalizedFile == null) { - return { - file: pathToURI(normalizePath("")!), - range, - }; + // If we can't normalize the file path, throw an error + // This should never happen in practice, but we need to handle it + throw new Error(`Failed to normalize file path: ${file}`); } return { file: pathToURI(normalizedFile), From 30b57e0cea19e9b5acc27dd42aafbe29fc682e41 Mon Sep 17 00:00:00 2001 From: nojaf Date: Fri, 28 Nov 2025 15:14:11 +0100 Subject: [PATCH 06/56] Fixed inconsistency between diagnostic tracking and publishing. --- server/src/incrementalCompilation.ts | 12 ++++++------ server/src/lookup.ts | 6 ------ 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index da1bd4e56..f730e9042 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -653,17 +653,17 @@ async function compileContents( const fileUri = utils.pathToURI(entry.file.sourceFilePath); + // Get compiler diagnostics from main build (if any) and combine with incremental diagnostics + const compilerDiagnosticsForFile = + getCurrentCompilerDiagnosticsForFile(fileUri); + const allDiagnostics = [...res, ...compilerDiagnosticsForFile]; + // Update filesWithDiagnostics to track this file // entry.project.rootPath is guaranteed to match a key in projectsFiles // (see triggerIncrementalCompilationOfFile where the entry is created) const projectFile = projectsFiles.get(entry.project.rootPath); if (projectFile != null) { - // Get compiler diagnostics from main build (if any) - const compilerDiagnosticsForFile = - getCurrentCompilerDiagnosticsForFile(fileUri); - const allDiagnostics = [...res, ...compilerDiagnosticsForFile]; - if (allDiagnostics.length > 0) { projectFile.filesWithDiagnostics.add(fileUri); } else { @@ -677,7 +677,7 @@ async function compileContents( method: "textDocument/publishDiagnostics", params: { uri: fileUri, - diagnostics: res, + diagnostics: allDiagnostics, }, }; send(notification); diff --git a/server/src/lookup.ts b/server/src/lookup.ts index 68c9910e7..6ab3bc6c8 100644 --- a/server/src/lookup.ts +++ b/server/src/lookup.ts @@ -60,9 +60,6 @@ export const findFilePathFromProjectRoot = ( } const parentDir = normalizePath(parentDirStr); - if (parentDir == null) { - return null; - } return findFilePathFromProjectRoot(parentDir, filePartialPath); }; @@ -153,9 +150,6 @@ export const getFilenameFromRootBsconfig = ( ): NormalizedPath | null => { // Start searching from the parent directory of projDir to find the workspace root const parentDir = normalizePath(path.dirname(projDir)); - if (parentDir == null) { - return null; - } let rootConfigPath = findFilePathFromProjectRoot( parentDir, From 5a2f31571556057e6479207f4c0aec8ec38c44fa Mon Sep 17 00:00:00 2001 From: Matt Champagne Date: Fri, 28 Nov 2025 23:09:17 -0500 Subject: [PATCH 07/56] fix: resolve symlinks when finding platform binaries This fixes an issue where the extension fails to find ReScript platform binaries (like bsc.exe) when using package managers that use symlinked node_modules structures, such as Deno and pnpm. The issue occurs because `path.join(rescriptDir, "..")` performs string manipulation rather than following the symlink. When `node_modules/rescript` is a symlink (e.g., to `.deno/rescript@12.0.0/node_modules/rescript`), the parent directory resolves to `node_modules/` instead of the actual parent where `@rescript/darwin-arm64` etc. are located. Using `fs.realpathSync()` to resolve the symlink first ensures the correct parent directory is found. --- server/src/utils.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/utils.ts b/server/src/utils.ts index 9446585cd..09134571f 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -145,8 +145,10 @@ let findBinary = async ( // TODO: export `binPaths` from `rescript` package so that we don't need to // copy the logic for figuring out `target`. const target = `${process.platform}-${process.arch}`; + // Use realpathSync to resolve symlinks, which is necessary for package + // managers like Deno and pnpm that use symlinked node_modules structures. const targetPackagePath = path.join( - rescriptDir, + fs.realpathSync(rescriptDir), "..", `@rescript/${target}/bin.js`, ); From fbbdd8e0685514443527555e456b9b098276213c Mon Sep 17 00:00:00 2001 From: nojaf Date: Sun, 30 Nov 2025 11:26:54 +0100 Subject: [PATCH 08/56] Add changelog --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 54407d177..11efe49d5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,12 @@ > - :house: [Internal] > - :nail_care: [Polish] +## [Unreleased] + +#### :nail_care: Polish + +- Resolve symlinks when finding platform binaries. https://github.com/rescript-lang/rescript-vscode/pull/1154 + ## 1.70.0 #### :bug: Bug fix From 3013116cfb1d15aea730ee7c4bb95638342e8986 Mon Sep 17 00:00:00 2001 From: nojaf Date: Sun, 30 Nov 2025 11:35:37 +0100 Subject: [PATCH 09/56] Remove null checks --- server/src/utils.ts | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/server/src/utils.ts b/server/src/utils.ts index ee7eb11c2..6f1c15ed5 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -628,10 +628,7 @@ export let getCompiledFilePath = ( } // Normalize the path before returning - const normalizedResult = normalizePath(result); - if (normalizedResult == null) { - return error; - } + const normalizedResult = normalizePath(result)!; return { kind: "success", @@ -733,12 +730,7 @@ let parseFileAndRange = (fileAndRange: string) => { let match = trimmedFileAndRange.match(regex); if (match === null) { // no location! Though LSP insist that we provide at least a dummy location - const normalizedPath = normalizePath(trimmedFileAndRange); - if (normalizedPath == null) { - // If we can't normalize the file path, throw an error - // This should never happen in practice, but we need to handle it - throw new Error(`Failed to normalize file path: ${trimmedFileAndRange}`); - } + const normalizedPath = normalizePath(trimmedFileAndRange)!; return { file: pathToURI(normalizedPath), range: { @@ -784,12 +776,7 @@ let parseFileAndRange = (fileAndRange: string) => { end: { line: parseInt(endLine) - 1, character: parseInt(endChar) }, }; } - const normalizedFile = normalizePath(file); - if (normalizedFile == null) { - // If we can't normalize the file path, throw an error - // This should never happen in practice, but we need to handle it - throw new Error(`Failed to normalize file path: ${file}`); - } + const normalizedFile = normalizePath(file)!; return { file: pathToURI(normalizedFile), range, From a6056d28c58eef71cf868c74fe7e93ca42798a36 Mon Sep 17 00:00:00 2001 From: Florian Verdonck Date: Sun, 30 Nov 2025 11:36:36 +0100 Subject: [PATCH 10/56] Update server/src/utils.ts Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- server/src/utils.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src/utils.ts b/server/src/utils.ts index 6f1c15ed5..439185896 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -141,11 +141,11 @@ export let findProjectRootOfFile = ( /** * Gets the project file for a given project root path. * - * All keys in `projectsFiles` are normalized (see `openedFile` in server.ts), - * so this function accepts either a normalized or non-normalized path and - * normalizes it before lookup. + * All keys in `projectsFiles` are normalized (see `openedFile` in server.ts). + * This function accepts a normalized project root path (or null) and performs a direct lookup. + * The path must already be normalized before calling this function. * - * @param projectRootPath - The project root path to look up (can be null or already normalized) + * @param projectRootPath - The normalized project root path to look up (or null) * @returns The project file if found, null otherwise */ export let getProjectFile = ( From 01e1549d84c06da3012b87fe17ef23fc7a1c02d9 Mon Sep 17 00:00:00 2001 From: Florian Verdonck Date: Sun, 30 Nov 2025 11:37:17 +0100 Subject: [PATCH 11/56] Update server/src/server.ts Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- server/src/server.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/server/src/server.ts b/server/src/server.ts index ce6ea4667..73417a4b3 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1642,7 +1642,11 @@ async function onMessage(msg: p.Message) { // The type says it's NormalizedPath, so we ensure it actually is let projectRootPath = utils.normalizePath(msg_.projectRootPath); if (projectRootPath == null) { - // Should never happen, but handle gracefully + // Should never happen, but handle gracefully and log a warning + console.warn( + "[ReScript Language Server] Failed to normalize projectRootPath from clientSentBuildAction:", + msg_.projectRootPath + ); return; } // TODO: sometime stale .bsb.lock dangling From 483aae34a95b8d9890c5dedf1dd95b5be602383e Mon Sep 17 00:00:00 2001 From: Florian Verdonck Date: Sun, 30 Nov 2025 11:37:30 +0100 Subject: [PATCH 12/56] Update server/src/incrementalCompilation.ts Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- server/src/incrementalCompilation.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index f730e9042..1e6eb6099 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -273,7 +273,7 @@ function triggerIncrementalCompilationOfFile( computedWorkspaceRoot ?? projectRootPath; // Determine if lockfile was found for debug logging - // If computedWorkspaceRoot is null and actualProjectRootPath is not null, check if parent exists + // If computedWorkspaceRoot is null and projectRootPath is not null, check if parent exists const foundRewatchLockfileInProjectRoot = computedWorkspaceRoot == null && projectRootPath != null && From 718a6f74f02f7593bc94dd4339d5dd3e9b2119c7 Mon Sep 17 00:00:00 2001 From: Florian Verdonck Date: Sun, 30 Nov 2025 11:40:56 +0100 Subject: [PATCH 13/56] Update server/src/incrementalCompilation.ts Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- server/src/incrementalCompilation.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index 1e6eb6099..d0d46a230 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -268,7 +268,7 @@ function triggerIncrementalCompilationOfFile( const computedWorkspaceRoot = utils.computeWorkspaceRootPathFromLockfile(projectRootPath); // If null, it means either a lockfile was found (local package) or no parent project root exists - // In both cases, we default to actualProjectRootPath + // In both cases, we default to projectRootPath const workspaceRootPath: NormalizedPath = computedWorkspaceRoot ?? projectRootPath; From d7936795baeef088bd43b47bfb3236de9ac76b4b Mon Sep 17 00:00:00 2001 From: nojaf Date: Sun, 30 Nov 2025 11:41:37 +0100 Subject: [PATCH 14/56] clean up --- server/src/server.ts | 8 ++------ server/src/utils.ts | 3 +-- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/server/src/server.ts b/server/src/server.ts index 73417a4b3..ae3e9c9a2 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1069,11 +1069,7 @@ let updateDiagnosticSyntax = async ( if (allDiagnostics.length > 0) { projectFile.filesWithDiagnostics.add(fileUri); } else { - // Only remove if there are no compiler diagnostics either - // (compiler diagnostics are tracked separately in filesDiagnostics) - if (compilerDiagnosticsForFile.length === 0) { - projectFile.filesWithDiagnostics.delete(fileUri); - } + projectFile.filesWithDiagnostics.delete(fileUri); } } @@ -1645,7 +1641,7 @@ async function onMessage(msg: p.Message) { // Should never happen, but handle gracefully and log a warning console.warn( "[ReScript Language Server] Failed to normalize projectRootPath from clientSentBuildAction:", - msg_.projectRootPath + msg_.projectRootPath, ); return; } diff --git a/server/src/utils.ts b/server/src/utils.ts index 439185896..bbec24ae3 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -83,8 +83,7 @@ export let createFileInTempDir = (extension = "") => { let findProjectRootOfFileInDir = ( source: NormalizedPath, ): NormalizedPath | null => { - let dirStr = path.dirname(source); - const dir = normalizePath(dirStr); + const dir = normalizePath(path.dirname(source)); if (dir == null) { return null; } From add55969f7e23bab575abddd4146501b74c89eac Mon Sep 17 00:00:00 2001 From: nojaf Date: Sun, 30 Nov 2025 11:43:54 +0100 Subject: [PATCH 15/56] Correct pathToURI --- server/src/utils.ts | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/server/src/utils.ts b/server/src/utils.ts index bbec24ae3..8b375f5f6 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -10,7 +10,7 @@ import fs from "fs"; import fsAsync from "fs/promises"; import * as os from "os"; import semver from "semver"; -import { fileURLToPath } from "url"; +import { fileURLToPath, pathToFileURL } from "url"; import * as codeActions from "./codeActions"; import * as c from "./constants"; @@ -704,9 +704,7 @@ export let runBuildWatcherUsingValidBuildPath = ( // parser helpers export let pathToURI = (file: NormalizedPath): FileURI => { - return ( - process.platform === "win32" ? `file:\\\\\\${file}` : `file://${file}` - ) as FileURI; + return pathToFileURL(file).toString() as FileURI; }; let parseFileAndRange = (fileAndRange: string) => { // https://github.com/rescript-lang/rescript-compiler/blob/0a3f4bb32ca81e89cefd5a912b8795878836f883/jscomp/super_errors/super_location.ml#L15-L25 From b142a6381e52f6101f05be487f6b818703154574 Mon Sep 17 00:00:00 2001 From: nojaf Date: Sun, 30 Nov 2025 12:09:24 +0100 Subject: [PATCH 16/56] More usage of NormalizedPath --- client/src/commands/code_analysis.ts | 8 ++++-- client/src/commands/dump_debug.ts | 4 ++- client/src/utils.ts | 37 +++++++++++++++++++++++++--- server/src/bsc-args/rewatch.ts | 5 ++-- server/src/codeActions.ts | 4 +-- server/src/find-runtime.ts | 19 +++++++++----- server/src/incrementalCompilation.ts | 21 +++++++++------- server/src/server.ts | 7 +++--- server/src/utils.ts | 15 +++++------ 9 files changed, 84 insertions(+), 36 deletions(-) diff --git a/client/src/commands/code_analysis.ts b/client/src/commands/code_analysis.ts index e739573f0..61bb4f156 100644 --- a/client/src/commands/code_analysis.ts +++ b/client/src/commands/code_analysis.ts @@ -14,7 +14,11 @@ import { OutputChannel, StatusBarItem, } from "vscode"; -import { findProjectRootOfFileInDir, getBinaryPath } from "../utils"; +import { + findProjectRootOfFileInDir, + getBinaryPath, + NormalizedPath, +} from "../utils"; export let statusBarItem = { setToStopText: (codeAnalysisRunningStatusBarItem: StatusBarItem) => { @@ -208,7 +212,7 @@ export const runCodeAnalysisWithReanalyze = ( let currentDocument = window.activeTextEditor.document; let cwd = targetDir ?? path.dirname(currentDocument.uri.fsPath); - let projectRootPath: string | null = findProjectRootOfFileInDir( + let projectRootPath: NormalizedPath | null = findProjectRootOfFileInDir( currentDocument.uri.fsPath, ); diff --git a/client/src/commands/dump_debug.ts b/client/src/commands/dump_debug.ts index a845848c3..58e4054c2 100644 --- a/client/src/commands/dump_debug.ts +++ b/client/src/commands/dump_debug.ts @@ -11,6 +11,7 @@ import { createFileInTempDir, findProjectRootOfFileInDir, getBinaryPath, + NormalizedPath, } from "../utils"; import * as path from "path"; @@ -136,7 +137,8 @@ export const dumpDebug = async ( const { line: endLine, character: endChar } = editor.selection.end; const filePath = editor.document.uri.fsPath; - let projectRootPath: string | null = findProjectRootOfFileInDir(filePath); + let projectRootPath: NormalizedPath | null = + findProjectRootOfFileInDir(filePath); const binaryPath = getBinaryPath( "rescript-editor-analysis.exe", projectRootPath, diff --git a/client/src/utils.ts b/client/src/utils.ts index 443dc7b60..bf2756aa4 100644 --- a/client/src/utils.ts +++ b/client/src/utils.ts @@ -9,6 +9,26 @@ import { DocumentUri } from "vscode-languageclient"; * to the server itself. */ +/** + * Branded type for normalized file paths. + * + * All paths should be normalized to ensure consistent lookups and prevent + * path format mismatches (e.g., trailing slashes, relative vs absolute paths). + * + * Use `normalizePath()` to convert a regular path to a `NormalizedPath`. + */ +export type NormalizedPath = string & { __brand: "NormalizedPath" }; + +/** + * Normalizes a file path and returns it as a `NormalizedPath`. + * + * @param filePath - The path to normalize (can be null) + * @returns The normalized path, or null if input was null + */ +export function normalizePath(filePath: string | null): NormalizedPath | null { + return filePath != null ? (path.normalize(filePath) as NormalizedPath) : null; +} + type binaryName = "rescript-editor-analysis.exe" | "rescript-tools.exe"; const platformDir = @@ -29,7 +49,7 @@ export const getLegacyBinaryProdPath = (b: binaryName) => export const getBinaryPath = ( binaryName: "rescript-editor-analysis.exe" | "rescript-tools.exe", - projectRootPath: string | null = null, + projectRootPath: NormalizedPath | null = null, ): string | null => { const binaryFromCompilerPackage = path.join( projectRootPath ?? "", @@ -59,15 +79,24 @@ export const createFileInTempDir = (prefix = "", extension = "") => { return path.join(os.tmpdir(), tempFileName); }; -export let findProjectRootOfFileInDir = (source: string): null | string => { - let dir = path.dirname(source); +export let findProjectRootOfFileInDir = ( + source: string, +): NormalizedPath | null => { + const normalizedSource = normalizePath(source); + if (normalizedSource == null) { + return null; + } + const dir = normalizePath(path.dirname(normalizedSource)); + if (dir == null) { + return null; + } if ( fs.existsSync(path.join(dir, "rescript.json")) || fs.existsSync(path.join(dir, "bsconfig.json")) ) { return dir; } else { - if (dir === source) { + if (dir === normalizedSource) { // reached top return null; } else { diff --git a/server/src/bsc-args/rewatch.ts b/server/src/bsc-args/rewatch.ts index ae607c070..4cd229d5a 100644 --- a/server/src/bsc-args/rewatch.ts +++ b/server/src/bsc-args/rewatch.ts @@ -17,7 +17,7 @@ export type RewatchCompilerArgs = { async function getRuntimePath( entry: IncrementallyCompiledFileInfo, -): Promise { +): Promise { return utils.getRuntimePathFromWorkspaceRoot(entry.project.workspaceRootPath); } @@ -103,7 +103,8 @@ export async function getRewatchBscArgs( includePrerelease: true, }) ) { - let rescriptRuntime: string | null = await getRuntimePath(entry); + let rescriptRuntime: utils.NormalizedPath | null = + await getRuntimePath(entry); if (rescriptRuntime !== null) { env["RESCRIPT_RUNTIME"] = rescriptRuntime; diff --git a/server/src/codeActions.ts b/server/src/codeActions.ts index 30a0c24d2..e361f1cac 100644 --- a/server/src/codeActions.ts +++ b/server/src/codeActions.ts @@ -7,7 +7,7 @@ import * as utils from "./utils"; export type fileCodeActions = { range: p.Range; codeAction: p.CodeAction }; export type filesCodeActions = { - [key: string]: fileCodeActions[]; + [key: utils.FileURI]: fileCodeActions[]; }; interface findCodeActionsConfig { @@ -326,7 +326,7 @@ let handleUndefinedRecordFieldsAction = ({ }: { recordFieldNames: string[]; codeActions: filesCodeActions; - file: string; + file: utils.FileURI; range: p.Range; diagnostic: p.Diagnostic; todoValue: string; diff --git a/server/src/find-runtime.ts b/server/src/find-runtime.ts index 42512e044..abb68e517 100644 --- a/server/src/find-runtime.ts +++ b/server/src/find-runtime.ts @@ -1,6 +1,7 @@ import { readdir, stat as statAsync, readFile } from "fs/promises"; import { join, resolve } from "path"; import { compilerInfoPartialPath } from "./constants"; +import { NormalizedPath } from "./utils"; // Efficient parallel folder traversal to find node_modules directories async function findNodeModulesDirs( @@ -92,14 +93,16 @@ async function findRescriptRuntimeInAlternativeLayout( return results; } -async function findRuntimePath(project: string): Promise { +async function findRuntimePath( + project: NormalizedPath, +): Promise { // Try a compiler-info.json file first const compilerInfo = resolve(project, compilerInfoPartialPath); try { const contents = await readFile(compilerInfo, "utf8"); const compileInfo: { runtime_path?: string } = JSON.parse(contents); if (compileInfo && compileInfo.runtime_path) { - return [compileInfo.runtime_path]; + return [compileInfo.runtime_path as NormalizedPath]; } } catch { // Ignore errors, fallback to node_modules search @@ -146,12 +149,16 @@ async function findRuntimePath(project: string): Promise { }), ).then((results) => results.flatMap((x) => x)); - return rescriptRuntimeDirs.map((runtime) => resolve(runtime)); + return rescriptRuntimeDirs.map( + (runtime) => resolve(runtime) as NormalizedPath, + ); } -function findRuntimeCached(): (project: string) => Promise { - const cache = new Map(); - return async (project: string) => { +function findRuntimeCached(): ( + project: NormalizedPath, +) => Promise { + const cache = new Map(); + return async (project: NormalizedPath) => { if (cache.has(project)) { return cache.get(project)!; } diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index d0d46a230..8fc409c7f 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -1,7 +1,6 @@ import * as path from "path"; import fs from "fs"; import * as utils from "./utils"; -import readline from "readline"; import { performance } from "perf_hooks"; import * as p from "vscode-languageserver-protocol"; import * as cp from "node:child_process"; @@ -41,9 +40,9 @@ export type IncrementallyCompiledFileInfo = { /** Namespaced module name of the source file. */ moduleNameNamespaced: string; /** Path to where the incremental file is saved. */ - incrementalFilePath: string; + incrementalFilePath: NormalizedPath; /** Location of the original type file. */ - originalTypeFileLocation: string; + originalTypeFileLocation: NormalizedPath; }; buildSystem: "bsb" | "rewatch"; /** Cache for build.ninja assets. */ @@ -55,7 +54,7 @@ export type IncrementallyCompiledFileInfo = { } | null; /** Cache for rewatch compiler args. */ buildRewatch: { - lastFile: string; + lastFile: NormalizedPath; compilerArgs: RewatchCompilerArgs; } | null; /** Info of the currently active incremental compilation. `null` if no incremental compilation is active. */ @@ -88,8 +87,9 @@ const incrementallyCompiledFileInfo: Map< NormalizedPath, IncrementallyCompiledFileInfo > = new Map(); -const hasReportedFeatureFailedError: Set = new Set(); -const originalTypeFileToFilePath: Map = new Map(); +const hasReportedFeatureFailedError: Set = new Set(); +const originalTypeFileToFilePath: Map = + new Map(); export function incrementalCompilationFileChanged(changedPath: NormalizedPath) { const filePath = originalTypeFileToFilePath.get(changedPath); @@ -311,12 +311,12 @@ function triggerIncrementalCompilationOfFile( projectRootPath, c.compilerDirPartialPath, path.relative(projectRootPath, filePath), - ); + ) as NormalizedPath; const parsed = path.parse(originalTypeFileLocation); parsed.ext = ext === ".res" ? ".cmt" : ".cmti"; parsed.base = ""; - originalTypeFileLocation = path.format(parsed); + originalTypeFileLocation = path.format(parsed) as NormalizedPath; incrementalFileCacheEntry = { file: { @@ -326,7 +326,10 @@ function triggerIncrementalCompilationOfFile( moduleNameNamespaced, sourceFileName: moduleName + ext, sourceFilePath: filePath, - incrementalFilePath: path.join(incrementalFolderPath, moduleName + ext), + incrementalFilePath: path.join( + incrementalFolderPath, + moduleName + ext, + ) as NormalizedPath, }, project: { workspaceRootPath, diff --git a/server/src/server.ts b/server/src/server.ts index ae3e9c9a2..f747d630b 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -27,10 +27,11 @@ import { onErrorReported } from "./errorReporter"; import * as ic from "./incrementalCompilation"; import config, { extensionConfiguration } from "./config"; import { projectsFiles } from "./projectFiles"; +import { NormalizedPath } from "./utils"; // Absolute paths to all the workspace folders // Configured during the initialize request -export const workspaceFolders = new Set(); +export const workspaceFolders = new Set(); // This holds client capabilities specific to our extension, and not necessarily // related to the LS protocol. It's for enabling/disabling features that might @@ -75,7 +76,7 @@ const projectCompilationStates: Map = type CompilationStatusPayload = { project: string; - projectRootPath: string; + projectRootPath: NormalizedPath; status: "compiling" | "success" | "error" | "warning"; errorCount: number; warningCount: number; @@ -921,7 +922,7 @@ async function codeAction(msg: p.RequestMessage): Promise { // Check local code actions coming from the diagnostics, or from incremental compilation. let localResults: v.CodeAction[] = []; const fromDiagnostics = - codeActionsFromDiagnostics[params.textDocument.uri] ?? []; + codeActionsFromDiagnostics[params.textDocument.uri as utils.FileURI] ?? []; const fromIncrementalCompilation = ic.getCodeActionsFromIncrementalCompilation(filePath) ?? []; [...fromDiagnostics, ...fromIncrementalCompilation].forEach( diff --git a/server/src/utils.ts b/server/src/utils.ts index 8b375f5f6..b2e414309 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -74,10 +74,10 @@ export function uriToNormalizedPath(uri: FileURI): NormalizedPath { let tempFilePrefix = "rescript_format_file_" + process.pid + "_"; let tempFileId = 0; -export let createFileInTempDir = (extension = "") => { +export let createFileInTempDir = (extension = ""): NormalizedPath => { let tempFileName = tempFilePrefix + tempFileId + extension; tempFileId = tempFileId + 1; - return path.join(os.tmpdir(), tempFileName); + return path.join(os.tmpdir(), tempFileName) as NormalizedPath; }; let findProjectRootOfFileInDir = ( @@ -501,7 +501,7 @@ export function computeWorkspaceRootPathFromLockfile( } // Shared cache: key is either workspace root path or project root path -const runtimePathCache = new Map(); +const runtimePathCache = new Map(); /** * Gets the runtime path from a workspace root path. @@ -509,15 +509,16 @@ const runtimePathCache = new Map(); */ export async function getRuntimePathFromWorkspaceRoot( workspaceRootPath: NormalizedPath, -): Promise { +): Promise { // Check cache first if (runtimePathCache.has(workspaceRootPath)) { return runtimePathCache.get(workspaceRootPath)!; } // Compute and cache - let rescriptRuntime: string | null = - config.extensionConfiguration.runtimePath ?? null; + let rescriptRuntime: NormalizedPath | null = normalizePath( + config.extensionConfiguration.runtimePath ?? null, + ); if (rescriptRuntime !== null) { runtimePathCache.set(workspaceRootPath, rescriptRuntime); @@ -539,7 +540,7 @@ export async function getRuntimePathFromWorkspaceRoot( */ export async function getRuntimePathFromProjectRoot( projectRootPath: NormalizedPath | null, -): Promise { +): Promise { if (projectRootPath == null) { return null; } From ef8269f37b57c21b96468509363e17a6fafc1241 Mon Sep 17 00:00:00 2001 From: nojaf Date: Wed, 3 Dec 2025 17:13:18 +0100 Subject: [PATCH 17/56] Add code comments for certain castings --- client/src/utils.ts | 1 + server/src/find-runtime.ts | 7 +++++-- server/src/incrementalCompilation.ts | 4 ++++ server/src/utils.ts | 10 ++++++---- 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/client/src/utils.ts b/client/src/utils.ts index bf2756aa4..17135d14a 100644 --- a/client/src/utils.ts +++ b/client/src/utils.ts @@ -26,6 +26,7 @@ export type NormalizedPath = string & { __brand: "NormalizedPath" }; * @returns The normalized path, or null if input was null */ export function normalizePath(filePath: string | null): NormalizedPath | null { + // `path.normalize` ensures we can assume string is now NormalizedPath return filePath != null ? (path.normalize(filePath) as NormalizedPath) : null; } diff --git a/server/src/find-runtime.ts b/server/src/find-runtime.ts index 9d9110139..0f35032bc 100644 --- a/server/src/find-runtime.ts +++ b/server/src/find-runtime.ts @@ -1,7 +1,7 @@ import { readdir, stat as statAsync, readFile } from "fs/promises"; import { join, resolve } from "path"; import { compilerInfoPartialPath } from "./constants"; -import { NormalizedPath } from "./utils"; +import { NormalizedPath, normalizePath } from "./utils"; // Efficient parallel folder traversal to find node_modules directories async function findNodeModulesDirs( @@ -102,7 +102,9 @@ async function findRuntimePath( const contents = await readFile(compilerInfo, "utf8"); const compileInfo: { runtime_path?: string } = JSON.parse(contents); if (compileInfo && compileInfo.runtime_path) { - return [compileInfo.runtime_path as NormalizedPath]; + // We somewhat assume the user to pass down a normalized path, but we cannot be sure of this. + const normalizedRuntimePath = normalizePath(compileInfo.runtime_path); + return normalizedRuntimePath ? [normalizedRuntimePath] : []; } } catch { // Ignore errors, fallback to node_modules search @@ -150,6 +152,7 @@ async function findRuntimePath( ).then((results) => results.flatMap((x) => x)); return rescriptRuntimeDirs.map( + // `resolve` ensures we can assume string is now NormalizedPath (runtime) => resolve(runtime) as NormalizedPath, ); } diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index 8fc409c7f..b7e328978 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -302,11 +302,13 @@ function triggerIncrementalCompilationOfFile( ? `${moduleName}-${project.namespaceName}` : moduleName; + // projectRootPath is already NormalizedPath, appending a constant string still makes it a NormalizedPath const incrementalFolderPath: NormalizedPath = path.join( projectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION, ) as NormalizedPath; + // projectRootPath is already NormalizedPath, appending a constant string still makes it a NormalizedPath let originalTypeFileLocation = path.resolve( projectRootPath, c.compilerDirPartialPath, @@ -316,6 +318,7 @@ function triggerIncrementalCompilationOfFile( const parsed = path.parse(originalTypeFileLocation); parsed.ext = ext === ".res" ? ".cmt" : ".cmti"; parsed.base = ""; + // As originalTypeFileLocation was a NormalizedPath, path.format ensures we can assume string is now NormalizedPath originalTypeFileLocation = path.format(parsed) as NormalizedPath; incrementalFileCacheEntry = { @@ -326,6 +329,7 @@ function triggerIncrementalCompilationOfFile( moduleNameNamespaced, sourceFileName: moduleName + ext, sourceFilePath: filePath, + // As incrementalFolderPath was a NormalizedPath, path.join ensures we can assume string is now NormalizedPath incrementalFilePath: path.join( incrementalFolderPath, moduleName + ext, diff --git a/server/src/utils.ts b/server/src/utils.ts index 28ed7b82f..ac47a2c58 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -51,6 +51,7 @@ export type FileURI = string & { __brand: "FileURI" }; * @returns The normalized path, or null if input was null */ export function normalizePath(filePath: string | null): NormalizedPath | null { + // `path.normalize` ensures we can assume string is now NormalizedPath return filePath != null ? (path.normalize(filePath) as NormalizedPath) : null; } @@ -77,6 +78,7 @@ let tempFileId = 0; export let createFileInTempDir = (extension = ""): NormalizedPath => { let tempFileName = tempFilePrefix + tempFileId + extension; tempFileId = tempFileId + 1; + // `os.tmpdir` returns an absolute path, so `path.join` ensures we can assume string is now NormalizedPath return path.join(os.tmpdir(), tempFileName) as NormalizedPath; }; @@ -715,6 +717,7 @@ export let runBuildWatcherUsingValidBuildPath = ( // parser helpers export let pathToURI = (file: NormalizedPath): FileURI => { + // `pathToFileURL` ensures we can assume string is now FileURI return pathToFileURL(file).toString() as FileURI; }; let parseFileAndRange = (fileAndRange: string) => { @@ -953,10 +956,9 @@ export let parseCompilerLogOutput = async ( for (const parsedDiagnostic of parsedDiagnostics) { let [fileAndRangeLine, ...diagnosticMessage] = parsedDiagnostic.content; let { file, range } = parseFileAndRange(fileAndRangeLine); - const fileUri = file as FileURI; - if (result[fileUri] == null) { - result[fileUri] = []; + if (result[file] == null) { + result[file] = []; } // remove start and end whitespaces/newlines @@ -982,7 +984,7 @@ export let parseCompilerLogOutput = async ( range, }); - result[fileUri].push(diagnostic); + result[file].push(diagnostic); } } From af371e15e783420f073f473d3a3ebc09372b9d1a Mon Sep 17 00:00:00 2001 From: nojaf Date: Sat, 13 Dec 2025 21:03:57 +0100 Subject: [PATCH 18/56] Fix rewatch lockfile detection on Windows by correcting project root lookup logic Refactor findProjectRootOfFile to use separate functions for file vs directory lookups, eliminating confusing boolean parameter and fixing the case where exact directory matches were incorrectly excluded. --- server/src/incrementalCompilation.ts | 2 +- server/src/utils.ts | 110 +++++++++++++++++++++------ 2 files changed, 89 insertions(+), 23 deletions(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index b7e328978..624b3d47e 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -277,7 +277,7 @@ function triggerIncrementalCompilationOfFile( const foundRewatchLockfileInProjectRoot = computedWorkspaceRoot == null && projectRootPath != null && - utils.findProjectRootOfFile(projectRootPath, true) != null; + utils.findProjectRootOfDir(projectRootPath) != null; if (foundRewatchLockfileInProjectRoot && debug()) { console.log( diff --git a/server/src/utils.ts b/server/src/utils.ts index ac47a2c58..e3d36d028 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -82,9 +82,9 @@ export let createFileInTempDir = (extension = ""): NormalizedPath => { return path.join(os.tmpdir(), tempFileName) as NormalizedPath; }; -let findProjectRootOfFileInDir = ( +function findProjectRootOfFileInDir( source: NormalizedPath, -): NormalizedPath | null => { +): NormalizedPath | null { const dir = normalizePath(path.dirname(source)); if (dir == null) { return null; @@ -102,18 +102,20 @@ let findProjectRootOfFileInDir = ( return findProjectRootOfFileInDir(dir); } } -}; +} -// TODO: races here? -export let findProjectRootOfFile = ( +/** + * Searches for a project root path in projectsFiles that contains the given file path. + * Excludes exact matches - the source must be a file inside a project, not the project root itself. + */ +function findProjectRootContainingFile( source: NormalizedPath, - allowDir?: boolean, -): NormalizedPath | null => { - // First look in project files (keys are already normalized) +): NormalizedPath | null { let foundRootFromProjectFiles: NormalizedPath | null = null; for (const rootPath of projectsFiles.keys()) { // Both are normalized, so direct comparison works - if (source.startsWith(rootPath) && (!allowDir || source !== rootPath)) { + // For files, we exclude exact matches (source !== rootPath) + if (source.startsWith(rootPath) && source !== rootPath) { // Prefer the longest path (most nested) if ( foundRootFromProjectFiles == null || @@ -124,20 +126,77 @@ export let findProjectRootOfFile = ( } } - if (foundRootFromProjectFiles != null) { - return foundRootFromProjectFiles; - } else { - const isDir = path.extname(source) === ""; - const searchPath = - isDir && !allowDir ? path.join(source, "dummy.res") : source; - const normalizedSearchPath = normalizePath(searchPath); - if (normalizedSearchPath == null) { - return null; + return foundRootFromProjectFiles; +} + +/** + * Searches for a project root path in projectsFiles that matches the given directory path. + * Allows exact matches - the source can be the project root directory itself. + */ +function findProjectRootMatchingDir( + source: NormalizedPath, +): NormalizedPath | null { + console.log({ + source, + searchType: "directory", + projectsFilesKeys: Array.from(projectsFiles.keys()), + }); + + let foundRootFromProjectFiles: NormalizedPath | null = null; + for (const rootPath of projectsFiles.keys()) { + // Both are normalized, so direct comparison works + // For directories, we allow exact matches + if (source.startsWith(rootPath)) { + // Prefer the longest path (most nested) + if ( + foundRootFromProjectFiles == null || + rootPath.length > foundRootFromProjectFiles.length + ) { + foundRootFromProjectFiles = rootPath; + } } - const foundPath = findProjectRootOfFileInDir(normalizedSearchPath); - return foundPath; } -}; + + return foundRootFromProjectFiles; +} + +/** + * Finds the project root for a given file path. + * This is the main function used throughout the codebase for finding project roots. + */ +export function findProjectRootOfFile( + source: NormalizedPath, +): NormalizedPath | null { + // First look in project files - exclude exact matches since we're looking for a file + let foundRoot = findProjectRootContainingFile(source); + + if (foundRoot != null) { + return foundRoot; + } + + // Fallback: search the filesystem + const foundPath = findProjectRootOfFileInDir(source); + return foundPath; +} + +/** + * Finds the project root for a given directory path. + * This allows exact matches and is used for workspace/lockfile detection. + */ +export function findProjectRootOfDir( + source: NormalizedPath, +): NormalizedPath | null { + // First look in project files - allow exact matches since we're looking for a directory + let foundRoot = findProjectRootMatchingDir(source); + + if (foundRoot != null) { + return foundRoot; + } + + // Fallback: search the filesystem + const foundPath = findProjectRootOfFileInDir(source); + return foundPath; +} /** * Gets the project file for a given project root path. @@ -493,14 +552,21 @@ export function computeWorkspaceRootPathFromLockfile( path.resolve(projectRootPath, rescriptLockPartialPath), ]; + console.log(projectRewatchLockfiles); + const foundRewatchLockfileInProjectRoot = projectRewatchLockfiles.some( (lockFile) => fs.existsSync(lockFile), ); + console.log( + "foundRewatchLockfileInProjectRoot", + foundRewatchLockfileInProjectRoot, + ); + // if we find a rewatch.lock in the project root, it's a compilation of a local package // in the workspace. return !foundRewatchLockfileInProjectRoot - ? findProjectRootOfFile(projectRootPath, true) + ? findProjectRootOfDir(projectRootPath) : null; } From aa2c6cf041f7c5deb359fadb6505ca3a9b8138b5 Mon Sep 17 00:00:00 2001 From: nojaf Date: Sat, 13 Dec 2025 21:11:15 +0100 Subject: [PATCH 19/56] Remove logs --- server/src/utils.ts | 8 -------- 1 file changed, 8 deletions(-) diff --git a/server/src/utils.ts b/server/src/utils.ts index e3d36d028..fb95ae605 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -136,12 +136,6 @@ function findProjectRootContainingFile( function findProjectRootMatchingDir( source: NormalizedPath, ): NormalizedPath | null { - console.log({ - source, - searchType: "directory", - projectsFilesKeys: Array.from(projectsFiles.keys()), - }); - let foundRootFromProjectFiles: NormalizedPath | null = null; for (const rootPath of projectsFiles.keys()) { // Both are normalized, so direct comparison works @@ -552,8 +546,6 @@ export function computeWorkspaceRootPathFromLockfile( path.resolve(projectRootPath, rescriptLockPartialPath), ]; - console.log(projectRewatchLockfiles); - const foundRewatchLockfileInProjectRoot = projectRewatchLockfiles.some( (lockFile) => fs.existsSync(lockFile), ); From 801f81309c59976b454b4b031364f8f9d36019c2 Mon Sep 17 00:00:00 2001 From: nojaf Date: Sat, 13 Dec 2025 21:11:45 +0100 Subject: [PATCH 20/56] Remove logs --- server/src/utils.ts | 5 ----- 1 file changed, 5 deletions(-) diff --git a/server/src/utils.ts b/server/src/utils.ts index fb95ae605..a5662cf14 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -550,11 +550,6 @@ export function computeWorkspaceRootPathFromLockfile( (lockFile) => fs.existsSync(lockFile), ); - console.log( - "foundRewatchLockfileInProjectRoot", - foundRewatchLockfileInProjectRoot, - ); - // if we find a rewatch.lock in the project root, it's a compilation of a local package // in the workspace. return !foundRewatchLockfileInProjectRoot From 894205f33559843fb08f675a5a81e11fe63acd13 Mon Sep 17 00:00:00 2001 From: nojaf Date: Sat, 13 Dec 2025 22:26:28 +0100 Subject: [PATCH 21/56] changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 11efe49d5..758a5fd36 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,10 @@ ## [Unreleased] +#### :bug: Bug fix + +- Fix rewatch lockfile detection on Windows. https://github.com/rescript-lang/rescript-vscode/pull/1160 + #### :nail_care: Polish - Resolve symlinks when finding platform binaries. https://github.com/rescript-lang/rescript-vscode/pull/1154 From 663190e9464dbc7e2fcb50c3bb225c281c8c199f Mon Sep 17 00:00:00 2001 From: nojaf Date: Sun, 14 Dec 2025 11:48:17 +0100 Subject: [PATCH 22/56] Refactor dump command --- client/src/commands.ts | 1 - client/src/commands/dump_server_state.ts | 38 ------- client/src/extension.ts | 11 +- server/src/server.ts | 136 +++++++++++++++++------ 4 files changed, 108 insertions(+), 78 deletions(-) delete mode 100644 client/src/commands/dump_server_state.ts diff --git a/client/src/commands.ts b/client/src/commands.ts index a94c424a5..6a795c467 100644 --- a/client/src/commands.ts +++ b/client/src/commands.ts @@ -9,7 +9,6 @@ export { createInterface } from "./commands/create_interface"; export { openCompiled } from "./commands/open_compiled"; export { switchImplIntf } from "./commands/switch_impl_intf"; export { dumpDebug, dumpDebugRetrigger } from "./commands/dump_debug"; -export { dumpServerState } from "./commands/dump_server_state"; export { pasteAsRescriptJson } from "./commands/paste_as_rescript_json"; export { pasteAsRescriptJsx } from "./commands/paste_as_rescript_jsx"; diff --git a/client/src/commands/dump_server_state.ts b/client/src/commands/dump_server_state.ts deleted file mode 100644 index 5b61008c8..000000000 --- a/client/src/commands/dump_server_state.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { - ExtensionContext, - StatusBarItem, - Uri, - ViewColumn, - window, -} from "vscode"; -import { LanguageClient } from "vscode-languageclient/node"; -import * as fs from "fs"; -import { createFileInTempDir } from "../utils"; - -export async function dumpServerState( - client: LanguageClient, - _context?: ExtensionContext, - _statusBarItem?: StatusBarItem, -) { - try { - const result = await client.sendRequest("rescript/dumpServerState"); - const outputFile = createFileInTempDir("server_state", ".json"); - - // Pretty-print JSON with stable ordering where possible - const replacer = (_key: string, value: any) => { - if (value instanceof Map) return Object.fromEntries(value); - if (value instanceof Set) return Array.from(value); - return value; - }; - - const json = JSON.stringify(result, replacer, 2); - fs.writeFileSync(outputFile, json, { encoding: "utf-8" }); - - await window.showTextDocument(Uri.parse(outputFile), { - viewColumn: ViewColumn.Beside, - preview: false, - }); - } catch (e) { - window.showErrorMessage(`Failed to dump server state: ${String(e)}`); - } -} diff --git a/client/src/extension.ts b/client/src/extension.ts index cd4fee0f8..5ceed6f6f 100644 --- a/client/src/extension.ts +++ b/client/src/extension.ts @@ -373,8 +373,15 @@ export function activate(context: ExtensionContext) { customCommands.dumpDebug(context, debugDumpStatusBarItem); }); - commands.registerCommand("rescript-vscode.dump-server-state", () => { - customCommands.dumpServerState(client, context, debugDumpStatusBarItem); + commands.registerCommand("rescript-vscode.dump-server-state", async () => { + // Server handles everything: writing to disk and opening the file via window/showDocument + try { + await client.sendRequest("workspace/executeCommand", { + command: "rescript/dumpServerState", + }); + } catch (e) { + console.error("Failed to dump server state:", e); + } }); commands.registerCommand("rescript-vscode.showProblems", async () => { diff --git a/server/src/server.ts b/server/src/server.ts index fccd88916..01401428b 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -5,6 +5,7 @@ import * as rpc from "vscode-jsonrpc/node"; import * as path from "path"; import semver from "semver"; import fs from "fs"; +import fsAsync from "fs/promises"; import { DidChangeWatchedFilesNotification, DidOpenTextDocumentNotification, @@ -1260,6 +1261,93 @@ function openCompiledFile(msg: p.RequestMessage): p.Message { return response; } +async function dumpServerState( + msg: p.RequestMessage, +): Promise { + // Custom debug endpoint: dump current server state (config + projectsFiles) + try { + // Read the server version from package.json + let serverVersion: string | undefined; + try { + const packageJsonPath = path.join(__dirname, "..", "package.json"); + const packageJsonContent = await fsAsync.readFile(packageJsonPath, { + encoding: "utf-8", + }); + const packageJson = JSON.parse(packageJsonContent); + serverVersion = packageJson.version; + } catch (e) { + // If we can't read the version, that's okay - we'll just omit it + serverVersion = undefined; + } + + const projects = Array.from(projectsFiles.entries()).map( + ([projectRootPath, pf]) => ({ + projectRootPath, + openFiles: Array.from(pf.openFiles), + filesWithDiagnostics: Array.from(pf.filesWithDiagnostics), + filesDiagnostics: pf.filesDiagnostics, + rescriptVersion: pf.rescriptVersion, + bscBinaryLocation: pf.bscBinaryLocation, + editorAnalysisLocation: pf.editorAnalysisLocation, + namespaceName: pf.namespaceName, + hasPromptedToStartBuild: pf.hasPromptedToStartBuild, + bsbWatcherByEditor: + pf.bsbWatcherByEditor != null + ? { pid: pf.bsbWatcherByEditor.pid ?? null } + : null, + }), + ); + + const state = { + lspServerVersion: serverVersion, + config: config.extensionConfiguration, + projects, + workspaceFolders: Array.from(workspaceFolders), + runtimePathCache: utils.getRuntimePathCacheSnapshot(), + }; + + // Format JSON with pretty-printing (2-space indent) on the server side + // This ensures consistent formatting and handles any Maps/Sets that might + // have been converted to plain objects/arrays above + const formattedJson = JSON.stringify(state, null, 2); + + // Write the file to disk on the server side + const outputFile = utils.createFileInTempDir("_server_state.json"); + fs.writeFileSync(outputFile, formattedJson, { encoding: "utf-8" }); + + // Request the client to open the document + const fileUri = utils.pathToURI(outputFile); + const showDocumentRequest: p.RequestMessage = { + jsonrpc: c.jsonrpcVersion, + id: serverSentRequestIdCounter++, + method: "window/showDocument", + params: { + uri: fileUri, + external: false, + takeFocus: true, + }, + }; + send(showDocumentRequest); + + let response: p.ResponseMessage = { + jsonrpc: c.jsonrpcVersion, + id: msg.id, + result: { uri: fileUri }, + }; + return response; + } catch (e) { + let response: p.ResponseMessage = { + jsonrpc: c.jsonrpcVersion, + id: msg.id, + error: { + code: p.ErrorCodes.InternalError, + message: `Failed to dump server state: ${String(e)}`, + }, + }; + return response; + } +} + async function onMessage(msg: p.Message) { if (p.Message.isNotification(msg)) { // notification message, aka the client ends it and doesn't want a reply @@ -1458,6 +1546,9 @@ async function onMessage(msg: p.Message) { retriggerCharacters: ["=", ","], } : undefined, + executeCommandProvider: { + commands: ["rescript/dumpServerState"], + }, }, }; let response: p.ResponseMessage = { @@ -1555,47 +1646,18 @@ async function onMessage(msg: p.Message) { if (extName === c.resExt) { send(await signatureHelp(msg)); } - } else if (msg.method === "rescript/dumpServerState") { - // Custom debug endpoint: dump current server state (config + projectsFiles) - try { - const projects = Array.from(projectsFiles.entries()).map( - ([projectRootPath, pf]) => ({ - projectRootPath, - openFiles: Array.from(pf.openFiles), - filesWithDiagnostics: Array.from(pf.filesWithDiagnostics), - filesDiagnostics: pf.filesDiagnostics, - rescriptVersion: pf.rescriptVersion, - bscBinaryLocation: pf.bscBinaryLocation, - editorAnalysisLocation: pf.editorAnalysisLocation, - namespaceName: pf.namespaceName, - hasPromptedToStartBuild: pf.hasPromptedToStartBuild, - bsbWatcherByEditor: - pf.bsbWatcherByEditor != null - ? { pid: pf.bsbWatcherByEditor.pid ?? null } - : null, - }), - ); - - const result = { - config: config.extensionConfiguration, - projects, - workspaceFolders: Array.from(workspaceFolders), - runtimePathCache: utils.getRuntimePathCacheSnapshot(), - }; - - let response: p.ResponseMessage = { - jsonrpc: c.jsonrpcVersion, - id: msg.id, - result, - }; - send(response); - } catch (e) { + } else if (msg.method === p.ExecuteCommandRequest.method) { + // Standard LSP executeCommand - supports editor-agnostic command execution + const params = msg.params as p.ExecuteCommandParams; + if (params.command === "rescript/dumpServerState") { + send(await dumpServerState(msg)); + } else { let response: p.ResponseMessage = { jsonrpc: c.jsonrpcVersion, id: msg.id, error: { - code: p.ErrorCodes.InternalError, - message: `Failed to dump server state: ${String(e)}`, + code: p.ErrorCodes.InvalidRequest, + message: `Unknown command: ${params.command}`, }, }; send(response); From 17721fe37c399bf4874b9d481e47347144f5ca1c Mon Sep 17 00:00:00 2001 From: nojaf Date: Sun, 14 Dec 2025 12:14:14 +0100 Subject: [PATCH 23/56] Avoid temp file and other code review feedback --- client/src/extension.ts | 22 +++++++++++++++++++--- server/src/server.ts | 29 ++++++++--------------------- 2 files changed, 27 insertions(+), 24 deletions(-) diff --git a/client/src/extension.ts b/client/src/extension.ts index 5ceed6f6f..c31a8d170 100644 --- a/client/src/extension.ts +++ b/client/src/extension.ts @@ -13,6 +13,7 @@ import { WorkspaceEdit, CodeActionKind, Diagnostic, + ViewColumn, } from "vscode"; import { ThemeColor } from "vscode"; @@ -374,13 +375,28 @@ export function activate(context: ExtensionContext) { }); commands.registerCommand("rescript-vscode.dump-server-state", async () => { - // Server handles everything: writing to disk and opening the file via window/showDocument try { - await client.sendRequest("workspace/executeCommand", { + const result = (await client.sendRequest("workspace/executeCommand", { command: "rescript/dumpServerState", + })) as { content: string }; + + // Create an unsaved document with the server state content + const document = await workspace.openTextDocument({ + content: result.content, + language: "json", + }); + + // Show the document in the editor + await window.showTextDocument(document, { + viewColumn: ViewColumn.Beside, + preview: false, }); } catch (e) { - console.error("Failed to dump server state:", e); + outputChannel.appendLine(`Failed to dump server state: ${String(e)}`); + window.showErrorMessage( + "Failed to dump server state. See 'Output' tab, 'ReScript Language Server' channel for details.", + ); + outputChannel.show(); } }); diff --git a/server/src/server.ts b/server/src/server.ts index 01401428b..77eaffcda 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1273,8 +1273,11 @@ async function dumpServerState( const packageJsonContent = await fsAsync.readFile(packageJsonPath, { encoding: "utf-8", }); - const packageJson = JSON.parse(packageJsonContent); - serverVersion = packageJson.version; + const packageJson: { version?: unknown } = JSON.parse(packageJsonContent); + serverVersion = + typeof packageJson.version === "string" + ? packageJson.version + : undefined; } catch (e) { // If we can't read the version, that's okay - we'll just omit it serverVersion = undefined; @@ -1311,28 +1314,12 @@ async function dumpServerState( // have been converted to plain objects/arrays above const formattedJson = JSON.stringify(state, null, 2); - // Write the file to disk on the server side - const outputFile = utils.createFileInTempDir("_server_state.json"); - fs.writeFileSync(outputFile, formattedJson, { encoding: "utf-8" }); - - // Request the client to open the document - const fileUri = utils.pathToURI(outputFile); - const showDocumentRequest: p.RequestMessage = { - jsonrpc: c.jsonrpcVersion, - id: serverSentRequestIdCounter++, - method: "window/showDocument", - params: { - uri: fileUri, - external: false, - takeFocus: true, - }, - }; - send(showDocumentRequest); - + // Return the content so the client can create an unsaved document + // This avoids creating temporary files that would never be cleaned up let response: p.ResponseMessage = { jsonrpc: c.jsonrpcVersion, id: msg.id, - result: { uri: fileUri }, + result: { content: formattedJson }, }; return response; } catch (e) { From b111560764c652fa18fb76a6fc6f135fc8f1d6fe Mon Sep 17 00:00:00 2001 From: nojaf Date: Wed, 17 Dec 2025 11:03:27 +0100 Subject: [PATCH 24/56] Add initial logger --- package.json | 6 +++ server/src/config.ts | 2 + server/src/logger.ts | 89 ++++++++++++++++++++++++++++++++++++++++++++ server/src/server.ts | 17 +++++++++ 4 files changed, 114 insertions(+) create mode 100644 server/src/logger.ts diff --git a/package.json b/package.json index 5709e3b32..152548221 100644 --- a/package.json +++ b/package.json @@ -233,6 +233,12 @@ "type": "boolean", "default": true, "description": "Show compile status in the status bar (compiling/errors/warnings/success)." + }, + "rescript.settings.logLevel": { + "type": "string", + "enum": ["error", "warn", "info", "log", "debug"], + "default": "info", + "description": "Verbosity of ReScript language server logs sent to the Output channel." } } }, diff --git a/server/src/config.ts b/server/src/config.ts index 97b89985a..35502dd4e 100644 --- a/server/src/config.ts +++ b/server/src/config.ts @@ -4,6 +4,7 @@ export type send = (msg: Message) => void; export interface extensionConfiguration { askToStartBuild?: boolean; + logLevel?: "error" | "warn" | "info" | "log"; inlayHints?: { enable?: boolean; maxLength?: number | null; @@ -33,6 +34,7 @@ export interface extensionConfiguration { let config: { extensionConfiguration: extensionConfiguration } = { extensionConfiguration: { askToStartBuild: true, + logLevel: "info", inlayHints: { enable: false, maxLength: 25, diff --git a/server/src/logger.ts b/server/src/logger.ts new file mode 100644 index 000000000..3653aa5e9 --- /dev/null +++ b/server/src/logger.ts @@ -0,0 +1,89 @@ +import * as p from "vscode-languageserver-protocol"; +import * as c from "./constants"; + +export type LogLevel = "error" | "warn" | "info" | "log"; + +const levelOrder: Record = { + log: 1, + info: 2, + warn: 3, + error: 4, +}; + +export interface Logger { + error(message: string): void; + warn(message: string): void; + info(message: string): void; + log(message: string): void; +} + +class NoOpLogger implements Logger { + error(_message: string): void {} + warn(_message: string): void {} + info(_message: string): void {} + log(_message: string): void {} +} + +class LSPLogger implements Logger { + private logLevel: LogLevel = "info"; + + constructor(private send: (msg: p.Message) => void) {} + + setLogLevel(level: LogLevel): void { + this.logLevel = level; + } + + private shouldLog(level: LogLevel): boolean { + return levelOrder[level] >= levelOrder[this.logLevel]; + } + + error(message: string): void { + if (this.shouldLog("error")) { + this.sendLogMessage(message, p.MessageType.Error); + } + } + + warn(message: string): void { + if (this.shouldLog("warn")) { + this.sendLogMessage(message, p.MessageType.Warning); + } + } + + info(message: string): void { + if (this.shouldLog("info")) { + this.sendLogMessage(message, p.MessageType.Info); + } + } + + log(message: string): void { + if (this.shouldLog("log")) { + this.sendLogMessage(message, p.MessageType.Log); + } + } + + private sendLogMessage(message: string, type: p.MessageType): void { + const notification: p.NotificationMessage = { + jsonrpc: c.jsonrpcVersion, + method: "window/logMessage", + params: { type, message }, + }; + this.send(notification); + } +} + +// Default no-op instance +let instance: Logger = new NoOpLogger(); + +export function initializeLogger(send: (msg: p.Message) => void): void { + instance = new LSPLogger(send); +} + +export function setLogLevel(level: LogLevel): void { + if (instance instanceof LSPLogger) { + instance.setLogLevel(level); + } +} + +export function getLogger(): Logger { + return instance; +} diff --git a/server/src/server.ts b/server/src/server.ts index 77eaffcda..290abb30d 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -29,6 +29,7 @@ import * as ic from "./incrementalCompilation"; import config, { extensionConfiguration } from "./config"; import { projectsFiles } from "./projectFiles"; import { NormalizedPath } from "./utils"; +import { initializeLogger, getLogger, setLogLevel, LogLevel } from "./logger"; // Absolute paths to all the workspace folders // Configured during the initialize request @@ -548,10 +549,12 @@ export default function listen(useStdio = false) { let reader = new rpc.StreamMessageReader(process.stdin); // proper `this` scope for writer send = (msg: p.Message) => writer.write(msg); + initializeLogger(send); reader.listen(onMessage); } else { // proper `this` scope for process send = (msg: p.Message) => process.send!(msg); + initializeLogger(send); process.on("message", onMessage); } } @@ -1453,6 +1456,7 @@ async function onMessage(msg: p.Message) { }; send(response); } else if (msg.method === "initialize") { + getLogger().info("Received initialize request from client."); // Save initial configuration, if present let initParams = msg.params as InitializeParams; for (const workspaceFolder of initParams.workspaceFolders || []) { @@ -1465,6 +1469,19 @@ async function onMessage(msg: p.Message) { if (initialConfiguration != null) { config.extensionConfiguration = initialConfiguration; + + let initialLogLevel = initialConfiguration.logLevel as + | LogLevel + | undefined; + + if ( + initialLogLevel === "error" || + initialLogLevel === "warn" || + initialLogLevel === "info" || + initialLogLevel === "log" + ) { + setLogLevel(initialLogLevel); + } } // These are static configuration options the client can set to enable certain From a0dd178a017778ba15fde2605f1c97d18b313e6f Mon Sep 17 00:00:00 2001 From: nojaf Date: Wed, 17 Dec 2025 11:06:14 +0100 Subject: [PATCH 25/56] Update level when config changes --- server/src/server.ts | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/server/src/server.ts b/server/src/server.ts index 290abb30d..0d4e7f8e3 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1692,6 +1692,17 @@ async function onMessage(msg: p.Message) { ]; if (configuration != null) { config.extensionConfiguration = configuration; + + let updatedLogLevel = configuration.logLevel as LogLevel | undefined; + + if ( + updatedLogLevel === "error" || + updatedLogLevel === "warn" || + updatedLogLevel === "info" || + updatedLogLevel === "log" + ) { + setLogLevel(updatedLogLevel); + } } } } else if ( From b3ca0e68578730f066fef264c6f367d503dc6f76 Mon Sep 17 00:00:00 2001 From: nojaf Date: Wed, 17 Dec 2025 11:18:43 +0100 Subject: [PATCH 26/56] Refactor `debug()` from incrementalCompilation --- package.json | 8 ++- server/src/bsc-args/rewatch.ts | 18 ++--- server/src/incrementalCompilation.ts | 102 ++++++++++----------------- server/src/server.ts | 48 ++++++------- 4 files changed, 74 insertions(+), 102 deletions(-) diff --git a/package.json b/package.json index 152548221..922e56506 100644 --- a/package.json +++ b/package.json @@ -236,7 +236,13 @@ }, "rescript.settings.logLevel": { "type": "string", - "enum": ["error", "warn", "info", "log", "debug"], + "enum": [ + "error", + "warn", + "info", + "log", + "debug" + ], "default": "info", "description": "Verbosity of ReScript language server logs sent to the Output channel." } diff --git a/server/src/bsc-args/rewatch.ts b/server/src/bsc-args/rewatch.ts index 4cd229d5a..e38b8d2bb 100644 --- a/server/src/bsc-args/rewatch.ts +++ b/server/src/bsc-args/rewatch.ts @@ -3,12 +3,10 @@ import * as utils from "../utils"; import * as cp from "node:child_process"; import * as p from "vscode-languageserver-protocol"; import semver from "semver"; -import { - debug, - IncrementallyCompiledFileInfo, -} from "../incrementalCompilation"; +import { IncrementallyCompiledFileInfo } from "../incrementalCompilation"; import type { projectFiles } from "../projectFiles"; import { jsonrpcVersion } from "../constants"; +import { getLogger } from "../logger"; export type RewatchCompilerArgs = { compiler_args: Array; @@ -68,15 +66,11 @@ export async function getRewatchBscArgs( if (rescriptRewatchPath != null) { rewatchPath = rescriptRewatchPath; - if (debug()) { - console.log( - `Found rewatch binary bundled with v12: ${rescriptRewatchPath}`, - ); - } + getLogger().log( + `Found rewatch binary bundled with v12: ${rescriptRewatchPath}`, + ); } else { - if (debug()) { - console.log("Did not find rewatch binary bundled with v12"); - } + getLogger().log("Did not find rewatch binary bundled with v12"); } const rewatchArguments = semver.satisfies( diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index 624b3d47e..030cdea8e 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -14,12 +14,7 @@ import { getRewatchBscArgs, RewatchCompilerArgs } from "./bsc-args/rewatch"; import { BsbCompilerArgs, getBsbBscArgs } from "./bsc-args/bsb"; import { getCurrentCompilerDiagnosticsForFile } from "./server"; import { NormalizedPath } from "./utils"; - -export function debug() { - return ( - config.extensionConfiguration.incrementalTypechecking?.debugLogging ?? false - ); -} +import { getLogger } from "./logger"; const INCREMENTAL_FOLDER_NAME = "___incremental"; const INCREMENTAL_FILE_FOLDER_LOCATION = path.join( @@ -96,13 +91,11 @@ export function incrementalCompilationFileChanged(changedPath: NormalizedPath) { if (filePath != null) { const entry = incrementallyCompiledFileInfo.get(filePath); if (entry != null) { - if (debug()) { - console.log("[watcher] Cleaning up incremental files for " + filePath); - } + getLogger().log( + "[watcher] Cleaning up incremental files for " + filePath, + ); if (entry.compilation != null) { - if (debug()) { - console.log("[watcher] Was compiling, killing"); - } + getLogger().log("[watcher] Was compiling, killing"); clearTimeout(entry.compilation.timeout); entry.killCompilationListeners.forEach((cb) => cb()); entry.compilation = null; @@ -129,9 +122,7 @@ export function removeIncrementalFileFolder( } export function recreateIncrementalFileFolder(projectRootPath: NormalizedPath) { - if (debug()) { - console.log("Recreating incremental file folder"); - } + getLogger().log("Recreating incremental file folder"); removeIncrementalFileFolder(projectRootPath, () => { fs.mkdir( path.resolve(projectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION), @@ -153,9 +144,7 @@ export function cleanUpIncrementalFiles( ? `${fileNameNoExt}-${namespace.result}` : fileNameNoExt; - if (debug()) { - console.log("Cleaning up incremental file assets for: " + fileNameNoExt); - } + getLogger().log("Cleaning up incremental file assets for: " + fileNameNoExt); fs.unlink( path.resolve( @@ -252,15 +241,14 @@ function triggerIncrementalCompilationOfFile( // New file const projectRootPath = utils.findProjectRootOfFile(filePath); if (projectRootPath == null) { - if (debug()) - console.log("Did not find project root path for " + filePath); + getLogger().log("Did not find project root path for " + filePath); return; } // projectRootPath is already normalized (NormalizedPath) from findProjectRootOfFile // Use getProjectFile to verify the project exists const project = utils.getProjectFile(projectRootPath); if (project == null) { - if (debug()) console.log("Did not find open project for " + filePath); + getLogger().log("Did not find open project for " + filePath); return; } @@ -279,20 +267,19 @@ function triggerIncrementalCompilationOfFile( projectRootPath != null && utils.findProjectRootOfDir(projectRootPath) != null; - if (foundRewatchLockfileInProjectRoot && debug()) { - console.log( + if (foundRewatchLockfileInProjectRoot) { + getLogger().log( `Found rewatch/rescript lockfile in project root, treating as local package in workspace`, ); - } else if (!foundRewatchLockfileInProjectRoot && debug()) { - console.log( + } else { + getLogger().log( `Did not find rewatch/rescript lockfile in project root, assuming bsb`, ); } const bscBinaryLocation = project.bscBinaryLocation; if (bscBinaryLocation == null) { - if (debug()) - console.log("Could not find bsc binary location for " + filePath); + getLogger().log("Could not find bsc binary location for " + filePath); return; } const ext = filePath.endsWith(".resi") ? ".resi" : ".res"; @@ -401,12 +388,9 @@ async function figureOutBscArgs( ) { const project = projectsFiles.get(entry.project.rootPath); if (project?.rescriptVersion == null) { - if (debug()) { - console.log( - "Found no project (or ReScript version) for " + - entry.file.sourceFilePath, - ); - } + getLogger().log( + "Found no project (or ReScript version) for " + entry.file.sourceFilePath, + ); return null; } const res = await getBscArgs(send, entry); @@ -515,11 +499,9 @@ async function compileContents( callArgs = callArgsRetried; entry.project.callArgs = Promise.resolve(callArgsRetried); } else { - if (debug()) { - console.log( - "Could not figure out call args. Maybe build.ninja does not exist yet?", - ); - } + getLogger().log( + "Could not figure out call args. Maybe build.ninja does not exist yet?", + ); return; } } @@ -537,31 +519,27 @@ async function compileContents( entry.buildSystem === "bsb" ? entry.project.rootPath : path.resolve(entry.project.rootPath, c.compilerDirPartialPath); - if (debug()) { - console.log( - `About to invoke bsc from \"${cwd}\", used ${entry.buildSystem}`, - ); - console.log( - `${entry.project.bscBinaryLocation} ${callArgs.map((c) => `"${c}"`).join(" ")}`, - ); - } + getLogger().log( + `About to invoke bsc from \"${cwd}\", used ${entry.buildSystem}`, + ); + getLogger().log( + `${entry.project.bscBinaryLocation} ${callArgs.map((c) => `"${c}"`).join(" ")}`, + ); const process = cp.execFile( entry.project.bscBinaryLocation, callArgs, { cwd }, async (error, _stdout, stderr) => { if (!error?.killed) { - if (debug()) - console.log( - `Recompiled ${entry.file.sourceFileName} in ${ - (performance.now() - startTime) / 1000 - }s`, - ); + getLogger().log( + `Recompiled ${entry.file.sourceFileName} in ${ + (performance.now() - startTime) / 1000 + }s`, + ); } else { - if (debug()) - console.log( - `Compilation of ${entry.file.sourceFileName} was killed.`, - ); + getLogger().log( + `Compilation of ${entry.file.sourceFileName} was killed.`, + ); } let hasIgnoredErrorMessages = false; if ( @@ -569,9 +547,7 @@ async function compileContents( triggerToken != null && verifyTriggerToken(entry.file.sourceFilePath, triggerToken) ) { - if (debug()) { - console.log("Resetting compilation status."); - } + getLogger().log("Resetting compilation status."); // Reset compilation status as this compilation finished entry.compilation = null; const { result, codeActions } = await utils.parseCompilerLogOutput( @@ -706,9 +682,7 @@ export function handleUpdateOpenedFile( send: send, onCompilationFinished?: () => void, ) { - if (debug()) { - console.log("Updated: " + filePath); - } + getLogger().log("Updated: " + filePath); triggerIncrementalCompilationOfFile( filePath, fileContent, @@ -718,9 +692,7 @@ export function handleUpdateOpenedFile( } export function handleClosedFile(filePath: NormalizedPath) { - if (debug()) { - console.log("Closed: " + filePath); - } + getLogger().log("Closed: " + filePath); const entry = incrementallyCompiledFileInfo.get(filePath); if (entry == null) return; cleanUpIncrementalFiles(filePath, entry.project.rootPath); diff --git a/server/src/server.ts b/server/src/server.ts index 0d4e7f8e3..811eb2cdf 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -31,6 +31,28 @@ import { projectsFiles } from "./projectFiles"; import { NormalizedPath } from "./utils"; import { initializeLogger, getLogger, setLogLevel, LogLevel } from "./logger"; +function applyLogLevel(configuration: extensionConfiguration) { + const debugLoggingEnabled = + configuration.incrementalTypechecking?.debugLogging === true; + + if (debugLoggingEnabled) { + // incrementalTypechecking.debugLogging overrides logLevel to retain legacy behavior + setLogLevel("log"); + return; + } + + const level = configuration.logLevel as LogLevel | undefined; + + if ( + level === "error" || + level === "warn" || + level === "info" || + level === "log" + ) { + setLogLevel(level); + } +} + // Absolute paths to all the workspace folders // Configured during the initialize request export const workspaceFolders = new Set(); @@ -1469,19 +1491,7 @@ async function onMessage(msg: p.Message) { if (initialConfiguration != null) { config.extensionConfiguration = initialConfiguration; - - let initialLogLevel = initialConfiguration.logLevel as - | LogLevel - | undefined; - - if ( - initialLogLevel === "error" || - initialLogLevel === "warn" || - initialLogLevel === "info" || - initialLogLevel === "log" - ) { - setLogLevel(initialLogLevel); - } + applyLogLevel(initialConfiguration); } // These are static configuration options the client can set to enable certain @@ -1692,17 +1702,7 @@ async function onMessage(msg: p.Message) { ]; if (configuration != null) { config.extensionConfiguration = configuration; - - let updatedLogLevel = configuration.logLevel as LogLevel | undefined; - - if ( - updatedLogLevel === "error" || - updatedLogLevel === "warn" || - updatedLogLevel === "info" || - updatedLogLevel === "log" - ) { - setLogLevel(updatedLogLevel); - } + applyLogLevel(configuration); } } } else if ( From 4c915badaa53b8031aba0d7fae671a87bc4391ca Mon Sep 17 00:00:00 2001 From: nojaf Date: Wed, 17 Dec 2025 11:22:04 +0100 Subject: [PATCH 27/56] Remove console.log from server --- server/src/server.ts | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/server/src/server.ts b/server/src/server.ts index 811eb2cdf..96679baf1 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -345,31 +345,29 @@ let sendCompilationFinishedMessage = () => { send(notification); }; -let debug = false; - let syncProjectConfigCache = async (rootPath: utils.NormalizedPath) => { try { - if (debug) console.log("syncing project config cache for " + rootPath); + getLogger().log("syncing project config cache for " + rootPath); await utils.runAnalysisAfterSanityCheck(rootPath, [ "cache-project", rootPath, ]); - if (debug) console.log("OK - synced project config cache for " + rootPath); + getLogger().log("OK - synced project config cache for " + rootPath); } catch (e) { - if (debug) console.error(e); + getLogger().error(JSON.stringify(e)); } }; let deleteProjectConfigCache = async (rootPath: utils.NormalizedPath) => { try { - if (debug) console.log("deleting project config cache for " + rootPath); + getLogger().log("deleting project config cache for " + rootPath); await utils.runAnalysisAfterSanityCheck(rootPath, [ "cache-delete", rootPath, ]); - if (debug) console.log("OK - deleted project config cache for " + rootPath); + getLogger().log("OK - deleted project config cache for " + rootPath); } catch (e) { - if (debug) console.error(e); + getLogger().error(JSON.stringify(e)); } }; @@ -400,7 +398,7 @@ async function onWorkspaceDidChangeWatchedFiles( sendCodeLensRefresh(); } } catch { - console.log("Error while sending updated diagnostics"); + getLogger().error("Error while sending updated diagnostics"); } } else { ic.incrementalCompilationFileChanged( From 7334e1e2a86bfdf06ac7da42b19e21525826d47e Mon Sep 17 00:00:00 2001 From: nojaf Date: Wed, 17 Dec 2025 12:07:16 +0100 Subject: [PATCH 28/56] Always override default config --- server/src/config.ts | 52 +++++++++++++++++++++++--------------------- server/src/server.ts | 20 ++++++++++++----- 2 files changed, 41 insertions(+), 31 deletions(-) diff --git a/server/src/config.ts b/server/src/config.ts index 35502dd4e..9e6805c43 100644 --- a/server/src/config.ts +++ b/server/src/config.ts @@ -29,34 +29,36 @@ export interface extensionConfiguration { }; } -// All values here are temporary, and will be overridden as the server is -// initialized, and the current config is received from the client. -let config: { extensionConfiguration: extensionConfiguration } = { - extensionConfiguration: { - askToStartBuild: true, - logLevel: "info", - inlayHints: { - enable: false, - maxLength: 25, - }, - codeLens: false, - binaryPath: null, - platformPath: null, - signatureHelp: { - enabled: true, - forConstructorPayloads: true, - }, - incrementalTypechecking: { +export const initialConfiguration: extensionConfiguration = { + askToStartBuild: true, + logLevel: "info", + inlayHints: { + enable: false, + maxLength: 25, + }, + codeLens: false, + binaryPath: null, + platformPath: null, + signatureHelp: { + enabled: true, + forConstructorPayloads: true, + }, + incrementalTypechecking: { + enable: true, + acrossFiles: false, + debugLogging: false, + }, + cache: { + projectConfig: { enable: true, - acrossFiles: false, - debugLogging: false, - }, - cache: { - projectConfig: { - enable: true, - }, }, }, }; +// All values here are temporary, and will be overridden as the server is +// initialized, and the current config is received from the client. +let config: { extensionConfiguration: extensionConfiguration } = { + extensionConfiguration: initialConfiguration, +}; + export default config; diff --git a/server/src/server.ts b/server/src/server.ts index 96679baf1..bb3be042c 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -26,12 +26,19 @@ import { assert } from "console"; import { WorkspaceEdit } from "vscode-languageserver"; import { onErrorReported } from "./errorReporter"; import * as ic from "./incrementalCompilation"; -import config, { extensionConfiguration } from "./config"; +import config, { extensionConfiguration, initialConfiguration } from "./config"; import { projectsFiles } from "./projectFiles"; import { NormalizedPath } from "./utils"; import { initializeLogger, getLogger, setLogLevel, LogLevel } from "./logger"; -function applyLogLevel(configuration: extensionConfiguration) { +function applyUserConfiguration(configuration: extensionConfiguration) { + // We always want to spread the initial configuration to ensure all defaults are respected. + config.extensionConfiguration = Object.assign( + {}, + initialConfiguration, + configuration, + ); + const debugLoggingEnabled = configuration.incrementalTypechecking?.debugLogging === true; @@ -538,6 +545,9 @@ let closedFile = async (fileUri: utils.FileURI) => { }; let updateOpenedFile = (fileUri: utils.FileURI, fileContent: string) => { + getLogger().info( + `Updating opened file ${fileUri}, incremental TC enabled: ${config.extensionConfiguration.incrementalTypechecking?.enable}`, + ); let filePath = utils.uriToNormalizedPath(fileUri); assert(stupidFileContentCache.has(filePath)); stupidFileContentCache.set(filePath, fileContent); @@ -1488,8 +1498,7 @@ async function onMessage(msg: p.Message) { ?.extensionConfiguration as extensionConfiguration | undefined; if (initialConfiguration != null) { - config.extensionConfiguration = initialConfiguration; - applyLogLevel(initialConfiguration); + applyUserConfiguration(initialConfiguration); } // These are static configuration options the client can set to enable certain @@ -1699,8 +1708,7 @@ async function onMessage(msg: p.Message) { extensionConfiguration | null | undefined, ]; if (configuration != null) { - config.extensionConfiguration = configuration; - applyLogLevel(configuration); + applyUserConfiguration(configuration); } } } else if ( From 430117552b688797c560a115d6baa68041c94bc5 Mon Sep 17 00:00:00 2001 From: nojaf Date: Wed, 17 Dec 2025 12:21:27 +0100 Subject: [PATCH 29/56] Code review feedback --- package.json | 3 +-- server/src/server.ts | 6 +++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 922e56506..58c5bb7c3 100644 --- a/package.json +++ b/package.json @@ -240,8 +240,7 @@ "error", "warn", "info", - "log", - "debug" + "log" ], "default": "info", "description": "Verbosity of ReScript language server logs sent to the Output channel." diff --git a/server/src/server.ts b/server/src/server.ts index bb3be042c..1020d38f3 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -48,7 +48,7 @@ function applyUserConfiguration(configuration: extensionConfiguration) { return; } - const level = configuration.logLevel as LogLevel | undefined; + const level = config.extensionConfiguration.logLevel as LogLevel | undefined; if ( level === "error" || @@ -361,7 +361,7 @@ let syncProjectConfigCache = async (rootPath: utils.NormalizedPath) => { ]); getLogger().log("OK - synced project config cache for " + rootPath); } catch (e) { - getLogger().error(JSON.stringify(e)); + getLogger().error(String(e)); } }; @@ -374,7 +374,7 @@ let deleteProjectConfigCache = async (rootPath: utils.NormalizedPath) => { ]); getLogger().log("OK - deleted project config cache for " + rootPath); } catch (e) { - getLogger().error(JSON.stringify(e)); + getLogger().error(String(e)); } }; From 3c72247d99e9b60ad425b467f7179668a990c6f0 Mon Sep 17 00:00:00 2001 From: nojaf Date: Wed, 17 Dec 2025 13:20:03 +0100 Subject: [PATCH 30/56] Remove incrementalTypechecking?.debugLogging --- package.json | 5 ----- server/src/config.ts | 2 -- server/src/server.ts | 11 +---------- 3 files changed, 1 insertion(+), 17 deletions(-) diff --git a/package.json b/package.json index 58c5bb7c3..7dc17150e 100644 --- a/package.json +++ b/package.json @@ -195,11 +195,6 @@ "default": false, "description": "(beta/experimental) Enable incremental type checking across files, so that unsaved file A gets access to unsaved file B." }, - "rescript.settings.incrementalTypechecking.debugLogging": { - "type": "boolean", - "default": false, - "description": "(debug) Enable debug logging (ends up in the extension output)." - }, "rescript.settings.cache.projectConfig.enable": { "type": "boolean", "default": true, diff --git a/server/src/config.ts b/server/src/config.ts index 9e6805c43..874b95696 100644 --- a/server/src/config.ts +++ b/server/src/config.ts @@ -20,7 +20,6 @@ export interface extensionConfiguration { incrementalTypechecking?: { enable?: boolean; acrossFiles?: boolean; - debugLogging?: boolean; }; cache?: { projectConfig?: { @@ -46,7 +45,6 @@ export const initialConfiguration: extensionConfiguration = { incrementalTypechecking: { enable: true, acrossFiles: false, - debugLogging: false, }, cache: { projectConfig: { diff --git a/server/src/server.ts b/server/src/server.ts index 1020d38f3..5eedee868 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -39,16 +39,7 @@ function applyUserConfiguration(configuration: extensionConfiguration) { configuration, ); - const debugLoggingEnabled = - configuration.incrementalTypechecking?.debugLogging === true; - - if (debugLoggingEnabled) { - // incrementalTypechecking.debugLogging overrides logLevel to retain legacy behavior - setLogLevel("log"); - return; - } - - const level = config.extensionConfiguration.logLevel as LogLevel | undefined; + const level = config.extensionConfiguration.logLevel; if ( level === "error" || From 74430e002b2c451aaaefb1eb0c55d64d3df47f72 Mon Sep 17 00:00:00 2001 From: nojaf Date: Wed, 17 Dec 2025 13:21:28 +0100 Subject: [PATCH 31/56] Add changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 758a5fd36..b61263b41 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,10 +15,12 @@ #### :bug: Bug fix - Fix rewatch lockfile detection on Windows. https://github.com/rescript-lang/rescript-vscode/pull/1160 +- Override default `initialConfiguration` with user specific config. https://github.com/rescript-lang/rescript-vscode/pull/1162 #### :nail_care: Polish - Resolve symlinks when finding platform binaries. https://github.com/rescript-lang/rescript-vscode/pull/1154 +- Use `window/logMessage` in LSP Server for logging. https://github.com/rescript-lang/rescript-vscode/pull/1162 ## 1.70.0 From a238ec26fa9eabbc72bb11590782228554e1cc8c Mon Sep 17 00:00:00 2001 From: nojaf Date: Wed, 17 Dec 2025 14:05:03 +0100 Subject: [PATCH 32/56] Bump to version 1.72 --- CHANGELOG.md | 2 +- package-lock.json | 4 ++-- package.json | 2 +- server/package-lock.json | 4 ++-- server/package.json | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b61263b41..01f980611 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,7 +10,7 @@ > - :house: [Internal] > - :nail_care: [Polish] -## [Unreleased] +## 1.72.0 #### :bug: Bug fix diff --git a/package-lock.json b/package-lock.json index 095f591a4..d565f3626 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "rescript-vscode", - "version": "1.70.0", + "version": "1.72.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "rescript-vscode", - "version": "1.70.0", + "version": "1.72.0", "hasInstallScript": true, "license": "MIT", "dependencies": { diff --git a/package.json b/package.json index 7dc17150e..fa51db7df 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "ReScript language support (official)", "author": "ReScript Team", "license": "MIT", - "version": "1.70.0", + "version": "1.72.0", "repository": { "type": "git", "url": "https://github.com/rescript-lang/rescript-vscode" diff --git a/server/package-lock.json b/server/package-lock.json index 4a6fa75c8..6cfb758d0 100644 --- a/server/package-lock.json +++ b/server/package-lock.json @@ -1,12 +1,12 @@ { "name": "@rescript/language-server", - "version": "1.70.0", + "version": "1.72.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@rescript/language-server", - "version": "1.70.0", + "version": "1.72.0", "license": "MIT", "dependencies": { "semver": "^7.7.2", diff --git a/server/package.json b/server/package.json index 95580ac54..92c17c02d 100644 --- a/server/package.json +++ b/server/package.json @@ -1,7 +1,7 @@ { "name": "@rescript/language-server", "description": "LSP server for ReScript", - "version": "1.70.0", + "version": "1.72.0", "author": "ReScript Team", "license": "MIT", "bin": { From 092e35e51898beeaf16fc5cf7ebbb73b29d33f29 Mon Sep 17 00:00:00 2001 From: nojaf Date: Thu, 18 Dec 2025 09:28:25 +0100 Subject: [PATCH 33/56] Take namespace into account for incremental cleanup --- server/src/incrementalCompilation.ts | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index 030cdea8e..2575f39e5 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -295,19 +295,21 @@ function triggerIncrementalCompilationOfFile( INCREMENTAL_FILE_FOLDER_LOCATION, ) as NormalizedPath; - // projectRootPath is already NormalizedPath, appending a constant string still makes it a NormalizedPath - let originalTypeFileLocation = path.resolve( + const relSourcePath = path.relative(projectRootPath, filePath); + const relSourceDir = path.dirname(relSourcePath); + const typeExt = ext === ".res" ? ".cmt" : ".cmti"; + const originalTypeFileName = + project.namespaceName != null + ? `${moduleName}-${project.namespaceName}${typeExt}` + : `${moduleName}${typeExt}`; + // projectRootPath is already NormalizedPath, appending constant strings still yields a NormalizedPath + const originalTypeFileLocation = path.resolve( projectRootPath, c.compilerDirPartialPath, - path.relative(projectRootPath, filePath), + relSourceDir, + originalTypeFileName, ) as NormalizedPath; - const parsed = path.parse(originalTypeFileLocation); - parsed.ext = ext === ".res" ? ".cmt" : ".cmti"; - parsed.base = ""; - // As originalTypeFileLocation was a NormalizedPath, path.format ensures we can assume string is now NormalizedPath - originalTypeFileLocation = path.format(parsed) as NormalizedPath; - incrementalFileCacheEntry = { file: { originalTypeFileLocation, From 0898a6b8b3d6b02c077a2453afe75d79e4a91b62 Mon Sep 17 00:00:00 2001 From: nojaf Date: Thu, 18 Dec 2025 09:41:48 +0100 Subject: [PATCH 34/56] Add changelog entry --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01f980611..9930445ca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,12 @@ > - :house: [Internal] > - :nail_care: [Polish] +## [Unreleased] + +#### :bug: Bug fix + +- Take namespace into account for incremental cleanup. https://github.com/rescript-lang/rescript-vscode/pull/1164 + ## 1.72.0 #### :bug: Bug fix From 750a28b0e75571fc54fa349b59014102b36ebc23 Mon Sep 17 00:00:00 2001 From: nojaf Date: Thu, 18 Dec 2025 09:50:43 +0100 Subject: [PATCH 35/56] Log server version on initialize --- server/src/server.ts | 42 ++++++++++++++++++++++++------------------ 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/server/src/server.ts b/server/src/server.ts index 5eedee868..66572de0b 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -29,7 +29,7 @@ import * as ic from "./incrementalCompilation"; import config, { extensionConfiguration, initialConfiguration } from "./config"; import { projectsFiles } from "./projectFiles"; import { NormalizedPath } from "./utils"; -import { initializeLogger, getLogger, setLogLevel, LogLevel } from "./logger"; +import { initializeLogger, getLogger, setLogLevel } from "./logger"; function applyUserConfiguration(configuration: extensionConfiguration) { // We always want to spread the initial configuration to ensure all defaults are respected. @@ -1285,27 +1285,30 @@ function openCompiledFile(msg: p.RequestMessage): p.Message { return response; } +async function getServerVersion(): Promise { + // Read the server version from package.json + let serverVersion: string | undefined; + try { + const packageJsonPath = path.join(__dirname, "..", "package.json"); + const packageJsonContent = await fsAsync.readFile(packageJsonPath, { + encoding: "utf-8", + }); + const packageJson: { version?: unknown } = JSON.parse(packageJsonContent); + serverVersion = + typeof packageJson.version === "string" ? packageJson.version : undefined; + } catch (e) { + // If we can't read the version, that's okay - we'll just omit it + serverVersion = undefined; + } + return serverVersion; +} + async function dumpServerState( msg: p.RequestMessage, ): Promise { // Custom debug endpoint: dump current server state (config + projectsFiles) try { - // Read the server version from package.json - let serverVersion: string | undefined; - try { - const packageJsonPath = path.join(__dirname, "..", "package.json"); - const packageJsonContent = await fsAsync.readFile(packageJsonPath, { - encoding: "utf-8", - }); - const packageJson: { version?: unknown } = JSON.parse(packageJsonContent); - serverVersion = - typeof packageJson.version === "string" - ? packageJson.version - : undefined; - } catch (e) { - // If we can't read the version, that's okay - we'll just omit it - serverVersion = undefined; - } + const serverVersion = await getServerVersion(); const projects = Array.from(projectsFiles.entries()).map( ([projectRootPath, pf]) => ({ @@ -1477,7 +1480,10 @@ async function onMessage(msg: p.Message) { }; send(response); } else if (msg.method === "initialize") { - getLogger().info("Received initialize request from client."); + const serverVersion = await getServerVersion(); + getLogger().info( + `Received initialize request from client. Server version: ${serverVersion}`, + ); // Save initial configuration, if present let initParams = msg.params as InitializeParams; for (const workspaceFolder of initParams.workspaceFolders || []) { From f033a10899f048ed6469fa7c21b9b67f291950e2 Mon Sep 17 00:00:00 2001 From: nojaf Date: Thu, 18 Dec 2025 13:17:54 +0100 Subject: [PATCH 36/56] Add Mermaid diagrams --- CONTRIBUTING.md | 53 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 65650bb1e..70cebfb47 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -112,6 +112,59 @@ We call a few binaries and it's tricky to call them properly cross-platform. Her ## Rough Description Of How The Plugin Works +### Text Changes + +The flow below shows how the LSP server reacts to incremental text changes and produces diagnostics: + +```mermaid +flowchart TD + A[Your ReScript file in your editor] + B[LSP Client] + C[LSP Server] + D[bsc] + + A -->|Type a character| B + B -->|textDocument/didChange| C + + subgraph LSP_Server_Internal_Flow["LSP Server"] + C1[triggerIncrementalCompilationOfFile] + C2[compileContents] + C3[figureOutBscArgs] + C4[parse .compiler.log] + end + + C --> C1 + C1 --> C2 --> C3 + C3 -->|invoke| D + D -->|writes| C4 + C4 -->|textDocument/publishDiagnostics| B +``` + +### Completion + +The flow below shows how the LSP server handles completion requests by delegating to the native analysis binary: + +```mermaid +flowchart TD + A[Your ReScript file in your editor] + B[LSP Client] + C[LSP Server] + D[rescript-editor-analysis.exe] + + A -->|Trigger completion| B + B -->|textDocument/completion| C + + subgraph LSP_Server_Internal_Flow["LSP Server"] + C1[shell out to rescript-editor-analysis.exe] + C2[build completion response] + end + + C --> C1 + C1 -->|exec| D + D --> C2 + C2 -->|textDocument/completion response| B +``` + ### Editor Diagnostics They should be synced in from `lib/bs/.compiler.log` build. Don't take them from other places. From 45c61199597e113388e19fa2bfb18516c7654e5c Mon Sep 17 00:00:00 2001 From: nojaf Date: Mon, 22 Dec 2025 08:53:39 +0100 Subject: [PATCH 37/56] reverify token after parsing compiler log output. --- server/src/incrementalCompilation.ts | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index 2575f39e5..e4685c370 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -556,6 +556,14 @@ async function compileContents( `${stderr}\n#Done()`, ); + // reverify: Token may have changed during the await above + if (!verifyTriggerToken(entry.file.sourceFilePath, triggerToken)) { + getLogger().log( + `Discarding stale compilation results for ${entry.file.sourceFileName} (token mismatch after parsing)`, + ); + return; + } + const actions = Object.values(codeActions)[0] ?? []; // Code actions will point to the locally saved incremental file, so we must remap From 200460fd60b8a66432f7da361ebac9b2caccc336 Mon Sep 17 00:00:00 2001 From: nojaf Date: Mon, 22 Dec 2025 08:57:14 +0100 Subject: [PATCH 38/56] Extract code to parse and publish diagnostic from compileContents --- server/src/incrementalCompilation.ts | 286 ++++++++++++++++----------- 1 file changed, 176 insertions(+), 110 deletions(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index e4685c370..3086b89ed 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -487,6 +487,174 @@ rule mij callArgs.push(entry.file.incrementalFilePath); return callArgs; } + +/** + * Remaps code action file paths from the incremental temp file to the actual source file. + */ +function remapCodeActionsToSourceFile( + codeActions: Record, + sourceFilePath: NormalizedPath, +): fileCodeActions[] { + const actions = Object.values(codeActions)[0] ?? []; + + // Code actions will point to the locally saved incremental file, so we must remap + // them so the editor understand it's supposed to apply them to the unsaved doc, + // not the saved "dummy" incremental file. + actions.forEach((ca) => { + if (ca.codeAction.edit != null && ca.codeAction.edit.changes != null) { + const change = Object.values(ca.codeAction.edit.changes)[0]; + + ca.codeAction.edit.changes = { + [utils.pathToURI(sourceFilePath)]: change, + }; + } + }); + + return actions; +} + +/** + * Filters diagnostics to remove unwanted parser errors from incremental compilation. + */ +function filterIncrementalDiagnostics( + diagnostics: p.Diagnostic[], + sourceFileName: string, +): { filtered: p.Diagnostic[]; hasIgnoredMessages: boolean } { + let hasIgnoredMessages = false; + + const filtered = diagnostics + .map((d) => ({ + ...d, + message: removeAnsiCodes(d.message), + })) + // Filter out a few unwanted parser errors since we run the parser in ignore mode + .filter((d) => { + if ( + !d.message.startsWith("Uninterpreted extension 'rescript.") && + (!d.message.includes(`/${INCREMENTAL_FOLDER_NAME}/${sourceFileName}`) || + // The `Multiple definition of the name ` type error's + // message includes the filepath with LOC of the duplicate definition + d.message.startsWith("Multiple definition of the") || + // The signature mismatch, with mismatch and ill typed applicative functor + // type errors all include the filepath with LOC + d.message.startsWith("Signature mismatch") || + d.message.startsWith("In this `with' constraint") || + d.message.startsWith("This `with' constraint on")) + ) { + hasIgnoredMessages = true; + return true; + } + return false; + }); + + return { filtered, hasIgnoredMessages }; +} + +/** + * Logs an error when incremental compilation produces unexpected output. + */ +function logIncrementalCompilationError( + entry: IncrementallyCompiledFileInfo, + stderr: string, + callArgs: string[] | null, + send: (msg: p.Message) => void, +): void { + hasReportedFeatureFailedError.add(entry.project.rootPath); + const logfile = path.resolve( + entry.project.incrementalFolderPath, + "error.log", + ); + + try { + fs.writeFileSync( + logfile, + `== BSC ARGS ==\n${callArgs?.join(" ")}\n\n== OUTPUT ==\n${stderr}`, + ); + + const params: p.ShowMessageParams = { + type: p.MessageType.Warning, + message: `[Incremental typechecking] Something might have gone wrong with incremental type checking. Check out the [error log](file://${logfile}) and report this issue please.`, + }; + + const message: p.NotificationMessage = { + jsonrpc: c.jsonrpcVersion, + method: "window/showMessage", + params: params, + }; + + send(message); + } catch (e) { + console.error(e); + } +} + +/** + * Processes compilation results and publishes diagnostics to the LSP client. + */ +function processAndPublishDiagnostics( + entry: IncrementallyCompiledFileInfo, + result: Record, + codeActions: Record, + stderr: string, + callArgs: string[] | null, + send: (msg: p.Message) => void, +): void { + // Remap code actions to source file + const actions = remapCodeActionsToSourceFile( + codeActions, + entry.file.sourceFilePath, + ); + entry.codeActions = actions; + + // Filter diagnostics + const rawDiagnostics = Object.values(result)[0] ?? []; + const { filtered: res, hasIgnoredMessages } = filterIncrementalDiagnostics( + rawDiagnostics, + entry.file.sourceFileName, + ); + + // Log error if compilation produced unexpected output + if ( + res.length === 0 && + stderr !== "" && + !hasIgnoredMessages && + !hasReportedFeatureFailedError.has(entry.project.rootPath) + ) { + logIncrementalCompilationError(entry, stderr, callArgs, send); + } + + const fileUri = utils.pathToURI(entry.file.sourceFilePath); + + // Get compiler diagnostics from main build (if any) and combine with incremental diagnostics + const compilerDiagnosticsForFile = + getCurrentCompilerDiagnosticsForFile(fileUri); + const allDiagnostics = [...res, ...compilerDiagnosticsForFile]; + + // Update filesWithDiagnostics to track this file + // entry.project.rootPath is guaranteed to match a key in projectsFiles + // (see triggerIncrementalCompilationOfFile where the entry is created) + const projectFile = projectsFiles.get(entry.project.rootPath); + + if (projectFile != null) { + if (allDiagnostics.length > 0) { + projectFile.filesWithDiagnostics.add(fileUri); + } else { + // Only remove if there are no diagnostics at all + projectFile.filesWithDiagnostics.delete(fileUri); + } + } + + const notification: p.NotificationMessage = { + jsonrpc: c.jsonrpcVersion, + method: "textDocument/publishDiagnostics", + params: { + uri: fileUri, + diagnostics: allDiagnostics, + }, + }; + send(notification); +} + async function compileContents( entry: IncrementallyCompiledFileInfo, fileContent: string, @@ -564,116 +732,14 @@ async function compileContents( return; } - const actions = Object.values(codeActions)[0] ?? []; - - // Code actions will point to the locally saved incremental file, so we must remap - // them so the editor understand it's supposed to apply them to the unsaved doc, - // not the saved "dummy" incremental file. - actions.forEach((ca) => { - if ( - ca.codeAction.edit != null && - ca.codeAction.edit.changes != null - ) { - const change = Object.values(ca.codeAction.edit.changes)[0]; - - ca.codeAction.edit.changes = { - [utils.pathToURI(entry.file.sourceFilePath)]: change, - }; - } - }); - - entry.codeActions = actions; - - const res = (Object.values(result)[0] ?? []) - .map((d) => ({ - ...d, - message: removeAnsiCodes(d.message), - })) - // Filter out a few unwanted parser errors since we run the parser in ignore mode - .filter((d) => { - if ( - !d.message.startsWith("Uninterpreted extension 'rescript.") && - (!d.message.includes( - `/${INCREMENTAL_FOLDER_NAME}/${entry.file.sourceFileName}`, - ) || - // The `Multiple definition of the name ` type error's - // message includes the filepath with LOC of the duplicate definition - d.message.startsWith("Multiple definition of the") || - // The signature mismatch, with mismatch and ill typed applicative functor - // type errors all include the filepath with LOC - d.message.startsWith("Signature mismatch") || - d.message.startsWith("In this `with' constraint") || - d.message.startsWith("This `with' constraint on")) - ) { - hasIgnoredErrorMessages = true; - return true; - } - return false; - }); - - if ( - res.length === 0 && - stderr !== "" && - !hasIgnoredErrorMessages && - !hasReportedFeatureFailedError.has(entry.project.rootPath) - ) { - try { - hasReportedFeatureFailedError.add(entry.project.rootPath); - const logfile = path.resolve( - entry.project.incrementalFolderPath, - "error.log", - ); - fs.writeFileSync( - logfile, - `== BSC ARGS ==\n${callArgs?.join( - " ", - )}\n\n== OUTPUT ==\n${stderr}`, - ); - let params: p.ShowMessageParams = { - type: p.MessageType.Warning, - message: `[Incremental typechecking] Something might have gone wrong with incremental type checking. Check out the [error log](file://${logfile}) and report this issue please.`, - }; - let message: p.NotificationMessage = { - jsonrpc: c.jsonrpcVersion, - method: "window/showMessage", - params: params, - }; - send(message); - } catch (e) { - console.error(e); - } - } - - const fileUri = utils.pathToURI(entry.file.sourceFilePath); - - // Get compiler diagnostics from main build (if any) and combine with incremental diagnostics - const compilerDiagnosticsForFile = - getCurrentCompilerDiagnosticsForFile(fileUri); - const allDiagnostics = [...res, ...compilerDiagnosticsForFile]; - - // Update filesWithDiagnostics to track this file - // entry.project.rootPath is guaranteed to match a key in projectsFiles - // (see triggerIncrementalCompilationOfFile where the entry is created) - const projectFile = projectsFiles.get(entry.project.rootPath); - - if (projectFile != null) { - if (allDiagnostics.length > 0) { - projectFile.filesWithDiagnostics.add(fileUri); - } else { - // Only remove if there are no diagnostics at all - projectFile.filesWithDiagnostics.delete(fileUri); - } - } - - const notification: p.NotificationMessage = { - jsonrpc: c.jsonrpcVersion, - method: "textDocument/publishDiagnostics", - params: { - uri: fileUri, - diagnostics: allDiagnostics, - }, - }; - send(notification); + processAndPublishDiagnostics( + entry, + result, + codeActions, + stderr, + callArgs, + send, + ); } onCompilationFinished?.(); }, From a99ee848326b79d3ca020efdc1b32828dda36221 Mon Sep 17 00:00:00 2001 From: nojaf Date: Mon, 22 Dec 2025 09:18:04 +0100 Subject: [PATCH 39/56] Refactor to use abortController --- server/src/incrementalCompilation.ts | 170 ++++++++++++++++----------- 1 file changed, 99 insertions(+), 71 deletions(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index 3086b89ed..523c50817 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -4,6 +4,7 @@ import * as utils from "./utils"; import { performance } from "perf_hooks"; import * as p from "vscode-languageserver-protocol"; import * as cp from "node:child_process"; +import { promisify } from "node:util"; import semver from "semver"; import * as os from "os"; import config, { send } from "./config"; @@ -16,6 +17,8 @@ import { getCurrentCompilerDiagnosticsForFile } from "./server"; import { NormalizedPath } from "./utils"; import { getLogger } from "./logger"; +const execFilePromise = promisify(cp.execFile); + const INCREMENTAL_FOLDER_NAME = "___incremental"; const INCREMENTAL_FILE_FOLDER_LOCATION = path.join( c.compilerDirPartialPath, @@ -59,8 +62,8 @@ export type IncrementallyCompiledFileInfo = { /** The trigger token for the currently active compilation. */ triggerToken: number; } | null; - /** Listeners for when compilation of this file is killed. List always cleared after each invocation. */ - killCompilationListeners: Array<() => void>; + /** Mechanism to kill the currently active compilation. */ + abortCompilation: (() => void) | null; /** Project specific information. */ project: { /** The root path of the project (normalized to match projectsFiles keys). */ @@ -86,6 +89,19 @@ const hasReportedFeatureFailedError: Set = new Set(); const originalTypeFileToFilePath: Map = new Map(); +/** + * Cancels the currently active compilation for an entry. + * Clears the timeout, aborts the compilation, and resets state. + */ +function cancelActiveCompilation(entry: IncrementallyCompiledFileInfo): void { + if (entry.compilation != null) { + clearTimeout(entry.compilation.timeout); + entry.abortCompilation?.(); + entry.compilation = null; + entry.abortCompilation = null; + } +} + export function incrementalCompilationFileChanged(changedPath: NormalizedPath) { const filePath = originalTypeFileToFilePath.get(changedPath); if (filePath != null) { @@ -96,9 +112,7 @@ export function incrementalCompilationFileChanged(changedPath: NormalizedPath) { ); if (entry.compilation != null) { getLogger().log("[watcher] Was compiling, killing"); - clearTimeout(entry.compilation.timeout); - entry.killCompilationListeners.forEach((cb) => cb()); - entry.compilation = null; + cancelActiveCompilation(entry); } cleanUpIncrementalFiles( entry.file.sourceFilePath, @@ -335,7 +349,7 @@ function triggerIncrementalCompilationOfFile( buildRewatch: null, buildNinja: null, compilation: null, - killCompilationListeners: [], + abortCompilation: null, codeActions: [], }; @@ -352,25 +366,16 @@ function triggerIncrementalCompilationOfFile( if (incrementalFileCacheEntry == null) return; const entry = incrementalFileCacheEntry; - if (entry.compilation != null) { - clearTimeout(entry.compilation.timeout); - entry.killCompilationListeners.forEach((cb) => cb()); - entry.killCompilationListeners = []; - } + cancelActiveCompilation(entry); const triggerToken = performance.now(); const timeout = setTimeout(() => { compileContents(entry, fileContent, send, onCompilationFinished); }, 20); - if (entry.compilation != null) { - entry.compilation.timeout = timeout; - entry.compilation.triggerToken = triggerToken; - } else { - entry.compilation = { - timeout, - triggerToken, - }; - } + entry.compilation = { + timeout, + triggerToken, + }; } function verifyTriggerToken( filePath: NormalizedPath, @@ -689,66 +694,89 @@ async function compileContents( entry.buildSystem === "bsb" ? entry.project.rootPath : path.resolve(entry.project.rootPath, c.compilerDirPartialPath); + getLogger().log( `About to invoke bsc from \"${cwd}\", used ${entry.buildSystem}`, ); getLogger().log( `${entry.project.bscBinaryLocation} ${callArgs.map((c) => `"${c}"`).join(" ")}`, ); - const process = cp.execFile( - entry.project.bscBinaryLocation, - callArgs, - { cwd }, - async (error, _stdout, stderr) => { - if (!error?.killed) { - getLogger().log( - `Recompiled ${entry.file.sourceFileName} in ${ - (performance.now() - startTime) / 1000 - }s`, - ); - } else { - getLogger().log( - `Compilation of ${entry.file.sourceFileName} was killed.`, - ); - } - let hasIgnoredErrorMessages = false; - if ( - !error?.killed && - triggerToken != null && - verifyTriggerToken(entry.file.sourceFilePath, triggerToken) - ) { - getLogger().log("Resetting compilation status."); - // Reset compilation status as this compilation finished - entry.compilation = null; - const { result, codeActions } = await utils.parseCompilerLogOutput( - `${stderr}\n#Done()`, - ); - // reverify: Token may have changed during the await above - if (!verifyTriggerToken(entry.file.sourceFilePath, triggerToken)) { - getLogger().log( - `Discarding stale compilation results for ${entry.file.sourceFileName} (token mismatch after parsing)`, - ); - return; - } - - processAndPublishDiagnostics( - entry, - result, - codeActions, - stderr, - callArgs, - send, - ); - } - onCompilationFinished?.(); - }, - ); - entry.killCompilationListeners.push(() => { - process.kill("SIGKILL"); - }); + // Create AbortController for this compilation + const abortController = new AbortController(); + const { signal } = abortController; + + // Store abort function directly on the entry + entry.abortCompilation = () => { + getLogger().log(`Aborting compilation of ${entry.file.sourceFileName}`); + abortController.abort(); + }; + + try { + const { stdout, stderr } = await execFilePromise( + entry.project.bscBinaryLocation, + callArgs, + { cwd, signal }, + ); + + getLogger().log( + `Recompiled ${entry.file.sourceFileName} in ${ + (performance.now() - startTime) / 1000 + }s`, + ); + + // Verify token after async operation + if ( + triggerToken != null && + !verifyTriggerToken(entry.file.sourceFilePath, triggerToken) + ) { + getLogger().log( + `Discarding stale compilation results for ${entry.file.sourceFileName} (token changed)`, + ); + return; + } + + getLogger().log("Resetting compilation status."); + // Reset compilation status as this compilation finished + entry.compilation = null; + entry.abortCompilation = null; + + const { result, codeActions } = await utils.parseCompilerLogOutput( + `${stderr}\n#Done()`, + ); + + // Re-verify again after second async operation + if ( + triggerToken != null && + !verifyTriggerToken(entry.file.sourceFilePath, triggerToken) + ) { + getLogger().log( + `Discarding stale compilation results for ${entry.file.sourceFileName} (token changed after parsing)`, + ); + return; + } + + processAndPublishDiagnostics( + entry, + result, + codeActions, + stderr, + callArgs, + send, + ); + } catch (error: any) { + if (error.name === "AbortError") { + getLogger().log( + `Compilation of ${entry.file.sourceFileName} was aborted.`, + ); + } else { + throw error; + } + } } catch (e) { console.error(e); + } finally { + onCompilationFinished?.(); } } From a954b6f2fa7e9bdb9640d949b7b2d905a05bb917 Mon Sep 17 00:00:00 2001 From: nojaf Date: Mon, 22 Dec 2025 09:37:20 +0100 Subject: [PATCH 40/56] Code review feedback --- server/src/incrementalCompilation.ts | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index 523c50817..4e681fc25 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -503,7 +503,7 @@ function remapCodeActionsToSourceFile( const actions = Object.values(codeActions)[0] ?? []; // Code actions will point to the locally saved incremental file, so we must remap - // them so the editor understand it's supposed to apply them to the unsaved doc, + // them so the editor understands it's supposed to apply them to the unsaved doc, // not the saved "dummy" incremental file. actions.forEach((ca) => { if (ca.codeAction.edit != null && ca.codeAction.edit.changes != null) { @@ -713,7 +713,7 @@ async function compileContents( }; try { - const { stdout, stderr } = await execFilePromise( + const { stderr } = await execFilePromise( entry.project.bscBinaryLocation, callArgs, { cwd, signal }, @@ -736,11 +736,6 @@ async function compileContents( return; } - getLogger().log("Resetting compilation status."); - // Reset compilation status as this compilation finished - entry.compilation = null; - entry.abortCompilation = null; - const { result, codeActions } = await utils.parseCompilerLogOutput( `${stderr}\n#Done()`, ); @@ -756,6 +751,11 @@ async function compileContents( return; } + getLogger().log("Resetting compilation status."); + // Reset compilation status as this compilation finished + entry.compilation = null; + entry.abortCompilation = null; + processAndPublishDiagnostics( entry, result, From f94574da6233af832a80b1990e2cc940a07d5c2d Mon Sep 17 00:00:00 2001 From: nojaf Date: Mon, 22 Dec 2025 11:10:28 +0100 Subject: [PATCH 41/56] Don't merge in the original diagnostics --- server/src/incrementalCompilation.ts | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index 4e681fc25..a1a090a51 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -630,18 +630,13 @@ function processAndPublishDiagnostics( const fileUri = utils.pathToURI(entry.file.sourceFilePath); - // Get compiler diagnostics from main build (if any) and combine with incremental diagnostics - const compilerDiagnosticsForFile = - getCurrentCompilerDiagnosticsForFile(fileUri); - const allDiagnostics = [...res, ...compilerDiagnosticsForFile]; - // Update filesWithDiagnostics to track this file // entry.project.rootPath is guaranteed to match a key in projectsFiles // (see triggerIncrementalCompilationOfFile where the entry is created) const projectFile = projectsFiles.get(entry.project.rootPath); if (projectFile != null) { - if (allDiagnostics.length > 0) { + if (res.length > 0) { projectFile.filesWithDiagnostics.add(fileUri); } else { // Only remove if there are no diagnostics at all @@ -654,7 +649,7 @@ function processAndPublishDiagnostics( method: "textDocument/publishDiagnostics", params: { uri: fileUri, - diagnostics: allDiagnostics, + diagnostics: res, }, }; send(notification); From 092863d0caea9f418887c8dff413ef63551dc637 Mon Sep 17 00:00:00 2001 From: nojaf Date: Mon, 22 Dec 2025 11:19:39 +0100 Subject: [PATCH 42/56] Run incremental typechecking on file open --- server/src/server.ts | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/server/src/server.ts b/server/src/server.ts index 66572de0b..b4445f68a 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1436,7 +1436,20 @@ async function onMessage(msg: p.Message) { params.textDocument.uri as utils.FileURI, params.textDocument.text, ); - await sendUpdatedDiagnostics(); + + if (config.extensionConfiguration.incrementalTypechecking?.enable) { + // We run incremental typechecking to get the most accurate results + // the current file may have deviated from the last compilation. + updateOpenedFile( + params.textDocument.uri as utils.FileURI, + params.textDocument.text, + ); + } else { + // Check the .compiler.log file for diagnostics + // This could be stale data of course. + await sendUpdatedDiagnostics(); + } + await updateDiagnosticSyntax( params.textDocument.uri as utils.FileURI, params.textDocument.text, From 9276c2e652253fccd7a83aab022550e9754b6881 Mon Sep 17 00:00:00 2001 From: nojaf Date: Tue, 23 Dec 2025 10:03:24 +0100 Subject: [PATCH 43/56] Clean up entry in finally block --- server/src/incrementalCompilation.ts | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index a1a090a51..1cc4e1ce8 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -746,11 +746,6 @@ async function compileContents( return; } - getLogger().log("Resetting compilation status."); - // Reset compilation status as this compilation finished - entry.compilation = null; - entry.abortCompilation = null; - processAndPublishDiagnostics( entry, result, @@ -765,11 +760,19 @@ async function compileContents( `Compilation of ${entry.file.sourceFileName} was aborted.`, ); } else { + getLogger().error( + `Unexpected error during compilation of ${entry.file.sourceFileName}: ${error}`, + ); throw error; } + } finally { + // Only clean up if this is still the active compilation + if (entry.compilation?.triggerToken === triggerToken) { + getLogger().log("Cleaning up compilation status."); + entry.compilation = null; + entry.abortCompilation = null; + } } - } catch (e) { - console.error(e); } finally { onCompilationFinished?.(); } From 97a799402596c673881d639d09c99fb8a1394149 Mon Sep 17 00:00:00 2001 From: nojaf Date: Tue, 23 Dec 2025 10:58:29 +0100 Subject: [PATCH 44/56] changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9930445ca..650bed246 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,11 @@ #### :bug: Bug fix - Take namespace into account for incremental cleanup. https://github.com/rescript-lang/rescript-vscode/pull/1164 +- Potential race condition in incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1167 + +#### :nail_care: Polish + +- Stale .compiler.log can still spill through in incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1167 ## 1.72.0 From dc278f53d08d78b3d2a38a62656aa7d201cc8465 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=A9di-R=C3=A9mi=20Hashim?= Date: Wed, 24 Dec 2025 20:53:16 +0100 Subject: [PATCH 45/56] Promisify'd child_process.execFile throws if exit code is non-zero --- server/src/incrementalCompilation.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index 1cc4e1ce8..14dddc498 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -712,7 +712,12 @@ async function compileContents( entry.project.bscBinaryLocation, callArgs, { cwd, signal }, - ); + ).catch((error) => { + if (error.stderr) { + return { stderr: error.stderr }; + } + throw error; + }); getLogger().log( `Recompiled ${entry.file.sourceFileName} in ${ From e9d5071a80d9e92db004f911edf62c2c3d89dae4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=A9di-R=C3=A9mi=20Hashim?= Date: Wed, 24 Dec 2025 20:53:31 +0100 Subject: [PATCH 46/56] Remove unused import --- server/src/incrementalCompilation.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts index 14dddc498..da65b9d4d 100644 --- a/server/src/incrementalCompilation.ts +++ b/server/src/incrementalCompilation.ts @@ -13,7 +13,6 @@ import { fileCodeActions } from "./codeActions"; import { projectsFiles } from "./projectFiles"; import { getRewatchBscArgs, RewatchCompilerArgs } from "./bsc-args/rewatch"; import { BsbCompilerArgs, getBsbBscArgs } from "./bsc-args/bsb"; -import { getCurrentCompilerDiagnosticsForFile } from "./server"; import { NormalizedPath } from "./utils"; import { getLogger } from "./logger"; From cacee028ec961bd49645bfa07589cd186557c245 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=A9di-R=C3=A9mi=20Hashim?= Date: Wed, 24 Dec 2025 21:04:00 +0100 Subject: [PATCH 47/56] Add CHANGELOG entry --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 650bed246..3e095205f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ - Take namespace into account for incremental cleanup. https://github.com/rescript-lang/rescript-vscode/pull/1164 - Potential race condition in incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1167 +- Fix extension crash triggered by incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1169 #### :nail_care: Polish From dd218539f39e31aef00f7f5d65c453fce420603a Mon Sep 17 00:00:00 2001 From: Cristiano Calcagno Date: Sun, 4 Jan 2026 08:33:52 +0100 Subject: [PATCH 48/56] Migrate example-project to ReScript 12 --- .../examples/example-project/bsconfig.json | 14 - .../example-project/package-lock.json | 176 +++- .../examples/example-project/package.json | 8 +- .../examples/example-project/rescript.json | 18 + analysis/examples/example-project/src/B.re | 9 - .../examples/example-project/src/Hello.res | 9 +- .../examples/example-project/src/Hello.res.js | 171 ++++ .../examples/example-project/src/Json.res | 299 +++--- .../examples/example-project/src/Json.res.js | 908 ++++++++++++++++++ .../src/ModuleWithDocComment.res.js | 19 + .../examples/example-project/src/More.res | 1 - .../examples/example-project/src/More.res.js | 18 + .../examples/example-project/src/More.resi | 1 - .../examples/example-project/src/Other.res | 1 - .../examples/example-project/src/Other.res.js | 43 + .../examples/example-project/src/Serde.res | 222 ----- .../example-project/src/TransformHelpers.res | 13 +- .../src/TransformHelpers.res.js | 71 ++ analysis/examples/example-project/src/ZZ.res | 21 +- .../examples/example-project/src/ZZ.res.js | 197 ++++ analysis/examples/example-project/types.json | 13 - 21 files changed, 1811 insertions(+), 421 deletions(-) delete mode 100644 analysis/examples/example-project/bsconfig.json create mode 100644 analysis/examples/example-project/rescript.json delete mode 100644 analysis/examples/example-project/src/B.re create mode 100644 analysis/examples/example-project/src/Hello.res.js create mode 100644 analysis/examples/example-project/src/Json.res.js create mode 100644 analysis/examples/example-project/src/ModuleWithDocComment.res.js create mode 100644 analysis/examples/example-project/src/More.res.js create mode 100644 analysis/examples/example-project/src/Other.res.js delete mode 100644 analysis/examples/example-project/src/Serde.res create mode 100644 analysis/examples/example-project/src/TransformHelpers.res.js create mode 100644 analysis/examples/example-project/src/ZZ.res.js delete mode 100644 analysis/examples/example-project/types.json diff --git a/analysis/examples/example-project/bsconfig.json b/analysis/examples/example-project/bsconfig.json deleted file mode 100644 index f59290000..000000000 --- a/analysis/examples/example-project/bsconfig.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "tryit", - "sources": "src", - "bsc-flags": ["-bs-super-errors", "-open Belt"], - "warnings": { - "number": "-32-26-27-33" - }, - "bs-dependencies": ["reason-react"], - "reason": { "react-jsx": 3 }, - "namespace": "my-namespace", - "reanalyze": { - "analysis": ["dce", "exception"] - } -} diff --git a/analysis/examples/example-project/package-lock.json b/analysis/examples/example-project/package-lock.json index 61650f529..a20ccc046 100644 --- a/analysis/examples/example-project/package-lock.json +++ b/analysis/examples/example-project/package-lock.json @@ -1,16 +1,170 @@ { + "name": "tryit", + "lockfileVersion": 3, "requires": true, - "lockfileVersion": 1, - "dependencies": { - "reason-react": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/reason-react/-/reason-react-0.9.1.tgz", - "integrity": "sha512-nlH0O2TDy9KzOLOW+vlEQk4ExHOeciyzFdoLcsmmiit6hx6H5+CVDrwJ+8aiaLT/kqK5xFOjy4PS7PftWz4plA==" - }, - "rescript": { - "version": "9.1.2", - "resolved": "https://registry.npmjs.org/rescript/-/rescript-9.1.2.tgz", - "integrity": "sha512-4wHvTDv3nyYnAPJHcg1RGG8z7u3HDiBf6RN3P/dITDv859Qo35aKOzJWQtfBzbAs0EKNafLqei3TnUqiAv6BwQ==" + "packages": { + "": { + "name": "tryit", + "dependencies": { + "@rescript/react": "^0.14.0", + "rescript": "12.0.2" + } + }, + "node_modules/@rescript/darwin-arm64": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/darwin-arm64/-/darwin-arm64-12.0.2.tgz", + "integrity": "sha512-GcfiPE0L2N7smWXMBEW4pvlcqblSbKh2tisZTuOaLPr9+WmaXnBLap6OUT+XoEWzTY0RA4QURfpNJLAgqZPrzw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/darwin-x64": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/darwin-x64/-/darwin-x64-12.0.2.tgz", + "integrity": "sha512-di4eucoDdLsLqz86h7Cwywsmt+xFup7rpVlkx8L7i22RTqyTw+CD6aGjFj1Ddj2sVUd5SSFiE9Hj86QggzUgbg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/linux-arm64": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/linux-arm64/-/linux-arm64-12.0.2.tgz", + "integrity": "sha512-x6WOyimX0kw9DGADQ02R2rmvKu8SQOrskfYrKREfc2CgDT+FOKAH+qxvpUiljKhrvthoHkVBJOXtlKk854OdjQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/linux-x64": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/linux-x64/-/linux-x64-12.0.2.tgz", + "integrity": "sha512-5QHE+Ca1h3qXxDkw343923WTEdEyBeXphrv+8SZLhLOFNabQ5t6m4rWf52RluDqQpa67j6GB8kVAZpqc2rA2Qw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/react": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@rescript/react/-/react-0.14.0.tgz", + "integrity": "sha512-ncOHWK7ujQmff+QMYKRmtwETvJVolzkwRpDa0MFenEXdUz9ZYywNbq+xH9F9RDQeSwC3/4s9JeUQVyTu4fMpHw==", + "license": "MIT", + "peerDependencies": { + "react": ">=19.0.0", + "react-dom": ">=19.0.0" + } + }, + "node_modules/@rescript/runtime": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/runtime/-/runtime-12.0.2.tgz", + "integrity": "sha512-Jma1QEgns/WlWeVap/Mv6dVbFEKmftPOrmwQbzLQnnrQ9z6PJjorHIvDMQUigQKnvM/4RhRJ3LiG+CsNtgS9ww==" + }, + "node_modules/@rescript/win32-x64": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@rescript/win32-x64/-/win32-x64-12.0.2.tgz", + "integrity": "sha512-QcNQiok3H2Xd+cReB1kpH4hlPXnT7DA6ETsJfZugTUOKOdAYLTJ+Imynv+ojuafkUrwa2v0tZAiqHaQsDjf9IQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/react": { + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.3.tgz", + "integrity": "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-yELu4WmLPw5Mr/lmeEpox5rw3RETacE++JgHqQzd2dg+YbJuat3jH4ingc+WPZhxaoFzdv9y33G+F7Nl5O0GBg==", + "license": "MIT", + "peer": true, + "dependencies": { + "scheduler": "^0.27.0" + }, + "peerDependencies": { + "react": "^19.2.3" + } + }, + "node_modules/rescript": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/rescript/-/rescript-12.0.2.tgz", + "integrity": "sha512-H1K9ovIxg2BVwJcXN1cZv0Q1pJEyFgNcUDKHSkmKmv5eBZMpXBASTZ2+grDt0zFFNoeFA1B9abAjSf+oTTi7hg==", + "license": "SEE LICENSE IN LICENSE", + "workspaces": [ + "packages/playground", + "packages/@rescript/*", + "tests/dependencies/**", + "tests/analysis_tests/**", + "tests/docstring_tests", + "tests/gentype_tests/**", + "tests/tools_tests", + "scripts/res" + ], + "dependencies": { + "@rescript/runtime": "12.0.2" + }, + "bin": { + "bsc": "cli/bsc.js", + "bstracing": "cli/bstracing.js", + "rescript": "cli/rescript.js", + "rescript-legacy": "cli/rescript-legacy.js", + "rescript-tools": "cli/rescript-tools.js" + }, + "engines": { + "node": ">=20.11.0" + }, + "optionalDependencies": { + "@rescript/darwin-arm64": "12.0.2", + "@rescript/darwin-x64": "12.0.2", + "@rescript/linux-arm64": "12.0.2", + "@rescript/linux-x64": "12.0.2", + "@rescript/win32-x64": "12.0.2" + } + }, + "node_modules/scheduler": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", + "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", + "license": "MIT", + "peer": true } } } diff --git a/analysis/examples/example-project/package.json b/analysis/examples/example-project/package.json index b948ffc3a..22f77c1a6 100644 --- a/analysis/examples/example-project/package.json +++ b/analysis/examples/example-project/package.json @@ -1,11 +1,13 @@ { + "name": "tryit", "dependencies": { - "reason-react": "^0.9.1", - "rescript": "^9.1.2" + "@rescript/react": "^0.14.0", + "rescript": "12.0.2" }, "scripts": { "build": "rescript", "start": "rescript build -w", - "clean": "rescript clean -with-deps" + "clean": "rescript clean", + "format": "rescript format" } } diff --git a/analysis/examples/example-project/rescript.json b/analysis/examples/example-project/rescript.json new file mode 100644 index 000000000..de34db8b8 --- /dev/null +++ b/analysis/examples/example-project/rescript.json @@ -0,0 +1,18 @@ +{ + "name": "tryit", + "sources": "src", + "compiler-flags": [], + "warnings": { + "number": "-32-26-27-33" + }, + "dependencies": ["@rescript/react"], + "jsx": { + "version": 4 + }, + "namespace": "my-namespace", + "package-specs": { + "module": "esmodule", + "in-source": true, + "suffix": ".res.js" + } +} diff --git a/analysis/examples/example-project/src/B.re b/analysis/examples/example-project/src/B.re deleted file mode 100644 index 2ad1ee830..000000000 --- a/analysis/examples/example-project/src/B.re +++ /dev/null @@ -1,9 +0,0 @@ - - -let x = 12 - - -let y = 44 - - -let z = 123 diff --git a/analysis/examples/example-project/src/Hello.res b/analysis/examples/example-project/src/Hello.res index db525a7e7..1a1d13434 100644 --- a/analysis/examples/example-project/src/Hello.res +++ b/analysis/examples/example-project/src/Hello.res @@ -50,7 +50,7 @@ let awesome = let thing = "thing" -let transform = (x, y) => x ++ Js.Float.toString(y) +let transform = (x, y) => x ++ Float.toString(y) let z = transform("hello ", 5.) @@ -64,8 +64,6 @@ let added = open Other -open Hashtbl - @ocaml.doc(" Some more documentation about this ") let awesome = x => x + 2 @@ -93,14 +91,12 @@ let someFunction = (memorableName, {contents}) => { let z = 10 -let z = find - let z = later let m = Other.later for _index in 0 to 10 { - print_endline("hellO") + Console.log("hellO") } module OneOneOneOne = { @@ -170,4 +166,3 @@ type lockfile = { pastVersions: Belt.HashMap.Int.t>, current: list<(shortReference, int)>, } - diff --git a/analysis/examples/example-project/src/Hello.res.js b/analysis/examples/example-project/src/Hello.res.js new file mode 100644 index 000000000..51bcd36b8 --- /dev/null +++ b/analysis/examples/example-project/src/Hello.res.js @@ -0,0 +1,171 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + +import * as More$MyNamespace from "./More.res.js"; +import * as Other$MyNamespace from "./Other.res.js"; +import * as JsxRuntime from "react/jsx-runtime"; + +let x = { + a: 3 +}; + +let l = (More$MyNamespace.inner + More$MyNamespace.n | 0) + Other$MyNamespace.inner | 0; + +let me = { + TAG: "People", + _0: "Hie" +}; + +let x$1 = Other$MyNamespace.something + 10 | 0; + +let Something_m = { + name: "Me", + age: 0 +}; + +let Something_animal = { + TAG: "Things", + _0: 10 +}; + +let Something_other = { + TAG: "Things", + _0: 2 +}; + +let Something = { + m: Something_m, + animal: Something_animal, + other: Something_other, + me: me, + x: x$1, + r: "Me", + awesome: 20 +}; + +let aThing = 10 + Other$MyNamespace.something | 0; + +function transform(x, y) { + return x + y.toString(); +} + +transform("hello ", 5); + +let added = 110; + +let a_1 = { + hd: "my finefolks", + tl: { + hd: "in boonville", + tl: /* [] */0 + } +}; + +let a = { + hd: "hello", + tl: a_1 +}; + +function div(x, y, children, param) { + return 10; +} + +JsxRuntime.jsx("div", { + x: "10", + y: "20" +}); + +function something(animal) { + +} + +function someFunction(memorableName, param) { + return memorableName + 20 | 0; +} + +for (let _index = 0; _index <= 10; ++_index) { + console.log("hellO"); +} + +let TwoTwoTwoTwo = { + xxxxxxxxxx: 10 +}; + +let OneOneOneOne = { + TwoTwoTwoTwo: TwoTwoTwoTwo +}; + +let y = 15; + +let z = { + contents: 30 +}; + +function thing() { + return 77; +} + +let contents = z.contents; + +let someLongName = 10; + +let otherLongName = "string"; + +let zzz = 1; + +let more = 20; + +let m = Other$MyNamespace.later; + +let r = 10; + +let xxxxxxxxxx = 10; + +let contnets = More$MyNamespace.contnets; + +let inner = More$MyNamespace.inner; + +let n = More$MyNamespace.n; + +let party = { + one: "one", + two: 2 +}; + +let one = "one"; + +let two = 2; + +let awesome = "hello"; + +export { + someLongName, + otherLongName, + x, + l, + Something, + aThing, + transform, + zzz, + more, + added, + a, + div, + something, + someFunction, + m, + OneOneOneOne, + r, + xxxxxxxxxx, + contnets, + inner, + n, + y, + z, + party, + one, + two, + thing, + contents, + awesome, +} +/* Not a pure module */ diff --git a/analysis/examples/example-project/src/Json.res b/analysis/examples/example-project/src/Json.res index 7cbfbbe03..251e249fa 100644 --- a/analysis/examples/example-project/src/Json.res +++ b/analysis/examples/example-project/src/Json.res @@ -32,6 +32,8 @@ * @doc Infix ") +external parseFloat: string => float = "parseFloat" + type rec t = | String(string) | Number(float) @@ -42,9 +44,9 @@ type rec t = | Null let string_of_number = f => { - let s = Js.Float.toString(f) - if String.get(s, String.length(s) - 1) == '.' { - String.sub(s, 0, String.length(s) - 1) + let s = Float.toString(f) + if String.get(s, String.length(s) - 1) == Some(".") { + String.slice(s, ~start=0, ~end=String.length(s) - 1) } else { s } @@ -135,23 +137,24 @@ module Infix = { let escape = text => { let ln = String.length(text) - let buf = Buffer.create(ln) - let rec loop = i => + let rec loop = (i, acc) => if i < ln { - switch String.get(text, i) { - | '\012' => Buffer.add_string(buf, "\\f") - | '\\' => Buffer.add_string(buf, "\\\\") - | '"' => Buffer.add_string(buf, "\\\"") - | '\n' => Buffer.add_string(buf, "\\n") - | '\b' => Buffer.add_string(buf, "\\b") - | '\r' => Buffer.add_string(buf, "\\r") - | '\t' => Buffer.add_string(buf, "\\t") - | c => Buffer.add_char(buf, c) + let next = switch String.get(text, i) { + | Some("\x0c") => acc ++ "\\f" + | Some("\\") => acc ++ "\\\\" + | Some("\"") => acc ++ "\\\"" + | Some("\n") => acc ++ "\\n" + | Some("\b") => acc ++ "\\b" + | Some("\r") => acc ++ "\\r" + | Some("\t") => acc ++ "\\t" + | Some(c) => acc ++ c + | None => acc } - loop(i + 1) + loop(i + 1, next) + } else { + acc } - loop(0) - Buffer.contents(buf) + loop(0, "") } @ocaml.doc(" ``` @@ -165,54 +168,76 @@ let rec stringify = t => switch t { | String(value) => "\"" ++ (escape(value) ++ "\"") | Number(num) => string_of_number(num) - | Array(items) => "[" ++ (String.concat(", ", List.map(items, stringify)) ++ "]") - | Object(items) => - "{" ++ - (String.concat( - ", ", - List.map(items, ((k, v)) => "\"" ++ (String.escaped(k) ++ ("\": " ++ stringify(v)))), - ) ++ - "}") + | Array(items) => { + let rec join = (items, sep) => + switch items { + | list{} => "" + | list{x} => x + | list{x, ...rest} => x ++ sep ++ join(rest, sep) + } + let parts = List.map(items, stringify) + "[" ++ join(parts, ", ") ++ "]" + } + | Object(items) => { + let rec join = (items, sep) => + switch items { + | list{} => "" + | list{x} => x + | list{x, ...rest} => x ++ sep ++ join(rest, sep) + } + let parts = List.map(items, ((k, v)) => "\"" ++ (escape(k) ++ ("\": " ++ stringify(v)))) + "{" ++ join(parts, ", ") ++ "}" + } | True => "true" | False => "false" | Null => "null" } let white = n => { - let buffer = Buffer.create(n) - for _ in 0 to n - 1 { - Buffer.add_char(buffer, ' ') - } - Buffer.contents(buffer) + let rec loop = (i, acc) => + if i < n { + loop(i + 1, acc ++ " ") + } else { + acc + } + loop(0, "") } -let rec stringifyPretty = (~indent=0, t) => +let rec stringifyPretty = (~indent=0, t) => { + let rec join = (items, sep) => + switch items { + | list{} => "" + | list{x} => x + | list{x, ...rest} => x ++ sep ++ join(rest, sep) + } switch t { | String(value) => "\"" ++ (escape(value) ++ "\"") | Number(num) => string_of_number(num) | Array(list{}) => "[]" - | Array(items) => - "[\n" ++ - (white(indent) ++ - (String.concat(",\n" ++ white(indent), List.map(items, stringifyPretty(~indent=indent + 2))) ++ - ("\n" ++ - (white(indent) ++ "]")))) + | Array(items) => { + let parts = List.map(items, item => stringifyPretty(~indent=indent + 2, item)) + "[\n" ++ + white(indent + 2) ++ + join(parts, ",\n" ++ white(indent + 2)) ++ + "\n" ++ + white(indent) ++ "]" + } | Object(list{}) => "{}" - | Object(items) => - "{\n" ++ - (white(indent) ++ - (String.concat( - ",\n" ++ white(indent), - List.map(items, ((k, v)) => - "\"" ++ (String.escaped(k) ++ ("\": " ++ stringifyPretty(~indent=indent + 2, v))) - ), - ) ++ - ("\n" ++ - (white(indent) ++ "}")))) + | Object(items) => { + let parts = List.map(items, ((k, v)) => + "\"" ++ (escape(k) ++ ("\": " ++ stringifyPretty(~indent=indent + 2, v))) + ) + "{\n" ++ + white(indent + 2) ++ + join(parts, ",\n" ++ white(indent + 2)) ++ + "\n" ++ + white(indent) ++ "}" + } | True => "true" | False => "false" | Null => "null" } +} let unwrap = (message, t) => switch t { @@ -229,12 +254,12 @@ module Parser = { if last_pos == 0 && !keep_empty { acc } else { - list{String.sub(str, 0, last_pos), ...acc} + list{String.slice(str, ~start=0, ~end=last_pos), ...acc} } } else if is_delim(String.get(str, pos)) { let new_len = last_pos - pos - 1 if new_len != 0 || keep_empty { - let v = String.sub(str, pos + 1, new_len) + let v = String.slice(str, ~start=pos + 1, ~end=pos + 1 + new_len) loop(list{v, ...acc}, pos, pos - 1) } else { loop(acc, pos, pos - 1) @@ -245,19 +270,27 @@ module Parser = { loop(list{}, len, len - 1) } let fail = (text, pos, message) => { - let pre = String.sub(text, 0, pos) - let lines = split_by(c => c == '\n', pre) + let pre = String.slice(text, ~start=0, ~end=pos) + let lines = split_by(c => c == Some("\n"), pre) let count = List.length(lines) - let last = count > 0 ? List.getExn(lines, count - 1) : "" + let last = count > 0 ? List.getOrThrow(lines, count - 1) : "" let col = String.length(last) + 1 let line = List.length(lines) - let string = Printf.sprintf("Error \"%s\" at %d:%d -> %s\n", message, line, col, last) + let string = + "Error \"" ++ + message ++ + "\" at " ++ + Int.toString(line) ++ + ":" ++ + Int.toString(col) ++ + " -> " ++ + last ++ "\n" failwith(string) } let rec skipToNewline = (text, pos) => if pos >= String.length(text) { pos - } else if String.get(text, pos) == '\n' { + } else if String.get(text, pos) == Some("\n") { pos + 1 } else { skipToNewline(text, pos + 1) @@ -265,7 +298,7 @@ module Parser = { let stringTail = text => { let len = String.length(text) if len > 1 { - String.sub(text, 1, len - 1) + String.slice(text, ~start=1, ~end=len) } else { "" } @@ -273,7 +306,7 @@ module Parser = { let rec skipToCloseMultilineComment = (text, pos) => if pos + 1 >= String.length(text) { failwith("Unterminated comment") - } else if String.get(text, pos) == '*' && String.get(text, pos + 1) == '/' { + } else if String.get(text, pos) == Some("*") && String.get(text, pos + 1) == Some("/") { pos + 2 } else { skipToCloseMultilineComment(text, pos + 1) @@ -281,43 +314,36 @@ module Parser = { let rec skipWhite = (text, pos) => if ( pos < String.length(text) && - (String.get(text, pos) == ' ' || - (String.get(text, pos) == '\t' || - (String.get(text, pos) == '\n' || String.get(text, pos) == '\r'))) + (String.get(text, pos) == Some(" ") || + (String.get(text, pos) == Some("\t") || + (String.get(text, pos) == Some("\n") || String.get(text, pos) == Some("\r")))) ) { skipWhite(text, pos + 1) } else { pos } let parseString = (text, pos) => { - /* let i = ref(pos); */ - let buffer = Buffer.create(String.length(text)) let ln = String.length(text) - let rec loop = i => + let rec loop = (i, acc) => i >= ln ? fail(text, i, "Unterminated string") : switch String.get(text, i) { - | '"' => i + 1 - | '\\' => + | Some("\"") => (i + 1, acc) + | Some("\\") => i + 1 >= ln ? fail(text, i, "Unterminated string") : switch String.get(text, i + 1) { - | '/' => - Buffer.add_char(buffer, '/') - loop(i + 2) - | 'f' => - Buffer.add_char(buffer, '\012') - loop(i + 2) + | Some("/") => loop(i + 2, acc ++ "/") + | Some("f") => loop(i + 2, acc ++ "\x0c") | _ => - Buffer.add_string(buffer, Scanf.unescaped(String.sub(text, i, 2))) - loop(i + 2) + let escaped = String.slice(text, ~start=i, ~end=i + 2) + loop(i + 2, acc ++ escaped) } - | c => - Buffer.add_char(buffer, c) - loop(i + 1) + | Some(c) => loop(i + 1, acc ++ c) + | None => (i, acc) } - let final = loop(pos) - (Buffer.contents(buffer), final) + let (final, result) = loop(pos, "") + (result, final) } let parseDigits = (text, pos) => { let len = String.length(text) @@ -326,7 +352,17 @@ module Parser = { i } else { switch String.get(text, i) { - | '0' .. '9' => loop(i + 1) + | Some("0") + | Some("1") + | Some("2") + | Some("3") + | Some("4") + | Some("5") + | Some("6") + | Some("7") + | Some("8") + | Some("9") => + loop(i + 1) | _ => i } } @@ -334,7 +370,7 @@ module Parser = { } let parseWithDecimal = (text, pos) => { let pos = parseDigits(text, pos) - if pos < String.length(text) && String.get(text, pos) == '.' { + if pos < String.length(text) && String.get(text, pos) == Some(".") { let pos = parseDigits(text, pos + 1) pos } else { @@ -344,10 +380,10 @@ module Parser = { let parseNumber = (text, pos) => { let pos = parseWithDecimal(text, pos) let ln = String.length(text) - if pos < ln - 1 && (String.get(text, pos) == 'E' || String.get(text, pos) == 'e') { + if pos < ln - 1 && (String.get(text, pos) == Some("E") || String.get(text, pos) == Some("e")) { let pos = switch String.get(text, pos + 1) { - | '-' - | '+' => + | Some("-") + | Some("+") => pos + 2 | _ => pos + 1 } @@ -357,22 +393,23 @@ module Parser = { } } let parseNegativeNumber = (text, pos) => { - let final = if String.get(text, pos) == '-' { + let final = if String.get(text, pos) == Some("-") { parseNumber(text, pos + 1) } else { parseNumber(text, pos) } - (Number(float_of_string(String.sub(text, pos, final - pos))), final) + let numStr = String.slice(text, ~start=pos, ~end=final) + (Number(parseFloat(numStr)), final) } let expect = (char, text, pos, message) => - if String.get(text, pos) != char { + if String.get(text, pos) != Some(char) { fail(text, pos, "Expected: " ++ message) } else { pos + 1 } let parseComment: 'a. (string, int, (string, int) => 'a) => 'a = (text, pos, next) => - if String.get(text, pos) != '/' { - if String.get(text, pos) == '*' { + if String.get(text, pos) != Some("/") { + if String.get(text, pos) == Some("*") { next(text, skipToCloseMultilineComment(text, pos + 1)) } else { failwith("Invalid syntax") @@ -381,10 +418,10 @@ module Parser = { next(text, skipToNewline(text, pos + 1)) } let maybeSkipComment = (text, pos) => - if pos < String.length(text) && String.get(text, pos) == '/' { - if pos + 1 < String.length(text) && String.get(text, pos + 1) == '/' { + if pos < String.length(text) && String.get(text, pos) == Some("/") { + if pos + 1 < String.length(text) && String.get(text, pos + 1) == Some("/") { skipToNewline(text, pos + 1) - } else if pos + 1 < String.length(text) && String.get(text, pos + 1) == '*' { + } else if pos + 1 < String.length(text) && String.get(text, pos + 1) == Some("*") { skipToCloseMultilineComment(text, pos + 1) } else { fail(text, pos, "Invalid synatx") @@ -396,7 +433,7 @@ module Parser = { if pos == String.length(text) { pos } else { - let n = skipWhite(text, pos) |> maybeSkipComment(text) + let n = maybeSkipComment(text, skipWhite(text, pos)) if n > pos { skip(text, n) } else { @@ -408,37 +445,46 @@ module Parser = { fail(text, pos, "Reached end of file without being done parsing") } else { switch String.get(text, pos) { - | '/' => parseComment(text, pos + 1, parse) - | '[' => parseArray(text, pos + 1) - | '{' => parseObject(text, pos + 1) - | 'n' => - if String.sub(text, pos, 4) == "null" { + | Some("/") => parseComment(text, pos + 1, parse) + | Some("[") => parseArray(text, pos + 1) + | Some("{") => parseObject(text, pos + 1) + | Some("n") => + if String.slice(text, ~start=pos, ~end=pos + 4) == "null" { (Null, pos + 4) } else { fail(text, pos, "unexpected character") } - | 't' => - if String.sub(text, pos, 4) == "true" { + | Some("t") => + if String.slice(text, ~start=pos, ~end=pos + 4) == "true" { (True, pos + 4) } else { fail(text, pos, "unexpected character") } - | 'f' => - if String.sub(text, pos, 5) == "false" { + | Some("f") => + if String.slice(text, ~start=pos, ~end=pos + 5) == "false" { (False, pos + 5) } else { fail(text, pos, "unexpected character") } - | '\n' - | '\t' - | ' ' - | '\r' => + | Some("\n") + | Some("\t") + | Some(" ") + | Some("\r") => parse(text, skipWhite(text, pos)) - | '"' => + | Some("\"") => let (s, pos) = parseString(text, pos + 1) (String(s), pos) - | '-' - | '0' .. '9' => + | Some("-") + | Some("0") + | Some("1") + | Some("2") + | Some("3") + | Some("4") + | Some("5") + | Some("6") + | Some("7") + | Some("8") + | Some("9") => parseNegativeNumber(text, pos) | _ => fail(text, pos, "unexpected character") } @@ -448,22 +494,22 @@ module Parser = { let (value, pos) = parse(text, pos) let pos = skip(text, pos) switch String.get(text, pos) { - | ',' => + | Some(",") => let pos = skip(text, pos + 1) - if String.get(text, pos) == ']' { + if String.get(text, pos) == Some("]") { (list{value}, pos + 1) } else { let (rest, pos) = parseArrayValue(text, pos) (list{value, ...rest}, pos) } - | ']' => (list{value}, pos + 1) + | Some("]") => (list{value}, pos + 1) | _ => fail(text, pos, "unexpected character") } } and parseArray = (text, pos) => { let pos = skip(text, pos) switch String.get(text, pos) { - | ']' => (Array(list{}), pos + 1) + | Some("]") => (Array(list{}), pos + 1) | _ => let (items, pos) = parseArrayValue(text, pos) (Array(items), pos) @@ -471,24 +517,24 @@ module Parser = { } and parseObjectValue = (text, pos) => { let pos = skip(text, pos) - if String.get(text, pos) != '"' { + if String.get(text, pos) != Some("\"") { fail(text, pos, "Expected string") } else { let (key, pos) = parseString(text, pos + 1) let pos = skip(text, pos) - let pos = expect(':', text, pos, "Colon") + let pos = expect(":", text, pos, "Colon") let (value, pos) = parse(text, pos) let pos = skip(text, pos) switch String.get(text, pos) { - | ',' => + | Some(",") => let pos = skip(text, pos + 1) - if String.get(text, pos) == '}' { + if String.get(text, pos) == Some("}") { (list{(key, value)}, pos + 1) } else { let (rest, pos) = parseObjectValue(text, pos) (list{(key, value), ...rest}, pos) } - | '}' => (list{(key, value)}, pos + 1) + | Some("}") => (list{(key, value)}, pos + 1) | _ => let (rest, pos) = parseObjectValue(text, pos) (list{(key, value), ...rest}, pos) @@ -497,7 +543,7 @@ module Parser = { } and parseObject = (text, pos) => { let pos = skip(text, pos) - if String.get(text, pos) == '}' { + if String.get(text, pos) == Some("}") { (Object(list{}), pos + 1) } else { let (pairs, pos) = parseObjectValue(text, pos) @@ -512,7 +558,8 @@ let parse = text => { let pos = Parser.skip(text, pos) if pos < String.length(text) { failwith( - "Extra data after parse finished: " ++ String.sub(text, pos, String.length(text) - pos), + "Extra data after parse finished: " ++ + String.slice(text, ~start=pos, ~end=String.length(text)), ) } else { item @@ -529,7 +576,14 @@ let bind = (v, fn) => @ocaml.doc(" If `t` is an object, get the value associated with the given string key ") let get = (key, t) => switch t { - | Object(items) => List.getAssoc(items, key, \"=") + | Object(items) => { + let rec find = items => + switch items { + | list{} => None + | list{(k, v), ...rest} => k == key ? Some(v) : find(rest) + } + find(items) + } | _ => None } @@ -538,7 +592,7 @@ let nth = (n, t) => switch t { | Array(items) => if n < List.length(items) { - Some(List.getExn(items, n)) + Some(List.getOrThrow(items, n)) } else { None } @@ -601,7 +655,6 @@ let rec parsePath = (keyList, t) => * ``` ") let getPath = (path, t) => { - let keys = Parser.split_by(c => c == '.', path) + let keys = Parser.split_by(c => c == Some("."), path) parsePath(keys, t) } - diff --git a/analysis/examples/example-project/src/Json.res.js b/analysis/examples/example-project/src/Json.res.js new file mode 100644 index 000000000..853074caa --- /dev/null +++ b/analysis/examples/example-project/src/Json.res.js @@ -0,0 +1,908 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + +import * as Pervasives from "@rescript/runtime/lib/es6/Pervasives.js"; +import * as Stdlib_List from "@rescript/runtime/lib/es6/Stdlib_List.js"; +import * as Primitive_object from "@rescript/runtime/lib/es6/Primitive_object.js"; +import * as Primitive_option from "@rescript/runtime/lib/es6/Primitive_option.js"; + +function string_of_number(f) { + let s = f.toString(); + if (Primitive_object.equal(s[s.length - 1 | 0], ".")) { + return s.slice(0, s.length - 1 | 0); + } else { + return s; + } +} + +function $pipe$bang(o, d) { + if (o !== undefined) { + return Primitive_option.valFromOption(o); + } else { + return Pervasives.failwith(d); + } +} + +function $pipe$question(o, d) { + if (o !== undefined) { + return Primitive_option.valFromOption(o); + } else { + return d; + } +} + +function $pipe$question$great(o, fn) { + if (o !== undefined) { + return fn(Primitive_option.valFromOption(o)); + } +} + +function $pipe$question$great$great(o, fn) { + if (o !== undefined) { + return Primitive_option.some(fn(Primitive_option.valFromOption(o))); + } +} + +function fold(o, d, f) { + if (o !== undefined) { + return f(Primitive_option.valFromOption(o)); + } else { + return d; + } +} + +let Infix = { + $pipe$bang: $pipe$bang, + $pipe$question: $pipe$question, + $pipe$question$great: $pipe$question$great, + $pipe$question$great$great: $pipe$question$great$great, + fold: fold +}; + +function escape(text) { + let ln = text.length; + let _i = 0; + let _acc = ""; + while (true) { + let acc = _acc; + let i = _i; + if (i >= ln) { + return acc; + } + let c = text[i]; + let next; + if (c !== undefined) { + switch (c) { + case "\"" : + next = acc + "\\\""; + break; + case "\\" : + next = acc + "\\\\"; + break; + case "\b" : + next = acc + "\\b"; + break; + case "\n" : + next = acc + "\\n"; + break; + case "\r" : + next = acc + "\\r"; + break; + case "\t" : + next = acc + "\\t"; + break; + case "\x0c" : + next = acc + "\\f"; + break; + default: + next = acc + c; + } + } else { + next = acc; + } + _acc = next; + _i = i + 1 | 0; + continue; + }; +} + +function stringify(t) { + if (typeof t !== "object") { + switch (t) { + case "True" : + return "true"; + case "False" : + return "false"; + case "Null" : + return "null"; + } + } else { + switch (t.TAG) { + case "String" : + return "\"" + (escape(t._0) + "\""); + case "Number" : + return string_of_number(t._0); + case "Array" : + let join = (items, sep) => { + if (items === 0) { + return ""; + } + let rest = items.tl; + let x = items.hd; + if (rest !== 0) { + return x + sep + join(rest, sep); + } else { + return x; + } + }; + let parts = Stdlib_List.map(t._0, stringify); + return "[" + join(parts, ", ") + "]"; + case "Object" : + let join$1 = (items, sep) => { + if (items === 0) { + return ""; + } + let rest = items.tl; + let x = items.hd; + if (rest !== 0) { + return x + sep + join$1(rest, sep); + } else { + return x; + } + }; + let parts$1 = Stdlib_List.map(t._0, param => "\"" + (escape(param[0]) + ("\": " + stringify(param[1])))); + return "{" + join$1(parts$1, ", ") + "}"; + } + } +} + +function white(n) { + let _i = 0; + let _acc = ""; + while (true) { + let acc = _acc; + let i = _i; + if (i >= n) { + return acc; + } + _acc = acc + " "; + _i = i + 1 | 0; + continue; + }; +} + +function stringifyPretty(indentOpt, t) { + let indent = indentOpt !== undefined ? indentOpt : 0; + let join = (items, sep) => { + if (items === 0) { + return ""; + } + let rest = items.tl; + let x = items.hd; + if (rest !== 0) { + return x + sep + join(rest, sep); + } else { + return x; + } + }; + if (typeof t !== "object") { + switch (t) { + case "True" : + return "true"; + case "False" : + return "false"; + case "Null" : + return "null"; + } + } else { + switch (t.TAG) { + case "String" : + return "\"" + (escape(t._0) + "\""); + case "Number" : + return string_of_number(t._0); + case "Array" : + let items = t._0; + if (items === 0) { + return "[]"; + } + let parts = Stdlib_List.map(items, item => stringifyPretty(indent + 2 | 0, item)); + return "[\n" + white(indent + 2 | 0) + join(parts, ",\n" + white(indent + 2 | 0)) + "\n" + white(indent) + "]"; + case "Object" : + let items$1 = t._0; + if (items$1 === 0) { + return "{}"; + } + let parts$1 = Stdlib_List.map(items$1, param => "\"" + (escape(param[0]) + ("\": " + stringifyPretty(indent + 2 | 0, param[1])))); + return "{\n" + white(indent + 2 | 0) + join(parts$1, ",\n" + white(indent + 2 | 0)) + "\n" + white(indent) + "}"; + } + } +} + +function unwrap(message, t) { + if (t !== undefined) { + return Primitive_option.valFromOption(t); + } else { + return Pervasives.failwith(message); + } +} + +function split_by(keep_emptyOpt, is_delim, str) { + let keep_empty = keep_emptyOpt !== undefined ? keep_emptyOpt : false; + let len = str.length; + let _acc = /* [] */0; + let _last_pos = len; + let _pos = len - 1 | 0; + while (true) { + let pos = _pos; + let last_pos = _last_pos; + let acc = _acc; + if (pos === -1) { + if (last_pos === 0 && !keep_empty) { + return acc; + } else { + return { + hd: str.slice(0, last_pos), + tl: acc + }; + } + } + if (is_delim(str[pos])) { + let new_len = (last_pos - pos | 0) - 1 | 0; + if (new_len !== 0 || keep_empty) { + let v = str.slice(pos + 1 | 0, (pos + 1 | 0) + new_len | 0); + _pos = pos - 1 | 0; + _last_pos = pos; + _acc = { + hd: v, + tl: acc + }; + continue; + } + _pos = pos - 1 | 0; + _last_pos = pos; + continue; + } + _pos = pos - 1 | 0; + continue; + }; +} + +function fail(text, pos, message) { + let pre = text.slice(0, pos); + let lines = split_by(undefined, c => Primitive_object.equal(c, "\n"), pre); + let count = Stdlib_List.length(lines); + let last = count > 0 ? Stdlib_List.getOrThrow(lines, count - 1 | 0) : ""; + let col = last.length + 1 | 0; + let line = Stdlib_List.length(lines); + return Pervasives.failwith("Error \"" + message + "\" at " + line.toString() + ":" + col.toString() + " -> " + last + "\n"); +} + +function skipToNewline(text, _pos) { + while (true) { + let pos = _pos; + if (pos >= text.length) { + return pos; + } + if (Primitive_object.equal(text[pos], "\n")) { + return pos + 1 | 0; + } + _pos = pos + 1 | 0; + continue; + }; +} + +function stringTail(text) { + let len = text.length; + if (len > 1) { + return text.slice(1, len); + } else { + return ""; + } +} + +function skipToCloseMultilineComment(text, _pos) { + while (true) { + let pos = _pos; + if ((pos + 1 | 0) >= text.length) { + return Pervasives.failwith("Unterminated comment"); + } + if (Primitive_object.equal(text[pos], "*") && Primitive_object.equal(text[pos + 1 | 0], "/")) { + return pos + 2 | 0; + } + _pos = pos + 1 | 0; + continue; + }; +} + +function skipWhite(text, _pos) { + while (true) { + let pos = _pos; + if (!(pos < text.length && (Primitive_object.equal(text[pos], " ") || Primitive_object.equal(text[pos], "\t") || Primitive_object.equal(text[pos], "\n") || Primitive_object.equal(text[pos], "\r")))) { + return pos; + } + _pos = pos + 1 | 0; + continue; + }; +} + +function parseString(text, pos) { + let ln = text.length; + let loop = (_i, _acc) => { + while (true) { + let acc = _acc; + let i = _i; + if (i >= ln) { + return fail(text, i, "Unterminated string"); + } + let c = text[i]; + if (c === undefined) { + return [ + i, + acc + ]; + } + switch (c) { + case "\"" : + return [ + i + 1 | 0, + acc + ]; + case "\\" : + if ((i + 1 | 0) >= ln) { + return fail(text, i, "Unterminated string"); + } + let match = text[i + 1 | 0]; + if (match !== undefined) { + switch (match) { + case "/" : + _acc = acc + "/"; + _i = i + 2 | 0; + continue; + case "f" : + _acc = acc + "\x0c"; + _i = i + 2 | 0; + continue; + } + } + let escaped = text.slice(i, i + 2 | 0); + _acc = acc + escaped; + _i = i + 2 | 0; + continue; + break; + default: + _acc = acc + c; + _i = i + 1 | 0; + continue; + } + }; + }; + let match = loop(pos, ""); + return [ + match[1], + match[0] + ]; +} + +function parseDigits(text, pos) { + let len = text.length; + let _i = pos + 1 | 0; + while (true) { + let i = _i; + if (i >= len) { + return i; + } + let match = text[i]; + if (match === undefined) { + return i; + } + switch (match) { + case "0" : + case "1" : + case "2" : + case "3" : + case "4" : + case "5" : + case "6" : + case "7" : + case "8" : + case "9" : + _i = i + 1 | 0; + continue; + default: + return i; + } + }; +} + +function parseWithDecimal(text, pos) { + let pos$1 = parseDigits(text, pos); + if (pos$1 < text.length && Primitive_object.equal(text[pos$1], ".")) { + return parseDigits(text, pos$1 + 1 | 0); + } else { + return pos$1; + } +} + +function parseNumber(text, pos) { + let pos$1 = parseWithDecimal(text, pos); + let ln = text.length; + if (!(pos$1 < (ln - 1 | 0) && (Primitive_object.equal(text[pos$1], "E") || Primitive_object.equal(text[pos$1], "e")))) { + return pos$1; + } + let match = text[pos$1 + 1 | 0]; + let pos$2; + if (match !== undefined) { + switch (match) { + case "+" : + case "-" : + pos$2 = pos$1 + 2 | 0; + break; + default: + pos$2 = pos$1 + 1 | 0; + } + } else { + pos$2 = pos$1 + 1 | 0; + } + return parseDigits(text, pos$2); +} + +function parseNegativeNumber(text, pos) { + let final = Primitive_object.equal(text[pos], "-") ? parseNumber(text, pos + 1 | 0) : parseNumber(text, pos); + let numStr = text.slice(pos, final); + return [ + { + TAG: "Number", + _0: parseFloat(numStr) + }, + final + ]; +} + +function expect(char, text, pos, message) { + if (Primitive_object.notequal(text[pos], char)) { + return fail(text, pos, "Expected: " + message); + } else { + return pos + 1 | 0; + } +} + +function parseComment(text, pos, next) { + if (Primitive_object.notequal(text[pos], "/")) { + if (Primitive_object.equal(text[pos], "*")) { + return next(text, skipToCloseMultilineComment(text, pos + 1 | 0)); + } else { + return Pervasives.failwith("Invalid syntax"); + } + } else { + return next(text, skipToNewline(text, pos + 1 | 0)); + } +} + +function maybeSkipComment(text, pos) { + if (pos < text.length && Primitive_object.equal(text[pos], "/")) { + if ((pos + 1 | 0) < text.length && Primitive_object.equal(text[pos + 1 | 0], "/")) { + return skipToNewline(text, pos + 1 | 0); + } else if ((pos + 1 | 0) < text.length && Primitive_object.equal(text[pos + 1 | 0], "*")) { + return skipToCloseMultilineComment(text, pos + 1 | 0); + } else { + return fail(text, pos, "Invalid synatx"); + } + } else { + return pos; + } +} + +function skip(text, _pos) { + while (true) { + let pos = _pos; + if (pos === text.length) { + return pos; + } + let n = maybeSkipComment(text, skipWhite(text, pos)); + if (n <= pos) { + return n; + } + _pos = n; + continue; + }; +} + +function parse(text, _pos) { + while (true) { + let pos = _pos; + if (pos >= text.length) { + return fail(text, pos, "Reached end of file without being done parsing"); + } + let match = text[pos]; + if (match === undefined) { + return fail(text, pos, "unexpected character"); + } + switch (match) { + case "/" : + return parseComment(text, pos + 1 | 0, parse); + case "-" : + case "0" : + case "1" : + case "2" : + case "3" : + case "4" : + case "5" : + case "6" : + case "7" : + case "8" : + case "9" : + return parseNegativeNumber(text, pos); + case "[" : + return parseArray(text, pos + 1 | 0); + case "\"" : + let match$1 = parseString(text, pos + 1 | 0); + return [ + { + TAG: "String", + _0: match$1[0] + }, + match$1[1] + ]; + case " " : + case "\n" : + case "\r" : + case "\t" : + break; + case "f" : + if (text.slice(pos, pos + 5 | 0) === "false") { + return [ + "False", + pos + 5 | 0 + ]; + } else { + return fail(text, pos, "unexpected character"); + } + case "n" : + if (text.slice(pos, pos + 4 | 0) === "null") { + return [ + "Null", + pos + 4 | 0 + ]; + } else { + return fail(text, pos, "unexpected character"); + } + case "t" : + if (text.slice(pos, pos + 4 | 0) === "true") { + return [ + "True", + pos + 4 | 0 + ]; + } else { + return fail(text, pos, "unexpected character"); + } + case "{" : + return parseObject(text, pos + 1 | 0); + default: + return fail(text, pos, "unexpected character"); + } + _pos = skipWhite(text, pos); + continue; + }; +} + +function parseArrayValue(text, pos) { + let pos$1 = skip(text, pos); + let match = parse(text, pos$1); + let value = match[0]; + let pos$2 = skip(text, match[1]); + let match$1 = text[pos$2]; + if (match$1 === undefined) { + return fail(text, pos$2, "unexpected character"); + } + switch (match$1) { + case "," : + let pos$3 = skip(text, pos$2 + 1 | 0); + if (Primitive_object.equal(text[pos$3], "]")) { + return [ + { + hd: value, + tl: /* [] */0 + }, + pos$3 + 1 | 0 + ]; + } + let match$2 = parseArrayValue(text, pos$3); + return [ + { + hd: value, + tl: match$2[0] + }, + match$2[1] + ]; + case "]" : + return [ + { + hd: value, + tl: /* [] */0 + }, + pos$2 + 1 | 0 + ]; + default: + return fail(text, pos$2, "unexpected character"); + } +} + +function parseArray(text, pos) { + let pos$1 = skip(text, pos); + let match = text[pos$1]; + if (match === "]") { + return [ + { + TAG: "Array", + _0: /* [] */0 + }, + pos$1 + 1 | 0 + ]; + } + let match$1 = parseArrayValue(text, pos$1); + return [ + { + TAG: "Array", + _0: match$1[0] + }, + match$1[1] + ]; +} + +function parseObjectValue(text, pos) { + let pos$1 = skip(text, pos); + if (Primitive_object.notequal(text[pos$1], "\"")) { + return fail(text, pos$1, "Expected string"); + } + let match = parseString(text, pos$1 + 1 | 0); + let key = match[0]; + let pos$2 = skip(text, match[1]); + let pos$3 = expect(":", text, pos$2, "Colon"); + let match$1 = parse(text, pos$3); + let value = match$1[0]; + let pos$4 = skip(text, match$1[1]); + let match$2 = text[pos$4]; + if (match$2 !== undefined) { + switch (match$2) { + case "," : + let pos$5 = skip(text, pos$4 + 1 | 0); + if (Primitive_object.equal(text[pos$5], "}")) { + return [ + { + hd: [ + key, + value + ], + tl: /* [] */0 + }, + pos$5 + 1 | 0 + ]; + } + let match$3 = parseObjectValue(text, pos$5); + return [ + { + hd: [ + key, + value + ], + tl: match$3[0] + }, + match$3[1] + ]; + case "}" : + return [ + { + hd: [ + key, + value + ], + tl: /* [] */0 + }, + pos$4 + 1 | 0 + ]; + } + } + let match$4 = parseObjectValue(text, pos$4); + return [ + { + hd: [ + key, + value + ], + tl: match$4[0] + }, + match$4[1] + ]; +} + +function parseObject(text, pos) { + let pos$1 = skip(text, pos); + if (Primitive_object.equal(text[pos$1], "}")) { + return [ + { + TAG: "Object", + _0: /* [] */0 + }, + pos$1 + 1 | 0 + ]; + } + let match = parseObjectValue(text, pos$1); + return [ + { + TAG: "Object", + _0: match[0] + }, + match[1] + ]; +} + +let Parser = { + split_by: split_by, + fail: fail, + skipToNewline: skipToNewline, + stringTail: stringTail, + skipToCloseMultilineComment: skipToCloseMultilineComment, + skipWhite: skipWhite, + parseString: parseString, + parseDigits: parseDigits, + parseWithDecimal: parseWithDecimal, + parseNumber: parseNumber, + parseNegativeNumber: parseNegativeNumber, + expect: expect, + parseComment: parseComment, + maybeSkipComment: maybeSkipComment, + skip: skip, + parse: parse, + parseArrayValue: parseArrayValue, + parseArray: parseArray, + parseObjectValue: parseObjectValue, + parseObject: parseObject +}; + +function parse$1(text) { + let match = parse(text, 0); + let pos = skip(text, match[1]); + if (pos < text.length) { + return Pervasives.failwith("Extra data after parse finished: " + text.slice(pos, text.length)); + } else { + return match[0]; + } +} + +function bind(v, fn) { + if (v !== undefined) { + return fn(Primitive_option.valFromOption(v)); + } +} + +function get(key, t) { + if (typeof t !== "object") { + return; + } + if (t.TAG !== "Object") { + return; + } + let _items = t._0; + while (true) { + let items = _items; + if (items === 0) { + return; + } + let match = items.hd; + if (match[0] === key) { + return Primitive_option.some(match[1]); + } + _items = items.tl; + continue; + }; +} + +function nth(n, t) { + if (typeof t !== "object") { + return; + } + if (t.TAG !== "Array") { + return; + } + let items = t._0; + if (n < Stdlib_List.length(items)) { + return Stdlib_List.getOrThrow(items, n); + } +} + +function string(t) { + if (typeof t !== "object" || t.TAG !== "String") { + return; + } else { + return t._0; + } +} + +function number(t) { + if (typeof t !== "object" || t.TAG !== "Number") { + return; + } else { + return t._0; + } +} + +function array(t) { + if (typeof t !== "object" || t.TAG !== "Array") { + return; + } else { + return t._0; + } +} + +function obj(t) { + if (typeof t !== "object" || t.TAG !== "Object") { + return; + } else { + return t._0; + } +} + +function bool(t) { + if (typeof t === "object") { + return; + } + switch (t) { + case "True" : + return true; + case "False" : + return false; + default: + return; + } +} + +function $$null(t) { + if (typeof t !== "object" && t === "Null") { + return Primitive_option.some(undefined); + } +} + +function parsePath(_keyList, _t) { + while (true) { + let t = _t; + let keyList = _keyList; + if (keyList === 0) { + return t; + } + let value = get(keyList.hd, t); + if (value === undefined) { + return; + } + _t = value; + _keyList = keyList.tl; + continue; + }; +} + +function getPath(path, t) { + let keys = split_by(undefined, c => Primitive_object.equal(c, "."), path); + return parsePath(keys, t); +} + +export { + string_of_number, + Infix, + escape, + stringify, + white, + stringifyPretty, + unwrap, + Parser, + parse$1 as parse, + bind, + get, + nth, + string, + number, + array, + obj, + bool, + $$null, + parsePath, + getPath, +} +/* No side effect */ diff --git a/analysis/examples/example-project/src/ModuleWithDocComment.res.js b/analysis/examples/example-project/src/ModuleWithDocComment.res.js new file mode 100644 index 000000000..3caa9b550 --- /dev/null +++ b/analysis/examples/example-project/src/ModuleWithDocComment.res.js @@ -0,0 +1,19 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + + +let NestedAgain = { + y: 123 +}; + +let Nested = { + x: "123", + NestedAgain: NestedAgain +}; + +let M; + +export { + Nested, + M, +} +/* No side effect */ diff --git a/analysis/examples/example-project/src/More.res b/analysis/examples/example-project/src/More.res index a1775e21a..88875a3e0 100644 --- a/analysis/examples/example-project/src/More.res +++ b/analysis/examples/example-project/src/More.res @@ -10,4 +10,3 @@ let n = 10 let party = 30 let awesome = 200 - diff --git a/analysis/examples/example-project/src/More.res.js b/analysis/examples/example-project/src/More.res.js new file mode 100644 index 000000000..bb6a97c61 --- /dev/null +++ b/analysis/examples/example-project/src/More.res.js @@ -0,0 +1,18 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + + +let contnets = "here"; + +let inner = 20; + +let n = 10; + +let party = 30; + +export { + contnets, + inner, + n, + party, +} +/* No side effect */ diff --git a/analysis/examples/example-project/src/More.resi b/analysis/examples/example-project/src/More.resi index 023a7222c..3f211d74d 100644 --- a/analysis/examples/example-project/src/More.resi +++ b/analysis/examples/example-project/src/More.resi @@ -2,4 +2,3 @@ let contnets: string let inner: int let n: int let party: int - diff --git a/analysis/examples/example-project/src/Other.res b/analysis/examples/example-project/src/Other.res index d7a5bf432..306354483 100644 --- a/analysis/examples/example-project/src/Other.res +++ b/analysis/examples/example-project/src/Other.res @@ -27,4 +27,3 @@ let oo = {person: z, height: 34.2} let show = o => { let m = o.height } - diff --git a/analysis/examples/example-project/src/Other.res.js b/analysis/examples/example-project/src/Other.res.js new file mode 100644 index 000000000..a6f6873fa --- /dev/null +++ b/analysis/examples/example-project/src/Other.res.js @@ -0,0 +1,43 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + + +let z = { + name: "hi", + age: 20 +}; + +function concat(first, second) { + return first + second | 0; +} + +let oo = { + person: z, + height: 34.2 +}; + +function show(o) { + +} + +let something = 10; + +let inner = 10; + +let m = { + TAG: "Things", + _0: 1 +}; + +let later = 20; + +export { + something, + inner, + m, + z, + later, + concat, + oo, + show, +} +/* No side effect */ diff --git a/analysis/examples/example-project/src/Serde.res b/analysis/examples/example-project/src/Serde.res deleted file mode 100644 index 1c516c210..000000000 --- a/analysis/examples/example-project/src/Serde.res +++ /dev/null @@ -1,222 +0,0 @@ -let rec deserialize_Hello__TryIt____lockfile: Json.t => Belt.Result.t< - MyNamespace.Hello.lockfile, - string, -> = record => - switch record { - | Json.Object(items) => - switch Belt.List.getAssoc(items, "current", \"=") { - | None => Belt.Result.Error(@reason.raw_literal("No attribute ") "No attribute " ++ "current") - | Some(json) => - switch ( - list => - switch list { - | Json.Array(items) => - let transformer = json => - switch json { - | Json.Array(list{arg0, arg1}) => - switch ( - number => - switch number { - | Json.Number(number) => Belt.Result.Ok(int_of_float(number)) - | _ => Error(@reason.raw_literal("Expected a float") "Expected a float") - } - )(arg1) { - | Belt.Result.Ok(arg1) => - switch deserialize_Hello__TryIt____shortReference(arg0) { - | Belt.Result.Ok(arg0) => Belt.Result.Ok(arg0, arg1) - | Error(error) => Error(error) - } - | Error(error) => Error(error) - } - | _ => Belt.Result.Error(@reason.raw_literal("Expected array") "Expected array") - } - let rec loop = items => - switch items { - | list{} => Belt.Result.Ok(list{}) - | list{one, ...rest} => - switch transformer(one) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(value) => - switch loop(rest) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(rest) => Belt.Result.Ok(list{value, ...rest}) - } - } - } - loop(items) - | _ => Belt.Result.Error(@reason.raw_literal("expected an array") "expected an array") - } - )(json) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(attr_current) => - switch Belt.List.getAssoc(items, "pastVersions", \"=") { - | None => - Belt.Result.Error(@reason.raw_literal("No attribute ") "No attribute " ++ "pastVersions") - | Some(json) => - switch deserialize_Belt_HashMapInt____t(list => - switch list { - | Json.Array(items) => - let transformer = json => - switch json { - | Json.Array(list{arg0, arg1}) => - switch ( - number => - switch number { - | Json.Number(number) => Belt.Result.Ok(int_of_float(number)) - | _ => Error(@reason.raw_literal("Expected a float") "Expected a float") - } - )(arg1) { - | Belt.Result.Ok(arg1) => - switch deserialize_Hello__TryIt____shortReference(arg0) { - | Belt.Result.Ok(arg0) => Belt.Result.Ok(arg0, arg1) - | Error(error) => Error(error) - } - | Error(error) => Error(error) - } - | _ => Belt.Result.Error(@reason.raw_literal("Expected array") "Expected array") - } - let rec loop = items => - switch items { - | list{} => Belt.Result.Ok(list{}) - | list{one, ...rest} => - switch transformer(one) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(value) => - switch loop(rest) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(rest) => Belt.Result.Ok(list{value, ...rest}) - } - } - } - loop(items) - | _ => Belt.Result.Error(@reason.raw_literal("expected an array") "expected an array") - } - )(json) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(attr_pastVersions) => - switch Belt.List.getAssoc(items, "version", \"=") { - | None => - Belt.Result.Error(@reason.raw_literal("No attribute ") "No attribute " ++ "version") - | Some(json) => - switch ( - number => - switch number { - | Json.Number(number) => Belt.Result.Ok(int_of_float(number)) - | _ => Error(@reason.raw_literal("Expected a float") "Expected a float") - } - )(json) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(attr_version) => - Belt.Result.Ok({ - version: attr_version, - pastVersions: attr_pastVersions, - current: attr_current, - }) - } - } - } - } - } - } - | _ => Belt.Result.Error(@reason.raw_literal("Expected an object") "Expected an object") - } -and deserialize_Hello__TryIt____shortReference: Json.t => Belt.Result.t< - MyNamespace.Hello.shortReference, - string, -> = value => - ( - json => - switch json { - | Json.Array(list{arg0, arg1, arg2}) => - switch ( - string => - switch string { - | Json.String(string) => Belt.Result.Ok(string) - | _ => Error(@reason.raw_literal("epected a string") "epected a string") - } - )(arg2) { - | Belt.Result.Ok(arg2) => - switch ( - list => - switch list { - | Json.Array(items) => - let transformer = string => - switch string { - | Json.String(string) => Belt.Result.Ok(string) - | _ => Error(@reason.raw_literal("epected a string") "epected a string") - } - let rec loop = items => - switch items { - | list{} => Belt.Result.Ok(list{}) - | list{one, ...rest} => - switch transformer(one) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(value) => - switch loop(rest) { - | Belt.Result.Error(error) => Belt.Result.Error(error) - | Belt.Result.Ok(rest) => Belt.Result.Ok(list{value, ...rest}) - } - } - } - loop(items) - | _ => Belt.Result.Error(@reason.raw_literal("expected an array") "expected an array") - } - )(arg1) { - | Belt.Result.Ok(arg1) => - switch ( - string => - switch string { - | Json.String(string) => Belt.Result.Ok(string) - | _ => Error(@reason.raw_literal("epected a string") "epected a string") - } - )(arg0) { - | Belt.Result.Ok(arg0) => Belt.Result.Ok(arg0, arg1, arg2) - | Error(error) => Error(error) - } - | Error(error) => Error(error) - } - | Error(error) => Error(error) - } - | _ => Belt.Result.Error(@reason.raw_literal("Expected array") "Expected array") - } - )(value) -and deserialize_Belt_HashMapInt____t: 'arg0. ( - Json.t => Belt.Result.t<'arg0, string>, - Json.t, -) => Belt.Result.t, string> = bTransformer => - TransformHelpers.deserialize_Belt_HashMapInt____t(bTransformer) -let rec serialize_Hello__TryIt____lockfile: MyNamespace.Hello.lockfile => Json.t = record => Json.Object(list{ - ("version", (i => Json.Number(float_of_int(i)))(record.version)), - ( - "pastVersions", - serialize_Belt_HashMapInt____t(list => Json.Array( - Belt.List.map(list, ((arg0, arg1)) => Json.Array(list{ - serialize_Hello__TryIt____shortReference(arg0), - (i => Json.Number(float_of_int(i)))(arg1), - })), - ))(record.pastVersions), - ), - ( - "current", - ( - list => Json.Array( - Belt.List.map(list, ((arg0, arg1)) => Json.Array(list{ - serialize_Hello__TryIt____shortReference(arg0), - (i => Json.Number(float_of_int(i)))(arg1), - })), - ) - )(record.current), - ), -}) -and serialize_Hello__TryIt____shortReference: MyNamespace.Hello.shortReference => Json.t = value => - ( - ((arg0, arg1, arg2)) => Json.Array(list{ - (s => Json.String(s))(arg0), - (list => Json.Array(Belt.List.map(list, s => Json.String(s))))(arg1), - (s => Json.String(s))(arg2), - }) - )(value) -and serialize_Belt_HashMapInt____t: 'arg0. ( - 'arg0 => Json.t, - Belt_HashMapInt.t<'arg0>, -) => Json.t = bTransformer => TransformHelpers.serialize_Belt_HashMapInt____t(bTransformer) diff --git a/analysis/examples/example-project/src/TransformHelpers.res b/analysis/examples/example-project/src/TransformHelpers.res index b0ab64685..e789c1a74 100644 --- a/analysis/examples/example-project/src/TransformHelpers.res +++ b/analysis/examples/example-project/src/TransformHelpers.res @@ -1,12 +1,11 @@ -let deserialize_Belt__HashMapInt__t = (transformer, t) => assert false +let deserialize_Belt__HashMapInt__t = (transformer, t) => assert(false) -let deserialize_Belt_HashMapInt____t = (a, b) => assert false +let deserialize_Belt_HashMapInt____t = (a, b) => assert(false) -let deserialize_Belt__HashMap__Int__t = (a, b) => assert false +let deserialize_Belt__HashMap__Int__t = (a, b) => assert(false) -let serialize_Belt_HashMapInt____t = (a, b) => assert false +let serialize_Belt_HashMapInt____t = (a, b) => assert(false) -let serialize_Belt__HashMap__Int__t = (a, b) => assert false - -let serialize_Belt_HashMapInt____t = (transformer, t) => assert false +let serialize_Belt__HashMap__Int__t = (a, b) => assert(false) +let serialize_Belt_HashMapInt____t = (transformer, t) => assert(false) diff --git a/analysis/examples/example-project/src/TransformHelpers.res.js b/analysis/examples/example-project/src/TransformHelpers.res.js new file mode 100644 index 000000000..408a62363 --- /dev/null +++ b/analysis/examples/example-project/src/TransformHelpers.res.js @@ -0,0 +1,71 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + + +function deserialize_Belt__HashMapInt__t(transformer, t) { + throw { + RE_EXN_ID: "Assert_failure", + _1: [ + "TransformHelpers.res", + 1, + 58 + ], + Error: new Error() + }; +} + +function deserialize_Belt_HashMapInt____t(a, b) { + throw { + RE_EXN_ID: "Assert_failure", + _1: [ + "TransformHelpers.res", + 3, + 49 + ], + Error: new Error() + }; +} + +function deserialize_Belt__HashMap__Int__t(a, b) { + throw { + RE_EXN_ID: "Assert_failure", + _1: [ + "TransformHelpers.res", + 5, + 50 + ], + Error: new Error() + }; +} + +function serialize_Belt__HashMap__Int__t(a, b) { + throw { + RE_EXN_ID: "Assert_failure", + _1: [ + "TransformHelpers.res", + 9, + 48 + ], + Error: new Error() + }; +} + +function serialize_Belt_HashMapInt____t(transformer, t) { + throw { + RE_EXN_ID: "Assert_failure", + _1: [ + "TransformHelpers.res", + 11, + 57 + ], + Error: new Error() + }; +} + +export { + deserialize_Belt__HashMapInt__t, + deserialize_Belt_HashMapInt____t, + deserialize_Belt__HashMap__Int__t, + serialize_Belt__HashMap__Int__t, + serialize_Belt_HashMapInt____t, +} +/* No side effect */ diff --git a/analysis/examples/example-project/src/ZZ.res b/analysis/examples/example-project/src/ZZ.res index 6a3c9ddcd..9eca987ed 100644 --- a/analysis/examples/example-project/src/ZZ.res +++ b/analysis/examples/example-project/src/ZZ.res @@ -15,10 +15,14 @@ let d = module J = { @react.component - export make = (~children: React.element) => React.null + let make = (~children: React.element) => React.null } -let z = {React.string("")} {React.string("")} +let z = + + {React.string("")} + {React.string("")} + type inline = | A({x: int, y: string}) @@ -85,7 +89,7 @@ let v1 = V1 module DoubleNested = ModuleWithDocComment.Nested.NestedAgain -let uncurried = (. x) => x + 1 +let uncurried = x => x + 1 module Inner = { type tInner = int @@ -107,7 +111,7 @@ module HoverInsideModuleWithComponent = { } module Lib = { - let foo = (~age, ~name) => name ++ string_of_int(age) + let foo = (~age, ~name) => name ++ Int.toString(age) let next = (~number=0, ~year) => number + year } @@ -122,24 +126,23 @@ module Dep: { let customDouble2 = foo => foo * 2 } -let cc = Dep.customDouble(11) +let customDouble = foo => foo * 2 +let cc = customDouble(11) module O = { module Comp = { @react.component let make = (~first="", ~kas=11, ~foo=3, ~second, ~v) => - React.string(first ++ second ++ string_of_int(foo)) + React.string(first ++ second ++ Int.toString(foo)) } } let comp = -let lll = List.make(3, 4) +let lll = List.make(~length=3, 4) let abc = "abc" let arr = [1, 2, 3] let some7 = Some(7) - - diff --git a/analysis/examples/example-project/src/ZZ.res.js b/analysis/examples/example-project/src/ZZ.res.js new file mode 100644 index 000000000..7d7b87d7b --- /dev/null +++ b/analysis/examples/example-project/src/ZZ.res.js @@ -0,0 +1,197 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + +import * as Stdlib_List from "@rescript/runtime/lib/es6/Stdlib_List.js"; +import * as JsxRuntime from "react/jsx-runtime"; + +let b = [ + 1, + 2, + 3, + 12 +]; + +let c = JsxRuntime.jsx("div", {}); + +function s(prim) { + return prim; +} + +function ZZ$M(props) { + return props.x; +} + +let M = { + make: ZZ$M +}; + +let d = JsxRuntime.jsx(ZZ$M, { + x: "abc" +}); + +function ZZ$J(props) { + return null; +} + +let J = { + make: ZZ$J +}; + +let z = JsxRuntime.jsxs(ZZ$J, { + children: [ + "", + "" + ] +}); + +let MSig = { + x: 14 +}; + +let Impl = { + x: 14 +}; + +let Impl2 = { + x: 14 +}; + +function testRecordFields(gr) { + return gr.s; +} + +function uncurried(x) { + return x + 1 | 0; +} + +let Inner = { + vInner: 34 +}; + +function functionWithTypeAnnotation() { + return 1; +} + +function ZZ$HoverInsideModuleWithComponent(props) { + return null; +} + +let HoverInsideModuleWithComponent = { + x: 2, + make: ZZ$HoverInsideModuleWithComponent +}; + +function foo(age, name) { + return name + age.toString(); +} + +function next(numberOpt, year) { + let number = numberOpt !== undefined ? numberOpt : 0; + return number + year | 0; +} + +let Lib = { + foo: foo, + next: next +}; + +function customDouble(foo) { + return (foo << 1); +} + +function customDouble2(foo) { + return (foo << 1); +} + +let Dep = { + customDouble: customDouble, + customDouble2: customDouble2 +}; + +function customDouble$1(foo) { + return (foo << 1); +} + +let cc = 22; + +function ZZ$O$Comp(props) { + let __foo = props.foo; + let __first = props.first; + let first = __first !== undefined ? __first : ""; + let foo = __foo !== undefined ? __foo : 3; + return first + props.second + foo.toString(); +} + +let Comp = { + make: ZZ$O$Comp +}; + +let O = { + Comp: Comp +}; + +let comp = JsxRuntime.jsx(ZZ$O$Comp, { + second: "abcc", + v: 12 +}, "12"); + +let lll = Stdlib_List.make(3, 4); + +let arr = [ + 1, + 2, + 3 +]; + +let a = 12; + +let D; + +let E; + +let F; + +let v1 = "V1"; + +let DoubleNested; + +let valueInner = 34; + +let abc = "abc"; + +let some7 = 7; + +export { + a, + b, + c, + s, + M, + d, + J, + z, + MSig, + Impl, + Impl2, + D, + E, + F, + testRecordFields, + v1, + DoubleNested, + uncurried, + Inner, + valueInner, + functionWithTypeAnnotation, + HoverInsideModuleWithComponent, + Lib, + Dep, + customDouble$1 as customDouble, + cc, + O, + comp, + lll, + abc, + arr, + some7, +} +/* c Not a pure module */ diff --git a/analysis/examples/example-project/types.json b/analysis/examples/example-project/types.json deleted file mode 100644 index 904cdf616..000000000 --- a/analysis/examples/example-project/types.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "output": "src/Serde.ml", - "engine": "rex-json", - "entries": [ - { - "file": "src/Hello.re", - "type": "lockfile" - } - ], - "custom": [ - {"module": "Belt_HashMapInt", "path": [], "name": "t", "args": 1} - ] -} \ No newline at end of file From 07d912f92a1d0d5ab2a17254d57fe289d0cfe84f Mon Sep 17 00:00:00 2001 From: Cristiano Calcagno Date: Sun, 4 Jan 2026 08:45:05 +0100 Subject: [PATCH 49/56] example project: use legacy build to enable renalyze to run, and in fact repro the crash --- analysis/examples/example-project/package.json | 6 +++--- analysis/examples/example-project/rescript.json | 6 ++++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/analysis/examples/example-project/package.json b/analysis/examples/example-project/package.json index 22f77c1a6..7f60ced76 100644 --- a/analysis/examples/example-project/package.json +++ b/analysis/examples/example-project/package.json @@ -5,9 +5,9 @@ "rescript": "12.0.2" }, "scripts": { - "build": "rescript", - "start": "rescript build -w", - "clean": "rescript clean", + "build": "rescript-legacy", + "start": "rescript-legacy build -w", + "clean": "rescript-legacy clean -with-deps", "format": "rescript format" } } diff --git a/analysis/examples/example-project/rescript.json b/analysis/examples/example-project/rescript.json index de34db8b8..85ac151a8 100644 --- a/analysis/examples/example-project/rescript.json +++ b/analysis/examples/example-project/rescript.json @@ -5,7 +5,9 @@ "warnings": { "number": "-32-26-27-33" }, - "dependencies": ["@rescript/react"], + "dependencies": [ + "@rescript/react" + ], "jsx": { "version": 4 }, @@ -15,4 +17,4 @@ "in-source": true, "suffix": ".res.js" } -} +} \ No newline at end of file From 92dae41291c958e8c9ef9052825f01896c169423 Mon Sep 17 00:00:00 2001 From: Cristiano Calcagno Date: Sun, 4 Jan 2026 13:54:50 +0100 Subject: [PATCH 50/56] fix: Code Analyzer binary lookup for ReScript v12+ For v12+, rescript-tools.exe is at @rescript/{platform}-{arch}/bin/. Try this path first, then fall back to legacy paths. --- CHANGELOG.md | 1 + client/src/commands/code_analysis.ts | 28 ++++++++++++++++++++++------ 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3e095205f..543e509e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ #### :bug: Bug fix +- Fix Code Analyzer binary lookup for ReScript v12+ projects. - Take namespace into account for incremental cleanup. https://github.com/rescript-lang/rescript-vscode/pull/1164 - Potential race condition in incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1167 - Fix extension crash triggered by incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1169 diff --git a/client/src/commands/code_analysis.ts b/client/src/commands/code_analysis.ts index 61bb4f156..559f89c67 100644 --- a/client/src/commands/code_analysis.ts +++ b/client/src/commands/code_analysis.ts @@ -1,4 +1,5 @@ import * as cp from "child_process"; +import * as fs from "fs"; import * as path from "path"; import { window, @@ -216,12 +217,27 @@ export const runCodeAnalysisWithReanalyze = ( currentDocument.uri.fsPath, ); - // This little weird lookup is because in the legacy setup reanalyze needs to be - // run from the analysis binary, whereas in the new setup it's run from the tools - // binary. - let binaryPath = - getBinaryPath("rescript-tools.exe", projectRootPath) ?? - getBinaryPath("rescript-editor-analysis.exe"); + // Try v12+ path first: @rescript/{platform}-{arch}/bin/rescript-tools.exe + // Then fall back to legacy paths via getBinaryPath + let binaryPath: string | null = null; + if (projectRootPath != null) { + const v12Path = path.join( + projectRootPath, + "node_modules", + "@rescript", + `${process.platform}-${process.arch}`, + "bin", + "rescript-tools.exe", + ); + if (fs.existsSync(v12Path)) { + binaryPath = v12Path; + } + } + if (binaryPath == null) { + binaryPath = + getBinaryPath("rescript-tools.exe", projectRootPath) ?? + getBinaryPath("rescript-editor-analysis.exe", projectRootPath); + } if (binaryPath === null) { window.showErrorMessage("Binary executable not found."); From 0c1b2d78438718641a45723c1cc5b11fc8b40632 Mon Sep 17 00:00:00 2001 From: Cristiano Calcagno Date: Wed, 14 Jan 2026 12:11:29 +0100 Subject: [PATCH 51/56] Fix monorepo build detection: watch only root .compiler.log; run reanalyze from workspace root - Server: restrict .compiler.log watching to ROOT/lib/bs/.compiler.log per workspace folder to avoid event floods from packages/** and node_modules/**.\n- Server: fix Start Build spawning for v12 layouts by preferring rescript.exe and falling back to rescript (also respects rescript.settings.binaryPath).\n- Client: anchor Start Code Analysis to the VSCode workspace folder root rather than the active file directory.\n- Client: resolve code-analysis binary relative to the chosen cwd (workspace root) to avoid subpackage-anchored tool lookup.\n\nFiles: server/src/server.ts, client/src/extension.ts, client/src/commands/code_analysis.ts --- client/src/commands/code_analysis.ts | 7 ++++++- client/src/extension.ts | 11 +++++----- server/src/server.ts | 31 +++++++++++++++++++--------- 3 files changed, 32 insertions(+), 17 deletions(-) diff --git a/client/src/commands/code_analysis.ts b/client/src/commands/code_analysis.ts index 559f89c67..ed9feafd0 100644 --- a/client/src/commands/code_analysis.ts +++ b/client/src/commands/code_analysis.ts @@ -213,9 +213,14 @@ export const runCodeAnalysisWithReanalyze = ( let currentDocument = window.activeTextEditor.document; let cwd = targetDir ?? path.dirname(currentDocument.uri.fsPath); + // Resolve the project root from `cwd` (which is the workspace root when code analysis is started), + // rather than from the currently-open file (which may be in a subpackage). let projectRootPath: NormalizedPath | null = findProjectRootOfFileInDir( - currentDocument.uri.fsPath, + path.join(cwd, "bsconfig.json"), ); + if (projectRootPath == null) { + projectRootPath = findProjectRootOfFileInDir(currentDocument.uri.fsPath); + } // Try v12+ path first: @rescript/{platform}-{arch}/bin/rescript-tools.exe // Then fall back to legacy paths via getBinaryPath diff --git a/client/src/extension.ts b/client/src/extension.ts index c31a8d170..41e7ecc66 100644 --- a/client/src/extension.ts +++ b/client/src/extension.ts @@ -442,12 +442,11 @@ export function activate(context: ExtensionContext) { inCodeAnalysisState.active = true; - // Pointing reanalyze to the dir of the current file path is fine, because - // reanalyze will walk upwards looking for a bsconfig.json in order to find - // the correct project root. - inCodeAnalysisState.activatedFromDirectory = path.dirname( - currentDocument.uri.fsPath, - ); + // Run reanalyze from the workspace root (so monorepos consistently analyze the root project), + // instead of from whatever file happened to be active when analysis was started. + const wsFolder = workspace.getWorkspaceFolder(currentDocument.uri); + inCodeAnalysisState.activatedFromDirectory = + wsFolder?.uri.fsPath ?? path.dirname(currentDocument.uri.fsPath); codeAnalysisRunningStatusBarItem.command = "rescript-vscode.stop_code_analysis"; diff --git a/server/src/server.ts b/server/src/server.ts index b4445f68a..8dad29ea8 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -118,16 +118,28 @@ let findRescriptBinary = async ( ): Promise => { if ( config.extensionConfiguration.binaryPath != null && - fs.existsSync( - path.join(config.extensionConfiguration.binaryPath, "rescript"), - ) + (fs.existsSync( + path.join(config.extensionConfiguration.binaryPath, "rescript.exe"), + ) || + fs.existsSync( + path.join(config.extensionConfiguration.binaryPath, "rescript"), + )) ) { return utils.normalizePath( - path.join(config.extensionConfiguration.binaryPath, "rescript"), + fs.existsSync( + path.join(config.extensionConfiguration.binaryPath, "rescript.exe"), + ) + ? path.join(config.extensionConfiguration.binaryPath, "rescript.exe") + : path.join(config.extensionConfiguration.binaryPath, "rescript"), ); } - return utils.findRescriptBinary(projectRootPath); + // Prefer the native rescript.exe (v12+) for spawning `build -w`. + // Fall back to the legacy/JS wrapper `rescript` path if needed. + return ( + (await utils.findRescriptExeBinary(projectRootPath)) ?? + (await utils.findRescriptBinary(projectRootPath)) + ); }; let createInterfaceRequest = new v.RequestType< @@ -1388,11 +1400,10 @@ async function onMessage(msg: p.Message) { const watchers = Array.from(workspaceFolders).flatMap( (projectRootPath) => [ { - globPattern: path.join( - projectRootPath, - "**", - c.compilerLogPartialPath, - ), + // Only watch the root compiler log for each workspace folder. + // In monorepos, `**/lib/bs/.compiler.log` matches every package and dependency, + // causing a burst of events per save. + globPattern: path.join(projectRootPath, c.compilerLogPartialPath), kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete, }, { From 0500f548d0670264060f85fa3fb778554d5626c0 Mon Sep 17 00:00:00 2001 From: Cristiano Calcagno Date: Wed, 14 Jan 2026 12:12:20 +0100 Subject: [PATCH 52/56] Update changelog for monorepo compiler.log watcher and code analysis root --- CHANGELOG.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 543e509e0..d43b4b5e8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,7 +14,9 @@ #### :bug: Bug fix -- Fix Code Analyzer binary lookup for ReScript v12+ projects. +- Fix Code Analyzer cwd/binary lookup in monorepos (run from workspace root). +- Fix monorepo build detection by only watching the workspace root `.compiler.log`. +- Fix Start Build for ReScript v12+ projects by preferring `rescript.exe`. - Take namespace into account for incremental cleanup. https://github.com/rescript-lang/rescript-vscode/pull/1164 - Potential race condition in incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1167 - Fix extension crash triggered by incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1169 From 20cb2084b7d100e182c22817ca4e59a63027268b Mon Sep 17 00:00:00 2001 From: Cristiano Calcagno Date: Thu, 15 Jan 2026 17:48:32 +0100 Subject: [PATCH 53/56] Logic for finding binary: use shared logic between client and server. --- client/src/commands.ts | 2 - client/src/commands/code_analysis.ts | 58 +++++------- client/src/extension.ts | 12 +-- client/src/utils.ts | 31 ++----- client/tsconfig.json | 4 +- server/src/server.ts | 22 +---- server/src/utils.ts | 119 +++--------------------- server/tsconfig.json | 4 +- shared/src/findBinary.ts | 133 +++++++++++++++++++++++++++ shared/src/projectRoots.ts | 40 ++++++++ shared/tsconfig.json | 12 +++ tsconfig.json | 3 + 12 files changed, 240 insertions(+), 200 deletions(-) create mode 100644 shared/src/findBinary.ts create mode 100644 shared/src/projectRoots.ts create mode 100644 shared/tsconfig.json diff --git a/client/src/commands.ts b/client/src/commands.ts index 6a795c467..0bc131863 100644 --- a/client/src/commands.ts +++ b/client/src/commands.ts @@ -13,14 +13,12 @@ export { pasteAsRescriptJson } from "./commands/paste_as_rescript_json"; export { pasteAsRescriptJsx } from "./commands/paste_as_rescript_jsx"; export const codeAnalysisWithReanalyze = ( - targetDir: string | null, diagnosticsCollection: DiagnosticCollection, diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap, outputChannel: OutputChannel, codeAnalysisRunningStatusBarItem: StatusBarItem, ) => { runCodeAnalysisWithReanalyze( - targetDir, diagnosticsCollection, diagnosticsResultCodeActions, outputChannel, diff --git a/client/src/commands/code_analysis.ts b/client/src/commands/code_analysis.ts index ed9feafd0..241d16c98 100644 --- a/client/src/commands/code_analysis.ts +++ b/client/src/commands/code_analysis.ts @@ -15,11 +15,12 @@ import { OutputChannel, StatusBarItem, } from "vscode"; +import { getBinaryPath, NormalizedPath, normalizePath } from "../utils"; import { - findProjectRootOfFileInDir, - getBinaryPath, - NormalizedPath, -} from "../utils"; + findBinary, + findBinary as findSharedBinary, +} from "../../../shared/src/findBinary"; +import { findProjectRootOfFile } from "../../../shared/src/projectRoots"; export let statusBarItem = { setToStopText: (codeAnalysisRunningStatusBarItem: StatusBarItem) => { @@ -203,45 +204,26 @@ let resultsToDiagnostics = ( }; }; -export const runCodeAnalysisWithReanalyze = ( - targetDir: string | null, +export const runCodeAnalysisWithReanalyze = async ( diagnosticsCollection: DiagnosticCollection, diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap, outputChannel: OutputChannel, codeAnalysisRunningStatusBarItem: StatusBarItem, ) => { let currentDocument = window.activeTextEditor.document; - let cwd = targetDir ?? path.dirname(currentDocument.uri.fsPath); - // Resolve the project root from `cwd` (which is the workspace root when code analysis is started), - // rather than from the currently-open file (which may be in a subpackage). - let projectRootPath: NormalizedPath | null = findProjectRootOfFileInDir( - path.join(cwd, "bsconfig.json"), + let projectRootPath: NormalizedPath | null = normalizePath( + findProjectRootOfFile(currentDocument.uri.fsPath), ); - if (projectRootPath == null) { - projectRootPath = findProjectRootOfFileInDir(currentDocument.uri.fsPath); - } - - // Try v12+ path first: @rescript/{platform}-{arch}/bin/rescript-tools.exe - // Then fall back to legacy paths via getBinaryPath - let binaryPath: string | null = null; - if (projectRootPath != null) { - const v12Path = path.join( - projectRootPath, - "node_modules", - "@rescript", - `${process.platform}-${process.arch}`, - "bin", - "rescript-tools.exe", - ); - if (fs.existsSync(v12Path)) { - binaryPath = v12Path; - } - } + let binaryPath: string | null = await findBinary({ + projectRootPath, + binary: "rescript-tools.exe", + }); if (binaryPath == null) { - binaryPath = - getBinaryPath("rescript-tools.exe", projectRootPath) ?? - getBinaryPath("rescript-editor-analysis.exe", projectRootPath); + binaryPath = await findBinary({ + projectRootPath, + binary: "rescript-editor-analysis.exe", + }); } if (binaryPath === null) { @@ -249,12 +231,14 @@ export const runCodeAnalysisWithReanalyze = ( return; } + // Strip everything after the outermost node_modules segment to get the project root. + let cwd = + binaryPath.match(/^(.*?)[\\/]+node_modules([\\/]+|$)/)?.[1] ?? binaryPath; + statusBarItem.setToRunningText(codeAnalysisRunningStatusBarItem); let opts = ["reanalyze", "-json"]; - let p = cp.spawn(binaryPath, opts, { - cwd, - }); + let p = cp.spawn(binaryPath, opts, { cwd }); if (p.stdout == null) { statusBarItem.setToFailed(codeAnalysisRunningStatusBarItem); diff --git a/client/src/extension.ts b/client/src/extension.ts index 41e7ecc66..cefe346a5 100644 --- a/client/src/extension.ts +++ b/client/src/extension.ts @@ -155,7 +155,6 @@ export function activate(context: ExtensionContext) { client.onNotification("rescript/compilationFinished", () => { if (inCodeAnalysisState.active === true) { customCommands.codeAnalysisWithReanalyze( - inCodeAnalysisState.activatedFromDirectory, diagnosticsCollection, diagnosticsResultCodeActions, outputChannel, @@ -308,8 +307,7 @@ export function activate(context: ExtensionContext) { let inCodeAnalysisState: { active: boolean; - activatedFromDirectory: string | null; - } = { active: false, activatedFromDirectory: null }; + } = { active: false }; // This code actions provider yields the code actions potentially extracted // from the code analysis to the editor. @@ -442,19 +440,12 @@ export function activate(context: ExtensionContext) { inCodeAnalysisState.active = true; - // Run reanalyze from the workspace root (so monorepos consistently analyze the root project), - // instead of from whatever file happened to be active when analysis was started. - const wsFolder = workspace.getWorkspaceFolder(currentDocument.uri); - inCodeAnalysisState.activatedFromDirectory = - wsFolder?.uri.fsPath ?? path.dirname(currentDocument.uri.fsPath); - codeAnalysisRunningStatusBarItem.command = "rescript-vscode.stop_code_analysis"; codeAnalysisRunningStatusBarItem.show(); statusBarItem.setToStopText(codeAnalysisRunningStatusBarItem); customCommands.codeAnalysisWithReanalyze( - inCodeAnalysisState.activatedFromDirectory, diagnosticsCollection, diagnosticsResultCodeActions, outputChannel, @@ -464,7 +455,6 @@ export function activate(context: ExtensionContext) { commands.registerCommand("rescript-vscode.stop_code_analysis", () => { inCodeAnalysisState.active = false; - inCodeAnalysisState.activatedFromDirectory = null; diagnosticsCollection.clear(); diagnosticsResultCodeActions.clear(); diff --git a/client/src/utils.ts b/client/src/utils.ts index 17135d14a..c0c364ff3 100644 --- a/client/src/utils.ts +++ b/client/src/utils.ts @@ -2,6 +2,11 @@ import * as path from "path"; import * as fs from "fs"; import * as os from "os"; import { DocumentUri } from "vscode-languageclient"; +import { findBinary, type BinaryName } from "../../shared/src/findBinary"; +import { + findProjectRootOfFileInDir as findProjectRootOfFileInDirShared, + normalizePath as normalizePathShared, +} from "../../shared/src/projectRoots"; /* * Much of the code in here is duplicated from the server code. @@ -27,7 +32,7 @@ export type NormalizedPath = string & { __brand: "NormalizedPath" }; */ export function normalizePath(filePath: string | null): NormalizedPath | null { // `path.normalize` ensures we can assume string is now NormalizedPath - return filePath != null ? (path.normalize(filePath) as NormalizedPath) : null; + return normalizePathShared(filePath) as NormalizedPath | null; } type binaryName = "rescript-editor-analysis.exe" | "rescript-tools.exe"; @@ -83,25 +88,7 @@ export const createFileInTempDir = (prefix = "", extension = "") => { export let findProjectRootOfFileInDir = ( source: string, ): NormalizedPath | null => { - const normalizedSource = normalizePath(source); - if (normalizedSource == null) { - return null; - } - const dir = normalizePath(path.dirname(normalizedSource)); - if (dir == null) { - return null; - } - if ( - fs.existsSync(path.join(dir, "rescript.json")) || - fs.existsSync(path.join(dir, "bsconfig.json")) - ) { - return dir; - } else { - if (dir === normalizedSource) { - // reached top - return null; - } else { - return findProjectRootOfFileInDir(dir); - } - } + return normalizePath(findProjectRootOfFileInDirShared(source)); }; + +export { findBinary, BinaryName }; diff --git a/client/tsconfig.json b/client/tsconfig.json index b0924c856..1974b6b8d 100644 --- a/client/tsconfig.json +++ b/client/tsconfig.json @@ -4,9 +4,9 @@ "target": "es2019", "lib": ["ES2019"], "outDir": "out", - "rootDir": "src", + "rootDirs": ["src", "../shared/src"], "sourceMap": true }, - "include": ["src"], + "include": ["src", "../shared/src"], "exclude": ["node_modules"] } diff --git a/server/src/server.ts b/server/src/server.ts index 8dad29ea8..7459f1051 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -118,28 +118,16 @@ let findRescriptBinary = async ( ): Promise => { if ( config.extensionConfiguration.binaryPath != null && - (fs.existsSync( - path.join(config.extensionConfiguration.binaryPath, "rescript.exe"), - ) || - fs.existsSync( - path.join(config.extensionConfiguration.binaryPath, "rescript"), - )) + fs.existsSync( + path.join(config.extensionConfiguration.binaryPath, "rescript"), + ) ) { return utils.normalizePath( - fs.existsSync( - path.join(config.extensionConfiguration.binaryPath, "rescript.exe"), - ) - ? path.join(config.extensionConfiguration.binaryPath, "rescript.exe") - : path.join(config.extensionConfiguration.binaryPath, "rescript"), + path.join(config.extensionConfiguration.binaryPath, "rescript"), ); } - // Prefer the native rescript.exe (v12+) for spawning `build -w`. - // Fall back to the legacy/JS wrapper `rescript` path if needed. - return ( - (await utils.findRescriptExeBinary(projectRootPath)) ?? - (await utils.findRescriptBinary(projectRootPath)) - ); + return utils.findRescriptBinary(projectRootPath); }; let createInterfaceRequest = new v.RequestType< diff --git a/server/src/utils.ts b/server/src/utils.ts index a5662cf14..4502ebbd3 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -12,6 +12,11 @@ import * as os from "os"; import semver from "semver"; import { fileURLToPath, pathToFileURL } from "url"; +import { + findBinary as findSharedBinary, + type BinaryName, +} from "../../shared/src/findBinary"; +import { findProjectRootOfFileInDir as findProjectRootOfFileInDirShared } from "../../shared/src/projectRoots"; import * as codeActions from "./codeActions"; import * as c from "./constants"; import * as lookup from "./lookup"; @@ -85,23 +90,7 @@ export let createFileInTempDir = (extension = ""): NormalizedPath => { function findProjectRootOfFileInDir( source: NormalizedPath, ): NormalizedPath | null { - const dir = normalizePath(path.dirname(source)); - if (dir == null) { - return null; - } - if ( - fs.existsSync(path.join(dir, c.rescriptJsonPartialPath)) || - fs.existsSync(path.join(dir, c.bsconfigPartialPath)) - ) { - return dir; - } else { - if (dir === source) { - // reached top - return null; - } else { - return findProjectRootOfFileInDir(dir); - } - } + return normalizePath(findProjectRootOfFileInDirShared(source)); } /** @@ -216,98 +205,14 @@ export let getProjectFile = ( // We won't know which version is in the project root until we read and parse `{project_root}/node_modules/rescript/package.json` let findBinary = async ( projectRootPath: NormalizedPath | null, - binary: - | "bsc.exe" - | "rescript-editor-analysis.exe" - | "rescript" - | "rewatch.exe" - | "rescript.exe", + binary: BinaryName, ): Promise => { - if (config.extensionConfiguration.platformPath != null) { - const result = path.join( - config.extensionConfiguration.platformPath, - binary, - ); - return normalizePath(result); - } - - if (projectRootPath !== null) { - try { - const compilerInfo = path.resolve( - projectRootPath, - c.compilerInfoPartialPath, - ); - const contents = await fsAsync.readFile(compilerInfo, "utf8"); - const compileInfo = JSON.parse(contents); - if (compileInfo && compileInfo.bsc_path) { - const bsc_path = compileInfo.bsc_path; - if (binary === "bsc.exe") { - return normalizePath(bsc_path); - } else { - const binary_path = path.join(path.dirname(bsc_path), binary); - return normalizePath(binary_path); - } - } - } catch {} - } - - const rescriptDir = lookup.findFilePathFromProjectRoot( + const result = await findSharedBinary({ projectRootPath, - path.join("node_modules", "rescript"), - ); - if (rescriptDir == null) { - return null; - } - - let rescriptVersion = null; - let rescriptJSWrapperPath = null; - try { - const rescriptPackageJSONPath = path.join(rescriptDir, "package.json"); - const rescriptPackageJSON = JSON.parse( - await fsAsync.readFile(rescriptPackageJSONPath, "utf-8"), - ); - rescriptVersion = rescriptPackageJSON.version; - rescriptJSWrapperPath = rescriptPackageJSON.bin.rescript; - } catch (error) { - return null; - } - - let binaryPath: string | null = null; - if (binary == "rescript") { - // Can't use the native bsb/rescript since we might need the watcher -w - // flag, which is only in the JS wrapper - binaryPath = path.join(rescriptDir, rescriptJSWrapperPath); - } else if (semver.gte(rescriptVersion, "12.0.0-alpha.13")) { - // TODO: export `binPaths` from `rescript` package so that we don't need to - // copy the logic for figuring out `target`. - const target = `${process.platform}-${process.arch}`; - // Use realpathSync to resolve symlinks, which is necessary for package - // managers like Deno and pnpm that use symlinked node_modules structures. - const targetPackagePath = path.join( - fs.realpathSync(rescriptDir), - "..", - `@rescript/${target}/bin.js`, - ); - const { binPaths } = await import(targetPackagePath); - - if (binary == "bsc.exe") { - binaryPath = binPaths.bsc_exe; - } else if (binary == "rescript-editor-analysis.exe") { - binaryPath = binPaths.rescript_editor_analysis_exe; - } else if (binary == "rewatch.exe") { - binaryPath = binPaths.rewatch_exe; - } else if (binary == "rescript.exe") { - binaryPath = binPaths.rescript_exe; - } - } else { - binaryPath = path.join(rescriptDir, c.platformDir, binary); - } - - if (binaryPath != null && fs.existsSync(binaryPath)) { - return normalizePath(binaryPath); - } else { - return null; - } + binary, + platformPath: config.extensionConfiguration.platformPath ?? null, + }); + return normalizePath(result); }; export let findRescriptBinary = (projectRootPath: NormalizedPath | null) => diff --git a/server/tsconfig.json b/server/tsconfig.json index 5dab877e0..c74b81c04 100644 --- a/server/tsconfig.json +++ b/server/tsconfig.json @@ -7,9 +7,9 @@ "sourceMap": true, "strict": true, "outDir": "out", - "rootDir": "src", + "rootDirs": ["src", "../shared/src"], "esModuleInterop": true }, - "include": ["src"], + "include": ["src", "../shared/src"], "exclude": ["node_modules"] } diff --git a/shared/src/findBinary.ts b/shared/src/findBinary.ts new file mode 100644 index 000000000..09cd65d6a --- /dev/null +++ b/shared/src/findBinary.ts @@ -0,0 +1,133 @@ +import * as fs from "fs"; +import * as fsAsync from "fs/promises"; +import * as path from "path"; +import * as semver from "semver"; + +export type BinaryName = + | "bsc.exe" + | "rescript-editor-analysis.exe" + | "rescript-tools.exe" + | "rescript" + | "rewatch.exe" + | "rescript.exe"; + +type FindBinaryOptions = { + projectRootPath: string | null; + binary: BinaryName; + platformPath?: string | null; +}; + +const compilerInfoPartialPath = path.join("lib", "bs", "compiler-info.json"); +const platformDir = + process.arch === "arm64" ? process.platform + process.arch : process.platform; + +const normalizePath = (filePath: string | null): string | null => { + return filePath != null ? path.normalize(filePath) : null; +}; + +const findFilePathFromProjectRoot = ( + directory: string | null, + filePartialPath: string, +): string | null => { + if (directory == null) { + return null; + } + + const filePath = path.join(directory, filePartialPath); + if (fs.existsSync(filePath)) { + return normalizePath(filePath); + } + + const parentDirStr = path.dirname(directory); + if (parentDirStr === directory) { + return null; + } + + return findFilePathFromProjectRoot( + normalizePath(parentDirStr), + filePartialPath, + ); +}; + +export const findBinary = async ({ + projectRootPath, + binary, + platformPath, +}: FindBinaryOptions): Promise => { + if (platformPath != null) { + const result = path.join(platformPath, binary); + return normalizePath(result); + } + + if (projectRootPath !== null) { + try { + const compilerInfo = path.resolve( + projectRootPath, + compilerInfoPartialPath, + ); + const contents = await fsAsync.readFile(compilerInfo, "utf8"); + const compileInfo = JSON.parse(contents); + if (compileInfo && compileInfo.bsc_path) { + const bscPath = compileInfo.bsc_path; + if (binary === "bsc.exe") { + return normalizePath(bscPath); + } else { + const binaryPath = path.join(path.dirname(bscPath), binary); + return normalizePath(binaryPath); + } + } + } catch {} + } + + const rescriptDir = findFilePathFromProjectRoot( + projectRootPath, + path.join("node_modules", "rescript"), + ); + if (rescriptDir == null) { + return null; + } + + let rescriptVersion = null; + let rescriptJSWrapperPath = null; + try { + const rescriptPackageJSONPath = path.join(rescriptDir, "package.json"); + const rescriptPackageJSON = JSON.parse( + await fsAsync.readFile(rescriptPackageJSONPath, "utf-8"), + ); + rescriptVersion = rescriptPackageJSON.version; + rescriptJSWrapperPath = rescriptPackageJSON.bin.rescript; + } catch { + return null; + } + + let binaryPath: string | null = null; + if (binary === "rescript") { + binaryPath = path.join(rescriptDir, rescriptJSWrapperPath); + } else if (semver.gte(rescriptVersion, "12.0.0-alpha.13")) { + const target = `${process.platform}-${process.arch}`; + const targetPackagePath = path.join( + fs.realpathSync(rescriptDir), + "..", + `@rescript/${target}/bin.js`, + ); + const { binPaths } = await import(targetPackagePath); + + if (binary === "bsc.exe") { + binaryPath = binPaths.bsc_exe; + } else if (binary === "rescript-editor-analysis.exe") { + binaryPath = binPaths.rescript_editor_analysis_exe; + } else if (binary === "rewatch.exe") { + binaryPath = binPaths.rewatch_exe; + } else if (binary === "rescript.exe") { + binaryPath = binPaths.rescript_exe; + } + } else { + binaryPath = path.join(rescriptDir, platformDir, binary); + } + + if (binaryPath != null && fs.existsSync(binaryPath)) { + return normalizePath(binaryPath); + } + + return null; +}; diff --git a/shared/src/projectRoots.ts b/shared/src/projectRoots.ts new file mode 100644 index 000000000..2ddd8a08f --- /dev/null +++ b/shared/src/projectRoots.ts @@ -0,0 +1,40 @@ +import * as fs from "fs"; +import * as path from "path"; + +export const normalizePath = (filePath: string | null): string | null => { + return filePath != null ? path.normalize(filePath) : null; +}; + +export const findProjectRootOfFileInDir = (source: string): string | null => { + const normalizedSource = normalizePath(source); + if (normalizedSource == null) { + return null; + } + + const dir = normalizePath(path.dirname(normalizedSource)); + if (dir == null) { + return null; + } + + if ( + fs.existsSync(path.join(dir, "rescript.json")) || + fs.existsSync(path.join(dir, "bsconfig.json")) + ) { + return dir; + } + + if (dir === normalizedSource) { + return null; + } + + return findProjectRootOfFileInDir(dir); +}; + +export const findProjectRootOfFile = (source: string): string | null => { + const normalizedSource = normalizePath(source); + if (normalizedSource == null) { + return null; + } + + return findProjectRootOfFileInDir(normalizedSource); +}; diff --git a/shared/tsconfig.json b/shared/tsconfig.json new file mode 100644 index 000000000..b0924c856 --- /dev/null +++ b/shared/tsconfig.json @@ -0,0 +1,12 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "es2019", + "lib": ["ES2019"], + "outDir": "out", + "rootDir": "src", + "sourceMap": true + }, + "include": ["src"], + "exclude": ["node_modules"] +} diff --git a/tsconfig.json b/tsconfig.json index 0f5dff06a..7cdfe7c07 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -15,6 +15,9 @@ }, { "path": "./server" + }, + { + "path": "./shared" } ] } From 47d5dd34abf476368f3dcdef2b1b0818e81f2ec6 Mon Sep 17 00:00:00 2001 From: Cristiano Calcagno Date: Tue, 27 Jan 2026 11:22:12 +0100 Subject: [PATCH 54/56] Add reanalyze server support, monorepo build support, and comprehensive integration tests. ## Summary Add reanalyze server support, monorepo build support, and comprehensive integration tests. --- ## 1. Fixes ### ARM64 binary lookup fallback **File:** `shared/src/findBinary.ts` - For ARM64 architecture, try the arm64-specific directory first (e.g., `darwinarm64`), then fall back to the generic platform directory (e.g., `darwin`) for older ReScript versions that don't have arm64-specific directories ### Fixed "rescript" JS wrapper lookup **File:** `shared/src/findBinary.ts` - For the "rescript" JS wrapper (as opposed to native binaries like `bsc.exe`), don't use the `bsc_path` directory from `compiler-info.json` - fall through to find the JS wrapper in node_modules/.bin instead ### Build watcher process cleanup **File:** `server/src/utils.ts` - Added `killBuildWatcher()` function that properly kills the entire process group (not just the parent process) and cleans up lock files - Uses `process.kill(-proc.pid, "SIGTERM")` on Unix to kill the entire process group, ensuring child processes (like the native rescript binary) are also killed - The rescript compiler doesn't remove lock files on SIGTERM, so we now clean them up manually ### Build watcher stdin handling for older ReScript **File:** `server/src/utils.ts` - Use "pipe" for stdin instead of "ignore" because older ReScript versions (9.x, 10.x, 11.x) have a handler that exits when stdin closes: `process.stdin.on("close", exitProcess)` ### Server shutdown cleanup **File:** `server/src/server.ts` - Kill all build watchers and clean up lock files on LSP server shutdown --- ## 2. New Functionality ### Reanalyze Server Support (ReScript >= 12.1.0) **Files:** `client/src/commands/code_analysis.ts`, `client/src/commands.ts`, `client/src/extension.ts` - Added persistent reanalyze-server support for ReScript >= 12.1.0 (uses Unix socket for incremental analysis) - Server state management per monorepo root - Functions: `startReanalyzeServer()`, `stopReanalyzeServer()`, `stopAllReanalyzeServers()`, `showReanalyzeServerLog()` - Dedicated output channels for server logs per project - Automatic socket file cleanup - Servers are stopped when code analysis is stopped or extension deactivates ### New Command: Show Reanalyze Server Log **Files:** `client/src/extension.ts`, `package.json` - New command: `rescript-vscode.show_reanalyze_server_log` - Displays the reanalyze server output channel for debugging ### New Command: Start Build **Files:** `client/src/extension.ts`, `server/src/server.ts`, `package.json` - New command: `rescript-vscode.start_build` - Allows manually starting the build watcher without waiting for the prompt - LSP request handler: `rescript/startBuild` ### Monorepo Support **Files:** `shared/src/findBinary.ts`, `server/src/server.ts`, `server/src/utils.ts`, `client/src/commands/code_analysis.ts` - New function `getMonorepoRootFromBinaryPath()` to derive monorepo root from binary path - Build watcher now runs from monorepo root, not subpackage directory - Lock file detection at monorepo root level (checks both `lib/rescript.lock` and `.bsb.lock`) - Build prompt correctly uses monorepo root path when opening files from subpackages - Tracks `buildRootPath` separately from `projectRootPath` for proper cleanup ### ReScript Version-Aware Build Command **File:** `server/src/utils.ts` - Uses `rescript watch` for ReScript >= 12.0.0 - Uses `rescript build -w` for ReScript < 12.0.0 ### Improved Error Handling and Logging **Files:** Multiple - Better error messages when binaries are not found - Logging for build watcher process lifecycle (start, error, exit) - Logging for file open/close events --- ## 3. Tests ### Test Infrastructure **Files:** `client/package.json`, `client/.vscode-test.mjs`, `client/tsconfig.json` - Added test dependencies: `@types/mocha`, `@vscode/test-cli`, `@vscode/test-electron` - Test configuration for 4 test suites with different workspace folders - Added `skipLibCheck: true` to tsconfig for test type compatibility ### Test Helpers (`client/src/test/suite/helpers.ts`) Shared utilities for all test suites: - `getWorkspaceRoot()` - Get workspace folder path - `removeRescriptLockFile()`, `removeBsbLockFile()`, `removeMonorepoLockFiles()` - Lock file cleanup - `removeReanalyzeSocketFile()` - Socket file cleanup - `ensureExtensionActivated()` - Ensure extension is active - `openFile()` - Open file in editor - `findLspLogContent()` - Read LSP logs for verification - `waitFor()`, `sleep()` - Async utilities - `getCompilerLogPath()`, `getFileMtime()`, `waitForFileUpdate()` - File modification tracking - `insertCommentAndSave()`, `restoreContentAndSave()` - Edit and restore files - `startBuildWatcher()`, `startCodeAnalysis()`, `stopCodeAnalysis()` - Command execution - `showReanalyzeServerLog()`, `findBuildPromptInLogs()` - Verification helpers ### Example Project Tests (`client/src/test/suite/exampleProject.test.ts`) Tests against `analysis/examples/example-project/` (ReScript 12.1.0): 1. **Extension should be present** - Verify extension loads 2. **Commands should be registered** - Verify all new commands are registered 3. **Start Code Analysis should run** - Run code analysis on a ReScript file 4. **Start Build command should start build watcher** - Manual build start 5. **Build watcher recompiles on file save** - Edit file, verify compiler.log updates 6. **Code analysis with incremental updates** - Add dead code, verify new diagnostics appear 7. **Should prompt to start build when no lock file exists** - Verify build prompt ### Monorepo Root Tests (`client/src/test/suite/monorepoRoot.test.ts`) Tests against `analysis/examples/monorepo-project/` opened at root: 1. **Build watcher works when opening root package** - Build from root, verify compiler.log 2. **Build watcher works when opening subpackage file** - Open subpackage file, build still uses root compiler.log 3. **Code analysis works from subpackage** - Run reanalyze from lib package 4. **Should prompt to start build with monorepo root path** - Open subpackage, prompt uses root path ### Monorepo Subpackage Tests (`client/src/test/suite/monorepoSubpackage.test.ts`) Tests against `analysis/examples/monorepo-project/packages/app/` (VSCode opened on subpackage): 1. **Should prompt to start build with monorepo root path** - Even when opened from subpackage, prompt uses monorepo root 2. **Lock file created at monorepo root, not subpackage** - Verify lock file location 3. **Code analysis works when opened from subpackage** - Diagnostics shown correctly ### ReScript 9 Tests (`client/src/test/suite/rescript9.test.ts`) Tests against `analysis/examples/rescript9-project/` (ReScript 9.x): 1. **Extension should be present** - Extension loads with older ReScript 2. **Build watcher should start with 'rescript build -w'** - Not 'rescript watch' 3. **Build watcher recompiles on file save** - Verify incremental compilation 4. **Should prompt to start build when no lock file exists** - Uses `.bsb.lock` 5. **Lock file should be cleaned up on language server restart** - Verify cleanup ### Test Projects Added - `analysis/examples/monorepo-project/` - Monorepo with root package + packages/app + packages/lib - `analysis/examples/rescript9-project/` - ReScript 9.x project with bsconfig.json - Updated `analysis/examples/example-project/` to use ReScript 12.1.0 --- ## Other Changes ### .gitignore - Added `.vscode-test/` directory (VSCode test downloads) ### package.json (root) - Added two new commands to the extension manifest: - `rescript-vscode.show_reanalyze_server_log` - "ReScript: Show Code Analyzer Server Log" - `rescript-vscode.start_build` - "ReScript: Start Build" --- .gitignore | 5 +- .../example-project/.vscode/settings.json | 3 + .../examples/example-project/package.json | 8 +- analysis/examples/monorepo-project/.gitignore | 2 + .../monorepo-project/.vscode/settings.json | 3 + .../monorepo-project/package-lock.json | 152 ++++++++ .../examples/monorepo-project/package.json | 16 + .../packages/app/.vscode/settings.json | 3 + .../packages/app/package.json | 12 + .../packages/app/rescript.json | 13 + .../monorepo-project/packages/app/src/App.mjs | 22 ++ .../monorepo-project/packages/app/src/App.res | 16 + .../examples/monorepo-project/rescript.json | 16 + .../examples/monorepo-project/src/Root.mjs | 15 + .../examples/monorepo-project/src/Root.res | 11 + .../examples/rescript9-project/.gitignore | 2 + .../rescript9-project/.vscode/settings.json | 3 + .../examples/rescript9-project/bsconfig.json | 9 + .../rescript9-project/package-lock.json | 28 ++ .../examples/rescript9-project/package.json | 12 + .../rescript9-project/src/Hello.bs.js | 18 + .../examples/rescript9-project/src/Hello.res | 6 + client/.vscode-test.mjs | 65 ++++ client/package.json | 8 + client/src/commands.ts | 16 +- client/src/commands/code_analysis.ts | 330 ++++++++++++++++-- client/src/extension.ts | 66 +++- client/src/test/suite/exampleProject.test.ts | 328 +++++++++++++++++ client/src/test/suite/helpers.ts | 324 +++++++++++++++++ client/src/test/suite/monorepoRoot.test.ts | 274 +++++++++++++++ .../src/test/suite/monorepoSubpackage.test.ts | 153 ++++++++ client/src/test/suite/rescript9.test.ts | 241 +++++++++++++ client/tsconfig.json | 3 +- package.json | 8 + server/src/projectFiles.ts | 3 + server/src/server.ts | 183 +++++++++- server/src/utils.ts | 101 +++++- shared/src/findBinary.ts | 39 ++- 38 files changed, 2460 insertions(+), 57 deletions(-) create mode 100644 analysis/examples/example-project/.vscode/settings.json create mode 100644 analysis/examples/monorepo-project/.gitignore create mode 100644 analysis/examples/monorepo-project/.vscode/settings.json create mode 100644 analysis/examples/monorepo-project/package-lock.json create mode 100644 analysis/examples/monorepo-project/package.json create mode 100644 analysis/examples/monorepo-project/packages/app/.vscode/settings.json create mode 100644 analysis/examples/monorepo-project/packages/app/package.json create mode 100644 analysis/examples/monorepo-project/packages/app/rescript.json create mode 100644 analysis/examples/monorepo-project/packages/app/src/App.mjs create mode 100644 analysis/examples/monorepo-project/packages/app/src/App.res create mode 100644 analysis/examples/monorepo-project/rescript.json create mode 100644 analysis/examples/monorepo-project/src/Root.mjs create mode 100644 analysis/examples/monorepo-project/src/Root.res create mode 100644 analysis/examples/rescript9-project/.gitignore create mode 100644 analysis/examples/rescript9-project/.vscode/settings.json create mode 100644 analysis/examples/rescript9-project/bsconfig.json create mode 100644 analysis/examples/rescript9-project/package-lock.json create mode 100644 analysis/examples/rescript9-project/package.json create mode 100644 analysis/examples/rescript9-project/src/Hello.bs.js create mode 100644 analysis/examples/rescript9-project/src/Hello.res create mode 100644 client/.vscode-test.mjs create mode 100644 client/src/test/suite/exampleProject.test.ts create mode 100644 client/src/test/suite/helpers.ts create mode 100644 client/src/test/suite/monorepoRoot.test.ts create mode 100644 client/src/test/suite/monorepoSubpackage.test.ts create mode 100644 client/src/test/suite/rescript9.test.ts diff --git a/.gitignore b/.gitignore index 5dc814def..77951f065 100644 --- a/.gitignore +++ b/.gitignore @@ -24,4 +24,7 @@ rescript-tools.exe _opam/ _build/ -*.tsbuildinfo \ No newline at end of file +*.tsbuildinfo + +# VSCode test downloads +.vscode-test/ \ No newline at end of file diff --git a/analysis/examples/example-project/.vscode/settings.json b/analysis/examples/example-project/.vscode/settings.json new file mode 100644 index 000000000..6c4df7cf5 --- /dev/null +++ b/analysis/examples/example-project/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "rescript.settings.logLevel": "log" +} diff --git a/analysis/examples/example-project/package.json b/analysis/examples/example-project/package.json index 7f60ced76..39e890488 100644 --- a/analysis/examples/example-project/package.json +++ b/analysis/examples/example-project/package.json @@ -2,12 +2,12 @@ "name": "tryit", "dependencies": { "@rescript/react": "^0.14.0", - "rescript": "12.0.2" + "rescript": "12.1.0" }, "scripts": { - "build": "rescript-legacy", - "start": "rescript-legacy build -w", - "clean": "rescript-legacy clean -with-deps", + "build": "rescript", + "start": "rescript build -w", + "clean": "rescript clean", "format": "rescript format" } } diff --git a/analysis/examples/monorepo-project/.gitignore b/analysis/examples/monorepo-project/.gitignore new file mode 100644 index 000000000..d66a081c5 --- /dev/null +++ b/analysis/examples/monorepo-project/.gitignore @@ -0,0 +1,2 @@ +lib +.merlin diff --git a/analysis/examples/monorepo-project/.vscode/settings.json b/analysis/examples/monorepo-project/.vscode/settings.json new file mode 100644 index 000000000..6c4df7cf5 --- /dev/null +++ b/analysis/examples/monorepo-project/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "rescript.settings.logLevel": "log" +} diff --git a/analysis/examples/monorepo-project/package-lock.json b/analysis/examples/monorepo-project/package-lock.json new file mode 100644 index 000000000..c79b0d66a --- /dev/null +++ b/analysis/examples/monorepo-project/package-lock.json @@ -0,0 +1,152 @@ +{ + "name": "monorepo-project", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "monorepo-project", + "workspaces": [ + "packages/app", + "packages/lib" + ], + "dependencies": { + "rescript": "12.1.0" + } + }, + "node_modules/@monorepo/app": { + "resolved": "packages/app", + "link": true + }, + "node_modules/@monorepo/lib": { + "resolved": "packages/lib", + "link": true + }, + "node_modules/@rescript/darwin-arm64": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/darwin-arm64/-/darwin-arm64-12.1.0.tgz", + "integrity": "sha512-OuJMT+2h2Lp60n8ONFx1oBAAePSVkM9zl7E/EX4VD2xkQoVTPklz0BpHYOICnFJSCOOdbOhbsTBXdLpo3yvllg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/darwin-x64": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/darwin-x64/-/darwin-x64-12.1.0.tgz", + "integrity": "sha512-r5Iv4ga+LaNq+6g9LODwZG4bwydd9UDXACP/HKxOfrP9XQCITlF/XqB1ZDJWyJOgJLZSJCd7erlG38YtB0VZKA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/linux-arm64": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/linux-arm64/-/linux-arm64-12.1.0.tgz", + "integrity": "sha512-UTZv4GTjbyQ/T5LQDfQiGcumK3SzE1K7+ug6gWpDcGZ7ALc7hCS6BVEFL/LDs8iWVwAwkK/6r456s2zRnvS7wQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/linux-x64": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/linux-x64/-/linux-x64-12.1.0.tgz", + "integrity": "sha512-c0PXuBL09JRSA4nQusYbR4mW5QJrBPqxDrqvIX+M79fk3d6jQmj5x4NsBwk5BavxvmbR/JU1JjYBlSAa3h22Vg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/@rescript/runtime": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/runtime/-/runtime-12.1.0.tgz", + "integrity": "sha512-bvr9RfvBD+JS/6foWCA4l2fLXmUXN0KGqylXQPHt09QxUghqgoCiaWVHaHSx5dOIk/jAPlGQ7zB5yVeMas/EFQ==" + }, + "node_modules/@rescript/win32-x64": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/@rescript/win32-x64/-/win32-x64-12.1.0.tgz", + "integrity": "sha512-nQC42QByyAbryfkbyK67iskipUqXVwTPCFrqissY4jJoP0128gg0yG6DydJnV1stXphtFdMFHtmyYE1ffG7UBg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=20.11.0" + } + }, + "node_modules/rescript": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/rescript/-/rescript-12.1.0.tgz", + "integrity": "sha512-n/B43wzIEKV4OmlrWbrlQOL4zZaz0RM/Cc8PG2YvhQvQDW7nscHJliDq1AGeVwHoMX68MeaKKzLDOMOMU9Z6FA==", + "license": "SEE LICENSE IN LICENSE", + "workspaces": [ + "packages/playground", + "packages/@rescript/*", + "tests/dependencies/**", + "tests/analysis_tests/**", + "tests/docstring_tests", + "tests/gentype_tests/**", + "tests/tools_tests", + "scripts/res" + ], + "dependencies": { + "@rescript/runtime": "12.1.0" + }, + "bin": { + "bsc": "cli/bsc.js", + "bstracing": "cli/bstracing.js", + "rescript": "cli/rescript.js", + "rescript-legacy": "cli/rescript-legacy.js", + "rescript-tools": "cli/rescript-tools.js" + }, + "engines": { + "node": ">=20.11.0" + }, + "optionalDependencies": { + "@rescript/darwin-arm64": "12.1.0", + "@rescript/darwin-x64": "12.1.0", + "@rescript/linux-arm64": "12.1.0", + "@rescript/linux-x64": "12.1.0", + "@rescript/win32-x64": "12.1.0" + } + }, + "packages/app": { + "name": "@monorepo/app", + "version": "0.0.1", + "dependencies": { + "@monorepo/lib": "*" + } + }, + "packages/lib": { + "name": "@monorepo/lib", + "version": "0.0.1" + } + } +} diff --git a/analysis/examples/monorepo-project/package.json b/analysis/examples/monorepo-project/package.json new file mode 100644 index 000000000..b516ef9fa --- /dev/null +++ b/analysis/examples/monorepo-project/package.json @@ -0,0 +1,16 @@ +{ + "name": "monorepo-project", + "private": true, + "workspaces": [ + "packages/app", + "packages/lib" + ], + "dependencies": { + "rescript": "12.1.0" + }, + "scripts": { + "build": "rescript build", + "watch": "rescript watch", + "clean": "rescript clean" + } +} diff --git a/analysis/examples/monorepo-project/packages/app/.vscode/settings.json b/analysis/examples/monorepo-project/packages/app/.vscode/settings.json new file mode 100644 index 000000000..6c4df7cf5 --- /dev/null +++ b/analysis/examples/monorepo-project/packages/app/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "rescript.settings.logLevel": "log" +} diff --git a/analysis/examples/monorepo-project/packages/app/package.json b/analysis/examples/monorepo-project/packages/app/package.json new file mode 100644 index 000000000..ce8a9af6d --- /dev/null +++ b/analysis/examples/monorepo-project/packages/app/package.json @@ -0,0 +1,12 @@ +{ + "name": "@monorepo/app", + "version": "0.0.1", + "scripts": { + "build": "rescript build", + "clean": "rescript clean", + "watch": "rescript watch" + }, + "dependencies": { + "@monorepo/lib": "*" + } +} diff --git a/analysis/examples/monorepo-project/packages/app/rescript.json b/analysis/examples/monorepo-project/packages/app/rescript.json new file mode 100644 index 000000000..81c0e2bd7 --- /dev/null +++ b/analysis/examples/monorepo-project/packages/app/rescript.json @@ -0,0 +1,13 @@ +{ + "name": "@monorepo/app", + "sources": { + "dir": "src", + "subdirs": true + }, + "package-specs": { + "module": "esmodule", + "in-source": true + }, + "suffix": ".mjs", + "dependencies": ["@monorepo/lib"] +} diff --git a/analysis/examples/monorepo-project/packages/app/src/App.mjs b/analysis/examples/monorepo-project/packages/app/src/App.mjs new file mode 100644 index 000000000..e91955003 --- /dev/null +++ b/analysis/examples/monorepo-project/packages/app/src/App.mjs @@ -0,0 +1,22 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + +import * as Lib from "@monorepo/lib/src/Lib.mjs"; + +function main() { + let greeting = Lib.greet("World"); + console.log(greeting); + let sum = Lib.add(1, 2); + console.log("Sum: " + sum.toString()); +} + +function unusedAppFunction() { + return "Unused in app"; +} + +main(); + +export { + main, + unusedAppFunction, +} +/* Not a pure module */ diff --git a/analysis/examples/monorepo-project/packages/app/src/App.res b/analysis/examples/monorepo-project/packages/app/src/App.res new file mode 100644 index 000000000..22fb2dd11 --- /dev/null +++ b/analysis/examples/monorepo-project/packages/app/src/App.res @@ -0,0 +1,16 @@ +/* monorepo subpackage test */ +// App module - main application + +let main = () => { + let greeting = Lib.greet("World") + Console.log(greeting) + + let sum = Lib.add(1, 2) + Console.log("Sum: " ++ Int.toString(sum)) +} + +// This function is never used (dead code) +let unusedAppFunction = () => "Unused in app" + +// Run main +let _ = main() diff --git a/analysis/examples/monorepo-project/rescript.json b/analysis/examples/monorepo-project/rescript.json new file mode 100644 index 000000000..b22b4976e --- /dev/null +++ b/analysis/examples/monorepo-project/rescript.json @@ -0,0 +1,16 @@ +{ + "name": "monorepo-project", + "sources": { + "dir": "src", + "subdirs": true + }, + "package-specs": { + "module": "esmodule", + "in-source": true + }, + "suffix": ".mjs", + "dependencies": [ + "@monorepo/app", + "@monorepo/lib" + ] +} diff --git a/analysis/examples/monorepo-project/src/Root.mjs b/analysis/examples/monorepo-project/src/Root.mjs new file mode 100644 index 000000000..201f61b4b --- /dev/null +++ b/analysis/examples/monorepo-project/src/Root.mjs @@ -0,0 +1,15 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + +import * as App from "@monorepo/app/src/App.mjs"; + +App.main(); + +let rootValue = "Root package"; + +let unusedRootValue = 123; + +export { + rootValue, + unusedRootValue, +} +/* Not a pure module */ diff --git a/analysis/examples/monorepo-project/src/Root.res b/analysis/examples/monorepo-project/src/Root.res new file mode 100644 index 000000000..eb3da731e --- /dev/null +++ b/analysis/examples/monorepo-project/src/Root.res @@ -0,0 +1,11 @@ +// Root module - monorepo root + +let rootValue = "Root package" + +// Use something from the app package +let _ = App.main() + +// This is unused (dead code) +let unusedRootValue = 123 + +// let _ = App.unusedAppFunction \ No newline at end of file diff --git a/analysis/examples/rescript9-project/.gitignore b/analysis/examples/rescript9-project/.gitignore new file mode 100644 index 000000000..d66a081c5 --- /dev/null +++ b/analysis/examples/rescript9-project/.gitignore @@ -0,0 +1,2 @@ +lib +.merlin diff --git a/analysis/examples/rescript9-project/.vscode/settings.json b/analysis/examples/rescript9-project/.vscode/settings.json new file mode 100644 index 000000000..6c4df7cf5 --- /dev/null +++ b/analysis/examples/rescript9-project/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "rescript.settings.logLevel": "log" +} diff --git a/analysis/examples/rescript9-project/bsconfig.json b/analysis/examples/rescript9-project/bsconfig.json new file mode 100644 index 000000000..d594590e1 --- /dev/null +++ b/analysis/examples/rescript9-project/bsconfig.json @@ -0,0 +1,9 @@ +{ + "name": "rescript9-project", + "sources": ["src"], + "package-specs": { + "module": "es6", + "in-source": true + }, + "suffix": ".bs.js" +} diff --git a/analysis/examples/rescript9-project/package-lock.json b/analysis/examples/rescript9-project/package-lock.json new file mode 100644 index 000000000..7d9d5c10a --- /dev/null +++ b/analysis/examples/rescript9-project/package-lock.json @@ -0,0 +1,28 @@ +{ + "name": "rescript9-project", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "rescript9-project", + "version": "1.0.0", + "dependencies": { + "rescript": "9.1.4" + } + }, + "node_modules/rescript": { + "version": "9.1.4", + "resolved": "https://registry.npmjs.org/rescript/-/rescript-9.1.4.tgz", + "integrity": "sha512-aXANK4IqecJzdnDpJUsU6pxMViCR5ogAxzuqS0mOr8TloMnzAjJFu63fjD6LCkWrKAhlMkFFzQvVQYaAaVkFXw==", + "hasInstallScript": true, + "license": "SEE LICENSE IN LICENSE", + "bin": { + "bsc": "bsc", + "bsrefmt": "bsrefmt", + "bstracing": "lib/bstracing", + "rescript": "rescript" + } + } + } +} diff --git a/analysis/examples/rescript9-project/package.json b/analysis/examples/rescript9-project/package.json new file mode 100644 index 000000000..2571ffabe --- /dev/null +++ b/analysis/examples/rescript9-project/package.json @@ -0,0 +1,12 @@ +{ + "name": "rescript9-project", + "version": "1.0.0", + "scripts": { + "build": "rescript build", + "watch": "rescript build -w", + "clean": "rescript clean" + }, + "dependencies": { + "rescript": "9.1.4" + } +} diff --git a/analysis/examples/rescript9-project/src/Hello.bs.js b/analysis/examples/rescript9-project/src/Hello.bs.js new file mode 100644 index 000000000..0edc63d78 --- /dev/null +++ b/analysis/examples/rescript9-project/src/Hello.bs.js @@ -0,0 +1,18 @@ +// Generated by ReScript, PLEASE EDIT WITH CARE + + +function add(a, b) { + return a + b | 0; +} + +var greeting = "Hello from ReScript 9!"; + +var result = 3; + +export { + greeting , + add , + result , + +} +/* No side effect */ diff --git a/analysis/examples/rescript9-project/src/Hello.res b/analysis/examples/rescript9-project/src/Hello.res new file mode 100644 index 000000000..8dace8fd7 --- /dev/null +++ b/analysis/examples/rescript9-project/src/Hello.res @@ -0,0 +1,6 @@ +// Simple ReScript 9 test file +let greeting = "Hello from ReScript 9!" + +let add = (a, b) => a + b + +let result = add(1, 2) diff --git a/client/.vscode-test.mjs b/client/.vscode-test.mjs new file mode 100644 index 000000000..4bdfbda6b --- /dev/null +++ b/client/.vscode-test.mjs @@ -0,0 +1,65 @@ +import { defineConfig } from "@vscode/test-cli"; +import * as path from "path"; + +export default defineConfig([ + { + label: "example-project", + files: "out/client/src/test/suite/exampleProject.test.js", + version: "stable", + extensionDevelopmentPath: path.resolve(import.meta.dirname, ".."), + workspaceFolder: path.resolve( + import.meta.dirname, + "../analysis/examples/example-project", + ), + mocha: { + ui: "tdd", + timeout: 60000, + }, + launchArgs: ["--disable-extensions"], + }, + { + label: "monorepo-root", + files: "out/client/src/test/suite/monorepoRoot.test.js", + version: "stable", + extensionDevelopmentPath: path.resolve(import.meta.dirname, ".."), + workspaceFolder: path.resolve( + import.meta.dirname, + "../analysis/examples/monorepo-project", + ), + mocha: { + ui: "tdd", + timeout: 60000, + }, + launchArgs: ["--disable-extensions"], + }, + { + label: "monorepo-subpackage", + files: "out/client/src/test/suite/monorepoSubpackage.test.js", + version: "stable", + extensionDevelopmentPath: path.resolve(import.meta.dirname, ".."), + workspaceFolder: path.resolve( + import.meta.dirname, + "../analysis/examples/monorepo-project/packages/app", + ), + mocha: { + ui: "tdd", + timeout: 60000, + }, + launchArgs: ["--disable-extensions"], + }, + { + label: "rescript9-project", + files: "out/client/src/test/suite/rescript9.test.js", + version: "stable", + extensionDevelopmentPath: path.resolve(import.meta.dirname, ".."), + workspaceFolder: path.resolve( + import.meta.dirname, + "../analysis/examples/rescript9-project", + ), + mocha: { + ui: "tdd", + timeout: 60000, + }, + launchArgs: ["--disable-extensions"], + }, +]); diff --git a/client/package.json b/client/package.json index 6f5dc88b8..3a74fdb60 100644 --- a/client/package.json +++ b/client/package.json @@ -6,7 +6,15 @@ "keywords": [], "author": "ReScript Team", "license": "MIT", + "scripts": { + "test": "vscode-test" + }, "dependencies": { "vscode-languageclient": "8.1.0-next.5" + }, + "devDependencies": { + "@types/mocha": "^10.0.10", + "@vscode/test-cli": "^0.0.12", + "@vscode/test-electron": "^2.5.2" } } diff --git a/client/src/commands.ts b/client/src/commands.ts index 0bc131863..f6e97235b 100644 --- a/client/src/commands.ts +++ b/client/src/commands.ts @@ -3,8 +3,19 @@ import { DiagnosticCollection, OutputChannel, StatusBarItem } from "vscode"; import { DiagnosticsResultCodeActionsMap, runCodeAnalysisWithReanalyze, + reanalyzeServers, + stopReanalyzeServer, + stopAllReanalyzeServers, + showReanalyzeServerLog, } from "./commands/code_analysis"; +export { + reanalyzeServers, + stopReanalyzeServer, + stopAllReanalyzeServers, + showReanalyzeServerLog, +}; + export { createInterface } from "./commands/create_interface"; export { openCompiled } from "./commands/open_compiled"; export { switchImplIntf } from "./commands/switch_impl_intf"; @@ -12,13 +23,14 @@ export { dumpDebug, dumpDebugRetrigger } from "./commands/dump_debug"; export { pasteAsRescriptJson } from "./commands/paste_as_rescript_json"; export { pasteAsRescriptJsx } from "./commands/paste_as_rescript_jsx"; +// Returns the monorepo root path if a reanalyze server was started, null otherwise. export const codeAnalysisWithReanalyze = ( diagnosticsCollection: DiagnosticCollection, diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap, outputChannel: OutputChannel, codeAnalysisRunningStatusBarItem: StatusBarItem, -) => { - runCodeAnalysisWithReanalyze( +): Promise => { + return runCodeAnalysisWithReanalyze( diagnosticsCollection, diagnosticsResultCodeActions, outputChannel, diff --git a/client/src/commands/code_analysis.ts b/client/src/commands/code_analysis.ts index 241d16c98..1fe197af2 100644 --- a/client/src/commands/code_analysis.ts +++ b/client/src/commands/code_analysis.ts @@ -1,6 +1,7 @@ import * as cp from "child_process"; import * as fs from "fs"; import * as path from "path"; +import * as semver from "semver"; import { window, DiagnosticCollection, @@ -15,13 +16,260 @@ import { OutputChannel, StatusBarItem, } from "vscode"; -import { getBinaryPath, NormalizedPath, normalizePath } from "../utils"; +import { NormalizedPath, normalizePath } from "../utils"; import { findBinary, - findBinary as findSharedBinary, + getMonorepoRootFromBinaryPath, } from "../../../shared/src/findBinary"; import { findProjectRootOfFile } from "../../../shared/src/projectRoots"; +// Reanalyze server constants (matches rescript monorepo) +const REANALYZE_SOCKET_FILENAME = ".rescript-reanalyze.sock"; +const REANALYZE_SERVER_MIN_VERSION = "12.1.0"; + +// Server state per monorepo root +export interface ReanalyzeServerState { + process: cp.ChildProcess | null; + monorepoRoot: string; + socketPath: string; + startedByUs: boolean; + outputChannel: OutputChannel | null; +} + +// Map from monorepo root to server state +export const reanalyzeServers: Map = new Map(); + +// Check if ReScript version supports reanalyze-server +const supportsReanalyzeServer = async ( + monorepoRootPath: string | null, +): Promise => { + if (monorepoRootPath === null) return false; + + try { + const rescriptDir = path.join(monorepoRootPath, "node_modules", "rescript"); + const packageJsonPath = path.join(rescriptDir, "package.json"); + const packageJson = JSON.parse( + await fs.promises.readFile(packageJsonPath, "utf-8"), + ); + const version = packageJson.version; + + return ( + semver.valid(version) != null && + semver.gte(version, REANALYZE_SERVER_MIN_VERSION) + ); + } catch { + return false; + } +}; + +// Get socket path for a monorepo root +const getSocketPath = (monorepoRoot: string): string => { + return path.join(monorepoRoot, REANALYZE_SOCKET_FILENAME); +}; + +// Check if server is running (socket file exists) +const isServerRunning = (monorepoRoot: string): boolean => { + const socketPath = getSocketPath(monorepoRoot); + return fs.existsSync(socketPath); +}; + +// Start reanalyze server for a monorepo. +// Note: This should only be called after supportsReanalyzeServer() returns true, +// which ensures ReScript >= 12.1.0 where the reanalyze-server subcommand exists. +export const startReanalyzeServer = async ( + monorepoRoot: string, + binaryPath: string, + clientOutputChannel?: OutputChannel, +): Promise => { + // Check if already running (either by us or externally) + if (isServerRunning(monorepoRoot)) { + // Check if we have a record of starting it + const existing = reanalyzeServers.get(monorepoRoot); + if (existing) { + existing.outputChannel?.appendLine( + "[info] Server already running (started by us)", + ); + return existing; + } + // Server running but not started by us - just record it + clientOutputChannel?.appendLine( + `[info] Found existing reanalyze-server for ${path.basename(monorepoRoot)} (not started by extension)`, + ); + const state: ReanalyzeServerState = { + process: null, + monorepoRoot, + socketPath: getSocketPath(monorepoRoot), + startedByUs: false, + outputChannel: null, + }; + reanalyzeServers.set(monorepoRoot, state); + return state; + } + + // Create output channel for server logs + const outputChannel = window.createOutputChannel( + `ReScript Reanalyze Server (${path.basename(monorepoRoot)})`, + ); + + outputChannel.appendLine( + `[info] Starting reanalyze-server in ${monorepoRoot}`, + ); + + // Start the server + const serverProcess = cp.spawn(binaryPath, ["reanalyze-server"], { + cwd: monorepoRoot, + stdio: ["ignore", "pipe", "pipe"], + }); + + if (serverProcess.pid == null) { + outputChannel.appendLine("[error] Failed to start reanalyze-server"); + return null; + } + + const state: ReanalyzeServerState = { + process: serverProcess, + monorepoRoot, + socketPath: getSocketPath(monorepoRoot), + startedByUs: true, + outputChannel, + }; + + // Log stdout and stderr to output channel + serverProcess.stdout?.on("data", (data) => { + outputChannel.appendLine(`[stdout] ${data.toString().trim()}`); + }); + + serverProcess.stderr?.on("data", (data) => { + outputChannel.appendLine(`[stderr] ${data.toString().trim()}`); + }); + + serverProcess.on("error", (err) => { + outputChannel.appendLine(`[error] Server error: ${err.message}`); + }); + + serverProcess.on("exit", (code, signal) => { + outputChannel.appendLine( + `[info] Server exited with code ${code}, signal ${signal}`, + ); + reanalyzeServers.delete(monorepoRoot); + }); + + reanalyzeServers.set(monorepoRoot, state); + + // Wait briefly for socket file to be created (up to 3 seconds) + for (let i = 0; i < 30; i++) { + if (isServerRunning(monorepoRoot)) { + outputChannel.appendLine(`[info] Server socket ready`); + return state; + } + await new Promise((resolve) => setTimeout(resolve, 100)); + } + + outputChannel.appendLine( + "[warn] Server started but socket not found after 3 seconds", + ); + return state; +}; + +// Clean up socket file if it exists +const cleanupSocketFile = (socketPath: string): void => { + try { + if (fs.existsSync(socketPath)) { + fs.unlinkSync(socketPath); + } + } catch { + // Ignore errors during cleanup + } +}; + +// Stop reanalyze server for a monorepo (only if we started it) +export const stopReanalyzeServer = ( + monorepoRoot: string | null, + clientOutputChannel?: OutputChannel, +): void => { + if (monorepoRoot == null) return; + + const state = reanalyzeServers.get(monorepoRoot); + if (!state) return; + + // Only kill the process if we started it + if (state.startedByUs && state.process != null) { + state.process.kill(); + state.outputChannel?.appendLine("[info] Server stopped by extension"); + // Clean up socket file to prevent stale socket issues + cleanupSocketFile(state.socketPath); + } else if (!state.startedByUs) { + clientOutputChannel?.appendLine( + `[info] Leaving external reanalyze-server running for ${path.basename(monorepoRoot)}`, + ); + } + + reanalyzeServers.delete(monorepoRoot); +}; + +// Stop all servers we started +export const stopAllReanalyzeServers = (): void => { + for (const [_monorepoRoot, state] of reanalyzeServers) { + if (state.startedByUs && state.process != null) { + state.process.kill(); + state.outputChannel?.appendLine("[info] Server stopped by extension"); + // Clean up socket file to prevent stale socket issues + cleanupSocketFile(state.socketPath); + } + } + reanalyzeServers.clear(); +}; + +// Show server log for a monorepo +// Returns true if the output channel was shown, false otherwise +// This is an async function because it may need to find the binary to derive monorepo root +export const showReanalyzeServerLog = async ( + monorepoRoot: string | null, +): Promise => { + if (monorepoRoot == null) { + // Try to find any running server + const firstServer = reanalyzeServers.values().next().value; + if (firstServer?.outputChannel) { + firstServer.outputChannel.show(); + return true; + } else { + window.showInformationMessage( + "No reanalyze server is currently running.", + ); + return false; + } + } + + // First try direct lookup + let state = reanalyzeServers.get(monorepoRoot); + if (state?.outputChannel) { + state.outputChannel.show(); + return true; + } + + // If not found, try to derive monorepo root from binary path + // (the server is registered under monorepo root, not subpackage root) + const binaryPath = await findBinary({ + projectRootPath: monorepoRoot, + binary: "rescript-tools.exe", + }); + if (binaryPath != null) { + const derivedMonorepoRoot = getMonorepoRootFromBinaryPath(binaryPath); + if (derivedMonorepoRoot != null && derivedMonorepoRoot !== monorepoRoot) { + state = reanalyzeServers.get(derivedMonorepoRoot); + if (state?.outputChannel) { + state.outputChannel.show(); + return true; + } + } + } + + window.showInformationMessage( + `No reanalyze server log available for ${path.basename(monorepoRoot)}`, + ); + return false; +}; + export let statusBarItem = { setToStopText: (codeAnalysisRunningStatusBarItem: StatusBarItem) => { codeAnalysisRunningStatusBarItem.text = "$(debug-stop) Stop Code Analyzer"; @@ -204,46 +452,85 @@ let resultsToDiagnostics = ( }; }; +// Returns the monorepo root path if a reanalyze server was started, null otherwise. +// This allows the caller to track which server to stop later. export const runCodeAnalysisWithReanalyze = async ( diagnosticsCollection: DiagnosticCollection, diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap, outputChannel: OutputChannel, codeAnalysisRunningStatusBarItem: StatusBarItem, -) => { - let currentDocument = window.activeTextEditor.document; +): Promise => { + let currentDocument = window.activeTextEditor?.document; + if (!currentDocument) { + window.showErrorMessage("No active document found."); + return null; + } let projectRootPath: NormalizedPath | null = normalizePath( findProjectRootOfFile(currentDocument.uri.fsPath), ); - let binaryPath: string | null = await findBinary({ + + // findBinary walks up the directory tree to find node_modules/rescript, + // so it works correctly for monorepos (finds the workspace root's binary) + // Note: rescript-tools.exe (with reanalyze command) is only available in ReScript 12+ + const binaryPath: string | null = await findBinary({ projectRootPath, binary: "rescript-tools.exe", }); - if (binaryPath == null) { - binaryPath = await findBinary({ - projectRootPath, - binary: "rescript-editor-analysis.exe", - }); - } if (binaryPath === null) { - window.showErrorMessage("Binary executable not found."); - return; + outputChannel.appendLine( + `[error] rescript-tools.exe not found for project root: ${projectRootPath}. Code analysis requires ReScript 12 or later.`, + ); + window.showErrorMessage( + "Code analysis requires ReScript 12 or later (rescript-tools.exe not found).", + ); + return null; } - // Strip everything after the outermost node_modules segment to get the project root. - let cwd = - binaryPath.match(/^(.*?)[\\/]+node_modules([\\/]+|$)/)?.[1] ?? binaryPath; + // Derive monorepo root from binary path - the directory containing node_modules + // This handles monorepos correctly since findBinary walks up to find the binary + const monorepoRootPath: NormalizedPath | null = normalizePath( + getMonorepoRootFromBinaryPath(binaryPath), + ); + + if (monorepoRootPath === null) { + outputChannel.appendLine( + `[error] Could not determine workspace root from binary path: ${binaryPath}`, + ); + window.showErrorMessage("Could not determine workspace root."); + return null; + } + + // Check if we should use reanalyze-server (ReScript >= 12.1.0) + const useServer = await supportsReanalyzeServer(monorepoRootPath); + + if (useServer && monorepoRootPath) { + // Ensure server is running from workspace root + const serverState = await startReanalyzeServer( + monorepoRootPath, + binaryPath, + outputChannel, + ); + if (serverState) { + outputChannel.appendLine( + `[info] Using reanalyze-server for ${path.basename(monorepoRootPath)}`, + ); + } + } statusBarItem.setToRunningText(codeAnalysisRunningStatusBarItem); let opts = ["reanalyze", "-json"]; - let p = cp.spawn(binaryPath, opts, { cwd }); + let p = cp.spawn(binaryPath, opts, { cwd: monorepoRootPath }); if (p.stdout == null) { + outputChannel.appendLine( + `[error] Failed to spawn reanalyze process: stdout is null. Binary: ${binaryPath}, cwd: ${monorepoRootPath}`, + ); statusBarItem.setToFailed(codeAnalysisRunningStatusBarItem); - window.showErrorMessage("Something went wrong."); - return; + window.showErrorMessage("Failed to start code analysis process."); + return null; } let data = ""; @@ -292,7 +579,7 @@ export const runCodeAnalysisWithReanalyze = async ( outputChannel.appendLine( `> To reproduce, run "${binaryPath} ${opts.join( " ", - )}" in directory: "${cwd}"`, + )}" in directory: "${monorepoRootPath}"`, ); outputChannel.appendLine("\n"); } @@ -324,4 +611,7 @@ export const runCodeAnalysisWithReanalyze = async ( statusBarItem.setToStopText(codeAnalysisRunningStatusBarItem); }); + + // Return the monorepo root so the caller can track which server to stop + return monorepoRootPath; }; diff --git a/client/src/extension.ts b/client/src/extension.ts index cefe346a5..dd24726f7 100644 --- a/client/src/extension.ts +++ b/client/src/extension.ts @@ -32,6 +32,7 @@ import { } from "./commands/code_analysis"; import { pasteAsRescriptJson } from "./commands/paste_as_rescript_json"; import { pasteAsRescriptJsx } from "./commands/paste_as_rescript_jsx"; +import { findProjectRootOfFile } from "../../shared/src/projectRoots"; let client: LanguageClient; @@ -307,7 +308,8 @@ export function activate(context: ExtensionContext) { let inCodeAnalysisState: { active: boolean; - } = { active: false }; + currentMonorepoRoot: string | null; + } = { active: false, currentMonorepoRoot: null }; // This code actions provider yields the code actions potentially extracted // from the code analysis to the editor. @@ -431,13 +433,7 @@ export function activate(context: ExtensionContext) { ); // Starts the code analysis mode. - commands.registerCommand("rescript-vscode.start_code_analysis", () => { - // Save the directory this first ran from, and re-use that when continuously - // running the analysis. This is so that the target of the analysis does not - // change on subsequent runs, if there are multiple ReScript projects open - // in the editor. - let currentDocument = window.activeTextEditor.document; - + commands.registerCommand("rescript-vscode.start_code_analysis", async () => { inCodeAnalysisState.active = true; codeAnalysisRunningStatusBarItem.command = @@ -445,27 +441,76 @@ export function activate(context: ExtensionContext) { codeAnalysisRunningStatusBarItem.show(); statusBarItem.setToStopText(codeAnalysisRunningStatusBarItem); - customCommands.codeAnalysisWithReanalyze( + // Start code analysis and capture the monorepo root for server management + const monorepoRoot = await customCommands.codeAnalysisWithReanalyze( diagnosticsCollection, diagnosticsResultCodeActions, outputChannel, codeAnalysisRunningStatusBarItem, ); + inCodeAnalysisState.currentMonorepoRoot = monorepoRoot; }); commands.registerCommand("rescript-vscode.stop_code_analysis", () => { inCodeAnalysisState.active = false; + // Stop server if we started it for this project + customCommands.stopReanalyzeServer( + inCodeAnalysisState.currentMonorepoRoot, + outputChannel, + ); + inCodeAnalysisState.currentMonorepoRoot = null; + diagnosticsCollection.clear(); diagnosticsResultCodeActions.clear(); codeAnalysisRunningStatusBarItem.hide(); }); + // Show reanalyze server log + commands.registerCommand( + "rescript-vscode.show_reanalyze_server_log", + async () => { + let currentDocument = window.activeTextEditor?.document; + let projectRootPath: string | null = null; + + if (currentDocument) { + projectRootPath = findProjectRootOfFile(currentDocument.uri.fsPath); + } + + return await customCommands.showReanalyzeServerLog(projectRootPath); + }, + ); + commands.registerCommand("rescript-vscode.switch-impl-intf", () => { customCommands.switchImplIntf(client); }); + // Start build command + commands.registerCommand("rescript-vscode.start_build", async () => { + let currentDocument = window.activeTextEditor?.document; + if (!currentDocument) { + window.showErrorMessage("No active document found."); + return; + } + + try { + const result = (await client.sendRequest("rescript/startBuild", { + uri: currentDocument.uri.toString(), + })) as { success: boolean }; + + if (result.success) { + window.showInformationMessage("Build watcher started."); + } else { + window.showErrorMessage( + "Failed to start build. Check that a ReScript project is open.", + ); + } + } catch (e) { + window.showErrorMessage(`Failed to start build: ${String(e)}`); + } + }); + commands.registerCommand("rescript-vscode.restart_language_server", () => { client.stop().then(() => { client = createLanguageClient(); @@ -514,6 +559,9 @@ export function activate(context: ExtensionContext) { } export function deactivate(): Thenable | undefined { + // Stop all reanalyze servers we started + customCommands.stopAllReanalyzeServers(); + if (!client) { return undefined; } diff --git a/client/src/test/suite/exampleProject.test.ts b/client/src/test/suite/exampleProject.test.ts new file mode 100644 index 000000000..e3acc7a72 --- /dev/null +++ b/client/src/test/suite/exampleProject.test.ts @@ -0,0 +1,328 @@ +/** + * Example Project Tests (Code Analysis Server Test Suite) + * + * Run these tests: + * cd client && npm run test -- --label example-project + */ +import * as assert from "assert"; +import * as path from "path"; +import * as vscode from "vscode"; +import { + getWorkspaceRoot, + removeRescriptLockFile, + removeReanalyzeSocketFile, + ensureExtensionActivated, + sleep, + getCompilerLogPath, + getFileMtime, + waitForFileUpdate, + insertCommentAndSave, + restoreContentAndSave, + startBuildWatcher, + startCodeAnalysis, + stopCodeAnalysis, + showReanalyzeServerLog, + findBuildPromptInLogs, +} from "./helpers"; + +suite("Code Analysis Server Test Suite", () => { + test("Extension should be present", async () => { + const extension = vscode.extensions.getExtension( + "chenglou92.rescript-vscode", + ); + assert.ok(extension, "ReScript extension should be present"); + console.log("Extension found:", extension.id); + }); + + test("Commands should be registered after activation", async () => { + const extension = vscode.extensions.getExtension( + "chenglou92.rescript-vscode", + ); + if (!extension) { + console.log("Extension not found, skipping command test"); + return; + } + + if (!extension.isActive) { + console.log("Activating extension..."); + await extension.activate(); + } + + const commands = await vscode.commands.getCommands(true); + + const expectedCommands = [ + "rescript-vscode.start_code_analysis", + "rescript-vscode.stop_code_analysis", + "rescript-vscode.show_reanalyze_server_log", + "rescript-vscode.start_build", + ]; + + for (const cmd of expectedCommands) { + assert.ok(commands.includes(cmd), `Command ${cmd} should be registered`); + } + console.log("All commands registered successfully!"); + }); + + test("Start Code Analysis should run on a ReScript file", async () => { + const workspaceRoot = getWorkspaceRoot(); + if (!workspaceRoot) { + console.log("No workspace folder found, skipping test"); + return; + } + console.log("Workspace root:", workspaceRoot); + + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + console.log("Running start_code_analysis command..."); + await startCodeAnalysis(); + console.log("Code analysis command completed"); + + console.log("Running stop_code_analysis command..."); + await stopCodeAnalysis(); + console.log("Test completed successfully"); + }); + + test("Start Build command should start build watcher", async () => { + const workspaceRoot = getWorkspaceRoot(); + if (!workspaceRoot) { + console.log("No workspace folder found, skipping test"); + return; + } + console.log("Workspace root:", workspaceRoot); + + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + console.log("Running start_build command..."); + await startBuildWatcher(); + + console.log( + "Test completed - check Language Server log for 'Starting build watcher' or 'Build watcher already running' message", + ); + }); + + test("Build watcher recompiles on file save", async () => { + const workspaceRoot = getWorkspaceRoot(); + if (!workspaceRoot) { + console.log("No workspace folder found, skipping test"); + return; + } + console.log("Workspace root:", workspaceRoot); + + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + const editor = await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + console.log("Starting build watcher..."); + await startBuildWatcher(); + console.log("Build watcher started"); + + const compilerLogPath = getCompilerLogPath(workspaceRoot); + const mtimeBefore = getFileMtime(compilerLogPath); + if (mtimeBefore) { + console.log(`compiler.log mtime before: ${mtimeBefore.toISOString()}`); + } else { + console.log("compiler.log does not exist yet"); + } + + console.log("Editing file..."); + const originalContent = document.getText(); + await insertCommentAndSave(editor, "/* test comment */\n"); + console.log("File saved with edit"); + + console.log("Waiting for compilation..."); + const mtimeAfter = await waitForFileUpdate(compilerLogPath, mtimeBefore); + if (mtimeAfter) { + console.log(`compiler.log mtime after: ${mtimeAfter.toISOString()}`); + } else { + console.log("compiler.log still does not exist"); + } + + assert.ok(mtimeAfter, "compiler.log should exist after file save"); + if (mtimeBefore && mtimeAfter) { + assert.ok( + mtimeAfter > mtimeBefore, + "compiler.log should be updated after file save", + ); + console.log("SUCCESS: compiler.log was updated after file save"); + } else if (!mtimeBefore && mtimeAfter) { + console.log("SUCCESS: compiler.log was created after file save"); + } + + console.log("Restoring original content..."); + await restoreContentAndSave(editor, originalContent); + console.log("Original content restored"); + + await sleep(1000); + console.log("Test completed"); + }); + + test("Code analysis with incremental updates", async () => { + const workspaceRoot = getWorkspaceRoot(); + if (!workspaceRoot) { + console.log("No workspace folder found, skipping test"); + return; + } + console.log("Workspace root:", workspaceRoot); + + // Remove stale socket file to ensure a fresh server is started + // Note: Only remove the socket file, not the lock file, to keep the build watcher running + removeReanalyzeSocketFile(workspaceRoot); + + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Step 1: Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + const editor = await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + console.log("Step 2: Starting build..."); + await startBuildWatcher(); + console.log("Build started"); + + console.log("Step 3: Starting code analysis..."); + await startCodeAnalysis(); + console.log("Code analysis started"); + + console.log("Step 3b: Opening reanalyze server log..."); + await showReanalyzeServerLog(); + + console.log("Step 4: Checking for diagnostics..."); + const diagnostics = vscode.languages.getDiagnostics(document.uri); + console.log(`Found ${diagnostics.length} diagnostics in Hello.res`); + assert.ok(diagnostics.length > 0, "Should have diagnostics for dead code"); + for (const diag of diagnostics.slice(0, 5)) { + console.log( + ` - Line ${diag.range.start.line + 1}: ${diag.message.substring(0, 80)}...`, + ); + } + if (diagnostics.length > 5) { + console.log(` ... and ${diagnostics.length - 5} more`); + } + const initialDiagnosticsCount = diagnostics.length; + + console.log("Step 5: Adding dead code..."); + const originalContent = document.getText(); + const deadCode = "let testDeadVariable12345 = 999\n"; + + const compilerLogPath = getCompilerLogPath(workspaceRoot); + const compilerLogMtimeBefore = getFileMtime(compilerLogPath); + if (compilerLogMtimeBefore) { + console.log( + `compiler.log mtime before: ${compilerLogMtimeBefore.toISOString()}`, + ); + } else { + console.log("compiler.log does not exist before edit"); + } + + await insertCommentAndSave(editor, deadCode); + console.log("Dead code added and saved"); + + console.log("Step 5a: Waiting for compilation..."); + const mtimeAfter = await waitForFileUpdate( + compilerLogPath, + compilerLogMtimeBefore, + ); + if (mtimeAfter) { + console.log(`compiler.log updated: ${mtimeAfter.toISOString()}`); + } else { + console.log("Warning: compilation may not have completed"); + } + + console.log("Step 5b: Re-running code analysis..."); + await vscode.window.showTextDocument(document); + await startCodeAnalysis(); + console.log("Code analysis re-run complete"); + + console.log("Step 6: Checking for updated diagnostics..."); + const updatedDiagnostics = vscode.languages.getDiagnostics(document.uri); + console.log( + `Found ${updatedDiagnostics.length} diagnostics after edit (was ${initialDiagnosticsCount})`, + ); + + assert.ok( + updatedDiagnostics.length > initialDiagnosticsCount, + `Diagnostics count should increase after adding dead code (was ${initialDiagnosticsCount}, now ${updatedDiagnostics.length})`, + ); + + const deadVarDiagnostic = updatedDiagnostics.find((d) => + d.message.includes("testDeadVariable12345"), + ); + assert.ok( + deadVarDiagnostic, + "Should find diagnostic for testDeadVariable12345", + ); + console.log( + `Found diagnostic for testDeadVariable12345: ${deadVarDiagnostic.message}`, + ); + + console.log("Step 7: Undoing change..."); + await restoreContentAndSave(editor, originalContent); + console.log("Change undone and saved"); + + await sleep(1000); + + console.log("Step 8: Stopping code analysis..."); + await stopCodeAnalysis(); + console.log("Code analysis stopped"); + + console.log("Step 9: Test completed - check Reanalyze Server log for:"); + console.log(" - [request #1] with 'files: X processed, 0 cached'"); + console.log( + " - [request #2] with 'files: X processed, Y cached' where Y > 0 (incremental)", + ); + }); + + test("Should prompt to start build when no lock file exists", async () => { + const workspaceRoot = getWorkspaceRoot(); + if (!workspaceRoot) { + console.log("No workspace folder found, skipping test"); + return; + } + console.log("Workspace root:", workspaceRoot); + + removeRescriptLockFile(workspaceRoot); + + const resFilePath = path.join(workspaceRoot, "src", "More.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await sleep(1000); + + const promptResult = findBuildPromptInLogs(); + if (promptResult.found) { + console.log( + `Found prompt message: "Prompting to start build for ${promptResult.path}"`, + ); + } + + assert.ok( + promptResult.found, + "Should find 'Prompting to start build' message in Language Server log", + ); + console.log("SUCCESS: Build prompt was shown"); + }); +}); diff --git a/client/src/test/suite/helpers.ts b/client/src/test/suite/helpers.ts new file mode 100644 index 000000000..32196b9ef --- /dev/null +++ b/client/src/test/suite/helpers.ts @@ -0,0 +1,324 @@ +/** + * Shared test helpers for ReScript VSCode extension tests + */ +import * as assert from "assert"; +import * as fs from "fs"; +import * as path from "path"; +import * as vscode from "vscode"; + +/** + * Get the workspace root folder path + */ +export function getWorkspaceRoot(): string { + const workspaceFolders = vscode.workspace.workspaceFolders; + if (workspaceFolders && workspaceFolders.length > 0) { + return workspaceFolders[0].uri.fsPath; + } + return ""; +} + +/** + * Remove ReScript 12+ lock file (lib/rescript.lock) and reanalyze socket file + */ +export function removeRescriptLockFile(workspaceRoot: string): void { + const filesToRemove = [ + path.join(workspaceRoot, "lib", "rescript.lock"), + // Also remove reanalyze socket file to ensure a fresh server is started + path.join(workspaceRoot, ".rescript-reanalyze.sock"), + ]; + + for (const file of filesToRemove) { + try { + if (fs.existsSync(file)) { + fs.unlinkSync(file); + console.log(`Removed ${path.basename(file)}`); + } + } catch (e) { + console.log(`Could not remove ${path.basename(file)}:`, e); + } + } +} + +/** + * Remove only the reanalyze socket file (not the lock file) + * Use this when you need a fresh reanalyze server but want to keep the build watcher running + */ +export function removeReanalyzeSocketFile(workspaceRoot: string): void { + const socketPath = path.join(workspaceRoot, ".rescript-reanalyze.sock"); + try { + if (fs.existsSync(socketPath)) { + fs.unlinkSync(socketPath); + console.log("Removed .rescript-reanalyze.sock"); + } + } catch (e) { + console.log("Could not remove .rescript-reanalyze.sock:", e); + } +} + +/** + * Remove ReScript 9/10/11 lock file (.bsb.lock) + */ +export function removeBsbLockFile(workspaceRoot: string): void { + const lockPath = path.join(workspaceRoot, ".bsb.lock"); + try { + if (fs.existsSync(lockPath)) { + fs.unlinkSync(lockPath); + console.log("Removed .bsb.lock"); + } + } catch (e) { + console.log("Could not remove .bsb.lock:", e); + } +} + +/** + * Remove monorepo lock files (both rewatch.lock and rescript.lock) + */ +export function removeMonorepoLockFiles(monorepoRoot: string): void { + const filesToRemove = [ + path.join(monorepoRoot, "lib", "rewatch.lock"), + path.join(monorepoRoot, "lib", "rescript.lock"), + // Also remove reanalyze socket file to ensure a fresh server is started + path.join(monorepoRoot, ".rescript-reanalyze.sock"), + ]; + + for (const file of filesToRemove) { + try { + if (fs.existsSync(file)) { + fs.unlinkSync(file); + console.log(`Removed ${path.basename(file)}`); + } + } catch (e) { + console.log(`Could not remove ${path.basename(file)}:`, e); + } + } +} + +/** + * Ensure the ReScript extension is activated + */ +export async function ensureExtensionActivated(): Promise< + vscode.Extension | undefined +> { + const extension = vscode.extensions.getExtension( + "chenglou92.rescript-vscode", + ); + if (extension && !extension.isActive) { + await extension.activate(); + } + return extension; +} + +/** + * Open a file in the editor and return the document + */ +export async function openFile(filePath: string): Promise { + const document = await vscode.workspace.openTextDocument(filePath); + await vscode.window.showTextDocument(document); + return document; +} + +/** + * Find the LSP log file in the most recent test logs directory + */ +export function findLspLogContent(): string | null { + // __dirname is client/out/client/src/test/suite when running tests + const vscodeTestDir = path.resolve(__dirname, "../../../../../.vscode-test"); + const logsBaseDir = path.join(vscodeTestDir, "user-data", "logs"); + + try { + // Find the most recent log directory (format: YYYYMMDDTHHMMSS) + const logDirs = fs + .readdirSync(logsBaseDir) + .filter((d) => /^\d{8}T\d{6}$/.test(d)) + .sort() + .reverse(); + + for (const logDir of logDirs) { + const outputLoggingDir = path.join( + logsBaseDir, + logDir, + "window1", + "exthost", + ); + if (!fs.existsSync(outputLoggingDir)) continue; + + const outputDirs = fs + .readdirSync(outputLoggingDir) + .filter((d) => d.startsWith("output_logging_")); + + for (const outputDir of outputDirs) { + const lspLogPath = path.join( + outputLoggingDir, + outputDir, + "1-ReScript Language Server.log", + ); + if (fs.existsSync(lspLogPath)) { + console.log("Checking log file:", lspLogPath); + return fs.readFileSync(lspLogPath, "utf-8"); + } + } + } + } catch (e) { + console.log("Error reading logs:", e); + } + + return null; +} + +/** + * Wait for a condition to become true, polling at intervals + */ +export async function waitFor( + condition: () => boolean, + options: { timeout?: number; interval?: number; message?: string } = {}, +): Promise { + const { timeout = 5000, interval = 500, message = "condition" } = options; + const maxAttempts = Math.ceil(timeout / interval); + + for (let i = 0; i < maxAttempts; i++) { + await new Promise((resolve) => setTimeout(resolve, interval)); + const result = condition(); + console.log(`Checking ${message} (attempt ${i + 1}): ${result}`); + if (result) return true; + } + return false; +} + +/** + * Sleep for a given number of milliseconds + */ +export function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +/** + * Get the path to the compiler.log file + */ +export function getCompilerLogPath(workspaceRoot: string): string { + return path.join(workspaceRoot, "lib", "bs", ".compiler.log"); +} + +/** + * Get the mtime of a file, or null if it doesn't exist + */ +export function getFileMtime(filePath: string): Date | null { + try { + return fs.statSync(filePath).mtime; + } catch { + return null; + } +} + +/** + * Wait for a file's mtime to be updated (newer than the given mtime) + */ +export async function waitForFileUpdate( + filePath: string, + mtimeBefore: Date | null, + options: { timeout?: number; interval?: number } = {}, +): Promise { + const { timeout = 5000, interval = 500 } = options; + const maxAttempts = Math.ceil(timeout / interval); + + for (let i = 0; i < maxAttempts; i++) { + await sleep(interval); + const mtimeAfter = getFileMtime(filePath); + if (mtimeAfter && (!mtimeBefore || mtimeAfter > mtimeBefore)) { + return mtimeAfter; + } + } + return getFileMtime(filePath); +} + +/** + * Insert a comment at the beginning of a document and save + */ +export async function insertCommentAndSave( + editor: vscode.TextEditor, + comment: string, +): Promise { + await editor.edit((editBuilder) => { + editBuilder.insert(new vscode.Position(0, 0), comment); + }); + await editor.document.save(); +} + +/** + * Restore original content to a document and save + */ +export async function restoreContentAndSave( + editor: vscode.TextEditor, + originalContent: string, +): Promise { + const document = editor.document; + await editor.edit((editBuilder) => { + const fullRange = new vscode.Range( + new vscode.Position(0, 0), + document.lineAt(document.lineCount - 1).range.end, + ); + editBuilder.replace(fullRange, originalContent); + }); + await document.save(); +} + +/** + * Start the build watcher and wait for it to initialize + */ +export async function startBuildWatcher(waitMs: number = 1000): Promise { + await vscode.commands.executeCommand("rescript-vscode.start_build"); + await sleep(waitMs); +} + +/** + * Start code analysis and wait for it to initialize + */ +export async function startCodeAnalysis(waitMs: number = 1000): Promise { + await vscode.commands.executeCommand("rescript-vscode.start_code_analysis"); + await sleep(waitMs); +} + +/** + * Stop code analysis + */ +export async function stopCodeAnalysis(): Promise { + await vscode.commands.executeCommand("rescript-vscode.stop_code_analysis"); +} + +/** + * Show the reanalyze server log and assert it returns true + */ +export async function showReanalyzeServerLog(): Promise { + const result = await vscode.commands.executeCommand( + "rescript-vscode.show_reanalyze_server_log", + ); + console.log(`Show reanalyze server log result: ${result}`); + assert.strictEqual( + result, + true, + "Show reanalyze server log should return true when output channel is shown", + ); +} + +/** + * Result of searching for build prompt in logs + */ +export interface BuildPromptResult { + found: boolean; + path: string; +} + +/** + * Find "Prompting to start build" message in LSP logs + */ +export function findBuildPromptInLogs(): BuildPromptResult { + const logContent = findLspLogContent(); + if (logContent) { + const promptMatch = logContent.match( + /\[Info.*\] Prompting to start build for (.+)/, + ); + if (promptMatch) { + return { found: true, path: promptMatch[1] }; + } + } + return { found: false, path: "" }; +} diff --git a/client/src/test/suite/monorepoRoot.test.ts b/client/src/test/suite/monorepoRoot.test.ts new file mode 100644 index 000000000..aafc52cba --- /dev/null +++ b/client/src/test/suite/monorepoRoot.test.ts @@ -0,0 +1,274 @@ +/** + * Monorepo Root Tests (Monorepo Code Analysis Test Suite) + * + * Run these tests: + * cd client && npm run test -- --label monorepo-root + */ +import * as assert from "assert"; +import * as fs from "fs"; +import * as path from "path"; +import * as vscode from "vscode"; +import { + getWorkspaceRoot, + removeMonorepoLockFiles, + ensureExtensionActivated, + sleep, + getCompilerLogPath, + getFileMtime, + waitForFileUpdate, + insertCommentAndSave, + restoreContentAndSave, + startBuildWatcher, + startCodeAnalysis, + stopCodeAnalysis, + showReanalyzeServerLog, + findBuildPromptInLogs, +} from "./helpers"; + +suite("Monorepo Code Analysis Test Suite", () => { + test("Monorepo: Build watcher works when opening root package", async () => { + const monorepoRoot = getWorkspaceRoot(); + console.log("Monorepo root:", monorepoRoot); + + const rootResPath = path.join(monorepoRoot, "src", "Root.res"); + if (!fs.existsSync(rootResPath)) { + console.log("Monorepo project not found, skipping test"); + return; + } + + removeMonorepoLockFiles(monorepoRoot); + + console.log("Opening root file:", rootResPath); + const document = await vscode.workspace.openTextDocument(rootResPath); + const editor = await vscode.window.showTextDocument(document); + console.log("Root file opened successfully"); + + await ensureExtensionActivated(); + + console.log("Starting build watcher from root..."); + await startBuildWatcher(); + console.log("Build watcher started"); + + const compilerLogPath = getCompilerLogPath(monorepoRoot); + const mtimeBefore = getFileMtime(compilerLogPath); + if (mtimeBefore) { + console.log( + `Root compiler.log mtime before: ${mtimeBefore.toISOString()}`, + ); + } else { + console.log("Root compiler.log does not exist yet"); + } + + console.log("Editing root file..."); + const originalContent = document.getText(); + await insertCommentAndSave(editor, "/* monorepo root test */\n"); + console.log("Root file saved with edit"); + + const mtimeAfter = await waitForFileUpdate(compilerLogPath, mtimeBefore); + if (mtimeAfter) { + console.log(`Root compiler.log mtime after: ${mtimeAfter.toISOString()}`); + } else { + console.log("Root compiler.log still does not exist"); + } + + assert.ok(mtimeAfter, "Root compiler.log should exist after file save"); + if (mtimeBefore && mtimeAfter) { + assert.ok( + mtimeAfter > mtimeBefore, + "Root compiler.log should be updated after file save", + ); + console.log("SUCCESS: Root compiler.log was updated"); + } + + await restoreContentAndSave(editor, originalContent); + console.log("Original content restored"); + + await sleep(1000); + console.log("Monorepo root test completed"); + }); + + test("Monorepo: Build watcher works when opening subpackage file", async () => { + const monorepoRoot = getWorkspaceRoot(); + console.log("Monorepo root:", monorepoRoot); + + const appResPath = path.join( + monorepoRoot, + "packages", + "app", + "src", + "App.res", + ); + if (!fs.existsSync(appResPath)) { + console.log("Monorepo app package not found, skipping test"); + return; + } + + console.log("Opening subpackage file:", appResPath); + const document = await vscode.workspace.openTextDocument(appResPath); + const editor = await vscode.window.showTextDocument(document); + console.log("Subpackage file opened successfully"); + + await ensureExtensionActivated(); + + console.log("Starting build watcher from subpackage..."); + await startBuildWatcher(); + console.log("Build watcher started"); + + const rootCompilerLogPath = getCompilerLogPath(monorepoRoot); + const mtimeBefore = getFileMtime(rootCompilerLogPath); + if (mtimeBefore) { + console.log( + `Root compiler.log mtime before: ${mtimeBefore.toISOString()}`, + ); + } else { + console.log( + "Root compiler.log does not exist yet (expected for monorepo subpackage)", + ); + } + + console.log("Editing subpackage file..."); + const originalContent = document.getText(); + await insertCommentAndSave(editor, "/* monorepo subpackage test */\n"); + console.log("Subpackage file saved with edit"); + + const mtimeAfter = await waitForFileUpdate( + rootCompilerLogPath, + mtimeBefore, + ); + if (mtimeAfter) { + console.log(`Root compiler.log mtime after: ${mtimeAfter.toISOString()}`); + } else { + console.log("Root compiler.log still does not exist"); + } + + assert.ok( + mtimeAfter, + "Root compiler.log should exist after subpackage file save", + ); + if (mtimeBefore && mtimeAfter) { + assert.ok( + mtimeAfter > mtimeBefore, + "Root compiler.log should be updated after subpackage file save", + ); + console.log( + "SUCCESS: Root compiler.log was updated from subpackage edit", + ); + } + + await restoreContentAndSave(editor, originalContent); + console.log("Original content restored"); + + await sleep(1000); + console.log("Monorepo subpackage test completed"); + }); + + test("Monorepo: Code analysis works from subpackage", async () => { + const monorepoRoot = getWorkspaceRoot(); + console.log("Monorepo root:", monorepoRoot); + + const libResPath = path.join( + monorepoRoot, + "packages", + "lib", + "src", + "Lib.res", + ); + if (!fs.existsSync(libResPath)) { + console.log("Monorepo lib package not found, skipping test"); + return; + } + + console.log("Opening lib file:", libResPath); + const document = await vscode.workspace.openTextDocument(libResPath); + await vscode.window.showTextDocument(document); + console.log("Lib file opened successfully"); + + await ensureExtensionActivated(); + + console.log("Starting build..."); + await startBuildWatcher(); + + console.log("Starting code analysis..."); + await startCodeAnalysis(); + console.log("Code analysis started"); + + console.log("Opening reanalyze server log..."); + await showReanalyzeServerLog(); + + const diagnostics = vscode.languages.getDiagnostics(document.uri); + console.log(`Found ${diagnostics.length} diagnostics in Lib.res`); + for (const diag of diagnostics.slice(0, 5)) { + console.log( + ` - Line ${diag.range.start.line + 1}: ${diag.message.substring(0, 80)}...`, + ); + } + + assert.ok( + diagnostics.length > 0, + "Should have diagnostics for dead code in Lib.res", + ); + + const deadFuncDiagnostic = diagnostics.find((d) => + d.message.includes("unusedLibFunction"), + ); + assert.ok( + deadFuncDiagnostic, + "Should find diagnostic for unusedLibFunction in monorepo lib", + ); + console.log( + `Found diagnostic for unusedLibFunction: ${deadFuncDiagnostic?.message}`, + ); + + console.log("Stopping code analysis..."); + await stopCodeAnalysis(); + console.log("Code analysis stopped"); + + console.log("Monorepo code analysis test completed"); + }); + + test("Monorepo: Should prompt to start build when opening subpackage without lock file", async () => { + const monorepoRoot = getWorkspaceRoot(); + console.log("Monorepo root:", monorepoRoot); + + const appResPath = path.join( + monorepoRoot, + "packages", + "app", + "src", + "App.res", + ); + if (!fs.existsSync(appResPath)) { + console.log("Monorepo app package not found, skipping test"); + return; + } + + console.log("Removing lock file from monorepo root..."); + removeMonorepoLockFiles(monorepoRoot); + + console.log("Opening subpackage file:", appResPath); + const document = await vscode.workspace.openTextDocument(appResPath); + await vscode.window.showTextDocument(document); + console.log("Subpackage file opened successfully"); + + await sleep(1000); + + const promptResult = findBuildPromptInLogs(); + if (promptResult.found) { + console.log( + `Found prompt message: "Prompting to start build for ${promptResult.path}"`, + ); + } + + assert.ok( + promptResult.found, + "Should find 'Prompting to start build' message in Language Server log", + ); + + assert.ok( + promptResult.path.includes("monorepo-project") && + !promptResult.path.includes("packages"), + `Prompt path should be monorepo root, not subpackage. Got: ${promptResult.path}`, + ); + console.log("SUCCESS: Build prompt was shown for monorepo root path"); + }); +}); diff --git a/client/src/test/suite/monorepoSubpackage.test.ts b/client/src/test/suite/monorepoSubpackage.test.ts new file mode 100644 index 000000000..f18a4849a --- /dev/null +++ b/client/src/test/suite/monorepoSubpackage.test.ts @@ -0,0 +1,153 @@ +/** + * Monorepo Subpackage Tests (Monorepo Subpackage Test Suite) + * + * This test suite runs with VSCode opened on a subpackage (packages/app), + * not the monorepo root. It tests that the extension correctly detects + * monorepo structure even when opened from a subpackage. + * + * Run these tests: + * cd client && npm run test -- --label monorepo-subpackage + */ +import * as assert from "assert"; +import * as fs from "fs"; +import * as path from "path"; +import * as vscode from "vscode"; +import { + getWorkspaceRoot, + removeMonorepoLockFiles, + sleep, + startBuildWatcher, + startCodeAnalysis, + stopCodeAnalysis, + showReanalyzeServerLog, + findBuildPromptInLogs, +} from "./helpers"; + +suite("Monorepo Subpackage Test Suite", () => { + test("Subpackage workspace: Should prompt to start build with monorepo root path", async () => { + // In this test, workspaceRoot is packages/app (the subpackage) + const workspaceRoot = getWorkspaceRoot(); + console.log("Workspace root (subpackage):", workspaceRoot); + + // The monorepo root is 2 levels up from packages/app + const monorepoRoot = path.resolve(workspaceRoot, "../.."); + console.log("Monorepo root:", monorepoRoot); + + // Verify we're in the right setup - workspace should be a subpackage + assert.ok( + workspaceRoot.includes("packages"), + `Workspace should be in packages folder, got: ${workspaceRoot}`, + ); + + // Check if the subpackage has a rescript.json + const rescriptJsonPath = path.join(workspaceRoot, "rescript.json"); + if (!fs.existsSync(rescriptJsonPath)) { + console.log("Subpackage rescript.json not found, skipping test"); + return; + } + + // Remove lock file from MONOREPO ROOT to trigger the "Start Build" prompt + console.log("Removing lock file from monorepo root..."); + removeMonorepoLockFiles(monorepoRoot); + + // Open a .res file from the subpackage workspace + const resFilePath = path.join(workspaceRoot, "src", "App.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + // Wait for LSP to process - since no lock file exists, it should prompt for build + await sleep(1000); + + // Read the Language Server log to verify the prompt was shown + const promptResult = findBuildPromptInLogs(); + if (promptResult.found) { + console.log( + `Found prompt message: "Prompting to start build for ${promptResult.path}"`, + ); + } + + // Assert that the prompt was shown + assert.ok( + promptResult.found, + "Should find 'Prompting to start build' message in Language Server log", + ); + + // Assert that the prompt path is the monorepo root, not the subpackage + // Even though we opened from packages/app, the build prompt should use monorepo root + assert.ok( + promptResult.path.includes("monorepo-project") && + !promptResult.path.includes("packages"), + `Prompt path should be monorepo root, not subpackage. Got: ${promptResult.path}`, + ); + console.log( + "SUCCESS: Build prompt correctly uses monorepo root path when opened from subpackage", + ); + + // Now start the build and verify the lock file is created at the monorepo root + console.log("Starting build from subpackage workspace..."); + await startBuildWatcher(1500); + console.log("Build started"); + + // Check that the lock file exists at the MONOREPO ROOT, not the subpackage + const monorepoLockPath = path.join(monorepoRoot, "lib", "rescript.lock"); + const subpackageLockPath = path.join(workspaceRoot, "lib", "rescript.lock"); + + const monorepoLockExists = fs.existsSync(monorepoLockPath); + const subpackageLockExists = fs.existsSync(subpackageLockPath); + + console.log( + `Monorepo lock file (${monorepoLockPath}): ${monorepoLockExists ? "EXISTS" : "NOT FOUND"}`, + ); + console.log( + `Subpackage lock file (${subpackageLockPath}): ${subpackageLockExists ? "EXISTS" : "NOT FOUND"}`, + ); + + // The lock file should exist at the monorepo root + assert.ok( + monorepoLockExists, + `Lock file should exist at monorepo root: ${monorepoLockPath}`, + ); + + // The lock file should NOT exist at the subpackage level + assert.ok( + !subpackageLockExists, + `Lock file should NOT exist at subpackage: ${subpackageLockPath}`, + ); + + console.log("SUCCESS: Lock file created at monorepo root, not subpackage"); + + // Remove any stale reanalyze socket file from a previous test run + const socketPath = path.join(monorepoRoot, ".rescript-reanalyze.sock"); + if (fs.existsSync(socketPath)) { + fs.unlinkSync(socketPath); + console.log("Removed stale socket file"); + } + + // Start code analysis + console.log("Starting code analysis..."); + await startCodeAnalysis(); + console.log("Code analysis started"); + + // Open the reanalyze server log - verify it returns true (output channel shown) + console.log("Opening reanalyze server log..."); + await showReanalyzeServerLog(); + + // Verify diagnostics are shown (code analysis is working from subpackage) + const diagnostics = vscode.languages.getDiagnostics(document.uri); + console.log(`Found ${diagnostics.length} diagnostics in App.res`); + assert.ok( + diagnostics.length > 0, + "Code analysis should find diagnostics in App.res when run from subpackage", + ); + + // Stop code analysis + console.log("Stopping code analysis..."); + await stopCodeAnalysis(); + console.log("Code analysis stopped"); + + console.log("Test complete - lock file will be cleaned up on LSP shutdown"); + }); +}); diff --git a/client/src/test/suite/rescript9.test.ts b/client/src/test/suite/rescript9.test.ts new file mode 100644 index 000000000..b2d1dd5ad --- /dev/null +++ b/client/src/test/suite/rescript9.test.ts @@ -0,0 +1,241 @@ +/** + * ReScript 9 Tests (ReScript 9 Build Test Suite) + * + * This tests that the build watcher works with older ReScript versions + * that use "rescript build -w" instead of "rescript watch". + * + * Run these tests: + * cd client && npm run test -- --label rescript9-project + */ +import * as assert from "assert"; +import * as fs from "fs"; +import * as path from "path"; +import * as vscode from "vscode"; +import { + getWorkspaceRoot, + removeBsbLockFile, + ensureExtensionActivated, + waitFor, + sleep, + getCompilerLogPath, + getFileMtime, + waitForFileUpdate, + insertCommentAndSave, + restoreContentAndSave, + startBuildWatcher, + findBuildPromptInLogs, +} from "./helpers"; + +suite("ReScript 9 Build Test Suite", () => { + test("ReScript 9: Extension should be present", async () => { + const extension = vscode.extensions.getExtension( + "chenglou92.rescript-vscode", + ); + assert.ok(extension, "ReScript extension should be present"); + console.log("Extension found:", extension.id); + }); + + test("ReScript 9: Build watcher should start with 'rescript build -w'", async () => { + const workspaceRoot = getWorkspaceRoot(); + console.log("Workspace root:", workspaceRoot); + + // Verify we're in the ReScript 9 project + const bsconfigPath = path.join(workspaceRoot, "bsconfig.json"); + if (!fs.existsSync(bsconfigPath)) { + console.log("bsconfig.json not found, skipping test"); + return; + } + + // Check ReScript version + const packageJsonPath = path.join( + workspaceRoot, + "node_modules", + "rescript", + "package.json", + ); + if (fs.existsSync(packageJsonPath)) { + const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf-8")); + console.log("ReScript version:", packageJson.version); + assert.ok( + packageJson.version.startsWith("9."), + `Expected ReScript 9.x, got ${packageJson.version}`, + ); + } + + // Open a ReScript file + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + // Start the build watcher + console.log("Starting build watcher (should use 'rescript build -w')..."); + await startBuildWatcher(1500); + console.log("Build watcher started"); + + // Check if the lock file was created (.bsb.lock for ReScript 9) + const lockPath = path.join(workspaceRoot, ".bsb.lock"); + const lockExists = fs.existsSync(lockPath); + console.log(`.bsb.lock exists: ${lockExists}`); + assert.ok( + lockExists, + ".bsb.lock should exist after starting build watcher", + ); + + console.log("ReScript 9 build watcher test completed"); + }); + + test("ReScript 9: Build watcher recompiles on file save", async () => { + const workspaceRoot = getWorkspaceRoot(); + console.log("Workspace root:", workspaceRoot); + + // Open a ReScript file + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + const editor = await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + await ensureExtensionActivated(); + + // Start the build watcher + console.log("Starting build watcher..."); + await startBuildWatcher(); + console.log("Build watcher started"); + + // Check compiler.log modification time before edit + const compilerLogPath = getCompilerLogPath(workspaceRoot); + const mtimeBefore = getFileMtime(compilerLogPath); + if (mtimeBefore) { + console.log(`compiler.log mtime before: ${mtimeBefore.toISOString()}`); + } else { + console.log("compiler.log does not exist yet"); + } + + // Edit the file and save + console.log("Editing file..."); + const originalContent = document.getText(); + await insertCommentAndSave(editor, "/* rescript 9 test */\n"); + console.log("File saved with edit"); + + // Wait for compilation + console.log("Waiting for compilation..."); + const mtimeAfter = await waitForFileUpdate(compilerLogPath, mtimeBefore, { + timeout: 3000, + }); + if (mtimeAfter) { + console.log(`compiler.log mtime after: ${mtimeAfter.toISOString()}`); + } else { + console.log("compiler.log still does not exist"); + } + + // Assert that compiler.log was updated + assert.ok(mtimeAfter, "compiler.log should exist after file save"); + if (mtimeBefore && mtimeAfter) { + assert.ok( + mtimeAfter > mtimeBefore, + "compiler.log should be updated after file save", + ); + console.log("SUCCESS: compiler.log was updated after file save"); + } else if (!mtimeBefore && mtimeAfter) { + console.log("SUCCESS: compiler.log was created after file save"); + } + + // Restore original content + console.log("Restoring original content..."); + await restoreContentAndSave(editor, originalContent); + console.log("Original content restored"); + + await sleep(1000); + console.log("ReScript 9 recompilation test completed"); + }); + + test("ReScript 9: Should prompt to start build when no lock file exists", async () => { + const workspaceRoot = getWorkspaceRoot(); + console.log("Workspace root:", workspaceRoot); + + // Remove lock file to trigger the "Start Build" prompt + removeBsbLockFile(workspaceRoot); + + // Open a .res file to trigger the LSP + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + // Wait for LSP to process and potentially show the prompt + await sleep(1000); + + // Read the Language Server log to verify the prompt was shown + const promptResult = findBuildPromptInLogs(); + if (promptResult.found) { + console.log("Found prompt message in logs"); + } + + assert.ok( + promptResult.found, + "Should find 'Prompting to start build' message in Language Server log", + ); + console.log("SUCCESS: Build prompt was shown for ReScript 9 project"); + }); + + test("ReScript 9: Lock file should be cleaned up on language server restart", async () => { + const workspaceRoot = getWorkspaceRoot(); + console.log("Workspace root:", workspaceRoot); + + // First restart language server to ensure clean state + console.log("Restarting language server to ensure clean state..."); + await vscode.commands.executeCommand( + "rescript-vscode.restart_language_server", + ); + await sleep(2000); + + // Open a ReScript file + const resFilePath = path.join(workspaceRoot, "src", "Hello.res"); + console.log("Opening file:", resFilePath); + + const document = await vscode.workspace.openTextDocument(resFilePath); + await vscode.window.showTextDocument(document); + console.log("File opened successfully"); + + // Start the build watcher + console.log("Starting build watcher..."); + await vscode.commands.executeCommand("rescript-vscode.start_build"); + + // Wait for lock file to appear (poll up to 5 seconds) + await sleep(500); + const lockPath = path.join(workspaceRoot, ".bsb.lock"); + const lockExistsBefore = await waitFor(() => fs.existsSync(lockPath), { + timeout: 5000, + interval: 500, + message: ".bsb.lock", + }); + assert.ok(lockExistsBefore, ".bsb.lock should exist before restart"); + + // Restart language server (this should kill the build watcher and clean up lock file) + console.log("Restarting language server..."); + await vscode.commands.executeCommand( + "rescript-vscode.restart_language_server", + ); + + // Wait for restart to complete + await sleep(2000); + + // Verify lock file is cleaned up + const lockExistsAfter = fs.existsSync(lockPath); + console.log(`.bsb.lock exists after restart: ${lockExistsAfter}`); + assert.ok( + !lockExistsAfter, + ".bsb.lock should be cleaned up after language server restart", + ); + + console.log("SUCCESS: Lock file was cleaned up on language server restart"); + }); +}); diff --git a/client/tsconfig.json b/client/tsconfig.json index 1974b6b8d..04ed11eee 100644 --- a/client/tsconfig.json +++ b/client/tsconfig.json @@ -5,7 +5,8 @@ "lib": ["ES2019"], "outDir": "out", "rootDirs": ["src", "../shared/src"], - "sourceMap": true + "sourceMap": true, + "skipLibCheck": true }, "include": ["src", "../shared/src"], "exclude": ["node_modules"] diff --git a/package.json b/package.json index fa51db7df..9eea5f2a9 100644 --- a/package.json +++ b/package.json @@ -73,10 +73,18 @@ "command": "rescript-vscode.stop_code_analysis", "title": "ReScript: Stop Code Analyzer" }, + { + "command": "rescript-vscode.show_reanalyze_server_log", + "title": "ReScript: Show Code Analyzer Server Log" + }, { "command": "rescript-vscode.restart_language_server", "title": "ReScript: Restart Language Server" }, + { + "command": "rescript-vscode.start_build", + "title": "ReScript: Start Build" + }, { "command": "rescript-vscode.switch-impl-intf", "title": "ReScript: Switch implementation/interface", diff --git a/server/src/projectFiles.ts b/server/src/projectFiles.ts index a8684360a..f39edd2fc 100644 --- a/server/src/projectFiles.ts +++ b/server/src/projectFiles.ts @@ -16,6 +16,9 @@ export interface projectFiles { namespaceName: string | null; bsbWatcherByEditor: null | cp.ChildProcess; + // The root path where the build watcher runs (could be monorepo root) + // Used for lock file cleanup when killing the watcher + buildRootPath: NormalizedPath | null; // This keeps track of whether we've prompted the user to start a build // automatically, if there's no build currently running for the project. We diff --git a/server/src/server.ts b/server/src/server.ts index 7459f1051..6bd4bfd56 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -142,6 +142,13 @@ let openCompiledFileRequest = new v.RequestType< void >("textDocument/openCompiled"); +// Request to start the build watcher for a project +let startBuildRequest = new v.RequestType< + p.TextDocumentIdentifier, + { success: boolean }, + void +>("rescript/startBuild"); + export let getCurrentCompilerDiagnosticsForFile = ( fileUri: utils.FileURI, ): p.Diagnostic[] => { @@ -436,6 +443,7 @@ let openedFile = async (fileUri: utils.FileURI, fileContent: string) => { rescriptVersion: await utils.findReScriptVersionForProjectRoot(projectRootPath), bsbWatcherByEditor: null, + buildRootPath: null, bscBinaryLocation: await utils.findBscExeBinary(projectRootPath), editorAnalysisLocation: await utils.findEditorAnalysisBinary(projectRootPath), @@ -452,21 +460,37 @@ let openedFile = async (fileUri: utils.FileURI, fileContent: string) => { } let root = projectsFiles.get(projectRootPath)!; root.openFiles.add(filePath); - // check if .bsb.lock is still there. If not, start a bsb -w ourselves + // check if a lock file exists. If not, start a build watcher ourselves // because otherwise the diagnostics info we'll display might be stale - let bsbLockPath = path.join(projectRootPath, c.bsbLock); + // ReScript < 12: .bsb.lock in project root + // ReScript >= 12: lib/rescript.lock + // For monorepos, the lock file is at the monorepo root, not the subpackage + let rescriptBinaryPath = await findRescriptBinary(projectRootPath); + let buildRootPath = projectRootPath; + if (rescriptBinaryPath != null) { + const monorepoRootPath = + utils.getMonorepoRootFromBinaryPath(rescriptBinaryPath); + if (monorepoRootPath != null) { + buildRootPath = monorepoRootPath; + } + } + let bsbLockPath = path.join(buildRootPath, c.bsbLock); + let rescriptLockPath = path.join(buildRootPath, c.rescriptLockPartialPath); + let hasLockFile = + fs.existsSync(bsbLockPath) || fs.existsSync(rescriptLockPath); if ( projectRootState.hasPromptedToStartBuild === false && config.extensionConfiguration.askToStartBuild === true && - !fs.existsSync(bsbLockPath) + !hasLockFile ) { // TODO: sometime stale .bsb.lock dangling. bsb -w knows .bsb.lock is // stale. Use that logic // TODO: close watcher when lang-server shuts down - if ((await findRescriptBinary(projectRootPath)) != null) { + if (rescriptBinaryPath != null) { + getLogger().info(`Prompting to start build for ${buildRootPath}`); let payload: clientSentBuildAction = { title: c.startBuildAction, - projectRootPath: projectRootPath, + projectRootPath: buildRootPath, }; let params = { type: p.MessageType.Info, @@ -510,6 +534,7 @@ let openedFile = async (fileUri: utils.FileURI, fileContent: string) => { let closedFile = async (fileUri: utils.FileURI) => { let filePath = utils.uriToNormalizedPath(fileUri); + getLogger().log(`Closing file ${filePath}`); if (config.extensionConfiguration.incrementalTypechecking?.enable) { ic.handleClosedFile(filePath); @@ -522,15 +547,27 @@ let closedFile = async (fileUri: utils.FileURI) => { let root = projectsFiles.get(projectRootPath); if (root != null) { root.openFiles.delete(filePath); + getLogger().log( + `Open files remaining for ${projectRootPath}: ${root.openFiles.size}`, + ); // clear diagnostics too if no open files open in said project if (root.openFiles.size === 0) { await deleteProjectConfigCache(projectRootPath); deleteProjectDiagnostics(projectRootPath); if (root.bsbWatcherByEditor !== null) { - root.bsbWatcherByEditor.kill(); + getLogger().info( + `Killing build watcher for ${projectRootPath} (all files closed)`, + ); + utils.killBuildWatcher( + root.bsbWatcherByEditor, + root.buildRootPath ?? undefined, + ); root.bsbWatcherByEditor = null; + root.buildRootPath = null; } } + } else { + getLogger().log(`No project state found for ${projectRootPath}`); } } }; @@ -1229,6 +1266,106 @@ async function createInterface(msg: p.RequestMessage): Promise { } } +// Shared function to start build watcher for a project +// Returns true if watcher was started or already running, false on failure +async function startBuildWatcher( + projectRootPath: utils.NormalizedPath, + rescriptBinaryPath: utils.NormalizedPath, + options?: { + createProjectStateIfMissing?: boolean; + monorepoRootPath?: utils.NormalizedPath | null; + }, +): Promise { + let root = projectsFiles.get(projectRootPath); + + // Create project state if missing and option is set + if (root == null && options?.createProjectStateIfMissing) { + const namespaceName = utils.getNamespaceNameFromConfigFile(projectRootPath); + root = { + openFiles: new Set(), + filesWithDiagnostics: new Set(), + filesDiagnostics: {}, + namespaceName: + namespaceName.kind === "success" ? namespaceName.result : null, + rescriptVersion: + await utils.findReScriptVersionForProjectRoot(projectRootPath), + bsbWatcherByEditor: null, + buildRootPath: null, + bscBinaryLocation: await utils.findBscExeBinary(projectRootPath), + editorAnalysisLocation: + await utils.findEditorAnalysisBinary(projectRootPath), + hasPromptedToStartBuild: true, // Don't prompt since we're starting the build + }; + projectsFiles.set(projectRootPath, root); + } + + if (root == null) { + return false; + } + + // If a build watcher is already running, return success + if (root.bsbWatcherByEditor != null) { + getLogger().info(`Build watcher already running for ${projectRootPath}`); + return true; + } + + // Use monorepo root for cwd (monorepo support), fall back to project root + const buildCwd = options?.monorepoRootPath ?? projectRootPath; + + getLogger().info( + `Starting build watcher for ${projectRootPath} in ${buildCwd} (ReScript ${root.rescriptVersion ?? "unknown"})`, + ); + let bsbProcess = utils.runBuildWatcherUsingValidBuildPath( + rescriptBinaryPath, + buildCwd, + root.rescriptVersion, + ); + root.bsbWatcherByEditor = bsbProcess; + root.buildRootPath = buildCwd; + + return true; +} + +async function handleStartBuildRequest( + msg: p.RequestMessage, +): Promise { + let params = msg.params as p.TextDocumentIdentifier; + let filePath = utils.uriToNormalizedPath(params.uri as utils.FileURI); + let projectRootPath = utils.findProjectRootOfFile(filePath); + + if (projectRootPath == null) { + return { + jsonrpc: c.jsonrpcVersion, + id: msg.id, + result: { success: false }, + }; + } + + let rescriptBinaryPath = await findRescriptBinary(projectRootPath); + if (rescriptBinaryPath == null) { + return { + jsonrpc: c.jsonrpcVersion, + id: msg.id, + result: { success: false }, + }; + } + + // Derive monorepo root from binary path for monorepo support + const monorepoRootPath = + utils.getMonorepoRootFromBinaryPath(rescriptBinaryPath); + + const success = await startBuildWatcher(projectRootPath, rescriptBinaryPath, { + createProjectStateIfMissing: true, + monorepoRootPath, + }); + + return { + jsonrpc: c.jsonrpcVersion, + id: msg.id, + result: { success }, + }; +} + function openCompiledFile(msg: p.RequestMessage): p.Message { let params = msg.params as p.TextDocumentIdentifier; let filePath = utils.uriToNormalizedPath(params.uri as utils.FileURI); @@ -1622,6 +1759,19 @@ async function onMessage(msg: p.Message) { clearInterval(pullConfigurationPeriodically); } + // Kill all build watchers on shutdown + for (const [projectPath, projectState] of projectsFiles) { + if (projectState.bsbWatcherByEditor != null) { + getLogger().info(`Killing build watcher for ${projectPath}`); + utils.killBuildWatcher( + projectState.bsbWatcherByEditor, + projectState.buildRootPath ?? undefined, + ); + projectState.bsbWatcherByEditor = null; + projectState.buildRootPath = null; + } + } + let response: p.ResponseMessage = { jsonrpc: c.jsonrpcVersion, id: msg.id, @@ -1658,6 +1808,8 @@ async function onMessage(msg: p.Message) { send(await createInterface(msg)); } else if (msg.method === openCompiledFileRequest.method) { send(openCompiledFile(msg)); + } else if (msg.method === startBuildRequest.method) { + send(await handleStartBuildRequest(msg)); } else if (msg.method === p.InlayHintRequest.method) { let params = msg.params as InlayHintParams; let extName = path.extname(params.textDocument.uri); @@ -1739,19 +1891,22 @@ async function onMessage(msg: p.Message) { ); return; } - // TODO: sometime stale .bsb.lock dangling + // TODO: sometime stale lock file dangling // TODO: close watcher when lang-server shuts down. However, by Node's // default, these subprocesses are automatically killed when this // language-server process exits let rescriptBinaryPath = await findRescriptBinary(projectRootPath); if (rescriptBinaryPath != null) { - let bsbProcess = utils.runBuildWatcherUsingValidBuildPath( - rescriptBinaryPath, - projectRootPath, - ); - let root = projectsFiles.get(projectRootPath)!; - root.bsbWatcherByEditor = bsbProcess; - // bsbProcess.on("message", (a) => console.log(a)); + // Derive monorepo root from binary path for monorepo support + const monorepoRootPath = + utils.getMonorepoRootFromBinaryPath(rescriptBinaryPath); + // Note: projectRootPath here might be the monorepo root (buildRootPath from prompt), + // which may not have project state if the file was opened from a subpackage. + // Use createProjectStateIfMissing to handle this case. + await startBuildWatcher(projectRootPath, rescriptBinaryPath, { + createProjectStateIfMissing: true, + monorepoRootPath, + }); } } } diff --git a/server/src/utils.ts b/server/src/utils.ts index 4502ebbd3..e249f3398 100644 --- a/server/src/utils.ts +++ b/server/src/utils.ts @@ -11,9 +11,11 @@ import fsAsync from "fs/promises"; import * as os from "os"; import semver from "semver"; import { fileURLToPath, pathToFileURL } from "url"; +import { getLogger } from "./logger"; import { findBinary as findSharedBinary, + getMonorepoRootFromBinaryPath as getMonorepoRootFromBinaryPathShared, type BinaryName, } from "../../shared/src/findBinary"; import { findProjectRootOfFileInDir as findProjectRootOfFileInDirShared } from "../../shared/src/projectRoots"; @@ -218,6 +220,11 @@ let findBinary = async ( export let findRescriptBinary = (projectRootPath: NormalizedPath | null) => findBinary(projectRootPath, "rescript"); +export let getMonorepoRootFromBinaryPath = ( + binaryPath: string | null, +): NormalizedPath | null => + normalizePath(getMonorepoRootFromBinaryPathShared(binaryPath)); + export let findBscExeBinary = (projectRootPath: NormalizedPath | null) => findBinary(projectRootPath, "bsc.exe"); @@ -609,10 +616,23 @@ export let getCompiledFilePath = ( export let runBuildWatcherUsingValidBuildPath = ( buildPath: p.DocumentUri, projectRootPath: p.DocumentUri, + rescriptVersion?: string | null, ) => { let cwdEnv = { cwd: projectRootPath, }; + // ReScript >= 12.0.0 uses "rescript watch" instead of "rescript build -w" + let useWatchCommand = + rescriptVersion != null && + semver.valid(rescriptVersion) != null && + semver.gte(rescriptVersion, "12.0.0"); + let args = useWatchCommand ? ["watch"] : ["build", "-w"]; + + getLogger().info( + `Running build watcher: ${buildPath} ${args.join(" ")} in ${projectRootPath}`, + ); + + let proc: childProcess.ChildProcess; if (process.platform === "win32") { /* - a node.js script in node_modules/.bin on windows is wrapped in a @@ -626,9 +646,86 @@ export let runBuildWatcherUsingValidBuildPath = ( (since the path might have spaces), which `execFile` would have done for you under the hood */ - return childProcess.exec(`"${buildPath}".cmd build -w`, cwdEnv); + proc = childProcess.exec(`"${buildPath}".cmd ${args.join(" ")}`, cwdEnv); } else { - return childProcess.execFile(buildPath, ["build", "-w"], cwdEnv); + // Use spawn with detached:true so we can kill the entire process group later + // This ensures child processes (like native rescript binary) are also killed + // Use "pipe" for stdin instead of "ignore" because older ReScript versions (9.x, 10.x, 11.x) + // have a handler that exits when stdin closes: `process.stdin.on("close", exitProcess)` + proc = childProcess.spawn(buildPath, args, { + ...cwdEnv, + detached: true, + stdio: ["pipe", "pipe", "pipe"], + }); + } + + proc.on("error", (err) => { + getLogger().error(`Build watcher error: ${err.message}`); + }); + + proc.on("exit", (code, signal) => { + getLogger().info( + `Build watcher exited with code ${code}, signal ${signal}`, + ); + }); + + if (proc.stdout) { + proc.stdout.on("data", (data) => { + getLogger().log(`[build stdout] ${data.toString().trim()}`); + }); + } + + if (proc.stderr) { + proc.stderr.on("data", (data) => { + getLogger().log(`[build stderr] ${data.toString().trim()}`); + }); + } + + return proc; +}; + +/** + * Kill a build watcher process and all its children, and clean up the lock file. + * On Unix, kills the entire process group if the process was started with detached:true. + * Also removes the lock file since the rescript compiler doesn't clean it up on SIGTERM. + */ +export let killBuildWatcher = ( + proc: childProcess.ChildProcess, + buildRootPath?: string, +): void => { + if (proc.pid == null) { + return; + } + try { + if (process.platform !== "win32") { + // Kill the entire process group (negative PID) + // This ensures child processes spawned by the JS wrapper are also killed + process.kill(-proc.pid, "SIGTERM"); + } else { + proc.kill(); + } + } catch (e) { + // Process might already be dead + getLogger().log(`Error killing build watcher: ${e}`); + } + + // Clean up lock files since the rescript compiler doesn't remove them on SIGTERM + // ReScript >= 12 uses lib/rescript.lock, older versions use .bsb.lock + if (buildRootPath != null) { + const lockFiles = [ + path.join(buildRootPath, "lib", "rescript.lock"), + path.join(buildRootPath, ".bsb.lock"), + ]; + for (const lockFilePath of lockFiles) { + try { + if (fs.existsSync(lockFilePath)) { + fs.unlinkSync(lockFilePath); + getLogger().log(`Removed lock file: ${lockFilePath}`); + } + } catch (e) { + getLogger().log(`Error removing lock file: ${e}`); + } + } } }; diff --git a/shared/src/findBinary.ts b/shared/src/findBinary.ts index 09cd65d6a..6c89efecf 100644 --- a/shared/src/findBinary.ts +++ b/shared/src/findBinary.ts @@ -18,8 +18,11 @@ type FindBinaryOptions = { }; const compilerInfoPartialPath = path.join("lib", "bs", "compiler-info.json"); -const platformDir = - process.arch === "arm64" ? process.platform + process.arch : process.platform; +// For arm64, try the arm64-specific directory first (e.g., darwinarm64), +// then fall back to the generic platform directory (e.g., darwin) for older ReScript versions +const platformDirArm64 = + process.arch === "arm64" ? process.platform + process.arch : null; +const platformDirGeneric = process.platform; const normalizePath = (filePath: string | null): string | null => { return filePath != null ? path.normalize(filePath) : null; @@ -71,10 +74,12 @@ export const findBinary = async ({ const bscPath = compileInfo.bsc_path; if (binary === "bsc.exe") { return normalizePath(bscPath); - } else { + } else if (binary !== "rescript") { + // For native binaries (not "rescript" JS wrapper), use the bsc_path directory const binaryPath = path.join(path.dirname(bscPath), binary); return normalizePath(binaryPath); } + // For "rescript", fall through to find the JS wrapper below } } catch {} } @@ -122,7 +127,17 @@ export const findBinary = async ({ binaryPath = binPaths.rescript_exe; } } else { - binaryPath = path.join(rescriptDir, platformDir, binary); + // For older ReScript versions (< 12.0.0-alpha.13), try arm64-specific directory first, + // then fall back to generic platform directory (older versions don't have arm64 directories) + if (platformDirArm64 != null) { + const arm64Path = path.join(rescriptDir, platformDirArm64, binary); + if (fs.existsSync(arm64Path)) { + binaryPath = arm64Path; + } + } + if (binaryPath == null) { + binaryPath = path.join(rescriptDir, platformDirGeneric, binary); + } } if (binaryPath != null && fs.existsSync(binaryPath)) { @@ -131,3 +146,19 @@ export const findBinary = async ({ return null; }; + +/** + * Derives the monorepo root directory from a binary path. + * For a path like `/monorepo/node_modules/.bin/rescript`, returns `/monorepo`. + * This is useful for monorepo support where the binary is in the monorepo root's + * node_modules, but the project root (nearest rescript.json) might be a subpackage. + */ +export const getMonorepoRootFromBinaryPath = ( + binaryPath: string | null, +): string | null => { + if (binaryPath == null) { + return null; + } + const match = binaryPath.match(/^(.*?)[\\/]+node_modules[\\/]+/); + return match ? normalizePath(match[1]) : null; +}; From 8b052818fb479df5dc3f86affb1f0e146ac4a8ac Mon Sep 17 00:00:00 2001 From: jderochervlk Date: Mon, 2 Feb 2026 09:01:19 -0500 Subject: [PATCH 55/56] fix: wrap binary path resolution with fileURLToPath --- shared/src/findBinary.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/shared/src/findBinary.ts b/shared/src/findBinary.ts index 6c89efecf..090157715 100644 --- a/shared/src/findBinary.ts +++ b/shared/src/findBinary.ts @@ -2,6 +2,7 @@ import * as fs from "fs"; import * as fsAsync from "fs/promises"; import * as path from "path"; import * as semver from "semver"; +import * as url from "url"; export type BinaryName = | "bsc.exe" @@ -115,7 +116,7 @@ export const findBinary = async ({ "..", `@rescript/${target}/bin.js`, ); - const { binPaths } = await import(targetPackagePath); + const { binPaths } = await import(url.fileURLToPath(targetPackagePath)); if (binary === "bsc.exe") { binaryPath = binPaths.bsc_exe; From 536ac75daa1f2ae58eb6a44f487c7b102fa58f54 Mon Sep 17 00:00:00 2001 From: Joshua Derocher Date: Tue, 3 Feb 2026 19:28:53 -0500 Subject: [PATCH 56/56] fix: update globs to work on windows with WSL --- CHANGELOG.md | 1 + server/src/server.ts | 23 +++++++++++++++-------- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d43b4b5e8..898393cbc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ - Take namespace into account for incremental cleanup. https://github.com/rescript-lang/rescript-vscode/pull/1164 - Potential race condition in incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1167 - Fix extension crash triggered by incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1169 +- Fix file watchers on Windows when using WSL. https://github.com/rescript-lang/rescript-vscode/pull/1178 #### :nail_care: Polish diff --git a/server/src/server.ts b/server/src/server.ts index 6bd4bfd56..f4829dd0e 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1528,20 +1528,27 @@ async function onMessage(msg: p.Message) { // Only watch the root compiler log for each workspace folder. // In monorepos, `**/lib/bs/.compiler.log` matches every package and dependency, // causing a burst of events per save. - globPattern: path.join(projectRootPath, c.compilerLogPartialPath), + globPattern: { + baseUri: utils.pathToURI(projectRootPath), + pattern: c.compilerLogPartialPath, + }, kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete, }, { - globPattern: path.join( - projectRootPath, - "**", - c.buildNinjaPartialPath, - ), + // Watch ninja output + globPattern: { + baseUri: utils.pathToURI(projectRootPath), + pattern: path.join("**", c.buildNinjaPartialPath), + }, kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete, }, { - globPattern: `${path.join(projectRootPath, "**", c.compilerDirPartialPath)}/**/*.{cmt,cmi}`, - kind: p.WatchKind.Change | p.WatchKind.Delete, + // Watch build artifacts + globPattern: { + baseUri: utils.pathToURI(projectRootPath), + pattern: path.join(c.compilerDirPartialPath, "**/*.{cmi,cmt}"), + }, + kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete, }, ], );