diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index cbbb9242..90ac7a19 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -13,4 +13,5 @@ jobs: submodules: true - run: | npm ci + npx lix install haxelib:formatter npx lix run formatter -s . --check diff --git a/.haxerc b/.haxerc index 5b42a905..b56acc52 100644 --- a/.haxerc +++ b/.haxerc @@ -1,4 +1,4 @@ { - "version": "b537e99", + "version": "2dc801f", "resolveLibs": "scoped" -} +} \ No newline at end of file diff --git a/haxe_libraries/formatter.hxml b/haxe_libraries/formatter.hxml index fec17cc9..7b187a81 100644 --- a/haxe_libraries/formatter.hxml +++ b/haxe_libraries/formatter.hxml @@ -1,4 +1,4 @@ -# @install: lix --silent download "haxelib:/formatter#1.17.1" into formatter/1.17.1/haxelib -# @run: haxelib run-dir formatter "${HAXE_LIBCACHE}/formatter/1.17.1/haxelib" --cp ${HAXE_LIBCACHE}/formatter/1.17.1/haxelib/src --D formatter=1.17.1 \ No newline at end of file +# @install: lix --silent download "haxelib:/formatter#1.18.0" into formatter/1.18.0/haxelib +# @run: haxelib run-dir formatter "${HAXE_LIBCACHE}/formatter/1.18.0/haxelib" +-cp ${HAXE_LIBCACHE}/formatter/1.18.0/haxelib/src +-D formatter=1.18.0 \ No newline at end of file diff --git a/haxe_libraries/json2object.hxml b/haxe_libraries/json2object.hxml index fdbe6ddb..5ed25d9c 100644 --- a/haxe_libraries/json2object.hxml +++ b/haxe_libraries/json2object.hxml @@ -1,4 +1,4 @@ -# @install: lix --silent download "gh://github.com/elnabo/json2object#429986134031cbb1980f09d0d3d642b4b4cbcd6a" into json2object/3.11.0/github/429986134031cbb1980f09d0d3d642b4b4cbcd6a +# @install: lix --silent download "gh://github.com/elnabo/json2object#a75859de1e966c09e73591b6c9186086c143fe60" into json2object/3.11.0/github/a75859de1e966c09e73591b6c9186086c143fe60 -lib hxjsonast --cp ${HAXE_LIBCACHE}/json2object/3.11.0/github/429986134031cbb1980f09d0d3d642b4b4cbcd6a/src +-cp ${HAXE_LIBCACHE}/json2object/3.11.0/github/a75859de1e966c09e73591b6c9186086c143fe60/src -D json2object=3.11.0 \ No newline at end of file diff --git a/haxe_libraries/rename.hxml b/haxe_libraries/rename.hxml index 55ebc6da..f01071d3 100644 --- a/haxe_libraries/rename.hxml +++ b/haxe_libraries/rename.hxml @@ -1,3 +1,3 @@ -# @install: lix --silent download "haxelib:/rename#2.3.0" into rename/2.3.0/haxelib --cp ${HAXE_LIBCACHE}/rename/2.3.0/haxelib/src --D rename=2.3.0 \ No newline at end of file +# @install: lix --silent download "haxelib:/rename#3.0.0" into rename/3.0.0/haxelib +-cp ${HAXE_LIBCACHE}/rename/3.0.0/haxelib/src +-D rename=3.0.0 \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 2cf41ab3..77c0c691 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@vshaxe/haxe-language-server", - "version": "2.32.2", + "version": "2.33.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@vshaxe/haxe-language-server", - "version": "2.32.2", + "version": "2.33.0", "hasInstallScript": true, "devDependencies": { "lix": "^15.12.0", diff --git a/package.json b/package.json index a994d4e8..f90a1ff1 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@vshaxe/haxe-language-server", - "version": "2.32.2", + "version": "2.33.0", "devDependencies": { "lix": "^15.12.0", "terser": "^5.15.0" diff --git a/src/haxeLanguageServer/Configuration.hx b/src/haxeLanguageServer/Configuration.hx index c2e8b195..09fb9632 100644 --- a/src/haxeLanguageServer/Configuration.hx +++ b/src/haxeLanguageServer/Configuration.hx @@ -102,6 +102,7 @@ typedef UserConfig = { var importsSortOrder:ImportsSortOrderConfig; var maxCompletionItems:Int; var renameSourceFolders:Array; + var disableRefactorCache:Bool; var inlayHints:InlayHintConfig; var serverRecording:ServerRecordingConfig; } @@ -202,6 +203,7 @@ class Configuration { importsSortOrder: AllAlphabetical, maxCompletionItems: 1000, renameSourceFolders: ["src", "source", "Source", "test", "tests"], + disableRefactorCache: false, inlayHints: { variableTypes: false, parameterNames: false, diff --git a/src/haxeLanguageServer/Context.hx b/src/haxeLanguageServer/Context.hx index b7f9b569..7572ef02 100644 --- a/src/haxeLanguageServer/Context.hx +++ b/src/haxeLanguageServer/Context.hx @@ -22,12 +22,14 @@ import haxeLanguageServer.features.haxe.GotoDefinitionFeature; import haxeLanguageServer.features.haxe.GotoImplementationFeature; import haxeLanguageServer.features.haxe.GotoTypeDefinitionFeature; import haxeLanguageServer.features.haxe.InlayHintFeature; +import haxeLanguageServer.features.haxe.RefactorFeature; import haxeLanguageServer.features.haxe.RenameFeature; import haxeLanguageServer.features.haxe.SignatureHelpFeature; import haxeLanguageServer.features.haxe.WorkspaceSymbolsFeature; import haxeLanguageServer.features.haxe.codeAction.CodeActionFeature; import haxeLanguageServer.features.haxe.documentSymbols.DocumentSymbolsFeature; import haxeLanguageServer.features.haxe.foldingRange.FoldingRangeFeature; +import haxeLanguageServer.features.haxe.refactoring.RefactorCache; import haxeLanguageServer.server.DisplayResult; import haxeLanguageServer.server.HaxeServer; import haxeLanguageServer.server.ServerRecording; @@ -64,6 +66,7 @@ class Context { @:nullSafety(Off) public var findReferences(default, null):FindReferencesFeature; @:nullSafety(Off) public var determinePackage(default, null):DeterminePackageFeature; @:nullSafety(Off) public var diagnostics(default, null):DiagnosticsFeature; + @:nullSafety(Off) public var refactorCache(default, null):RefactorCache; public var experimental(default, null):Null; var activeEditor:Null; @@ -376,7 +379,8 @@ class Context { new GotoTypeDefinitionFeature(this); findReferences = new FindReferencesFeature(this); determinePackage = new DeterminePackageFeature(this); - new RenameFeature(this); + refactorCache = new RefactorCache(this); + new RenameFeature(this, refactorCache); diagnostics = new DiagnosticsFeature(this); new CodeActionFeature(this); new CodeLensFeature(this); @@ -390,6 +394,7 @@ class Context { } else { haxeServer.restart(reason, function() { onServerStarted(); + refactorCache.initClassPaths(); if (activeEditor != null) { publishDiagnostics(activeEditor); } @@ -416,6 +421,7 @@ class Context { serverRecording.onDidChangeTextDocument(event); invalidateFile(uri); documents.onDidChangeTextDocument(event); + refactorCache.invalidateFile(uri.toFsPath().toString()); } } @@ -432,6 +438,7 @@ class Context { if (isUriSupported(uri)) { publishDiagnostics(uri); invalidated.remove(uri.toString()); + refactorCache.invalidateFile(uri.toFsPath().toString()); } } @@ -447,6 +454,9 @@ class Context { invalidateFile(change.uri); case _: } + if (change.uri.isHaxeFile()) { + refactorCache.invalidateFile(change.uri.toFsPath().toString()); + } } } @@ -466,6 +476,9 @@ class Context { } function onDidChangeActiveTextEditor(params:{uri:DocumentUri}) { + if (!params.uri.isFile() || !params.uri.isHaxeFile()) { + return; + } activeEditor = params.uri; final document = documents.getHaxe(params.uri); if (document == null) { diff --git a/src/haxeLanguageServer/features/haxe/DeterminePackageFeature.hx b/src/haxeLanguageServer/features/haxe/DeterminePackageFeature.hx index fa9c0d3e..ef7c636c 100644 --- a/src/haxeLanguageServer/features/haxe/DeterminePackageFeature.hx +++ b/src/haxeLanguageServer/features/haxe/DeterminePackageFeature.hx @@ -20,6 +20,10 @@ class DeterminePackageFeature { function handleJsonRpc(path:FsPath, token:Null, resolve:{pack:String}->Void, reject:ResponseError->Void) { context.callHaxeMethod(DisplayMethods.DeterminePackage, {file: path}, token, function(result) { + if (result == null) { + reject(ResponseError.internalError("unable to determine package of module")); + return null; + } resolve({pack: result.join(".")}); return null; }, reject.handler()); diff --git a/src/haxeLanguageServer/features/haxe/HoverFeature.hx b/src/haxeLanguageServer/features/haxe/HoverFeature.hx index ce17623a..56297891 100644 --- a/src/haxeLanguageServer/features/haxe/HoverFeature.hx +++ b/src/haxeLanguageServer/features/haxe/HoverFeature.hx @@ -33,7 +33,7 @@ class HoverFeature { function handleJsonRpc(params:TextDocumentPositionParams, token:CancellationToken, resolve:Null->Void, reject:ResponseError->Void, doc:HxTextDocument, offset:Int) { context.callHaxeMethod(DisplayMethods.Hover, {file: doc.uri.toFsPath(), contents: doc.content, offset: offset}, token, function(hover) { - if (hover == null) { + if (hover == null || hover.range == null) { resolve(null); } else { final range = context.displayOffsetConverter.byteRangeToCharacterRange(hover.range, doc); diff --git a/src/haxeLanguageServer/features/haxe/InlayHintFeature.hx b/src/haxeLanguageServer/features/haxe/InlayHintFeature.hx index 329dfc87..b2123f69 100644 --- a/src/haxeLanguageServer/features/haxe/InlayHintFeature.hx +++ b/src/haxeLanguageServer/features/haxe/InlayHintFeature.hx @@ -62,7 +62,7 @@ class InlayHintFeature { if (root == null) { return reject.noFittingDocument(uri); } - #if debug + #if debug_inlayhints trace('[inlayHints] requesting inlay hints for $fileName lines ${params.range.start.line}-${params.range.end.line}'); #end removeCancelledRequests(); diff --git a/src/haxeLanguageServer/features/haxe/RefactorFeature.hx b/src/haxeLanguageServer/features/haxe/RefactorFeature.hx new file mode 100644 index 00000000..79214ee4 --- /dev/null +++ b/src/haxeLanguageServer/features/haxe/RefactorFeature.hx @@ -0,0 +1,213 @@ +package haxeLanguageServer.features.haxe; + +import haxeLanguageServer.features.haxe.codeAction.CodeActionFeature.CodeActionContributor; +import haxeLanguageServer.features.haxe.codeAction.CodeActionFeature.CodeActionResolveType; +import haxeLanguageServer.features.haxe.refactoring.EditList; +import haxeLanguageServer.features.haxe.refactoring.RefactorCache; +import js.lib.Promise; +import jsonrpc.ResponseError; +import languageServerProtocol.Types.CodeAction; +import languageServerProtocol.Types.CodeActionKind; +import languageServerProtocol.Types.WorkspaceEdit; +import refactor.RefactorResult; +import refactor.Refactoring; +import refactor.refactor.RefactorHelper; +import refactor.refactor.RefactorType; +import refactor.refactor.RewriteWrapWithTryCatch; + +class RefactorFeature implements CodeActionContributor { + final context:Context; + final refactorCache:RefactorCache; + + public function new(context:Context) { + this.context = context; + this.refactorCache = context.refactorCache; + } + + public function createCodeActions(params:CodeActionParams):Array { + if (context.config.user.disableRefactorCache) { + return []; + } + + var actions:Array = []; + if (params.context.only != null) { + actions = actions.concat(findMatchingRefactors(params, i -> { + if (i == null) { + return false; + } + return params.context.only.contains(i.codeActionKind); + })); + } else { + actions = actions.concat(findMatchingRefactors(params, i -> true)); + } + + return actions; + } + + function findMatchingRefactors(params:CodeActionParams, filterType:FilterRefactorModuleCB):Array { + var actions:Array = []; + final canRefactorContext = refactorCache.makeCanRefactorContext(context.documents.getHaxe(params.textDocument.uri), params.range); + if (canRefactorContext == null) { + return actions; + } + var allRefactorInfos:Array> = [ + getRefactorInfo(ExtractConstructorParamsAsFinals), + getRefactorInfo(ExtractConstructorParamsAsVars), + getRefactorInfo(ExtractInterface), + getRefactorInfo(ExtractMethod), + getRefactorInfo(ExtractType), + getRefactorInfo(RewriteFinalsToVars), + getRefactorInfo(RewriteVarsToFinals), + getRefactorInfo(RewriteWrapWithTryCatch), + ]; + final refactorInfo = allRefactorInfos.filter(filterType); + if (refactorInfo.length <= 0) { + return actions; + } + final isRangeSameScope = RefactorHelper.rangeInSameScope(canRefactorContext); + refactorCache.updateSingleFileCache(canRefactorContext.what.fileName); + for (refactor in refactorInfo) { + if (refactor == null) { + continue; + } + switch (Refactoring.canRefactor(refactor.refactorType, canRefactorContext, isRangeSameScope)) { + case Unsupported: + case Supported(title): + actions.push(makeEmptyCodeAction(title, refactor.codeActionKind, params, refactor.type)); + } + } + return actions; + } + + function getRefactorInfo(type:CodeActionResolveType):Null { + switch (type) { + case MissingArg | ChangeFinalToVar | AddTypeHint: + return null; + case ExtractInterface: + return { + refactorType: RefactorExtractInterface, + type: type, + codeActionKind: RefactorExtract, + title: "extractInterface", + prefix: "[ExtractInterface]" + } + case ExtractMethod: + return { + refactorType: RefactorExtractMethod, + type: type, + codeActionKind: RefactorExtract, + title: "extractMethod", + prefix: "[ExtractMethod]" + } + case ExtractType: + return { + refactorType: RefactorExtractType, + type: type, + codeActionKind: RefactorExtract, + title: "extractType", + prefix: "[ExtractType]" + } + case ExtractConstructorParamsAsVars: + return { + refactorType: RefactorExtractConstructorParams(false), + type: type, + codeActionKind: RefactorExtract, + title: "extractConstructorParamsAsVars", + prefix: "[ExtractConstructorParams as Vars]" + } + case ExtractConstructorParamsAsFinals: + return { + refactorType: RefactorExtractConstructorParams(true), + type: type, + codeActionKind: RefactorExtract, + title: "extractConstructorParamsAsFinals", + prefix: "[ExtractConstructorParams as Finals]" + } + case RewriteVarsToFinals: + return { + refactorType: RefactorRewriteVarsToFinals(true), + type: type, + codeActionKind: RefactorRewrite, + title: "rewriteVarsToFinals", + prefix: "[RefactorRewriteVarsToFinals]" + } + case RewriteFinalsToVars: + return { + refactorType: RefactorRewriteVarsToFinals(false), + type: type, + codeActionKind: RefactorRewrite, + title: "rewriteFinalsToVars", + prefix: "[RewriteFinalsToVars]" + } + case RewriteWrapWithTryCatch: + return { + refactorType: RefactorRewriteWrapWithTryCatch, + type: type, + codeActionKind: RefactorRewrite, + title: "rewriteWrapInException", + prefix: "[RewriteWrapInException]" + } + } + } + + function makeEmptyCodeAction(title:String, kind:CodeActionKind, params:CodeActionParams, type:CodeActionResolveType):CodeAction { + return { + title: title, + kind: kind, + data: {params: params, type: type} + } + } + + public function createCodeActionEdits(context:Context, type:CodeActionResolveType, action:CodeAction, params:CodeActionParams):Promise { + var endProgress = context.startProgress("Performing Refactor Operation…"); + var actions:Array = []; + final editList:EditList = new EditList(); + final refactorContext = refactorCache.makeRefactorContext(context.documents.getHaxe(params.textDocument.uri), params.range, editList); + if (refactorContext == null) { + return Promise.reject("failed to make refactor context"); + } + var info = getRefactorInfo(type); + if (info == null) { + return Promise.reject("failed to make refactor context"); + } + final onResolve:(?result:Null, ?debugInfo:Null) -> Void = context.startTimer("refactor/" + info.title); + return Refactoring.doRefactor(info.refactorType, refactorContext).then((result:RefactorResult) -> { + var promise = switch (result) { + case NoChange: + trace(info.prefix + " no change"); + Promise.reject(ResponseError.internalError("no change")); + case NotFound: + var msg = 'could not find identifier at "${refactorContext.what.fileName}@${refactorContext.what.posStart}-${refactorContext.what.posEnd}"'; + trace('${info.prefix} $msg'); + Promise.reject(ResponseError.internalError(msg)); + case Unsupported(name): + trace('${info.prefix} refactoring not supported for "$name"'); + Promise.reject(ResponseError.internalError('refactoring not supported for "$name"')); + case DryRun: + trace(info.prefix + " dry run"); + Promise.reject(ResponseError.internalError("dry run")); + case Done: + var edit:WorkspaceEdit = {documentChanges: editList.documentChanges}; + Promise.resolve(edit); + } + endProgress(); + onResolve(null, editList.documentChanges.length + " changes"); + return promise; + }).catchError((msg) -> { + trace('${info.prefix} error: $msg'); + endProgress(); + onResolve(null, "error"); + Promise.reject(ResponseError.internalError('$msg')); + }); + } +} + +typedef RefactorInfo = { + var refactorType:RefactorType; + var type:CodeActionResolveType; + var codeActionKind:CodeActionKind; + var title:String; + var prefix:String; +} + +typedef FilterRefactorModuleCB = (info:Null) -> Bool; diff --git a/src/haxeLanguageServer/features/haxe/RenameFeature.hx b/src/haxeLanguageServer/features/haxe/RenameFeature.hx index 51f9e567..53927307 100644 --- a/src/haxeLanguageServer/features/haxe/RenameFeature.hx +++ b/src/haxeLanguageServer/features/haxe/RenameFeature.hx @@ -2,27 +2,19 @@ package haxeLanguageServer.features.haxe; import byte.ByteData; import haxe.PosInfos; -import haxe.display.Display.DisplayMethods; -import haxe.display.Display.HoverDisplayItemOccurence; -import haxe.extern.EitherType; import haxe.io.Path; -import haxeLanguageServer.protocol.DotPath.getDotPath; -import js.lib.Promise; +import haxeLanguageServer.features.haxe.refactoring.EditDoc; +import haxeLanguageServer.features.haxe.refactoring.EditList; +import haxeLanguageServer.features.haxe.refactoring.LanguageServerTyper; +import haxeLanguageServer.features.haxe.refactoring.RefactorCache; import jsonrpc.CancellationToken; import jsonrpc.ResponseError; import jsonrpc.Types.NoData; -import languageServerProtocol.Types.CreateFile; -import languageServerProtocol.Types.DeleteFile; -import languageServerProtocol.Types.RenameFile; -import languageServerProtocol.Types.RenameFileKind; -import languageServerProtocol.Types.TextDocumentEdit; import languageServerProtocol.Types.WorkspaceEdit; -import refactor.CanRefactorResult; -import refactor.ITypeList; -import refactor.ITyper; +import refactor.RefactorResult; import refactor.discover.FileContentType; import refactor.discover.TraverseSources.simpleFileReader; -import refactor.rename.RenameHelper.TypeHintType; +import refactor.rename.CanRenameResult; import tokentree.TokenTree; using Lambda; @@ -30,64 +22,43 @@ using haxeLanguageServer.helper.PathHelper; class RenameFeature { final context:Context; - final converter:Haxe3DisplayOffsetConverter; - final cache:refactor.cache.IFileCache; - final typer:LanguageServerTyper; + final refactorCache:RefactorCache; static final HINT_SETTINGS = " - check `haxe.renameSourceFolders` setting (see https://github.com/vshaxe/vshaxe/wiki/Rename-Symbol)"; - public function new(context:Context) { + public function new(context:Context, refactorCache:RefactorCache) { this.context = context; - cache = new refactor.cache.MemCache(); - typer = new LanguageServerTyper(context); - - converter = new Haxe3DisplayOffsetConverter(); + this.refactorCache = refactorCache; context.languageServerProtocol.onRequest(PrepareRenameRequest.type, onPrepareRename); context.languageServerProtocol.onRequest(RenameRequest.type, onRename); } function onPrepareRename(params:PrepareRenameParams, token:CancellationToken, resolve:PrepareRenameResult->Void, reject:ResponseError->Void) { + if (context.config.user.disableRefactorCache) { + return reject.handler()("rename feature disabled"); + } + final onResolve:(?result:Null, ?debugInfo:Null) -> Void = context.startTimer("textDocument/prepareRename"); final uri = params.textDocument.uri; - final doc = context.documents.getHaxe(uri); + final doc:Null = context.documents.getHaxe(uri); if (doc == null || !uri.isFile()) { return reject.noFittingDocument(uri); } final filePath:FsPath = uri.toFsPath(); - final usageContext:refactor.discover.UsageContext = makeUsageContext(); + final usageContext:refactor.discover.UsageContext = refactorCache.makeUsageContext(); usageContext.fileName = filePath.toString(); - var root:Null = doc?.tokens?.tree; - if (root == null) { - usageContext.usageCollector.parseFile(ByteData.ofString(doc.content), usageContext); - } else { - usageContext.usageCollector.parseFileWithTokens(root, usageContext); - } + refactorCache.updateSingleFileCache(filePath.toString()); final editList:EditList = new EditList(); - refactor.Refactor.canRename({ - nameMap: usageContext.nameMap, - fileList: usageContext.fileList, - typeList: usageContext.typeList, - what: { - fileName: filePath.toString(), - toName: "", - pos: converter.characterOffsetToByteOffset(doc.content, doc.offsetAt(params.position)) - }, - verboseLog: function(text:String, ?pos:PosInfos) { - #if debug - trace('[canRename] $text'); - #end - }, - typer: typer - }).then((result:CanRefactorResult) -> { + refactor.Rename.canRename(refactorCache.makeCanRenameContext(doc, filePath, params.position)).then((result:CanRenameResult) -> { if (result == null) { reject(ResponseError.internalError("cannot rename identifier")); } - var editDoc = new EditDoc(filePath, editList, context, converter); + var editDoc = new EditDoc(filePath, editList, context, refactorCache.converter); resolve({ range: editDoc.posToRange(result.pos), placeholder: result.name @@ -102,48 +73,18 @@ class RenameFeature { function onRename(params:RenameParams, token:CancellationToken, resolve:WorkspaceEdit->Void, reject:ResponseError->Void) { final onResolve:(?result:Null, ?debugInfo:Null) -> Void = context.startTimer("textDocument/rename"); final uri = params.textDocument.uri; - final doc = context.documents.getHaxe(uri); + final doc:Null = context.documents.getHaxe(uri); if (doc == null || !uri.isFile()) { return reject.noFittingDocument(uri); } - final filePath:FsPath = uri.toFsPath(); - - final usageContext:refactor.discover.UsageContext = makeUsageContext(); - typer.typeList = usageContext.typeList; - - // TODO abort if there are unsaved documents (rename operates on fs, so positions might be off) - - // TODO use workspace / compilation server source folders - var srcFolders:Array = ["src", "source", "Source", "test", "tests"]; - if (context.config.user.renameSourceFolders != null) { - srcFolders = context.config.user.renameSourceFolders; - } - final workspacePath = context.workspacePath.normalize(); - srcFolders = srcFolders.map(f -> Path.join([workspacePath.toString(), f])); + var endProgress = context.startProgress("Performing Rename Operation…"); - refactor.discover.TraverseSources.traverseSources(srcFolders, usageContext); - usageContext.usageCollector.updateImportHx(usageContext); + final filePath:FsPath = uri.toFsPath(); final editList:EditList = new EditList(); - refactor.Refactor.rename({ - nameMap: usageContext.nameMap, - fileList: usageContext.fileList, - typeList: usageContext.typeList, - what: { - fileName: filePath.toString(), - toName: params.newName, - pos: converter.characterOffsetToByteOffset(doc.content, doc.offsetAt(params.position)) - }, - forRealExecute: true, - docFactory: (filePath:String) -> new EditDoc(new FsPath(filePath), editList, context, converter), - verboseLog: function(text:String, ?pos:PosInfos) { - #if debug - trace('[rename] $text'); - #end - }, - typer: typer - }).then((result:refactor.RefactorResult) -> { + refactor.Rename.rename(refactorCache.makeRenameContext(doc, filePath, params.position, params.newName, editList)).then((result:RefactorResult) -> { + endProgress(); switch (result) { case NoChange: trace("[rename] no change"); @@ -162,238 +103,10 @@ class RenameFeature { } onResolve(null, editList.documentChanges.length + " changes"); }).catchError((msg) -> { + endProgress(); trace('[rename] error: $msg$HINT_SETTINGS'); + onResolve(null, "error"); reject(ResponseError.internalError('$msg$HINT_SETTINGS')); }); } - - function makeUsageContext():refactor.discover.UsageContext { - return { - fileReader: readFile, - fileName: "", - file: null, - usageCollector: new refactor.discover.UsageCollector(), - nameMap: new refactor.discover.NameMap(), - fileList: new refactor.discover.FileList(), - typeList: new refactor.discover.TypeList(), - type: null, - cache: cache - }; - } - - function readFile(path:String):FileContentType { - var fsPath = new FsPath(path); - var doc:Null = context.documents.getHaxe(fsPath.toUri()); - if (doc == null) { - return simpleFileReader(path); - } - var root:Null = doc?.tokens?.tree; - if (root != null) { - return Token(root); - } - return Text(doc.content); - } -} - -class EditList { - public var documentChanges:Array>>>; - - public function new() { - documentChanges = []; - } - - public function addEdit(edit:EitherType>>) { - documentChanges.push(edit); - } -} - -class EditDoc implements refactor.edits.IEditableDocument { - var list:EditList; - var filePath:FsPath; - var edits:Array; - var renames:Array; - final context:Context; - final converter:Haxe3DisplayOffsetConverter; - - public function new(filePath:FsPath, list:EditList, context:Context, converter:Haxe3DisplayOffsetConverter) { - this.filePath = filePath; - this.list = list; - this.context = context; - this.converter = converter; - edits = []; - renames = []; - } - - public function addChange(edit:refactor.edits.FileEdit) { - switch (edit) { - case Move(newFilePath): - renames.push({ - kind: RenameFileKind.Kind, - oldUri: filePath.toUri(), - newUri: new FsPath(newFilePath).toUri(), - options: { - overwrite: false, - ignoreIfExists: false - } - }); - case ReplaceText(text, pos): - edits.push({range: posToRange(pos), newText: text}); - case InsertText(text, pos): - edits.push({range: posToRange(pos), newText: text}); - case RemoveText(pos): - edits.push({range: posToRange(pos), newText: ""}); - } - } - - public function posToRange(pos:refactor.discover.IdentifierPos):Range { - var doc = context.documents.getHaxe(filePath.toUri()); - if (doc == null) { - // document currently not loaded -> load and find line number and character pos to build edit Range - var content:String = sys.io.File.getContent(filePath.toString()); - var lineSeparator:String = detectLineSeparator(content); - var separatorLength:Int = lineSeparator.length; - var lines:Array = content.split(lineSeparator); - var startPos:Null = null; - var endPos:Null = null; - var curLineStart:Int = 0; - var curLine:Int = 0; - - var startOffset:Int = converter.byteOffsetToCharacterOffset(content, pos.start); - var endOffset:Int = converter.byteOffsetToCharacterOffset(content, pos.end); - - for (line in lines) { - var length:Int = line.length + separatorLength; - if (startOffset > curLineStart + length) { - curLineStart += length; - curLine++; - continue; - } - if (startOffset >= curLineStart && startOffset < curLineStart + length) { - startPos = {line: curLine, character: startOffset - curLineStart}; - } - if (endOffset >= curLineStart && endOffset < curLineStart + length) { - endPos = {line: curLine, character: endOffset - curLineStart}; - break; - } - curLineStart += length; - curLine++; - } - if ((startPos == null) || (endPos == null)) { - throw '$filePath not found'; - } - return {start: cast startPos, end: cast endPos}; - } - return doc.rangeAt(converter.byteOffsetToCharacterOffset(doc.content, pos.start), converter.byteOffsetToCharacterOffset(doc.content, pos.end)); - } - - function detectLineSeparator(code:String):String { - var lineSeparator:String; - for (i in 0...code.length) { - var char = code.charAt(i); - if ((char == "\r") || (char == "\n")) { - lineSeparator = char; - if ((char == "\r") && (i + 1 < code.length)) { - char = code.charAt(i + 1); - if (char == "\n") { - lineSeparator += char; - } - } - return lineSeparator; - } - } - return "\n"; - } - - public function endEdits() { - list.addEdit({ - textDocument: { - uri: filePath.toUri(), - version: null - }, - edits: edits - }); - for (rename in renames) { - list.addEdit(rename); - } - } -} - -class LanguageServerTyper implements ITyper { - final context:Context; - - public var typeList:Null; - - public function new(context:Context) { - this.context = context; - } - - public function resolveType(filePath:String, pos:Int):Promise> { - final params = { - file: new FsPath(filePath), - offset: pos, - wasAutoTriggered: true - }; - #if debug - trace('[rename] requesting type info for $filePath@$pos'); - #end - var promise = new Promise(function(resolve:(value:Null) -> Void, reject) { - context.callHaxeMethod(DisplayMethods.Hover, params, null, function(hover) { - if (hover == null) { - #if debug - trace('[rename] received no type info for $filePath@$pos'); - #end - resolve(null); - } else { - resolve(buildTypeHint(hover, '$filePath@$pos')); - } - return null; - }, reject.handler()); - }); - return promise; - } - - function buildTypeHint(item:HoverDisplayItemOccurence, location:String):Null { - if (typeList == null) { - return null; - } - var reg = ~/Class<(.*)>/; - - var type = item?.item?.type; - if (type == null) { - return null; - } - var path = type?.args?.path; - if (path == null) { - return null; - } - if (path.moduleName == "StdTypes" && path.typeName == "Null") { - var params = type?.args?.params; - if (params == null) { - return null; - } - type = params[0]; - if (type == null) { - return null; - } - path = type?.args?.path; - if (path == null) { - return null; - } - } - if (reg.match(path.typeName)) { - var fullPath = reg.matched(1); - var parts = fullPath.split("."); - if (parts.length <= 0) { - return null; - } - @:nullSafety(Off) - path.typeName = parts.pop(); - path.pack = parts; - } - var fullPath = '${getDotPath(type)}'; - #if debug - trace('[rename] received type $fullPath for $location'); - #end - return typeList.makeTypeHintType(fullPath); - } } diff --git a/src/haxeLanguageServer/features/haxe/codeAction/CodeActionFeature.hx b/src/haxeLanguageServer/features/haxe/codeAction/CodeActionFeature.hx index 9cec30e3..a449ecb6 100644 --- a/src/haxeLanguageServer/features/haxe/codeAction/CodeActionFeature.hx +++ b/src/haxeLanguageServer/features/haxe/codeAction/CodeActionFeature.hx @@ -17,6 +17,14 @@ enum CodeActionResolveType { MissingArg; ChangeFinalToVar; AddTypeHint; + ExtractInterface; + ExtractMethod; + ExtractType; + ExtractConstructorParamsAsVars; + ExtractConstructorParamsAsFinals; + RewriteVarsToFinals; + RewriteFinalsToVars; + RewriteWrapWithTryCatch; } typedef CodeActionResolveData = { @@ -32,9 +40,11 @@ class CodeActionFeature { final context:Context; final contributors:Array = []; final hasCommandResolveSupport:Bool; + final refactorFeature:RefactorFeature; public function new(context) { this.context = context; + refactorFeature = new RefactorFeature(context); context.registerCapability(CodeActionRequest.type, { documentSelector: Context.haxeSelector, @@ -58,10 +68,7 @@ class CodeActionFeature { registerContributor(new ExtractVarFeature(context)); registerContributor(new ExtractConstantFeature(context)); registerContributor(new DiagnosticsCodeActionFeature(context)); - #if debug - registerContributor(new ExtractTypeFeature(context)); - registerContributor(new ExtractFunctionFeature(context)); - #end + registerContributor(refactorFeature); } public function registerContributor(contributor:CodeActionContributor) { @@ -93,6 +100,8 @@ class CodeActionFeature { return; } case AddTypeHint: + case ExtractInterface | ExtractMethod | ExtractType | ExtractConstructorParamsAsVars | ExtractConstructorParamsAsFinals | RewriteVarsToFinals | + RewriteFinalsToVars | RewriteWrapWithTryCatch: } switch (type) { case MissingArg, ChangeFinalToVar, AddTypeHint: @@ -119,6 +128,12 @@ class CodeActionFeature { arguments: command.arguments ?? [] }); }).catchError((e) -> reject(e)); + case ExtractInterface | ExtractMethod | ExtractType | ExtractConstructorParamsAsVars | ExtractConstructorParamsAsFinals | RewriteVarsToFinals | + RewriteFinalsToVars | RewriteWrapWithTryCatch: + refactorFeature.createCodeActionEdits(context, type, action, params).then(workspaceEdit -> { + action.edit = workspaceEdit; + resolve(action); + }).catchError((e) -> reject(e)); } } } diff --git a/src/haxeLanguageServer/features/haxe/codeAction/ExtractFunctionFeature.hx b/src/haxeLanguageServer/features/haxe/codeAction/ExtractFunctionFeature.hx deleted file mode 100644 index 6286cc41..00000000 --- a/src/haxeLanguageServer/features/haxe/codeAction/ExtractFunctionFeature.hx +++ /dev/null @@ -1,307 +0,0 @@ -package haxeLanguageServer.features.haxe.codeAction; - -import haxeLanguageServer.features.haxe.codeAction.CodeActionFeature.CodeActionContributor; -import haxeLanguageServer.helper.FormatterHelper; -import haxeLanguageServer.helper.WorkspaceEditHelper; -import languageServerProtocol.Types.CodeAction; -import languageServerProtocol.Types.TextDocumentEdit; -import languageServerProtocol.Types.WorkspaceEdit; -import tokentree.TokenTree; -import tokentree.TokenTreeBuilder; -import tokentree.utils.TokenTreeCheckUtils; - -using tokentree.TokenTreeAccessHelper; - -class ExtractFunctionFeature implements CodeActionContributor { - final context:Context; - - public function new(context:Context) { - this.context = context; - } - - public function createCodeActions(params:CodeActionParams):Array { - if ((params.context.only != null) && (!params.context.only.contains(RefactorExtract))) { - return []; - } - final doc = context.documents.getHaxe(params.textDocument.uri); - if (doc == null) { - return []; - } - final tokens = doc.tokens; - if (tokens == null) { - return []; - } - return try { - var text:String = doc.getText(params.range); - final leftOffset:Int = text.length - text.ltrim().length; - final rightOffset:Int = text.length - text.rtrim().length; - text = text.trim(); - - final tokenStart:Null = tokens.getTokenAtOffset(doc.offsetAt(params.range.start) + leftOffset); - final tokenEnd:Null = tokens.getTokenAtOffset(doc.offsetAt(params.range.end) - rightOffset); - if (tokenStart == null || tokenEnd == null) - return []; - if (tokenStart.index == tokenEnd.index) - return []; - // TODO is a minimum of 10 tokens between start and end enough / too much? is there a better solution - if (tokenStart.index + 10 > tokenEnd.index) - return []; - - final parentOfStart:Null = findParentFunction(tokenStart); - final parentOfEnd:Null = findParentFunction(tokenEnd); - if (parentOfStart == null || parentOfEnd == null) - return []; - if (parentOfStart.index != parentOfEnd.index) - return []; - final lastToken:Null = TokenTreeCheckUtils.getLastToken(parentOfStart); - - final rangeIdents:Array = []; - final varTokens:Array = []; - var hasReturn:Bool = false; - parentOfStart.filterCallback(function(token:TokenTree, index:Int):FilterResult { - if (lastToken == null || token.index > lastToken.index) - return SkipSubtree; - switch token.tok { - case Const(CIdent(s)): - if (token.index >= tokenStart.index && token.index <= tokenEnd.index && !rangeIdents.contains(s)) - rangeIdents.push(s); - case Dollar(s): - if (token.index >= tokenStart.index && token.index <= tokenEnd.index && !rangeIdents.contains(s)) - rangeIdents.push("$" + s); - case Kwd(KwdReturn): - if (token.index >= tokenStart.index && token.index <= tokenEnd.index) - hasReturn = true; - case Kwd(KwdVar): - if (token.index >= tokenStart.index) - return GoDeeper; - if (token.index >= parentOfStart.index && token.index <= lastToken.index) - varTokens.push(token); - default: - } - return GoDeeper; - }); - - var returnSpec:String = ""; - if (hasReturn) { - returnSpec = makeReturnSpec(parentOfStart); - } - final isStatic:Bool = isStaticFunction(parentOfStart); - final indent:String = detectIndent(doc, parentOfStart); - - var newParams:Array = copyParentFunctionParameters(parentOfStart, text, rangeIdents); - newParams = newParams.concat(localVarsToParameter(varTokens, text, rangeIdents)); - - final action:Null = makeExtractFunctionChanges(doc, doc.uri, params, text, isStatic, newParams, returnSpec, indent, - doc.positionAt(lastToken.pos.max + 1, Utf8)); - if (action == null) - return []; - [action]; - } catch (e) { - []; - } - } - - function makeExtractFunctionChanges(doc:HaxeDocument, uri:DocumentUri, params:CodeActionParams, text:String, isStatic:Bool, - newParams:Array, returnSpec:String, indent:String, newFuncPos:Position):CodeAction { - final callParams:String = newParams.map(s -> s.call).join(", "); - final funcParams:String = newParams.map(s -> s.param).join(", "); - - final funcName:String = "newFunction"; - - var call:String = '$funcName($callParams);\n'; - if (returnSpec.length > 0) { - call = 'return $call'; - } - - var func:String = 'function $funcName($funcParams)$returnSpec {\n$text\n}\n'; - if (isStatic) { - func = 'static $func'; - } - - call = FormatterHelper.formatText(doc, context, call, TokenTreeEntryPoint.FieldLevel); - func = FormatterHelper.formatText(doc, context, func, TokenTreeEntryPoint.FieldLevel); - func = func.split("\n").map(s -> indent + s).join("\n"); - final edits:Array = []; - - edits.push(WorkspaceEditHelper.insertText(newFuncPos, func)); - edits.push(WorkspaceEditHelper.replaceText(params.range, call)); - - final textEdit:TextDocumentEdit = WorkspaceEditHelper.textDocumentEdit(uri, edits); - final edit:WorkspaceEdit = { - documentChanges: [textEdit] - }; - return { - title: "Extract function", - kind: RefactorExtract, - edit: edit - } - } - - function findParentFunction(token:TokenTree):Null { - var parent:Null = token.parent; - while (parent != null && parent.tok != null) { - switch parent.tok { - case Kwd(KwdFunction): - return parent; - default: - } - parent = parent.parent; - } - return null; - } - - function makeReturnSpec(functionToken:TokenTree):String { - var returnHint:Null = functionToken.access().firstChild().isCIdent().firstOf(DblDot).token; - // anon function - if (returnHint == null) - returnHint = functionToken.access().firstOf(DblDot).token; - if (returnHint == null || returnHint.children == null) - return ""; - return varToString(returnHint); - } - - function isStaticFunction(functionToken:TokenTree):Bool { - if (functionToken.access().firstChild().isCIdent().firstOf(Kwd(KwdStatic)).exists()) - return true; - return false; - } - - function detectIndent(doc:HaxeDocument, functionToken:TokenTree):String { - final functionRange:Range = doc.rangeAt(functionToken.pos, Utf8); - functionRange.start.character = 0; - - final text:String = doc.getText(functionRange); - final whitespace:EReg = ~/^([ \t]+)/; - if (!whitespace.match(text)) - return ""; - - return whitespace.matched(1); - } - - function copyParentFunctionParameters(functionToken:TokenTree, text:String, rangeIdents:Array):Array { - var paramterList:Null = functionToken.access().firstChild().isCIdent().firstOf(POpen).token; - // anon function - if (paramterList == null) - paramterList = functionToken.access().firstOf(POpen).token; - - if (paramterList == null || paramterList.children == null) - return []; - - final newFuncParameter:Array = []; - for (child in paramterList.children) { - switch child.tok { - case Const(CIdent(s)): - checkAndAddIdentifier(child, s, text, rangeIdents, newFuncParameter); - case Question: - final firstChild:Null = child.getFirstChild(); - if (firstChild == null) - continue; - switch firstChild.tok { - case Const(CString(s)): - checkAndAddIdentifier(child, s, text, rangeIdents, newFuncParameter); - default: - } - case Dollar(s): - if (!rangeIdents.contains("$" + s)) - continue; - newFuncParameter.push({ - call: s, - param: varToString(child) - }); - case PClose: - return newFuncParameter; - default: - } - } - - return newFuncParameter; - } - - function checkAndAddIdentifier(token:TokenTree, identifier:String, text:String, rangeIdents:Array, newFuncParameter:Array) { - if (rangeIdents.contains(identifier)) - newFuncParameter.push({ - call: identifier, - param: varToString(token) - }); - if (text.contains("$" + identifier)) - newFuncParameter.push({ - call: identifier, - param: varToString(token) - }); - } - - function localVarsToParameter(varTokens:Array, text:String, rangeIdents:Array):Array { - final newFuncParameter:Array = []; - - for (varToken in varTokens) { - if (varToken.children == null) { - continue; - } - // TODO handle multiple vars - for (child in varToken.children) { - switch child.tok { - case Const(CIdent(s)): - checkAndAddIdentifier(child, s, text, rangeIdents, newFuncParameter); - case Dollar(s): - if (!rangeIdents.contains("$" + s)) - continue; - newFuncParameter.push({ - call: s, - param: varToString(child) - }); - default: - continue; - } - } - } - return newFuncParameter; - } - - function varToString(token:TokenTree):String { - var result:String = token.toString(); - if (token.children == null) - return result; - for (child in token.children) { - switch child.tok { - case Kwd(_): - result += varToString(child); - case Const(_): - result += varToString(child); - case Dot: - result += varToString(child); - case DblDot: - result += varToString(child); - case Arrow: - result += varToString(child); - case Dollar(_): - result += varToString(child); - case Binop(OpLt): - result += ltGtToString(child); - default: - return result; - } - } - return result; - } - - function ltGtToString(token:TokenTree):String { - var result:String = token.toString(); - if (token.children == null) - return result; - for (child in token.children) { - switch child.tok { - case Binop(OpGt): - result += child.toString(); - break; - default: - result += ltGtToString(child); - } - } - return result; - } -} - -private typedef NewFunctionParameter = { - final call:String; - final param:String; -} diff --git a/src/haxeLanguageServer/features/haxe/codeAction/ExtractTypeFeature.hx b/src/haxeLanguageServer/features/haxe/codeAction/ExtractTypeFeature.hx deleted file mode 100644 index 8387e45c..00000000 --- a/src/haxeLanguageServer/features/haxe/codeAction/ExtractTypeFeature.hx +++ /dev/null @@ -1,181 +0,0 @@ -package haxeLanguageServer.features.haxe.codeAction; - -import haxe.io.Path; -import haxeLanguageServer.features.haxe.codeAction.CodeActionFeature.CodeActionContributor; -import haxeLanguageServer.helper.WorkspaceEditHelper; -import haxeLanguageServer.tokentree.TokenTreeManager; -import languageServerProtocol.Types.CodeAction; -import languageServerProtocol.Types.CodeActionKind; -import languageServerProtocol.Types.CreateFile; -import languageServerProtocol.Types.TextDocumentEdit; -import languageServerProtocol.Types.WorkspaceEdit; -import sys.FileSystem; -import tokentree.TokenTree; -import tokentree.utils.TokenTreeCheckUtils; - -using tokentree.TokenTreeAccessHelper; - -class ExtractTypeFeature implements CodeActionContributor { - final context:Context; - - public function new(context:Context) { - this.context = context; - } - - public function createCodeActions(params:CodeActionParams):Array { - if ((params.context.only != null) && (!params.context.only.contains(RefactorExtract))) { - return []; - } - final uri = params.textDocument.uri; - final doc = context.documents.getHaxe(uri); - if (doc == null) { - return []; - } - final tokens = doc.tokens; - if (tokens == null) { - return []; - } - return try { - final fsPath:FsPath = uri.toFsPath(); - final path = new Path(fsPath.toString()); - - final types:Array = tokens.tree.filterCallback(function(token:TokenTree, index:Int):FilterResult { - switch token.tok { - case Kwd(KwdClass), Kwd(KwdInterface), Kwd(KwdEnum), Kwd(KwdAbstract), Kwd(KwdTypedef): - return FoundSkipSubtree; - default: - } - return GoDeeper; - }); - final lastImport:Null = getLastImportToken(tokens.tree); - if (isInsideConditional(lastImport)) - return []; - - // copy all imports from current file - // TODO reduce imports - final fileHeader = copyImports(doc, tokens, path.file, lastImport); - - final actions = []; - for (type in types) { - if (isInsideConditional(type)) { - // TODO support types inside conditionals - continue; - } - final nameTok:Null = type.access().firstChild().isCIdent().token; - if (nameTok == null) - continue; - - final name:String = nameTok.toString(); - if (name == path.file || path.dir == null) - continue; - - final newFileName:String = Path.join([path.dir, name + ".hx"]); - if (FileSystem.exists(newFileName)) - continue; - - final pos = tokens.getTreePos(type); - final docComment:Null = TokenTreeCheckUtils.getDocComment(type); - if (docComment != null) { - // expand pos.min to capture doc comment - pos.min = tokens.getPos(docComment).min; - } - final typeRange = doc.rangeAt(pos, Utf8); - if (params.range.intersection(typeRange) == null) { - // no overlap between selection / cursor pos and Haxe type - continue; - } - - // remove code from current file - final removeOld:TextDocumentEdit = WorkspaceEditHelper.textDocumentEdit(uri, [WorkspaceEditHelper.removeText(typeRange)]); - - // create new file - final newUri:DocumentUri = new FsPath(newFileName).toUri(); - final createFile:CreateFile = WorkspaceEditHelper.createNewFile(newUri, false, true); - - // copy file header, type and doc comment into new file - final addNewType:TextDocumentEdit = WorkspaceEditHelper.textDocumentEdit(newUri, [ - WorkspaceEditHelper.insertText(doc.positionAt(0), fileHeader + doc.getText(typeRange)) - ]); - - // TODO edits in files that use type - - final edit:WorkspaceEdit = { - documentChanges: [removeOld, createFile, addNewType] - }; - - actions.push({ - title: 'Extract $name to a new file', - kind: RefactorExtract, - edit: edit - }); - } - actions; - } catch (e) { - []; - } - } - - function copyImports(doc:HaxeDocument, tokens:TokenTreeManager, fileName:String, lastImport:Null):String { - if (lastImport == null) - return ""; - - final pos = tokens.getTreePos(lastImport); - pos.min = 0; - - final range = doc.rangeAt(pos, Utf8); - range.end.line++; - range.end.character = 0; - final fileHeader:String = doc.getText(range); - - var pack:Null = null; - tokens.tree.filterCallback(function(token:TokenTree, index:Int):FilterResult { - switch token.tok { - case Kwd(KwdPackage): - pack = token; - return SkipSubtree; - default: - return SkipSubtree; - } - }); - if (pack == null) - return fileHeader + "\n"; - - var packText:String = doc.getText(doc.rangeAt(tokens.getTreePos(pack), Utf8)); - packText = packText.replace("package ", ""); - packText = packText.replace(";", "").trim(); - if (packText.length <= 0) - packText = '${fileName}'; - else - packText += '.${fileName}'; - - return fileHeader + 'import $packText;\n\n'; - } - - function getLastImportToken(tree:TokenTree):Null { - final imports:Array = tree.filterCallback(function(token:TokenTree, index:Int):FilterResult { - switch token.tok { - case Kwd(KwdImport), Kwd(KwdUsing): - return FoundSkipSubtree; - default: - } - return GoDeeper; - }); - return imports.pop(); - } - - function isInsideConditional(token:Null):Bool { - if (token == null) - return false; - - var parent:Null = token.parent; - while (parent != null && parent.tok != null) { - switch parent.tok { - case Sharp(_): - return true; - default: - } - parent = parent.parent; - } - return false; - } -} diff --git a/src/haxeLanguageServer/features/haxe/refactoring/EditDoc.hx b/src/haxeLanguageServer/features/haxe/refactoring/EditDoc.hx new file mode 100644 index 00000000..f12d7fdb --- /dev/null +++ b/src/haxeLanguageServer/features/haxe/refactoring/EditDoc.hx @@ -0,0 +1,196 @@ +package haxeLanguageServer.features.haxe.refactoring; + +import haxe.extern.EitherType; +import haxeLanguageServer.helper.FormatterHelper; +import languageServerProtocol.Types.CreateFile; +import languageServerProtocol.Types.CreateFileKind; +import languageServerProtocol.Types.DeleteFile; +import languageServerProtocol.Types.DeleteFileKind; +import languageServerProtocol.Types.RenameFile; +import languageServerProtocol.Types.RenameFileKind; +import refactor.edits.IEditableDocument; +import sys.FileSystem; +import tokentree.TokenTreeBuilder; + +using Lambda; +using haxeLanguageServer.helper.PathHelper; + +class EditDoc implements IEditableDocument { + var list:EditList; + var filePath:FsPath; + var edits:Array; + var creates:Array; + var renames:Array; + var deletes:Array; + final context:Context; + final converter:Haxe3DisplayOffsetConverter; + + public function new(filePath:FsPath, list:EditList, context:Context, converter:Haxe3DisplayOffsetConverter) { + this.filePath = filePath; + this.list = list; + this.context = context; + this.converter = converter; + edits = []; + creates = []; + renames = []; + deletes = []; + } + + public function addChange(edit:refactor.edits.FileEdit) { + switch (edit) { + case CreateFile(newFilePath): + creates.push({ + kind: CreateFileKind.Create, + uri: new FsPath(newFilePath).toUri(), + options: { + overwrite: false, + ignoreIfExists: false + } + }); + case Move(newFilePath): + renames.push({ + kind: RenameFileKind.Kind, + oldUri: filePath.toUri(), + newUri: new FsPath(newFilePath).toUri(), + options: { + overwrite: false, + ignoreIfExists: false + } + }); + case DeleteFile(oldFilePath): + deletes.push({ + kind: DeleteFileKind.Delete, + uri: new FsPath(oldFilePath).toUri(), + options: { + recursive: false, + ignoreIfNotExists: false + } + }); + case ReplaceText(text, pos, f): + final range = posToRange(pos); + text = correctFirstLineIndent(f, text, range); + edits.push({range: range, newText: text}); + case InsertText(text, pos, f): + final range = posToRange(pos); + text = correctFirstLineIndent(f, text, range); + edits.push({range: posToRange(pos), newText: text}); + case RemoveText(pos): + edits.push({range: posToRange(pos), newText: ""}); + } + } + + function correctFirstLineIndent(f:refactor.edits.FormatType, text:String, range:Range):String { + switch (f) { + case NoFormat: + case Format(indentOffset, trimRight): + text = FormatterHelper.formatSnippet(filePath, text, TokenTreeEntryPoint.FieldLevel, indentOffset); + if (trimRight) { + text = text.rtrim(); + } + if (range.start.character != 0) { + var doc:Null = context.documents.getHaxe(filePath.toUri()); + if (doc != null) { + final beforeRange:Range = { + start: { + line: range.start.line, + character: 0 + }, + end: { + line: range.start.line, + character: range.start.character + } + }; + var beforeText = doc.getText(beforeRange); + if (beforeText.trim().length == 0) { + range.start.character = 0; + } else { + text = text.ltrim(); + } + } + } + } + return text; + } + + public function posToRange(pos:refactor.discover.IdentifierPos):Range { + if (!FileSystem.exists(filePath.toString())) { + var posNull:Position = {line: 0, character: 0}; + return {start: posNull, end: posNull}; + } + var doc:Null = context.documents.getHaxe(filePath.toUri()); + if (doc == null) { + // document currently not loaded -> load and find line number and character pos to build edit Range + var content:String = sys.io.File.getContent(filePath.toString()); + var lineSeparator:String = detectLineSeparator(content); + var separatorLength:Int = lineSeparator.length; + var lines:Array = content.split(lineSeparator); + var startPos:Null = null; + var endPos:Null = null; + var curLineStart:Int = 0; + var curLine:Int = 0; + + var startOffset:Int = converter.byteOffsetToCharacterOffset(content, pos.start); + var endOffset:Int = converter.byteOffsetToCharacterOffset(content, pos.end); + + for (line in lines) { + var length:Int = line.length + separatorLength; + if (startOffset > curLineStart + length) { + curLineStart += length; + curLine++; + continue; + } + if (startOffset >= curLineStart && startOffset < curLineStart + length) { + startPos = {line: curLine, character: startOffset - curLineStart}; + } + if (endOffset >= curLineStart && endOffset < curLineStart + length) { + endPos = {line: curLine, character: endOffset - curLineStart}; + break; + } + curLineStart += length; + curLine++; + } + if ((startPos == null) || (endPos == null)) { + throw '$filePath not found'; + } + return {start: cast startPos, end: cast endPos}; + } + return doc.rangeAt(converter.byteOffsetToCharacterOffset(doc.content, pos.start), converter.byteOffsetToCharacterOffset(doc.content, pos.end)); + } + + function detectLineSeparator(code:String):String { + var lineSeparator:String; + for (i in 0...code.length) { + var char = code.charAt(i); + if ((char == "\r") || (char == "\n")) { + lineSeparator = char; + if ((char == "\r") && (i + 1 < code.length)) { + char = code.charAt(i + 1); + if (char == "\n") { + lineSeparator += char; + } + } + return lineSeparator; + } + } + return "\n"; + } + + public function endEdits() { + for (create in creates) { + list.addEdit(create); + } + list.addEdit({ + textDocument: { + uri: filePath.toUri(), + version: null + }, + edits: edits + }); + for (rename in renames) { + list.addEdit(rename); + } + for (delete in deletes) { + list.addEdit(delete); + } + } +} diff --git a/src/haxeLanguageServer/features/haxe/refactoring/EditList.hx b/src/haxeLanguageServer/features/haxe/refactoring/EditList.hx new file mode 100644 index 00000000..6e7453be --- /dev/null +++ b/src/haxeLanguageServer/features/haxe/refactoring/EditList.hx @@ -0,0 +1,22 @@ +package haxeLanguageServer.features.haxe.refactoring; + +import haxe.extern.EitherType; +import languageServerProtocol.Types.CreateFile; +import languageServerProtocol.Types.DeleteFile; +import languageServerProtocol.Types.RenameFile; +import languageServerProtocol.Types.TextDocumentEdit; + +using Lambda; +using haxeLanguageServer.helper.PathHelper; + +class EditList { + public var documentChanges:Array>>>; + + public function new() { + documentChanges = []; + } + + public function addEdit(edit:EitherType>>) { + documentChanges.push(edit); + } +} diff --git a/src/haxeLanguageServer/features/haxe/refactoring/LanguageServerTyper.hx b/src/haxeLanguageServer/features/haxe/refactoring/LanguageServerTyper.hx new file mode 100644 index 00000000..57ee2336 --- /dev/null +++ b/src/haxeLanguageServer/features/haxe/refactoring/LanguageServerTyper.hx @@ -0,0 +1,137 @@ +package haxeLanguageServer.features.haxe.refactoring; + +import haxe.display.Display.DisplayMethods; +import haxe.display.Display.HoverDisplayItemOccurence; +import haxe.display.JsonModuleTypes.JsonType; +import haxeLanguageServer.protocol.DisplayPrinter; +import js.lib.Promise; +import refactor.typing.ITypeList; +import refactor.typing.ITyper; +import refactor.typing.TypeHintType; + +using Lambda; +using haxeLanguageServer.helper.PathHelper; + +class LanguageServerTyper implements ITyper { + final context:Context; + final printer:DisplayPrinter; + final fullPrinter:DisplayPrinter; + + public var typeList:Null; + + public function new(context:Context) { + this.context = context; + printer = new DisplayPrinter(); + fullPrinter = new DisplayPrinter(Always); + } + + public function resolveType(filePath:String, pos:Int):Promise> { + final params = { + file: new FsPath(filePath), + offset: pos, + wasAutoTriggered: true + }; + #if debug + trace('[refactor] requesting type info for $filePath@$pos'); + #end + var promise = new Promise(function(resolve:(value:Null) -> Void, reject) { + context.callHaxeMethod(DisplayMethods.Hover, params, null, function(hover) { + if (hover == null) { + #if debug + trace('[refactor] received no type info for $filePath@$pos'); + #end + resolve(null); + } else { + final typeHint:Null = buildTypeHint(hover, '$filePath@$pos'); + #if debug + trace('[refactor] received type info for $filePath@$pos: ${refactor.PrintHelper.typeHintToString(typeHint)}'); + #end + resolve(typeHint); + } + return null; + }, reject.handler()); + }); + return promise; + } + + function buildType(jsonType:JsonType):Null { + switch (jsonType.kind) { + case TMono: + return UnknownType("?"); + case TInst | TEnum | TType | TAbstract: + final path = jsonType.args; + final name = printer.printPath(path.path); + var fullPath = fullPrinter.printPath(path.path); + final typeName = path.path?.typeName; + + if (typeName != null) { + if (typeName.startsWith("Abstract<")) { + fullPath = typeName.substring(9, typeName.length - 1); + } + if (typeName.startsWith("Class<")) { + fullPath = typeName.substring(6, typeName.length - 1); + } + } + final type = typeList?.getType(fullPath); + final params:Array = []; + if (path.params.length > 0) { + for (param in path.params) { + final paramType = buildType(param); + if (paramType == null) { + continue; + } + params.push(paramType); + } + } + if (type == null) { + return LibType(name, fullPath, params); + } + return ClasspathType(type, params); + case TDynamic: + final path = jsonType.args; + if (path == null) { + return LibType("Dynamic", "Dynamic", []); + } + final paramType = buildType(path); + if (paramType == null) { + return LibType("Dynamic", "Dynamic", []); + } + return LibType("Dynamic", "Dynamic", [paramType]); + case TAnonymous: + final path = jsonType.args; + final fields:Array = []; + for (field in path.fields) { + final fieldType = buildType(field.type); + if (fieldType != null) { + fields.push(NamedType(field.name, fieldType)); + } + } + return StructType(fields); + case TFun: + final path = jsonType.args; + final args:Array = []; + for (arg in path.args) { + final argType = buildType(arg.t); + if (argType == null) { + continue; + } + args.push(argType); + } + final retVal = buildType(path.ret); + return FunctionType(args, retVal); + } + return null; + } + + function buildTypeHint(item:HoverDisplayItemOccurence, location:String):Null { + if (typeList == null) { + return null; + } + + var type = item?.item?.type; + if (type == null) { + return null; + } + return buildType(type); + } +} diff --git a/src/haxeLanguageServer/features/haxe/refactoring/RefactorCache.hx b/src/haxeLanguageServer/features/haxe/refactoring/RefactorCache.hx new file mode 100644 index 00000000..4ea6d70f --- /dev/null +++ b/src/haxeLanguageServer/features/haxe/refactoring/RefactorCache.hx @@ -0,0 +1,306 @@ +package haxeLanguageServer.features.haxe.refactoring; + +import haxe.Exception; +import haxe.PosInfos; +import haxe.display.Server.ServerMethods; +import haxe.io.Path; +import refactor.cache.IFileCache; +import refactor.cache.MemCache; +import refactor.discover.FileContentType; +import refactor.discover.FileList; +import refactor.discover.NameMap; +import refactor.discover.TraverseSources; +import refactor.discover.TypeList; +import refactor.discover.UsageCollector; +import refactor.discover.UsageContext; +import refactor.refactor.CanRefactorContext; +import refactor.refactor.RefactorContext; +import refactor.rename.CanRenameContext; +import refactor.rename.RenameContext; +import sys.FileSystem; +import tokentree.TokenTree; + +using haxeLanguageServer.helper.PathHelper; + +class RefactorCache { + final context:Context; + + public final cache:IFileCache; + public final typer:LanguageServerTyper; + public final converter:Haxe3DisplayOffsetConverter; + public final usageCollector:UsageCollector; + public final nameMap:NameMap; + public final fileList:FileList; + public final typeList:TypeList; + public var classPaths:Array; + + public function new(context:Context) { + this.context = context; + + cache = new MemCache(); + converter = new Haxe3DisplayOffsetConverter(); + typer = new LanguageServerTyper(context); + usageCollector = new UsageCollector(); + nameMap = new NameMap(); + fileList = new FileList(); + typeList = new TypeList(); + classPaths = []; + initClassPaths(); + } + + function clearCache() { + cache.clear(); + nameMap.clear(); + fileList.clear(); + typeList.clear(); + } + + public function initClassPaths() { + if (context.config.user.disableRefactorCache) { + return; + } + clearCache(); + if (!context.haxeServer.supports(ServerMethods.Contexts)) { + initFromSetting(); + return; + } + final currentWorkingDir = Path.removeTrailingSlashes(Sys.getCwd()); + context.callHaxeMethod(ServerMethods.Contexts, null, null, function(contexts) { + classPaths = []; + for (ctx in contexts) { + if (ctx?.desc == "after_init_macros") { + for (path in ctx.classPaths) { + if (path == "") { + continue; + } + if (Path.isAbsolute(path)) { + if (!path.startsWith(currentWorkingDir)) { + continue; + } + } + classPaths.push(path); + } + break; + } + } + if (classPaths.length <= 0) { + initFromSetting(); + return ""; + } + trace("[RefactorCache] detected classpaths: " + classPaths); + + init(); + return ""; + }, (err) -> initFromSetting()); + } + + function initFromSetting() { + classPaths = ["src", "source", "Source", "test", "tests"]; + if (context.config.user.renameSourceFolders != null) { + classPaths = context.config.user.renameSourceFolders; + } + init(); + } + + function init() { + if (context.config.user.disableRefactorCache) { + return; + } + final onResolve:(?result:Null, ?debugInfo:Null) -> Void = context.startTimer("refactor/cache/init"); + + var endProgress = context.startProgress("Building Refactoring Cache…"); + + final usageContext:UsageContext = makeUsageContext(); + typer.typeList = usageContext.typeList; + + final workspacePath = context.workspacePath.normalize(); + final srcFolders = classPaths.map(f -> Path.join([workspacePath.toString(), f])); + + try { + TraverseSources.traverseSources(srcFolders, usageContext); + usageContext.usageCollector.updateImportHx(usageContext); + } catch (e:Exception) { + #if debug + trace("failed to updateFileCache: " + e); + #end + } + + endProgress(); + onResolve(); + } + + public function updateFileCache() { + init(); + } + + public function updateSingleFileCache(uri:String) { + if (context.config.user.disableRefactorCache) { + return; + } + final onResolve:(?result:Null, ?debugInfo:Null) -> Void = context.startTimer("refactor/cache/updateFile"); + + final usageContext:UsageContext = makeUsageContext(); + usageContext.fileName = uri; + try { + TraverseSources.collectIdentifierData(usageContext); + } catch (e:Exception) { + #if debug + trace("failed to updateSingleFileCache: " + e); + #end + } + onResolve(); + } + + public function invalidateFile(uri:String) { + if (context.config.user.disableRefactorCache) { + return; + } + final onResolve:(?result:Null, ?debugInfo:Null) -> Void = context.startTimer("refactor/cache/invalidateFile"); + + cache.invalidateFile(uri, nameMap, typeList); + fileList.removeFile(uri); + + if (fileList.wasRecentlyRenamed(uri)) { + // delay for (potentially) recently renamed files, because vscode sends us + // a notifcation of edits in that file but not when it gets renamed. + // so in case of a type rename we don't want to end up having the renamed type twice: + // once for `pack.NewtypeName` and once for `pack.OldTypeName.NewTypeName` + // (helps solve renaming FlxSprite -> FlxCola -> FlxSprite) + haxe.Timer.delay(() -> { + if (FileSystem.exists(uri)) { + updateSingleFileCache(uri); + } + }, 250); + onResolve(); + return; + } + + if (FileSystem.exists(uri)) { + updateSingleFileCache(uri); + } + onResolve(); + } + + public function makeUsageContext():UsageContext { + return { + fileReader: readFile, + fileName: "", + file: null, + usageCollector: usageCollector, + nameMap: nameMap, + fileList: fileList, + typeList: typeList, + type: null, + cache: cache + }; + } + + public function makeCanRenameContext(doc:HaxeDocument, filePath:FsPath, position:Position):CanRenameContext { + return { + nameMap: nameMap, + fileList: fileList, + typeList: typeList, + what: { + fileName: filePath.toString(), + toName: "", + pos: converter.characterOffsetToByteOffset(doc.content, doc.offsetAt(position)) + }, + verboseLog: function(text:String, ?pos:PosInfos) { + #if debug + trace('[canRename] $text'); + #end + }, + + typer: typer, + fileReader: readFile, + converter: converter.byteOffsetToCharacterOffset, + }; + } + + public function makeRenameContext(doc:HaxeDocument, filePath:FsPath, position:Position, newName:String, editList:EditList):RenameContext { + return { + nameMap: nameMap, + fileList: fileList, + typeList: typeList, + what: { + fileName: filePath.toString(), + toName: newName, + pos: converter.characterOffsetToByteOffset(doc.content, doc.offsetAt(position)) + }, + forRealExecute: true, + docFactory: (filePath:String) -> new EditDoc(new FsPath(filePath), editList, context, converter), + verboseLog: function(text:String, ?pos:PosInfos) { + #if debug + trace('[rename] $text'); + #end + }, + + typer: typer, + fileReader: readFile, + converter: converter.byteOffsetToCharacterOffset, + }; + } + + public function makeCanRefactorContext(doc:Null, range:Range):Null { + if (doc == null) { + return null; + } + return { + nameMap: nameMap, + fileList: fileList, + typeList: typeList, + what: { + fileName: doc.uri.toFsPath().toString(), + posStart: converter.characterOffsetToByteOffset(doc.content, doc.offsetAt(range.start)), + posEnd: converter.characterOffsetToByteOffset(doc.content, doc.offsetAt(range.end)) + }, + verboseLog: function(text:String, ?pos:PosInfos) { + #if debug + trace('[Refactor] $text'); + #end + }, + typer: typer, + fileReader: readFile, + converter: converter.byteOffsetToCharacterOffset, + }; + } + + public function makeRefactorContext(doc:Null, range:Range, editList:EditList):Null { + if (doc == null) { + return null; + } + return { + nameMap: nameMap, + fileList: fileList, + typeList: typeList, + what: { + fileName: doc.uri.toFsPath().toString(), + posStart: converter.characterOffsetToByteOffset(doc.content, doc.offsetAt(range.start)), + posEnd: converter.characterOffsetToByteOffset(doc.content, doc.offsetAt(range.end)) + }, + verboseLog: function(text:String, ?pos:PosInfos) { + #if debug + trace('[refactor] $text'); + #end + }, + typer: typer, + fileReader: readFile, + forRealExecute: true, + docFactory: (filePath:String) -> new EditDoc(new FsPath(filePath), editList, context, converter), + converter: converter.byteOffsetToCharacterOffset, + }; + } + + function readFile(path:String):FileContentType { + var fsPath = new FsPath(path); + var doc:Null = context.documents.getHaxe(fsPath.toUri()); + if (doc == null) { + return simpleFileReader(path); + } + var root:Null = doc?.tokens?.tree; + if (root != null) { + return Token(root, doc.content); + } + return Text(doc.content); + } +} diff --git a/src/haxeLanguageServer/helper/FormatterHelper.hx b/src/haxeLanguageServer/helper/FormatterHelper.hx index 26bb6085..3457bae7 100644 --- a/src/haxeLanguageServer/helper/FormatterHelper.hx +++ b/src/haxeLanguageServer/helper/FormatterHelper.hx @@ -23,4 +23,15 @@ class FormatterHelper { } return code; } + + public static function formatSnippet(path:FsPath, code:String, entryPoint:TokenTreeEntryPoint, ?indentOffset:Int):String { + final config = Formatter.loadConfig(path.toString()); + switch Formatter.format(Code(code, Snippet), config, null, entryPoint, indentOffset) { + case Success(formattedCode): + return formattedCode; + case Failure(_): + case Disabled: + } + return code; + } } diff --git a/test/haxeLanguageServer/helper/IdentifierHelperTest.hx b/test/haxeLanguageServer/helper/IdentifierHelperTest.hx index 757890d3..8f369d56 100644 --- a/test/haxeLanguageServer/helper/IdentifierHelperTest.hx +++ b/test/haxeLanguageServer/helper/IdentifierHelperTest.hx @@ -43,7 +43,7 @@ class IdentifierHelperTest extends Test { function assert(expected, original, ?posInfos) Assert.equals(expected, addNamesToSignatureType(original), posInfos); - function assertUnchanged(expectedAndOriginal, ?posInfos) + function assertUnchanged(expectedAndOriginal:Any, ?posInfos) assert(expectedAndOriginal, expectedAndOriginal, posInfos); assertUnchanged("String");