Skip to content

Extension config #18219

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Apr 30, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Run fantomas
  • Loading branch information
abonie committed Apr 30, 2025
commit 13b59093ce663d78ea9043ba7619a64cbf7c9748
8 changes: 2 additions & 6 deletions src/FSharp.Compiler.LanguageServer/FSharpLanguageServer.fs
Original file line number Diff line number Diff line change
Expand Up @@ -89,12 +89,8 @@ type FSharpLanguageServer
FSharpLanguageServer.Create(LspLogger System.Diagnostics.Trace.TraceInformation, initialWorkspace, addExtraHandlers, config)

static member Create
(
logger: ILspLogger,
initialWorkspace,
?addExtraHandlers: Action<IServiceCollection>,
?config: FSharpLanguageServerConfig
) =
(logger: ILspLogger, initialWorkspace, ?addExtraHandlers: Action<IServiceCollection>, ?config: FSharpLanguageServerConfig)
=

let struct (clientStream, serverStream) = FullDuplexStream.CreatePair()

Expand Down
288 changes: 140 additions & 148 deletions vsintegration/src/FSharp.Editor/Classification/ClassificationService.fs
Original file line number Diff line number Diff line change
Expand Up @@ -152,164 +152,156 @@ type internal FSharpClassificationService [<ImportingConstructor>] () =
member _.AddLexicalClassifications(_: SourceText, _: TextSpan, _: List<ClassifiedSpan>, _: CancellationToken) = ()

member _.AddSyntacticClassificationsAsync
(
document: Document,
textSpan: TextSpan,
result: List<ClassifiedSpan>,
cancellationToken: CancellationToken
) =
(document: Document, textSpan: TextSpan, result: List<ClassifiedSpan>, cancellationToken: CancellationToken)
=

if not (document |> shouldProduceClassification) then
System.Threading.Tasks.Task.CompletedTask
else

cancellableTask {
use _logBlock = Logger.LogBlock(LogEditorFunctionId.Classification_Syntactic)

let! cancellationToken = CancellableTask.getCancellationToken ()

let defines, langVersion, strictIndentation = document.GetFsharpParsingOptions()

let! sourceText = document.GetTextAsync(cancellationToken)

// For closed documents, only get classification for the text within the span.
// This may be inaccurate for multi-line tokens such as string literals, but this is ok for now
// as it's better than having to tokenize a big part of a file which in return will allocate a lot and hurt find all references performance.
let isOpenDocument = document.Project.Solution.Workspace.IsDocumentOpen document.Id

let eventProps: (string * obj) array =
[|
"context.document.project.id", document.Project.Id.Id.ToString()
"context.document.id", document.Id.Id.ToString()
"isOpenDocument", isOpenDocument
"textSpanLength", textSpan.Length
|]

use _eventDuration =
TelemetryReporter.ReportSingleEventWithDuration(TelemetryEvents.AddSyntacticClassifications, eventProps)

if not isOpenDocument then
let classifiedSpans =
getLexicalClassifications (document.FilePath, defines, sourceText, textSpan, cancellationToken)

result.AddRange(classifiedSpans)
else
Tokenizer.classifySpans (
document.Id,
sourceText,
textSpan,
Some(document.FilePath),
defines,
Some langVersion,
strictIndentation,
result,
cancellationToken
)
}
|> CancellableTask.startAsTask cancellationToken
else

cancellableTask {
use _logBlock = Logger.LogBlock(LogEditorFunctionId.Classification_Syntactic)

let! cancellationToken = CancellableTask.getCancellationToken ()

let defines, langVersion, strictIndentation = document.GetFsharpParsingOptions()

let! sourceText = document.GetTextAsync(cancellationToken)

// For closed documents, only get classification for the text within the span.
// This may be inaccurate for multi-line tokens such as string literals, but this is ok for now
// as it's better than having to tokenize a big part of a file which in return will allocate a lot and hurt find all references performance.
let isOpenDocument = document.Project.Solution.Workspace.IsDocumentOpen document.Id

let eventProps: (string * obj) array =
[|
"context.document.project.id", document.Project.Id.Id.ToString()
"context.document.id", document.Id.Id.ToString()
"isOpenDocument", isOpenDocument
"textSpanLength", textSpan.Length
|]

use _eventDuration =
TelemetryReporter.ReportSingleEventWithDuration(TelemetryEvents.AddSyntacticClassifications, eventProps)

if not isOpenDocument then
let classifiedSpans =
getLexicalClassifications (document.FilePath, defines, sourceText, textSpan, cancellationToken)

result.AddRange(classifiedSpans)
else
Tokenizer.classifySpans (
document.Id,
sourceText,
textSpan,
Some(document.FilePath),
defines,
Some langVersion,
strictIndentation,
result,
cancellationToken
)
}
|> CancellableTask.startAsTask cancellationToken

member _.AddSemanticClassificationsAsync
(
document: Document,
textSpan: TextSpan,
result: List<ClassifiedSpan>,
cancellationToken: CancellationToken
) =
(document: Document, textSpan: TextSpan, result: List<ClassifiedSpan>, cancellationToken: CancellationToken)
=

if not (document |> shouldProduceClassification) then
System.Threading.Tasks.Task.CompletedTask
else

cancellableTask {
use _logBlock = Logger.LogBlock(LogEditorFunctionId.Classification_Semantic)

let! sourceText = document.GetTextAsync(cancellationToken)

// If we are trying to get semantic classification for a document that is not open, get the results from the background and cache it.
// We do this for find all references when it is populating results.
// We cache it temporarily so we do not have to continuously call into the checker and perform a background operation.
let isOpenDocument = document.Project.Solution.Workspace.IsDocumentOpen document.Id

if not isOpenDocument then
match! unopenedDocumentsSemanticClassificationCache.TryGetValueAsync document with
| ValueSome classificationDataLookup ->
let eventProps: (string * obj) array =
[|
"context.document.project.id", document.Project.Id.Id.ToString()
"context.document.id", document.Id.Id.ToString()
"isOpenDocument", isOpenDocument
"textSpanLength", textSpan.Length
"cacheHit", true
|]

use _eventDuration =
TelemetryReporter.ReportSingleEventWithDuration(TelemetryEvents.AddSemanticClassifications, eventProps)

addSemanticClassificationByLookup sourceText textSpan classificationDataLookup result
| ValueNone ->
let eventProps: (string * obj) array =
[|
"context.document.project.id", document.Project.Id.Id.ToString()
"context.document.id", document.Id.Id.ToString()
"isOpenDocument", isOpenDocument
"textSpanLength", textSpan.Length
"cacheHit", false
|]

use _eventDuration =
TelemetryReporter.ReportSingleEventWithDuration(TelemetryEvents.AddSemanticClassifications, eventProps)

let! classificationData = document.GetFSharpSemanticClassificationAsync(nameof (FSharpClassificationService))

let classificationDataLookup = toSemanticClassificationLookup classificationData
do! unopenedDocumentsSemanticClassificationCache.SetAsync(document, classificationDataLookup)
addSemanticClassificationByLookup sourceText textSpan classificationDataLookup result
else

match! openedDocumentsSemanticClassificationCache.TryGetValueAsync document with
| ValueSome classificationDataLookup ->
let eventProps: (string * obj) array =
[|
"context.document.project.id", document.Project.Id.Id.ToString()
"context.document.id", document.Id.Id.ToString()
"isOpenDocument", isOpenDocument
"textSpanLength", textSpan.Length
"cacheHit", true
|]

use _eventDuration =
TelemetryReporter.ReportSingleEventWithDuration(TelemetryEvents.AddSemanticClassifications, eventProps)

addSemanticClassificationByLookup sourceText textSpan classificationDataLookup result
| ValueNone ->

let eventProps: (string * obj) array =
[|
"context.document.project.id", document.Project.Id.Id.ToString()
"context.document.id", document.Id.Id.ToString()
"isOpenDocument", isOpenDocument
"textSpanLength", textSpan.Length
"cacheHit", false
|]

use _eventDuration =
TelemetryReporter.ReportSingleEventWithDuration(TelemetryEvents.AddSemanticClassifications, eventProps)

let! _, checkResults = document.GetFSharpParseAndCheckResultsAsync(nameof (IFSharpClassificationService))

let targetRange =
RoslynHelpers.TextSpanToFSharpRange(document.FilePath, textSpan, sourceText)

let classificationData = checkResults.GetSemanticClassification(Some targetRange)

if classificationData.Length > 0 then
let classificationDataLookup = itemToSemanticClassificationLookup classificationData
else

cancellableTask {
use _logBlock = Logger.LogBlock(LogEditorFunctionId.Classification_Semantic)

let! sourceText = document.GetTextAsync(cancellationToken)

// If we are trying to get semantic classification for a document that is not open, get the results from the background and cache it.
// We do this for find all references when it is populating results.
// We cache it temporarily so we do not have to continuously call into the checker and perform a background operation.
let isOpenDocument = document.Project.Solution.Workspace.IsDocumentOpen document.Id

if not isOpenDocument then
match! unopenedDocumentsSemanticClassificationCache.TryGetValueAsync document with
| ValueSome classificationDataLookup ->
let eventProps: (string * obj) array =
[|
"context.document.project.id", document.Project.Id.Id.ToString()
"context.document.id", document.Id.Id.ToString()
"isOpenDocument", isOpenDocument
"textSpanLength", textSpan.Length
"cacheHit", true
|]

use _eventDuration =
TelemetryReporter.ReportSingleEventWithDuration(TelemetryEvents.AddSemanticClassifications, eventProps)

addSemanticClassificationByLookup sourceText textSpan classificationDataLookup result
| ValueNone ->
let eventProps: (string * obj) array =
[|
"context.document.project.id", document.Project.Id.Id.ToString()
"context.document.id", document.Id.Id.ToString()
"isOpenDocument", isOpenDocument
"textSpanLength", textSpan.Length
"cacheHit", false
|]

use _eventDuration =
TelemetryReporter.ReportSingleEventWithDuration(TelemetryEvents.AddSemanticClassifications, eventProps)

let! classificationData = document.GetFSharpSemanticClassificationAsync(nameof (FSharpClassificationService))

let classificationDataLookup = toSemanticClassificationLookup classificationData
do! unopenedDocumentsSemanticClassificationCache.SetAsync(document, classificationDataLookup)
addSemanticClassificationByLookup sourceText textSpan classificationDataLookup result
else

addSemanticClassification sourceText textSpan classificationData result
}
|> CancellableTask.ifCanceledReturn ()
|> CancellableTask.startAsTask cancellationToken
match! openedDocumentsSemanticClassificationCache.TryGetValueAsync document with
| ValueSome classificationDataLookup ->
let eventProps: (string * obj) array =
[|
"context.document.project.id", document.Project.Id.Id.ToString()
"context.document.id", document.Id.Id.ToString()
"isOpenDocument", isOpenDocument
"textSpanLength", textSpan.Length
"cacheHit", true
|]

use _eventDuration =
TelemetryReporter.ReportSingleEventWithDuration(TelemetryEvents.AddSemanticClassifications, eventProps)

addSemanticClassificationByLookup sourceText textSpan classificationDataLookup result
| ValueNone ->

let eventProps: (string * obj) array =
[|
"context.document.project.id", document.Project.Id.Id.ToString()
"context.document.id", document.Id.Id.ToString()
"isOpenDocument", isOpenDocument
"textSpanLength", textSpan.Length
"cacheHit", false
|]

use _eventDuration =
TelemetryReporter.ReportSingleEventWithDuration(TelemetryEvents.AddSemanticClassifications, eventProps)

let! _, checkResults = document.GetFSharpParseAndCheckResultsAsync(nameof (IFSharpClassificationService))

let targetRange =
RoslynHelpers.TextSpanToFSharpRange(document.FilePath, textSpan, sourceText)

let classificationData = checkResults.GetSemanticClassification(Some targetRange)

if classificationData.Length > 0 then
let classificationDataLookup = itemToSemanticClassificationLookup classificationData
do! unopenedDocumentsSemanticClassificationCache.SetAsync(document, classificationDataLookup)

addSemanticClassification sourceText textSpan classificationData result
}
|> CancellableTask.ifCanceledReturn ()
|> CancellableTask.startAsTask cancellationToken

// Do not perform classification if we don't have project options (#defines matter)
member _.AdjustStaleClassification(_: SourceText, classifiedSpan: ClassifiedSpan) : ClassifiedSpan = classifiedSpan
Original file line number Diff line number Diff line change
Expand Up @@ -151,14 +151,20 @@ type internal FSharpDocumentDiagnosticAnalyzer [<ImportingConstructor>] () =
interface IFSharpDocumentDiagnosticAnalyzer with

member _.AnalyzeSyntaxAsync(document: Document, cancellationToken: CancellationToken) : Task<ImmutableArray<Diagnostic>> =
if document.Project.IsFSharpMetadata || (not (document |> shouldProduceDiagnostics)) then
if
document.Project.IsFSharpMetadata
|| (not (document |> shouldProduceDiagnostics))
then
Task.FromResult ImmutableArray.Empty
else
FSharpDocumentDiagnosticAnalyzer.GetDiagnostics(document, DiagnosticsType.Syntax)
|> CancellableTask.start cancellationToken

member _.AnalyzeSemanticsAsync(document: Document, cancellationToken: CancellationToken) : Task<ImmutableArray<Diagnostic>> =
if document.Project.IsFSharpMiscellaneousOrMetadata && not document.IsFSharpScript || (not (document |> shouldProduceDiagnostics)) then
if
document.Project.IsFSharpMiscellaneousOrMetadata && not document.IsFSharpScript
|| (not (document |> shouldProduceDiagnostics))
then
Task.FromResult ImmutableArray.Empty
else
FSharpDocumentDiagnosticAnalyzer.GetDiagnostics(document, DiagnosticsType.Semantic)
Expand Down
Loading
Loading