From 9775798f9645ab599c80f22b25f9dcd99848020a Mon Sep 17 00:00:00 2001 From: Abel Braaksma Date: Thu, 23 May 2024 22:59:35 +0200 Subject: [PATCH] temp --- src/Compiler/Driver/CompilerDiagnostics.fs | 3 ++ src/Compiler/Interactive/fsi.fs | 13 +++++--- src/Compiler/Service/ServiceLexing.fs | 6 ++-- src/FSharp.Core/prim-types-prelude.fs | 3 ++ .../FSharp.Core/PrimTypes.fs | 26 ++++++++-------- .../LanguageService/Tokenizer.fs | 30 +++++++++---------- 6 files changed, 47 insertions(+), 34 deletions(-) diff --git a/src/Compiler/Driver/CompilerDiagnostics.fs b/src/Compiler/Driver/CompilerDiagnostics.fs index 823eb4ccbf15..6c912ddc57ae 100644 --- a/src/Compiler/Driver/CompilerDiagnostics.fs +++ b/src/Compiler/Driver/CompilerDiagnostics.fs @@ -394,6 +394,9 @@ type PhasedDiagnostic with | 3579 -> false // alwaysUseTypedStringInterpolation - off by default | 3582 -> false // infoIfFunctionShadowsUnionCase - off by default | 3570 -> false // tcAmbiguousDiscardDotLambda - off by default + | 1104 -> false // lexhlpIdentifiersContainingAtSymbolReserved + | 1105 -> false // lexhlpIdentifierReserved + | 9999 -> false // mlCompatKeyword | _ -> match x.Exception with | DiagnosticEnabledWithLanguageFeature(_, _, _, enabled) -> enabled diff --git a/src/Compiler/Interactive/fsi.fs b/src/Compiler/Interactive/fsi.fs index d932b95eda1d..c013a3e430a0 100644 --- a/src/Compiler/Interactive/fsi.fs +++ b/src/Compiler/Interactive/fsi.fs @@ -3584,7 +3584,7 @@ type FsiStdinLexerProvider tokenizer // Create a new lexer to read stdin - member _.CreateStdinLexer diagnosticsLogger = + member _.CreateStdinLexer (diagnosticsLogger: DiagnosticsLogger) = let lexbuf = match fsiConsoleInput.TryGetConsole() with | Some console when fsiOptions.EnableConsoleKeyProcessing && not fsiOptions.UseServerPrompt -> @@ -3595,20 +3595,24 @@ type FsiStdinLexerProvider | _ -> LexbufFromLineReader fsiStdinSyphon (fun () -> fsiConsoleInput.In.ReadLine() |> removeZeroCharsFromString) fsiStdinSyphon.Reset() + fprintfn fsiConsoleOutput.Out "...In CreateStdinLexer" CreateLexerForLexBuffer(stdinMockFileName, lexbuf, diagnosticsLogger) // Create a new lexer to read an "included" script file - member _.CreateIncludedScriptLexer(sourceFileName, reader, diagnosticsLogger) = + member _.CreateIncludedScriptLexer(sourceFileName, reader, (diagnosticsLogger: DiagnosticsLogger)) = let lexbuf = UnicodeLexing.StreamReaderAsLexbuf(true, tcConfigB.langVersion, tcConfigB.strictIndentation, reader) + fprintfn fsiConsoleOutput.Out "...In CreateIncludedScriptLexer" + diagnosticsLogger.DiagnosticSink({ Exception = exn "foo bar"; Phase = BuildPhase.DefaultPhase}, FSharpDiagnosticSeverity.Warning) CreateLexerForLexBuffer(sourceFileName, lexbuf, diagnosticsLogger) // Create a new lexer to read a string - member _.CreateStringLexer(sourceFileName, source, diagnosticsLogger) = + member _.CreateStringLexer(sourceFileName, source, (diagnosticsLogger: DiagnosticsLogger)) = let lexbuf = UnicodeLexing.StringAsLexbuf(true, tcConfigB.langVersion, tcConfigB.strictIndentation, source) - + + fprintfn fsiConsoleOutput.Out "...In CreateStringLexer" CreateLexerForLexBuffer(sourceFileName, lexbuf, diagnosticsLogger) member _.ConsoleInput = fsiConsoleInput @@ -3790,6 +3794,7 @@ type FsiInteractionProcessor istate, Completed None | ParsedHashDirective("nowarn", ParsedHashDirectiveArguments numbers, m) -> + fprintfn fsiConsoleOutput.Out "...Parsing #nowarn" List.iter (fun (d: string) -> tcConfigB.TurnWarningOff(m, d)) numbers istate, Completed None diff --git a/src/Compiler/Service/ServiceLexing.fs b/src/Compiler/Service/ServiceLexing.fs index 9abe2199dc00..2a3c7677266e 100644 --- a/src/Compiler/Service/ServiceLexing.fs +++ b/src/Compiler/Service/ServiceLexing.fs @@ -428,6 +428,7 @@ module internal TokenClassifications = | CHAR _ -> (FSharpTokenColorKind.String, FSharpTokenCharKind.String, FSharpTokenTriggerClass.None) | EOF _ -> failwith "tokenInfo" + |> fun (_x, y, z) -> (FSharpTokenColorKind.String, y, z) module internal TestExpose = let TokenInfo tok = TokenClassifications.tokenInfo tok @@ -1205,8 +1206,9 @@ type FSharpLineTokenizer(lexbuf: UnicodeLexing.Lexbuf, maxLength: int option, fi tokenDataOption, lexintFinal - static member ColorStateOfLexState(lexState: FSharpTokenizerLexState) = - LexerStateEncoding.colorStateOfLexState lexState + static member ColorStateOfLexState(_lexState: FSharpTokenizerLexState) = + //LexerStateEncoding.colorStateOfLexState lexState + FSharpTokenizerColorState.String static member LexStateOfColorState(colorState: FSharpTokenizerColorState) = { diff --git a/src/FSharp.Core/prim-types-prelude.fs b/src/FSharp.Core/prim-types-prelude.fs index 9a002450382d..8edf1f523564 100644 --- a/src/FSharp.Core/prim-types-prelude.fs +++ b/src/FSharp.Core/prim-types-prelude.fs @@ -2,6 +2,9 @@ namespace Microsoft.FSharp.Core + #nowarn "42" + #nowarn "44" + //------------------------------------------------------------------------- // Basic type abbreviations diff --git a/tests/FSharp.Core.UnitTests/FSharp.Core/PrimTypes.fs b/tests/FSharp.Core.UnitTests/FSharp.Core/PrimTypes.fs index 42cc1ff1b27e..31dae4e01191 100644 --- a/tests/FSharp.Core.UnitTests/FSharp.Core/PrimTypes.fs +++ b/tests/FSharp.Core.UnitTests/FSharp.Core/PrimTypes.fs @@ -1150,13 +1150,13 @@ module RangeTests = | :? Value as b' -> compare a.Value b'.Value | _ -> failwith "unsupported" - [] - let ``Range.UserDefined``() = - // using a user defined type, as opposed to a primitive, as the base type - let rangeList = [Value 0 .. Step 2 .. Value 2] - let rangeArray = [Value 0 .. Step 2 .. Value 2] - Assert.AreEqual(2, rangeList.Length) - Assert.AreEqual(2, rangeArray.Length) + //[] + //let ``Range.UserDefined``() = + // // using a user defined type, as opposed to a primitive, as the base type + // let rangeList = [Value 0 .. Step 2 .. Value 2] + // let rangeArray = [Value 0 .. Step 2 .. Value 2] + // Assert.AreEqual(2, rangeList.Length) + // Assert.AreEqual(2, rangeArray.Length) /// These tests' arguments are intentionally _not_ inlined @@ -1243,12 +1243,12 @@ module RangeTests = let zero, one, min0, max0 = System.UIntPtr 0u, System.UIntPtr 1u, System.UIntPtr System.UInt64.MinValue, System.UIntPtr System.UInt64.MaxValue RangeTestsHelpers.unsigned zero one min0 max0 - [] - let ``Range.UserDefined``() = - let rangeList = [Value 0 .. Step 2 .. Value 2] - let rangeArray = [Value 0 .. Step 2 .. Value 2] - Assert.AreEqual(2, rangeList.Length) - () + //[] + //let ``Range.UserDefined``() = + // let rangeList = [Value 0 .. Step 2 .. Value 2] + // let rangeArray = [Value 0 .. Step 2 .. Value 2] + // Assert.AreEqual(2, rangeList.Length) + // () open NonStructuralComparison diff --git a/vsintegration/src/FSharp.Editor/LanguageService/Tokenizer.fs b/vsintegration/src/FSharp.Editor/LanguageService/Tokenizer.fs index 5b873129f22a..513fcd993698 100644 --- a/vsintegration/src/FSharp.Editor/LanguageService/Tokenizer.fs +++ b/vsintegration/src/FSharp.Editor/LanguageService/Tokenizer.fs @@ -493,19 +493,19 @@ module internal Tokenizer = let compilerTokenToRoslynToken (colorKind: FSharpTokenColorKind) : string = match colorKind with - | FSharpTokenColorKind.Comment -> ClassificationTypeNames.Comment - | FSharpTokenColorKind.Identifier -> ClassificationTypeNames.Identifier - | FSharpTokenColorKind.Keyword -> ClassificationTypeNames.Keyword + | FSharpTokenColorKind.Comment -> ClassificationTypeNames.StringLiteral + | FSharpTokenColorKind.Identifier -> ClassificationTypeNames.StringLiteral + | FSharpTokenColorKind.Keyword -> ClassificationTypeNames.StringLiteral | FSharpTokenColorKind.String -> ClassificationTypeNames.StringLiteral - | FSharpTokenColorKind.Text -> ClassificationTypeNames.Text - | FSharpTokenColorKind.UpperIdentifier -> ClassificationTypeNames.Identifier - | FSharpTokenColorKind.Number -> ClassificationTypeNames.NumericLiteral - | FSharpTokenColorKind.InactiveCode -> ClassificationTypeNames.ExcludedCode - | FSharpTokenColorKind.PreprocessorKeyword -> ClassificationTypeNames.PreprocessorKeyword - | FSharpTokenColorKind.Operator -> ClassificationTypeNames.Operator - | FSharpTokenColorKind.Punctuation -> ClassificationTypeNames.Punctuation + | FSharpTokenColorKind.Text -> ClassificationTypeNames.StringLiteral + | FSharpTokenColorKind.UpperIdentifier -> ClassificationTypeNames.StringLiteral + | FSharpTokenColorKind.Number -> ClassificationTypeNames.StringLiteral + | FSharpTokenColorKind.InactiveCode -> ClassificationTypeNames.StringLiteral + | FSharpTokenColorKind.PreprocessorKeyword -> ClassificationTypeNames.StringLiteral + | FSharpTokenColorKind.Operator -> ClassificationTypeNames.StringLiteral + | FSharpTokenColorKind.Punctuation -> ClassificationTypeNames.StringLiteral | FSharpTokenColorKind.Default - | _ -> ClassificationTypeNames.Text + | _ -> ClassificationTypeNames.StringLiteral let private scanSourceLine ( @@ -545,8 +545,8 @@ module internal Tokenizer = { LeftColumn = info.LeftColumn RightColumn = info.RightColumn - 2 - ColorClass = FSharpTokenColorKind.Number - CharClass = FSharpTokenCharKind.Literal + ColorClass = FSharpTokenColorKind.String + CharClass = FSharpTokenCharKind.String FSharpTokenTriggerClass = info.FSharpTokenTriggerClass Tag = info.Tag TokenName = "INT32" @@ -560,8 +560,8 @@ module internal Tokenizer = { LeftColumn = info.RightColumn - 1 RightColumn = info.RightColumn - ColorClass = FSharpTokenColorKind.Operator - CharClass = FSharpTokenCharKind.Operator + ColorClass = FSharpTokenColorKind.String + CharClass = FSharpTokenCharKind.String FSharpTokenTriggerClass = info.FSharpTokenTriggerClass Tag = FSharpTokenTag.DOT_DOT TokenName = "DOT_DOT"