diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index ca3ff2030d..21b46482ca 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -140,7 +140,7 @@ flag cabal common cabal if flag(cabal) - build-depends: hls-cabal-plugin + build-depends: hls-cabal-plugin cpp-options: -Dhls_cabal library hls-cabal-plugin @@ -223,7 +223,7 @@ flag class common class if flag(class) - build-depends: hls-class-plugin + build-depends: hls-class-plugin cpp-options: -Dhls_class library hls-class-plugin @@ -287,7 +287,7 @@ flag callHierarchy common callHierarchy if flag(callHierarchy) - build-depends: hls-call-hierarchy-plugin + build-depends: hls-call-hierarchy-plugin cpp-options: -Dhls_callHierarchy library hls-call-hierarchy-plugin @@ -348,7 +348,7 @@ flag eval common eval if flag(eval) - build-depends: hls-eval-plugin + build-depends: hls-eval-plugin cpp-options: -Dhls_eval library hls-eval-plugin @@ -429,7 +429,7 @@ test-suite hls-eval-plugin-tests common importLens if flag(importLens) - build-depends: hls-explicit-imports-plugin + build-depends: hls-explicit-imports-plugin cpp-options: -Dhls_importLens flag importLens @@ -494,7 +494,7 @@ flag rename common rename if flag(rename) - build-depends: hls-rename-plugin + build-depends: hls-rename-plugin cpp-options: -Dhls_rename library hls-rename-plugin @@ -550,7 +550,7 @@ flag retrie common retrie if flag(retrie) - build-depends: hls-retrie-plugin + build-depends: hls-retrie-plugin cpp-options: -Dhls_retrie library hls-retrie-plugin @@ -615,7 +615,7 @@ flag hlint common hlint if flag(hlint) && (impl(ghc < 9.8.0) || flag(ignore-plugins-ghc-bounds)) - build-depends: hls-hlint-plugin + build-depends: hls-hlint-plugin cpp-options: -Dhls_hlint library hls-hlint-plugin @@ -695,7 +695,7 @@ flag stan common stan if flag(stan) && (impl(ghc > 8.8.1) && impl(ghc <= 9.2.3) || impl(ghc >= 9.4.0) && impl(ghc < 9.10.0)) - build-depends: hls-stan-plugin + build-depends: hls-stan-plugin cpp-options: -Dhls_stan library hls-stan-plugin @@ -769,7 +769,7 @@ flag moduleName common moduleName if flag(moduleName) - build-depends: hls-module-name-plugin + build-depends: hls-module-name-plugin cpp-options: -Dhls_moduleName library hls-module-name-plugin @@ -814,7 +814,7 @@ flag pragmas common pragmas if flag(pragmas) - build-depends: hls-pragmas-plugin + build-depends: hls-pragmas-plugin cpp-options: -Dhls_pragmas library hls-pragmas-plugin @@ -862,7 +862,7 @@ flag splice common splice if flag(splice) - build-depends: hls-splice-plugin + build-depends: hls-splice-plugin cpp-options: -Dhls_splice library hls-splice-plugin @@ -1040,7 +1040,7 @@ flag codeRange common codeRange if flag(codeRange) - build-depends: hls-code-range-plugin + build-depends: hls-code-range-plugin cpp-options: -Dhls_codeRange library hls-code-range-plugin @@ -1100,7 +1100,7 @@ flag changeTypeSignature common changeTypeSignature if flag(changeTypeSignature) - build-depends: hls-change-type-signature-plugin + build-depends: hls-change-type-signature-plugin cpp-options: -Dhls_changeTypeSignature library hls-change-type-signature-plugin @@ -1160,7 +1160,7 @@ flag gadt common gadt if flag(gadt) - build-depends: hls-gadt-plugin + build-depends: hls-gadt-plugin cpp-options: -Dhls_gadt library hls-gadt-plugin @@ -1213,7 +1213,7 @@ flag explicitFixity common explicitFixity if flag(explicitFixity) - build-depends: hls-explicit-fixity-plugin + build-depends: hls-explicit-fixity-plugin cpp-options: -DexplicitFixity library hls-explicit-fixity-plugin @@ -1260,7 +1260,7 @@ flag explicitFields common explicitFields if flag(explicitFields) - build-depends: hls-explicit-record-fields-plugin + build-depends: hls-explicit-record-fields-plugin cpp-options: -DexplicitFields library hls-explicit-record-fields-plugin @@ -1284,7 +1284,7 @@ library hls-explicit-record-fields-plugin if flag(pedantic) ghc-options: -Werror -Wwarn=incomplete-record-updates - + test-suite hls-explicit-record-fields-plugin-tests import: warnings default-language: Haskell2010 @@ -1309,7 +1309,7 @@ flag overloadedRecordDot common overloadedRecordDot if flag(overloadedRecordDot) - build-depends: hls-overloaded-record-dot-plugin + build-depends: hls-overloaded-record-dot-plugin cpp-options: -Dhls_overloaded_record_dot library hls-overloaded-record-dot-plugin @@ -1356,7 +1356,7 @@ flag floskell common floskell if flag(floskell) && (impl(ghc < 9.7) || flag(ignore-plugins-ghc-bounds)) - build-depends: hls-floskell-plugin + build-depends: hls-floskell-plugin cpp-options: -Dhls_floskell library hls-floskell-plugin @@ -1398,7 +1398,7 @@ flag fourmolu common fourmolu if flag(fourmolu) - build-depends: hls-fourmolu-plugin + build-depends: hls-fourmolu-plugin cpp-options: -Dhls_fourmolu library hls-fourmolu-plugin @@ -1451,7 +1451,7 @@ flag ormolu common ormolu if flag(ormolu) - build-depends: hls-ormolu-plugin + build-depends: hls-ormolu-plugin cpp-options: -Dhls_ormolu library hls-ormolu-plugin @@ -1504,7 +1504,7 @@ flag stylishHaskell common stylishHaskell if flag(stylishHaskell) && (impl(ghc < 9.8.0) || flag(ignore-plugins-ghc-bounds)) - build-depends: hls-stylish-haskell-plugin + build-depends: hls-stylish-haskell-plugin cpp-options: -Dhls_stylishHaskell library hls-stylish-haskell-plugin @@ -1549,7 +1549,7 @@ flag refactor common refactor if flag(refactor) - build-depends: hls-refactor-plugin + build-depends: hls-refactor-plugin cpp-options: -Dhls_refactor library hls-refactor-plugin @@ -1665,7 +1665,7 @@ flag semanticTokens common semanticTokens if flag(semanticTokens) - build-depends: hls-semantic-tokens-plugin + build-depends: hls-semantic-tokens-plugin cpp-options: -Dhls_semanticTokens library hls-semantic-tokens-plugin @@ -1679,6 +1679,7 @@ library hls-semantic-tokens-plugin Ide.Plugin.SemanticTokens.Query Ide.Plugin.SemanticTokens.SemanticConfig Ide.Plugin.SemanticTokens.Utils + Ide.Plugin.SemanticTokens.Tokenize Ide.Plugin.SemanticTokens.Internal hs-source-dirs: plugins/hls-semantic-tokens-plugin/src @@ -1688,6 +1689,7 @@ library hls-semantic-tokens-plugin , containers , extra , hiedb + , text-rope , mtl >= 2.2 , ghcide == 2.6.0.0 , hls-plugin-api == 2.6.0.0 diff --git a/hls-plugin-api/src/Ide/Plugin/Properties.hs b/hls-plugin-api/src/Ide/Plugin/Properties.hs index 3e14bda908..5c0d9a60e1 100644 --- a/hls-plugin-api/src/Ide/Plugin/Properties.hs +++ b/hls-plugin-api/src/Ide/Plugin/Properties.hs @@ -15,6 +15,7 @@ module Ide.Plugin.Properties ( PropertyType (..), ToHsType, + NotElem, MetaData (..), PropertyKey (..), SPropertyKey (..), diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index 4c22af78db..881221bb04 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -5,7 +5,9 @@ {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PolyKinds #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TemplateHaskell #-} @@ -21,8 +23,9 @@ import Control.Monad.Except (ExceptT, liftEither, withExceptT) import Control.Monad.Trans (lift) import Control.Monad.Trans.Except (runExceptT) -import Data.Aeson (ToJSON (toJSON)) -import qualified Data.Map as Map +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as M +import qualified Data.Set as S import Development.IDE (Action, GetDocMap (GetDocMap), GetHieAst (GetHieAst), @@ -34,7 +37,6 @@ import Development.IDE (Action, cmapWithPrio, define, fromNormalizedFilePath, hieKind, logPriority, - usePropertyAction, use_) import Development.IDE.Core.PluginUtils (runActionE, useWithStaleE) @@ -54,6 +56,7 @@ import Ide.Plugin.Error (PluginError (PluginIn import Ide.Plugin.SemanticTokens.Mappings import Ide.Plugin.SemanticTokens.Query import Ide.Plugin.SemanticTokens.SemanticConfig (mkSemanticConfigFunctions) +import Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) import Ide.Plugin.SemanticTokens.Types import Ide.Types import qualified Language.LSP.Protocol.Lens as L @@ -91,6 +94,7 @@ semanticTokensFull recorder state pid param = do -- Local names token type from 'hieAst' -- Name locations from 'hieAst' -- Visible names from 'tmrRenamed' + -- -- It then combines this information to compute the semantic tokens for the file. getSemanticTokensRule :: Recorder (WithPriority SemanticLog) -> Rules () @@ -98,30 +102,28 @@ getSemanticTokensRule recorder = define (cmapWithPrio LogShake recorder) $ \GetSemanticTokens nfp -> handleError recorder $ do (HAR {..}) <- lift $ use_ GetHieAst nfp (DKMap {getTyThingMap}, _) <- lift $ useWithStale_ GetDocMap nfp - ast <- handleMaybe (LogNoAST $ show nfp) $ getAsts hieAst Map.!? (HiePath . mkFastString . fromNormalizedFilePath) nfp + ast <- handleMaybe (LogNoAST $ show nfp) $ getAsts hieAst M.!? (HiePath . mkFastString . fromNormalizedFilePath) nfp virtualFile <- handleMaybeM LogNoVF $ getVirtualFile nfp -- get current location from the old ones - let spanNamesMap = hieAstSpanNames virtualFile ast - let names = nameSetElemsStable $ unionNameSets $ Map.elems spanNamesMap - let localSemanticMap = mkLocalNameSemanticFromAst names (hieKindFunMasksKind hieKind) refMap + let spanIdMap = M.filter (not . null) $ hieAstSpanIdentifiers virtualFile ast + let names = S.unions $ M.elems spanIdMap + let localSemanticMap = mkLocalIdSemanticFromAst names (hieKindFunMasksKind hieKind) refMap -- get imported name semantic map - let importedNameSemanticMap = foldr (getTypeExclude localSemanticMap getTyThingMap) emptyNameEnv names - let sMap = plusNameEnv_C (<>) importedNameSemanticMap localSemanticMap - let rangeTokenType = extractSemanticTokensFromNames sMap spanNamesMap + let importedIdSemanticMap = M.mapMaybe id + $ M.fromSet (getTypeThing getTyThingMap) (names `S.difference` M.keysSet localSemanticMap) + let sMap = M.unionWith (<>) importedIdSemanticMap localSemanticMap + let rangeTokenType = extractSemanticTokensFromNames sMap spanIdMap return $ RangeHsSemanticTokenTypes rangeTokenType where - -- ignore one already in discovered in local - getTypeExclude :: - NameEnv a -> + getTypeThing :: NameEnv TyThing -> - Name -> - NameEnv HsSemanticTokenType -> - NameEnv HsSemanticTokenType - getTypeExclude localEnv tyThingMap n nameMap - | n `elemNameEnv` localEnv = nameMap - | otherwise = - let tyThing = lookupNameEnv tyThingMap n - in maybe nameMap (extendNameEnv nameMap n) (tyThing >>= tyThingSemantic) + Identifier -> + Maybe HsSemanticTokenType + getTypeThing tyThingMap n + | (Right name) <- n = + let tyThing = lookupNameEnv tyThingMap name + in (tyThing >>= tyThingSemantic) + | otherwise = Nothing -- | Persistent rule to ensure that semantic tokens doesn't block on startup persistentGetSemanticTokensRule :: Rules () diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs index 013d77a9e6..1003708b41 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs @@ -13,7 +13,7 @@ module Ide.Plugin.SemanticTokens.Mappings where import qualified Data.Array as A import Data.List.Extra (chunksOf, (!?)) -import qualified Data.Map as Map +import qualified Data.Map.Strict as Map import Data.Maybe (mapMaybe) import qualified Data.Set as Set import Data.Text (Text, unpack) @@ -45,6 +45,7 @@ toLspTokenType conf tk = case tk of TTypeFamily -> stTypeFamily conf TRecordField -> stRecordField conf TPatternSynonym -> stPatternSynonym conf + TModule -> stModule conf lspTokenReverseMap :: SemanticTokensConfig -> Map.Map SemanticTokenTypes HsSemanticTokenType lspTokenReverseMap config @@ -114,15 +115,15 @@ recoverFunMaskArray flattened = unflattened -- The recursion in 'unflattened' is crucial - it's what gives us sharing -- function indicator check. unflattened :: A.Array TypeIndex Bool - unflattened = fmap (\flatTy -> go (fmap (unflattened A.!) flatTy)) flattened + unflattened = fmap (go . fmap (unflattened A.!)) flattened - -- Unfold an 'HieType' whose subterms have already been unfolded + -- Unfold an 'HieType' whose sub-terms have already been unfolded go :: HieType Bool -> Bool go (HTyVarTy _name) = False go (HAppTy _f _x) = False go (HLitTy _lit) = False go (HForAllTy ((_n, _k), _af) b) = b - go (HFunTy _ _ _) = True + go (HFunTy {}) = True go (HQualTy _constraint b) = b go (HCastTy b) = b go HCoercionTy = False diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index 174048049f..847da4e61f 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -1,4 +1,6 @@ {-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} @@ -6,28 +8,28 @@ -- The query module is used to query the semantic tokens from the AST module Ide.Plugin.SemanticTokens.Query where -import Data.Either (rights) import Data.Foldable (fold) -import qualified Data.Map as M -import qualified Data.Map as Map -import Data.Maybe (fromMaybe, listToMaybe, - mapMaybe) -import qualified Data.Set as S +import qualified Data.Map.Strict as M +import Data.Maybe (listToMaybe, mapMaybe) +import Data.Set (Set) import qualified Data.Set as Set import Data.Text (Text) import Development.IDE.Core.PositionMapping (PositionMapping, toCurrentRange) import Development.IDE.GHC.Compat -import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) import Ide.Plugin.SemanticTokens.Mappings import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, - HsSemanticTokenType, - NameSemanticMap, + HsSemanticTokenType (TModule), + IdSemanticMap, + RangeIdSetMap, SemanticTokensConfig) -import Language.LSP.Protocol.Types -import Language.LSP.VFS (VirtualFile, - codePointRangeToRange) -import Prelude hiding (span) +import Language.LSP.Protocol.Types (Position (Position), + Range (Range), + SemanticTokenAbsolute (SemanticTokenAbsolute), + SemanticTokens, + defaultSemanticTokensLegend, + makeSemanticTokens) +import Prelude hiding (length, span) --------------------------------------------------------- @@ -35,17 +37,17 @@ import Prelude hiding (span) --------------------------------------------------------- -mkLocalNameSemanticFromAst :: [Name] -> HieFunMaskKind a -> RefMap a -> NameSemanticMap -mkLocalNameSemanticFromAst names hieKind rm = mkNameEnv (mapMaybe (nameNameSemanticFromHie hieKind rm) names) +mkLocalIdSemanticFromAst :: Set Identifier -> HieFunMaskKind a -> RefMap a -> IdSemanticMap +mkLocalIdSemanticFromAst names hieKind rm = M.mapMaybe (idIdSemanticFromHie hieKind rm) $ M.fromSet id names -nameNameSemanticFromHie :: forall a. HieFunMaskKind a -> RefMap a -> Name -> Maybe (Name, HsSemanticTokenType) -nameNameSemanticFromHie hieKind rm ns = do - st <- nameSemanticFromRefMap rm ns - return (ns, st) +idIdSemanticFromHie :: forall a. HieFunMaskKind a -> RefMap a -> Identifier -> Maybe HsSemanticTokenType +idIdSemanticFromHie _ _ (Left _) = Just TModule +idIdSemanticFromHie hieKind rm ns = do + idSemanticFromRefMap rm ns where - nameSemanticFromRefMap :: RefMap a -> Name -> Maybe HsSemanticTokenType - nameSemanticFromRefMap rm' name' = do - spanInfos <- Map.lookup (Right name') rm' + idSemanticFromRefMap :: RefMap a -> Identifier -> Maybe HsSemanticTokenType + idSemanticFromRefMap rm' name' = do + spanInfos <- M.lookup name' rm' let typeTokenType = foldMap (typeSemantic hieKind) $ listToMaybe $ mapMaybe (identType . snd) spanInfos contextInfoTokenType <- foldMap (contextInfosMaybeTokenType . identInfo . snd) spanInfos fold [typeTokenType, Just contextInfoTokenType] @@ -53,54 +55,21 @@ nameNameSemanticFromHie hieKind rm ns = do contextInfosMaybeTokenType :: Set.Set ContextInfo -> Maybe HsSemanticTokenType contextInfosMaybeTokenType details = foldMap infoTokenType (Set.toList details) ------------------------------------ - --- * extract location from HieAST a - ------------------------------------ - --- | get only visible names from HieAST --- we care only the leaf node of the AST --- and filter out the derived and evidence names -hieAstSpanNames :: VirtualFile -> HieAST a -> M.Map Range NameSet -hieAstSpanNames vf ast = - if null (nodeChildren ast) - then getIds ast - else M.unionsWith unionNameSet $ map (hieAstSpanNames vf) (nodeChildren ast) - where - getIds ast' = fromMaybe mempty $ do - range <- codePointRangeToRange vf $ realSrcSpanToCodePointRange $ nodeSpan ast' - return $ M.singleton range (getNodeIds' ast') - getNodeIds' = - Map.foldl' combineNodeIds mempty - . Map.filterWithKey (\k _ -> k == SourceInfo) - . getSourcedNodeInfo - . sourcedNodeInfo - combineNodeIds :: NameSet -> NodeInfo a -> NameSet - ad `combineNodeIds` (NodeInfo _ _ bd) = ad `unionNameSet` xs - where - xs = mkNameSet $ rights $ M.keys $ M.filterWithKey inclusion bd - inclusion :: Identifier -> IdentifierDetails a -> Bool - inclusion a b = not $ exclusion a b - exclusion :: Identifier -> IdentifierDetails a -> Bool - exclusion idt IdentifierDetails {identInfo = infos} = case idt of - Left _ -> True - Right _ -> any isEvidenceContext (S.toList infos) ------------------------------------------------- --- * extract semantic tokens from NameSemanticMap +-- * extract semantic tokens from IdSemanticMap ------------------------------------------------- -extractSemanticTokensFromNames :: NameSemanticMap -> M.Map Range NameSet -> M.Map Range HsSemanticTokenType -extractSemanticTokensFromNames nsm = Map.mapMaybe (foldMap (lookupNameEnv nsm) . nameSetElemsStable) +extractSemanticTokensFromNames :: IdSemanticMap -> RangeIdSetMap -> M.Map Range HsSemanticTokenType +extractSemanticTokensFromNames nsm = M.mapMaybe (foldMap (`M.lookup` nsm)) rangeSemanticMapSemanticTokens :: SemanticTokensConfig -> PositionMapping -> M.Map Range HsSemanticTokenType -> Either Text SemanticTokens rangeSemanticMapSemanticTokens stc mapping = makeSemanticTokens defaultSemanticTokensLegend . mapMaybe (\(range, ty) -> flip toAbsSemanticToken ty <$> range) - . Map.toAscList + . M.toAscList . M.mapKeys (toCurrentRange mapping) where toAbsSemanticToken :: Range -> HsSemanticTokenType -> SemanticTokenAbsolute diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index 7afcc879da..b3d8aeb7ad 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -1,39 +1,47 @@ -{-# LANGUAGE GADTs #-} -{-# LANGUAGE OverloadedLabels #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE TypeOperators #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE TemplateHaskellQuotes #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeOperators #-} module Ide.Plugin.SemanticTokens.SemanticConfig where import Data.Char (toLower) import Data.Default (def) import qualified Data.Set as S +import Data.Text (Text) import qualified Data.Text as T -import Development.IDE (usePropertyAction) -import Ide.Plugin.Properties (defineEnumProperty, +import Development.IDE (Action, usePropertyAction) +import GHC.TypeLits (KnownSymbol) +import Ide.Plugin.Properties (KeyNameProxy, NotElem, + Properties, + PropertyKey (type PropertyKey), + PropertyType (type TEnum), + defineEnumProperty, emptyProperties) import Ide.Plugin.SemanticTokens.Types +import Ide.Types (PluginId) import Language.Haskell.TH import Language.LSP.Protocol.Types (LspEnum (..), SemanticTokenTypes) - - docName :: HsSemanticTokenType -> T.Text docName tt = case tt of - TVariable -> "variables" - TFunction -> "functions" - TDataConstructor -> "data constructors" - TTypeVariable -> "type variables" - TClassMethod -> "typeclass methods" - TPatternSynonym -> "pattern synonyms" - TTypeConstructor -> "type constructors" - TClass -> "typeclasses" - TTypeSynonym -> "type synonyms" - TTypeFamily -> "type families" - TRecordField -> "record fields" + TVariable -> "variables" + TFunction -> "functions" + TDataConstructor -> "data constructors" + TTypeVariable -> "type variables" + TClassMethod -> "typeclass methods" + TPatternSynonym -> "pattern synonyms" + TTypeConstructor -> "type constructors" + TClass -> "typeclasses" + TTypeSynonym -> "type synonyms" + TTypeFamily -> "type families" + TRecordField -> "record fields" + TModule -> "modules" toConfigName :: String -> String toConfigName = ("st" <>) @@ -52,12 +60,17 @@ allHsTokenTypes :: [HsSemanticTokenType] allHsTokenTypes = enumFrom minBound lowerFirst :: String -> String -lowerFirst [] = [] -lowerFirst (x:xs) = toLower x : xs +lowerFirst [] = [] +lowerFirst (x : xs) = toLower x : xs allHsTokenNameStrings :: [String] allHsTokenNameStrings = map (drop 1 . show) allHsTokenTypes +defineSemanticProperty :: + (NotElem s r, KnownSymbol s) => + (KeyNameProxy s, Text, SemanticTokenTypes) -> + Properties r -> + Properties ('PropertyKey s (TEnum SemanticTokenTypes) : r) defineSemanticProperty (lb, tokenType, st) = defineEnumProperty lb @@ -79,7 +92,8 @@ mkSemanticConfigFunctions = do let pid = mkName "pid" let semanticConfigPropertiesName = mkName "semanticConfigProperties" let useSemanticConfigActionName = mkName "useSemanticConfigAction" - let allLabels = map (LabelE . (<> "Token"). lowerFirst) allHsTokenNameStrings + let allLabelStrs = map ((<> "Token") . lowerFirst) allHsTokenNameStrings + allLabels = map (LabelE . (<> "Token") . lowerFirst) allHsTokenNameStrings allFieldsNames = map (mkName . toConfigName) allHsTokenNameStrings allVariableNames = map (mkName . ("_variable_" <>) . toConfigName) allHsTokenNameStrings -- <- useSemanticConfigAction label pid config @@ -94,6 +108,7 @@ mkSemanticConfigFunctions = do -- get and then update record bb = DoE Nothing $ getProperties ++ [NoBindS $ AppE (VarE 'return) recordUpdate] let useSemanticConfigAction = FunD useSemanticConfigActionName [Clause [VarP pid] (NormalB bb) []] + let useSemanticConfigActionSig = SigD useSemanticConfigActionName (ArrowT `AppT` ConT ''PluginId `AppT` (ConT ''Action `AppT` ConT ''SemanticTokensConfig)) -- SemanticConfigProperties nameAndDescList <- @@ -105,5 +120,16 @@ mkSemanticConfigFunctions = do ) $ zip allLabels allHsTokenTypes let body = foldr (AppE . AppE (VarE 'defineSemanticProperty)) (VarE 'emptyProperties) nameAndDescList + let propertiesType = + foldr + ( \la -> + AppT + ( PromotedConsT + `AppT` (AppT (ConT 'PropertyKey) (LitT (StrTyLit la)) `AppT` AppT (ConT 'TEnum) (ConT ''SemanticTokenTypes)) + ) + ) + PromotedNilT + allLabelStrs let semanticConfigProperties = FunD semanticConfigPropertiesName [Clause [] (NormalB body) []] - return [semanticConfigProperties, useSemanticConfigAction] + let semanticConfigPropertiesSig = SigD semanticConfigPropertiesName (AppT (ConT ''Properties) propertiesType) + return [semanticConfigPropertiesSig, semanticConfigProperties, useSemanticConfigActionSig, useSemanticConfigAction] diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs new file mode 100644 index 0000000000..d4c3882884 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -0,0 +1,197 @@ +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} + +module Ide.Plugin.SemanticTokens.Tokenize (hieAstSpanIdentifiers) where + +import Control.Lens (Identity (runIdentity)) +import Control.Monad (forM_, guard) +import Control.Monad.State.Strict (MonadState (get), + MonadTrans (lift), + execStateT, modify, put) +import Control.Monad.Trans.State.Strict (StateT) +import Data.Char (isAlpha, isAlphaNum) +import qualified Data.Map.Strict as M +import qualified Data.Map.Strict as Map +import qualified Data.Set as S +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.Rope as Char +import Data.Text.Utf16.Rope (toText) +import qualified Data.Text.Utf16.Rope as Utf16 +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import Ide.Plugin.SemanticTokens.Types (RangeIdSetMap) +import Language.LSP.Protocol.Types (Position (Position), + Range (Range), UInt, mkRange) +import Language.LSP.VFS hiding (line) +import Prelude hiding (length, span) + +type Tokenizer m a = StateT PTokenState m a + + +data PTokenState = PTokenState + { rangeIdSetMap :: !RangeIdSetMap, + rope :: !Rope, -- the remains of rope we are working on + cursor :: !Char.Position, -- the cursor position of the current rope to the start of the original file in code point position + columnsInUtf16 :: !UInt -- the column of the start of the current rope in utf16 + } + +runTokenizer :: (Monad m) => Tokenizer m a -> PTokenState -> m RangeIdSetMap +runTokenizer p st = rangeIdSetMap <$> execStateT p st + +data SplitResult + = NoSplit (Text, Range) -- does not need to split, token text, token range + | Split (Text, Range, Range) -- token text, prefix range(module range), token range + deriving (Show) + + +mkPTokenState :: VirtualFile -> PTokenState +mkPTokenState vf = + PTokenState + { rangeIdSetMap = mempty, + rope = Rope.fromText $ toText vf._file_text, + cursor = Char.Position 0 0, + columnsInUtf16 = 0 + } + +addRangeIdSetMap :: (Monad m) => Range -> Identifier -> Tokenizer m () +addRangeIdSetMap r i = modify $ \s -> s {rangeIdSetMap = Map.insertWith (<>) r (S.singleton i) $ rangeIdSetMap s} + +-- lift a Tokenizer Maybe () to Tokenizer m (), +-- if the Maybe is Nothing, do nothing, recover the state +-- if the Maybe is Just (), do the action, and keep the state +liftMaybeM :: (Monad m) => Tokenizer Maybe () -> Tokenizer m () +liftMaybeM p = do + st <- get + forM_ (execStateT p st) put + +hieAstSpanIdentifiers :: VirtualFile -> HieAST a -> RangeIdSetMap +hieAstSpanIdentifiers vf ast = runIdentity $ runTokenizer (foldAst ast) (mkPTokenState vf) + +-- | foldAst +-- visit every leaf node in the ast in depth first order +foldAst :: (Monad m) => HieAST t -> Tokenizer m () +foldAst ast = if null (nodeChildren ast) + then liftMaybeM (visitLeafIds ast) + else mapM_ foldAst $ nodeChildren ast + +visitLeafIds :: HieAST t -> Tokenizer Maybe () +visitLeafIds leaf = liftMaybeM $ do + let span = nodeSpan leaf + (ran, token) <- focusTokenAt leaf + -- if `focusTokenAt` succeed, we can safely assume we have shift the cursor correctly + -- we do not need to recover the cursor state, even if the following computation failed + liftMaybeM $ do + -- only handle the leaf node with single column token + guard $ srcSpanStartLine span == srcSpanEndLine span + splitResult <- lift $ splitRangeByText token ran + mapM_ (combineNodeIds ran splitResult) $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf + where + combineNodeIds :: (Monad m) => Range -> SplitResult -> NodeInfo a -> Tokenizer m () + combineNodeIds ran ranSplit (NodeInfo _ _ bd) = mapM_ (getIdentifier ran ranSplit) (M.keys bd) + getIdentifier :: (Monad m) => Range -> SplitResult -> Identifier -> Tokenizer m () + getIdentifier ran ranSplit idt = liftMaybeM $ do + case idt of + Left _moduleName -> addRangeIdSetMap ran idt + Right name -> do + occStr <- lift $ T.pack <$> case (occNameString . nameOccName) name of + -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} + '$' : 's' : 'e' : 'l' : ':' : xs -> Just $ takeWhile (/= ':') xs + -- other generated names that should not be visible + '$' : c : _ | isAlphaNum c -> Nothing + c : ':' : _ | isAlphaNum c -> Nothing + ns -> Just ns + case ranSplit of + (NoSplit (tk, r)) -> do + guard $ tk == occStr + addRangeIdSetMap r idt + (Split (tk, r1, r2)) -> do + guard $ tk == occStr + addRangeIdSetMap r1 (Left $ mkModuleName "") + addRangeIdSetMap r2 idt + +focusTokenAt :: + -- | leaf node we want to focus on + HieAST a -> + -- | (token, remains) + Tokenizer Maybe (Range, Text) +focusTokenAt leaf = do + PTokenState{cursor, rope, columnsInUtf16} <- get + let span = nodeSpan leaf + let (tokenStartPos, tokenEndPos) = srcSpanCharPositions span + -- tokenStartOff: the offset position of the token start position to the cursor position + tokenStartOff <- lift $ tokenStartPos `sub` cursor + -- tokenOff: the offset position of the token end position to the token start position + tokenOff <- lift $ tokenEndPos `sub` tokenStartPos + (gap, tokenStartRope) <- lift $ charSplitAtPositionMaybe tokenStartOff rope + (token, remains) <- lift $ charSplitAtPositionMaybe tokenOff tokenStartRope + -- ncs: token start column in utf16 + let ncs = newColumn columnsInUtf16 gap + -- nce: token end column in utf16 + let nce = newColumn ncs token + -- compute the new range for utf16, tuning the columns is enough + let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span + modify $ \s -> s {columnsInUtf16 = nce, rope = remains, cursor = tokenEndPos} + return (ran, token) + where + srcSpanCharPositions :: RealSrcSpan -> (Char.Position, Char.Position) + srcSpanCharPositions real = + ( realSrcLocRopePosition $ realSrcSpanStart real, + realSrcLocRopePosition $ realSrcSpanEnd real + ) + charSplitAtPositionMaybe :: Char.Position -> Rope -> Maybe (Text, Rope) + charSplitAtPositionMaybe tokenOff rpe = do + let (prefix, suffix) = Rope.charSplitAtPosition tokenOff rpe + guard $ Rope.charLengthAsPosition prefix == tokenOff + return (Rope.toText prefix, suffix) + sub :: Char.Position -> Char.Position -> Maybe Char.Position + sub (Char.Position l1 c1) (Char.Position l2 c2) + | l1 == l2 && c1 > c2 = Just $ Char.Position 0 (c1 - c2) + | l1 > l2 = Just $ Char.Position (l1 - l2) c1 + | otherwise = Nothing + realSrcLocRopePosition :: RealSrcLoc -> Char.Position + realSrcLocRopePosition real = Char.Position (fromIntegral $ srcLocLine real - 1) (fromIntegral $ srcLocCol real - 1) + -- | newColumn + -- rope do not treat single \n in our favor + -- for example, the row length of "123\n" and "123" are both 1 + -- we are forced to use text to compute new column + newColumn :: UInt -> Text -> UInt + newColumn n rp = case T.breakOnEnd "\n" rp of + ("", nEnd) -> n + utf16Length nEnd + (_, nEnd) -> utf16Length nEnd + codePointRangeToRangeWith :: UInt -> UInt -> CodePointRange -> Range + codePointRangeToRangeWith newStartCol newEndCol (CodePointRange (CodePointPosition startLine _) (CodePointPosition endLine _)) = + Range (Position startLine newStartCol) (Position endLine newEndCol) + +-- | splitRangeByText +-- split a qualified identifier into module name and identifier and/or strip the (), `` +-- for `ModuleA.b`, break it into `ModuleA.` and `b` +-- for `(b)`, strip `()`, and get `b` +-- for `(ModuleA.b)`, strip `()` and break it into `ModuleA.` and `b` +splitRangeByText :: Text -> Range -> Maybe SplitResult +splitRangeByText tk ran = do + let (ran', tk') = case T.uncons tk of + Just ('(', xs) -> (subOneRange ran, T.takeWhile (/= ')') xs) + Just ('`', xs) -> (subOneRange ran, T.takeWhile (/= '`') xs) + _ -> (ran, tk) + let (prefix, tk'') = T.breakOnEnd "." tk' + splitRange tk'' (utf16PositionPosition $ Rope.utf16LengthAsPosition $ Rope.fromText prefix) ran' + where + splitRange :: Text -> Position -> Range -> Maybe SplitResult + splitRange tx (Position l c) r@(Range (Position l1 c1) (Position l2 c2)) + | l1 + l > l2 || (l1 + l == l2 && c > c2) = Nothing -- out of range + | l==0 && c==0 = Just $ NoSplit (tx, r) + | otherwise = let c' = if l <= 0 then c1+c else c + in Just $ Split (tx, mkRange l1 c1 (l1 + l) c', mkRange (l1 + l) c' l2 c2) + subOneRange :: Range -> Range + subOneRange (Range (Position l1 c1) (Position l2 c2)) = Range (Position l1 (c1 + 1)) (Position l2 (c2 - 1)) + utf16PositionPosition :: Utf16.Position -> Position + utf16PositionPosition (Utf16.Position l c) = Position (fromIntegral l) (fromIntegral c) + + +utf16Length :: Integral i => Text -> i +utf16Length = fromIntegral . Utf16.length . Utf16.fromText diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index 5be028ace8..214069b1ed 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -17,7 +17,7 @@ import Control.DeepSeq (NFData (rnf), rwhnf) import qualified Data.Array as A import Data.Default (Default (def)) import Data.Generics (Typeable) -import qualified Data.Map as M +import qualified Data.Map.Strict as M import Development.IDE (Pretty (pretty), RuleResult) import qualified Development.IDE.Core.Shake as Shake import Development.IDE.GHC.Compat hiding (loc) @@ -25,6 +25,8 @@ import Development.IDE.Graph.Classes (Hashable) import GHC.Generics (Generic) import Language.LSP.Protocol.Types -- import template haskell +import Data.Map.Strict (Map) +import Data.Set (Set) import Language.Haskell.TH.Syntax (Lift) @@ -43,6 +45,7 @@ data HsSemanticTokenType | TTypeSynonym -- Type synonym | TTypeFamily -- type family | TRecordField -- from match bind + | TModule -- module name deriving (Eq, Ord, Show, Enum, Bounded, Generic, Lift) @@ -65,6 +68,7 @@ instance Default SemanticTokensConfig where , stTypeSynonym = SemanticTokenTypes_Type , stTypeFamily = SemanticTokenTypes_Interface , stRecordField = SemanticTokenTypes_Property + , stModule = SemanticTokenTypes_Namespace } -- | SemanticTokensConfig_ is a configuration for the semantic tokens plugin. -- it contains map between the hs semantic token type and default token type. @@ -80,6 +84,7 @@ data SemanticTokensConfig = STC , stTypeSynonym :: !SemanticTokenTypes , stTypeFamily :: !SemanticTokenTypes , stRecordField :: !SemanticTokenTypes + , stModule :: !SemanticTokenTypes } deriving (Generic, Show) @@ -108,7 +113,9 @@ data Loc = Loc instance Show Loc where show (Loc line startChar len) = show line <> ":" <> show startChar <> "-" <> show (startChar + len) -type NameSemanticMap = NameEnv HsSemanticTokenType +type RangeIdSetMap = Map Range (Set Identifier) + +type IdSemanticMap = Map Identifier HsSemanticTokenType data GetSemanticTokens = GetSemanticTokens deriving (Eq, Show, Typeable, Generic) @@ -117,14 +124,14 @@ instance Hashable GetSemanticTokens instance NFData GetSemanticTokens -data RangeHsSemanticTokenTypes = RangeHsSemanticTokenTypes {rangeSemanticMap :: M.Map Range HsSemanticTokenType} +newtype RangeHsSemanticTokenTypes = RangeHsSemanticTokenTypes {rangeSemanticMap :: M.Map Range HsSemanticTokenType} instance NFData RangeHsSemanticTokenTypes where rnf :: RangeHsSemanticTokenTypes -> () rnf (RangeHsSemanticTokenTypes a) = rwhnf a instance Show RangeHsSemanticTokenTypes where - show = const "GlobalNameMap" + show = const "RangeHsSemanticTokenTypes" type instance RuleResult GetSemanticTokens = RangeHsSemanticTokenTypes diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index fb29c14729..7b22284850 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -1,17 +1,19 @@ -{-# LANGUAGE StandaloneDeriving #-} -{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternGuards #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -Wno-orphans #-} module Ide.Plugin.SemanticTokens.Utils where -import Data.ByteString (ByteString) -import Data.ByteString.Char8 (unpack) -import qualified Data.Map as Map -import Development.IDE (Position (..), Range (..)) +import Data.ByteString (ByteString) +import Data.ByteString.Char8 (unpack) +import qualified Data.Map.Strict as Map +import Development.IDE (Position (..), Range (..)) import Development.IDE.GHC.Compat -import Ide.Plugin.SemanticTokens.Types -import Prelude hiding (span) +import Prelude hiding (length, span) deriving instance Show DeclType deriving instance Show BindType @@ -83,14 +85,6 @@ nameTypesString xs = unlines | (span, name) <- xs] -nameMapString :: NameSemanticMap -> [Name] -> String -nameMapString nsm names = unlines - [ showSDocUnsafe (ppr name) ++ " " ++ show tokenType - | name <- names - , let tokenType = lookupNameEnv nsm name - ] - - showSpan :: RealSrcSpan -> String showSpan x = show (srcSpanStartLine x) <> ":" <> show (srcSpanStartCol x) <> "-" <> show (srcSpanEndCol x) @@ -99,3 +93,9 @@ showSpan x = show (srcSpanStartLine x) <> ":" <> show (srcSpanStartCol x) <> "-" mkRange :: (Integral a1, Integral a2) => a1 -> a2 -> a2 -> Range mkRange startLine startCol len = Range (Position (fromIntegral startLine) (fromIntegral startCol)) (Position (fromIntegral startLine) (fromIntegral $ startCol + len)) + + +rangeShortStr :: Range -> String +rangeShortStr (Range (Position startLine startColumn) (Position endLine endColumn)) = + show startLine <> ":" <> show startColumn <> "-" <> show endLine <> ":" <> show endColumn + diff --git a/plugins/hls-semantic-tokens-plugin/test/Main.hs b/plugins/hls-semantic-tokens-plugin/test/Main.hs index 5174939646..25744672b2 100644 --- a/plugins/hls-semantic-tokens-plugin/test/Main.hs +++ b/plugins/hls-semantic-tokens-plugin/test/Main.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE ExplicitNamespaces #-} {-# LANGUAGE FlexibleInstances #-} @@ -11,7 +12,7 @@ import Data.Aeson (KeyValue (..), Value (..), object) import Data.Default import Data.Functor (void) -import Data.Map as Map hiding (map) +import Data.Map.Strict as Map hiding (map) import Data.String (fromString) import Data.Text hiding (length, map, unlines) @@ -164,37 +165,43 @@ semanticTokensTests = testGroup "other semantic Token test" [ testCase "module import test" $ do - let file1 = "TModuleA.hs" + let file1 = "TModula𐐀bA.hs" let file2 = "TModuleB.hs" - let expect = - [ SemanticTokenOriginal TVariable (Loc 5 1 2) "go", - SemanticTokenOriginal TDataConstructor (Loc 5 6 4) "Game" - ] Test.Hls.runSessionWithServerInTmpDir def semanticTokensPlugin (mkFs $ FS.directProjectMulti [file1, file2]) $ do doc1 <- openDoc file1 "haskell" doc2 <- openDoc file2 "haskell" - _check1 <- waitForAction "TypeCheck" doc1 + check1 <- waitForAction "TypeCheck" doc1 check2 <- waitForAction "TypeCheck" doc2 + case check1 of + Right (WaitForIdeRuleResult _) -> return () + Left _ -> error "TypeCheck1 failed" case check2 of Right (WaitForIdeRuleResult _) -> return () Left _ -> error "TypeCheck2 failed" - textContent2 <- documentContents doc2 - let vfs = VirtualFile 0 0 (Rope.fromText textContent2) - res2 <- Test.getSemanticTokens doc2 - case res2 ^? Language.LSP.Protocol.Types._L of - Just tokens -> do - either - (error . show) - (\xs -> liftIO $ xs @?= expect) - $ recoverSemanticTokens def vfs tokens - return () - _ -> error "No tokens found" - liftIO $ 1 @?= 1, + + + result <- docSemanticTokensString def doc2 + let expect = unlines [ + "3:8-18 TModule \"TModula\\66560bA\"" + , "4:18-28 TModule \"TModula\\66560bA\"" + , "6:1-3 TVariable \"go\"" + , "6:6-10 TDataConstructor \"Game\"" + , "8:1-5 TVariable \"a\\66560bb\"" + , "8:8-19 TModule \"TModula\\66560bA.\"" + , "8:19-22 TRecordField \"a\\66560b\"" + , "8:23-25 TVariable \"go\"" + ] + liftIO $ result @?= expect, goldenWithSemanticTokensWithDefaultConfig "mixed constancy test result generated from one ghc version" "T1", goldenWithSemanticTokensWithDefaultConfig "pattern bind" "TPatternSynonym", goldenWithSemanticTokensWithDefaultConfig "type family" "TTypefamily", - goldenWithSemanticTokensWithDefaultConfig "TUnicodeSyntax" "TUnicodeSyntax" + goldenWithSemanticTokensWithDefaultConfig "TUnicodeSyntax" "TUnicodeSyntax", + goldenWithSemanticTokensWithDefaultConfig "TQualifiedName" "TQualifiedName" + -- it is not supported in ghc92 +#if MIN_VERSION_ghc(9,4,0) + , goldenWithSemanticTokensWithDefaultConfig "TDoc" "TDoc" +#endif ] semanticTokensDataTypeTests :: TestTree diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected index 062d4749d3..5377bb2728 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/T1.expected @@ -22,7 +22,8 @@ 21:7-10 TPatternSynonym "One" 23:6-9 TTypeConstructor "Doo" 23:12-15 TDataConstructor "Doo" -23:16-27 TTypeConstructor "Prelude.Int" +23:16-24 TModule "Prelude." +23:24-27 TTypeConstructor "Int" 24:6-10 TTypeSynonym "Bar1" 24:13-16 TTypeConstructor "Int" 25:6-10 TTypeSynonym "Bar2" @@ -72,7 +73,8 @@ 41:1-3 TFunction "go" 41:6-9 TRecordField "foo" 42:1-4 TFunction "add" -42:7-18 TClassMethod "(Prelude.+)" +42:8-16 TModule "Prelude." +42:16-17 TClassMethod "+" 47:1-5 TVariable "main" 47:9-11 TTypeConstructor "IO" 48:1-5 TVariable "main" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected index 9c2118cd3a..2c2cd492a0 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TDatatypeImported.expected @@ -1,3 +1,4 @@ +3:8-17 TModule "System.IO" 5:1-3 TVariable "go" 5:7-9 TTypeConstructor "IO" 6:1-3 TVariable "go" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.expected new file mode 100644 index 0000000000..405308c3c8 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.expected @@ -0,0 +1,5 @@ +4:5-10 TVariable "hello" +5:1-6 TVariable "hello" +5:10-13 TTypeConstructor "Int" +6:1-6 TVariable "hello" +6:9-15 TClassMethod "length" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.hs new file mode 100644 index 0000000000..dc5801b0e6 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TDoc.hs @@ -0,0 +1,9 @@ +module TDoc where + +-- | +-- `hello` +hello :: Int +hello = length "Hello, Haskell!" + + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodBind.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodBind.expected index a1392ff1d9..9468da2fc0 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodBind.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodBind.expected @@ -3,5 +3,5 @@ 4:16-19 TTypeConstructor "Int" 5:10-12 TClass "Eq" 5:13-16 TTypeConstructor "Foo" -6:5-9 TClassMethod "(==)" +6:6-8 TClassMethod "==" 6:12-21 TVariable "undefined" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodUse.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodUse.expected index 36e41ff096..e55735f77a 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodUse.expected +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TInstanceClassMethodUse.expected @@ -1,2 +1,2 @@ 4:1-3 TFunction "go" -4:9-13 TClassMethod "(==)" +4:10-12 TClassMethod "==" diff --git "a/plugins/hls-semantic-tokens-plugin/test/testdata/TModula\360\220\220\200bA.hs" "b/plugins/hls-semantic-tokens-plugin/test/testdata/TModula\360\220\220\200bA.hs" new file mode 100644 index 0000000000..f111eb396b --- /dev/null +++ "b/plugins/hls-semantic-tokens-plugin/test/testdata/TModula\360\220\220\200bA.hs" @@ -0,0 +1,5 @@ +module TModula𐐀bA where + +data Game = Game {a𐐀b :: Int} + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleA.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleA.hs deleted file mode 100644 index 7d2c2bb034..0000000000 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleA.hs +++ /dev/null @@ -1,3 +0,0 @@ -module TModuleA where - -data Game = Game Int diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleB.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleB.hs index 15ae4a7c44..f90f0484b0 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleB.hs +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TModuleB.hs @@ -1,5 +1,8 @@ module TModuleB where -import TModuleA +import TModula𐐀bA +import qualified TModula𐐀bA go = Game 1 + +a𐐀bb = TModula𐐀bA.a𐐀b go diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected new file mode 100644 index 0000000000..cdbe36bc46 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.expected @@ -0,0 +1,12 @@ +3:18-27 TModule "Data.List" +6:1-2 TVariable "a" +6:5-13 TModule "Prelude." +6:13-22 TVariable "undefined" +7:1-2 TVariable "b" +7:8-18 TModule "Data.List." +7:18-22 TClassMethod "elem" +8:1-2 TVariable "c" +8:6-14 TModule "Prelude." +8:14-15 TClassMethod "+" +9:1-2 TVariable "d" +9:6-7 TClassMethod "+" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs new file mode 100644 index 0000000000..5dbdcc1d52 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TQualifiedName.hs @@ -0,0 +1,9 @@ +module TQualifiedName where + +import qualified Data.List + + +a = Prelude.undefined +b = 1 `Data.List.elem` [1, 2] +c = (Prelude.+) 1 1 +d = (+) 1 1 diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/TRecordDuplicateRecordFields.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/TRecordDuplicateRecordFields.hs index 7258b5fc27..395a1d3731 100644 --- a/plugins/hls-semantic-tokens-plugin/test/testdata/TRecordDuplicateRecordFields.hs +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/TRecordDuplicateRecordFields.hs @@ -2,4 +2,4 @@ module TRecordDuplicateRecordFields where -data Foo = Foo { boo :: !String } \ No newline at end of file +data Foo = Foo { boo :: !String } diff --git a/test/testdata/schema/ghc92/default-config.golden.json b/test/testdata/schema/ghc92/default-config.golden.json index e55282483d..78ee03b5d2 100644 --- a/test/testdata/schema/ghc92/default-config.golden.json +++ b/test/testdata/schema/ghc92/default-config.golden.json @@ -122,6 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", + "moduleToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc92/vscode-extension-schema.golden.json b/test/testdata/schema/ghc92/vscode-extension-schema.golden.json index 844079ff9b..fcff330b84 100644 --- a/test/testdata/schema/ghc92/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc92/vscode-extension-schema.golden.json @@ -485,6 +485,62 @@ "scope": "resource", "type": "string" }, + "haskell.plugin.semanticTokens.config.moduleToken": { + "default": "namespace", + "description": "LSP semantic token type to use for modules", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, "haskell.plugin.semanticTokens.config.patternSynonymToken": { "default": "macro", "description": "LSP semantic token type to use for pattern synonyms", diff --git a/test/testdata/schema/ghc94/default-config.golden.json b/test/testdata/schema/ghc94/default-config.golden.json index e792c5fe8b..6bd1d4a642 100644 --- a/test/testdata/schema/ghc94/default-config.golden.json +++ b/test/testdata/schema/ghc94/default-config.golden.json @@ -122,6 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", + "moduleToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json b/test/testdata/schema/ghc94/vscode-extension-schema.golden.json index fe3b42bfdf..73ed5b0855 100644 --- a/test/testdata/schema/ghc94/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc94/vscode-extension-schema.golden.json @@ -485,6 +485,62 @@ "scope": "resource", "type": "string" }, + "haskell.plugin.semanticTokens.config.moduleToken": { + "default": "namespace", + "description": "LSP semantic token type to use for modules", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, "haskell.plugin.semanticTokens.config.patternSynonymToken": { "default": "macro", "description": "LSP semantic token type to use for pattern synonyms", diff --git a/test/testdata/schema/ghc96/default-config.golden.json b/test/testdata/schema/ghc96/default-config.golden.json index e792c5fe8b..6bd1d4a642 100644 --- a/test/testdata/schema/ghc96/default-config.golden.json +++ b/test/testdata/schema/ghc96/default-config.golden.json @@ -122,6 +122,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", + "moduleToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json index fe3b42bfdf..73ed5b0855 100644 --- a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json @@ -485,6 +485,62 @@ "scope": "resource", "type": "string" }, + "haskell.plugin.semanticTokens.config.moduleToken": { + "default": "namespace", + "description": "LSP semantic token type to use for modules", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, "haskell.plugin.semanticTokens.config.patternSynonymToken": { "default": "macro", "description": "LSP semantic token type to use for pattern synonyms", diff --git a/test/testdata/schema/ghc98/default-config.golden.json b/test/testdata/schema/ghc98/default-config.golden.json index b42d8f4e51..3a1db12be3 100644 --- a/test/testdata/schema/ghc98/default-config.golden.json +++ b/test/testdata/schema/ghc98/default-config.golden.json @@ -115,6 +115,7 @@ "classToken": "class", "dataConstructorToken": "enumMember", "functionToken": "function", + "moduleToken": "namespace", "patternSynonymToken": "macro", "recordFieldToken": "property", "typeConstructorToken": "enum", diff --git a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json index 861b8a37e0..d79f94383b 100644 --- a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json @@ -467,6 +467,62 @@ "scope": "resource", "type": "string" }, + "haskell.plugin.semanticTokens.config.moduleToken": { + "default": "namespace", + "description": "LSP semantic token type to use for modules", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, "haskell.plugin.semanticTokens.config.patternSynonymToken": { "default": "macro", "description": "LSP semantic token type to use for pattern synonyms",