summaryrefslogtreecommitdiff
path: root/script-beta/core/semantic-tokens.lua
blob: 3c134105764d92db4d44bdbe424ae04c7deee699 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
local files          = require 'files'
local guide          = require 'parser.guide'
local await          = require 'await'
local TokenTypes     = require 'define.TokenTypes'
local TokenModifiers = require 'define.TokenModifiers'
local vm             = require 'vm'

local Care = {}
Care['setglobal'] = function (source, results)
    results[#results+1] = {
        start      = source.start,
        finish     = source.finish,
        type       = TokenTypes.namespace,
        modifieres = TokenModifiers.deprecated,
    }
end
Care['getglobal'] = function (source, results)
    local lib = vm.getLibrary(source, 'simple')
    if lib then
        if source[1] == '_G' then
            return
        else
            results[#results+1] =  {
                start      = source.start,
                finish     = source.finish,
                type       = TokenTypes.namespace,
                modifieres = TokenModifiers.static,
            }
        end
    else
        results[#results+1] =  {
            start      = source.start,
            finish     = source.finish,
            type       = TokenTypes.namespace,
            modifieres = TokenModifiers.deprecated,
        }
    end
end
Care['tablefield'] = function (source, results)
    local field = source.field
    results[#results+1] = {
        start      = field.start,
        finish     = field.finish,
        type       = TokenTypes.property,
        modifieres = TokenModifiers.declaration,
    }
end
Care['getlocal'] = function (source, results)
    local loc = source.node
    -- 1. 函数的参数
    if loc.parent and loc.parent.type == 'funcargs' then
        results[#results+1] = {
            start      = source.start,
            finish     = source.finish,
            type       = TokenTypes.parameter,
            modifieres = TokenModifiers.declaration,
        }
        return
    end
end
Care['setlocal'] = Care['getlocal']

local function buildTokens(results, lines)
    local tokens = {}
    local lastLine = 0
    local lastStartChar = 0
    for i, source in ipairs(results) do
        local row, col = guide.positionOf(lines, source.start)
        local line = row - 1
        local startChar = col - 1
        local deltaLine = line - lastLine
        local deltaStartChar
        if deltaLine == 0 then
            deltaStartChar = startChar - lastStartChar
        else
            deltaStartChar = startChar
        end
        lastLine = line
        lastStartChar = startChar
        -- see https://microsoft.github.io/language-server-protocol/specifications/specification-3-16/#textDocument_semanticTokens
        local len = i * 5 - 5
        tokens[len + 1] = deltaLine
        tokens[len + 2] = deltaStartChar
        tokens[len + 3] = source.finish - source.start + 1 -- length
        tokens[len + 4] = source.type
        tokens[len + 5] = source.modifieres or 0
    end
    return tokens
end

return function (uri, start, finish)
    local ast   = files.getAst(uri)
    local lines = files.getLines(uri)
    if not ast then
        return nil
    end

    local results = {}
    local count = 0
    guide.eachSource(ast.ast, function (source)
        local method = Care[source.type]
        if not method then
            return
        end
        if source.start > finish or source.finish < start then
            return
        end
        method(source, results)
        count = count + 1
        if count % 100 == 0 then
            await.delay()
        end
    end)

    table.sort(results, function (a, b)
        return a.start < b.start
    end)

    local tokens = buildTokens(results, lines)

    return tokens
end