forked from SublimeHaskell/SublimeHaskell
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathcache.py
150 lines (129 loc) · 5.61 KB
/
cache.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
import json
import os
import sublime
if int(sublime.version()) < 3000:
from sublime_haskell_common import *
import symbols
else:
from SublimeHaskell.sublime_haskell_common import *
import SublimeHaskell.symbols as symbols
CACHE_PATH = None
CABAL_CACHE_PATH = None
PROJECTS_CACHE_PATH = None
def swap_dict(d):
"""
{key => (item, value)} => {item => (key, value)}
"""
return dict((v[0], (k, v[1])) for k, v in d.items())
def as_object(serializers, dct):
"""
Parse JSON as object
Serializers is dictionary name => serializer, where
serializer is tuple (type, list of field in order of appearance in constructor)
"""
sers = swap_dict(serializers)
if '__type__' in dct:
if dct['__type__'] in sers:
(load_type, fields) = sers[dct['__type__']]
return load_type(*[dct.get(f) for f in fields])
else:
raise RuntimeError("Unknown type '{0}'".format(dct['__type__']))
else:
return dct
class SymbolsEncoder(json.JSONEncoder):
def __init__(self, serializers = None, **kwargs):
super(SymbolsEncoder, self).__init__(**kwargs)
self.serializers = serializers
def default(self, obj):
if type(obj) in self.serializers:
(name, args) = self.serializers[type(obj)]
result = dict((k, v) for k, v in obj.__dict__.items() if k in args)
result.update({'__type__': name})
return result
return json.JSONEncoder.default(self, obj)
def symbol_serializers():
return {
symbols.Location: ('location', ['filename', 'line', 'column', 'project']),
symbols.Symbol: ('symbol', ['what', 'name', 'docs', 'location']),
symbols.Import: ('import', ['module', 'is_qualified', 'import_as']),
symbols.Module: ('module', ['name', 'exports', 'imports', 'declarations', 'location', 'cabal', 'last_inspection_time']),
symbols.Declaration: ('declaration', ['name', 'what', 'docs', 'location']),
symbols.Function: ('function', ['name', 'type', 'docs', 'location']),
symbols.TypeBase: ('typebase', ['name', 'what', 'context', 'args', 'definition', 'docs', 'location']),
symbols.Type: ('type', ['name', 'context', 'args', 'definition', 'docs', 'location']),
symbols.Newtype: ('newtype', ['name', 'context', 'args', 'definition', 'docs', 'location']),
symbols.Data: ('data', ['name', 'context', 'args', 'definition', 'docs', 'location']),
symbols.Class: ('class', ['name', 'context', 'args', 'docs', 'location']) }
def encode_json(obj, **kwargs):
return json.dumps(obj, cls = SymbolsEncoder, serializers = symbol_serializers(), **kwargs)
def decode_json(s):
return json.loads(s, object_hook = lambda v: as_object(symbol_serializers(), v))
def escape_path(path):
path = os.path.abspath(os.path.normcase(path))
folders = []
(base, name) = os.path.split(path)
while name:
folders.append(name)
(base, name) = os.path.split(base)
if base:
folders.append(''.join(filter(lambda c: c.isalpha() or c.isdigit(), base)))
folders.reverse()
return '.'.join(folders)
def dump_cabal_cache(database, cabal_name = None):
if not cabal_name:
cabal_name = current_cabal()
formatted_json = None
with database.get_cabal_modules(cabal_name) as cabal_modules:
cabal_path = escape_path(cabal_name) if cabal_name != 'cabal' else 'cabal'
cabal_json = os.path.join(CABAL_CACHE_PATH, cabal_path + '.json')
formatted_json = encode_json(cabal_modules, indent = 2)
with open(cabal_json, 'w') as f:
f.write(formatted_json)
def dump_project_cache(database, project_path):
formatted_json = None
project_modules = database.get_project_modules(project_path)
with database.files:
project_json = os.path.join(PROJECTS_CACHE_PATH, escape_path(project_path) + '.json')
formatted_json = encode_json(project_modules, indent = 2)
with open(project_json, 'w') as f:
f.write(formatted_json)
def load_cabal_cache(database, cabal_name = None):
if not cabal_name:
cabal_name = current_cabal()
formatted_json = None
cabal_path = escape_path(cabal_name) if cabal_name != 'cabal' else 'cabal'
cabal_json = os.path.join(CABAL_CACHE_PATH, cabal_path + '.json')
if os.path.exists(cabal_json):
with open(cabal_json, 'r') as f:
formatted_json = f.read()
if formatted_json:
cabal_modules = decode_json(formatted_json)
for m in cabal_modules.values():
database.add_module(m, cabal_name)
def load_project_cache(database, project_path):
formatted_json = None
project_json = os.path.join(PROJECTS_CACHE_PATH, escape_path(project_path) + '.json')
if os.path.exists(project_json):
with open(project_json, 'r') as f:
formatted_json = f.read()
if formatted_json:
project_modules = decode_json(formatted_json)
for m in project_modules.values():
database.add_file(m.location.filename, m)
def plugin_loaded():
global CACHE_PATH
global CABAL_CACHE_PATH
global PROJECTS_CACHE_PATH
package_path = sublime_haskell_package_path()
cache_path = sublime_haskell_cache_path()
CACHE_PATH = os.path.join(cache_path, 'cache')
CABAL_CACHE_PATH = os.path.join(CACHE_PATH, 'cabal')
PROJECTS_CACHE_PATH = os.path.join(CACHE_PATH, 'projects')
if not os.path.exists(CACHE_PATH):
os.mkdir(CACHE_PATH)
if not os.path.exists(CABAL_CACHE_PATH):
os.mkdir(CABAL_CACHE_PATH)
if not os.path.exists(PROJECTS_CACHE_PATH):
os.mkdir(PROJECTS_CACHE_PATH)
if int(sublime.version()) < 3000:
plugin_loaded()