Coverage for C:\leo.repo\leo-editor\leo\core\leoCache.py: 39%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

438 statements  

1#@+leo-ver=5-thin 

2#@+node:ekr.20100208065621.5894: * @file leoCache.py 

3"""A module encapsulating Leo's file caching""" 

4#@+<< imports >> 

5#@+node:ekr.20100208223942.10436: ** << imports >> (leoCache) 

6import fnmatch 

7import os 

8import pickle 

9import sqlite3 

10import stat 

11from typing import Any, Dict, Sequence 

12import zlib 

13from leo.core import leoGlobals as g 

14#@-<< imports >> 

15# pylint: disable=raise-missing-from 

16# Abbreviations used throughout. 

17abspath = g.os_path_abspath 

18basename = g.os_path_basename 

19expanduser = g.os_path_expanduser 

20isdir = g.os_path_isdir 

21isfile = g.os_path_isfile 

22join = g.os_path_join 

23normcase = g.os_path_normcase 

24split = g.os_path_split 

25#@+others 

26#@+node:ekr.20100208062523.5885: ** class CommanderCacher 

27class CommanderCacher: 

28 """A class to manage per-commander caches.""" 

29 

30 def __init__(self): 

31 try: 

32 path = join(g.app.homeLeoDir, 'db', 'global_data') 

33 self.db = SqlitePickleShare(path) 

34 except Exception: 

35 self.db = {} # type:ignore 

36 #@+others 

37 #@+node:ekr.20100209160132.5759: *3* cacher.clear 

38 def clear(self): 

39 """Clear the cache for all commanders.""" 

40 # Careful: self.db may be a Python dict. 

41 try: 

42 self.db.clear() 

43 except Exception: 

44 g.trace('unexpected exception') 

45 g.es_exception() 

46 self.db = {} # type:ignore 

47 #@+node:ekr.20180627062431.1: *3* cacher.close 

48 def close(self): 

49 # Careful: self.db may be a dict. 

50 if hasattr(self.db, 'conn'): 

51 # pylint: disable=no-member 

52 self.db.conn.commit() 

53 self.db.conn.close() 

54 #@+node:ekr.20180627042809.1: *3* cacher.commit 

55 def commit(self): 

56 # Careful: self.db may be a dict. 

57 if hasattr(self.db, 'conn'): 

58 # pylint: disable=no-member 

59 self.db.conn.commit() 

60 #@+node:ekr.20180611054447.1: *3* cacher.dump 

61 def dump(self): 

62 """Dump the indicated cache if --trace-cache is in effect.""" 

63 dump_cache(g.app.commander_db, tag='Commander Cache') 

64 #@+node:ekr.20180627053508.1: *3* cacher.get_wrapper 

65 def get_wrapper(self, c, fn=None): 

66 """Return a new wrapper for c.""" 

67 return CommanderWrapper(c, fn=fn) 

68 #@+node:ekr.20100208065621.5890: *3* cacher.test 

69 def test(self): 

70 

71 # pylint: disable=no-member 

72 if g.app.gui.guiName() == 'nullGui': 

73 # Null gui's don't normally set the g.app.gui.db. 

74 g.app.setGlobalDb() 

75 # Fixes bug 670108. 

76 assert g.app.db is not None 

77 # a PickleShareDB instance. 

78 # Make sure g.guessExternalEditor works. 

79 g.app.db.get("LEO_EDITOR") 

80 # self.initFileDB('~/testpickleshare') 

81 db = self.db 

82 db.clear() 

83 assert not list(db.items()) 

84 db['hello'] = 15 

85 db['aku ankka'] = [1, 2, 313] 

86 db['paths/nest/ok/keyname'] = [1, (5, 46)] 

87 db.uncache() # frees memory, causes re-reads later 

88 # print(db.keys()) 

89 db.clear() 

90 return True 

91 #@+node:ekr.20100210163813.5747: *3* cacher.save 

92 def save(self, c, fn): 

93 """ 

94 Save the per-commander cache. 

95 

96 Change the cache prefix if changeName is True. 

97 

98 save and save-as set changeName to True, save-to does not. 

99 """ 

100 self.commit() 

101 if fn: 

102 # 1484: Change only the key! 

103 if isinstance(c.db, CommanderWrapper): 

104 c.db.key = fn 

105 self.commit() 

106 else: 

107 g.trace('can not happen', c.db.__class__.__name__) 

108 #@-others 

109#@+node:ekr.20180627052459.1: ** class CommanderWrapper 

110class CommanderWrapper: 

111 """A class to distinguish keys from separate commanders.""" 

112 

113 def __init__(self, c, fn=None): 

114 self.c = c 

115 self.db = g.app.db 

116 self.key = fn or c.mFileName 

117 self.user_keys = set() 

118 

119 def get(self, key, default=None): 

120 value = self.db.get(f"{self.key}:::{key}") 

121 return default if value is None else value 

122 

123 def keys(self): 

124 return sorted(list(self.user_keys)) 

125 

126 def __contains__(self, key): 

127 return f"{self.key}:::{key}" in self.db 

128 

129 def __delitem__(self, key): 

130 if key in self.user_keys: 

131 self.user_keys.remove(key) 

132 del self.db[f"{self.key}:::{key}"] 

133 

134 def __getitem__(self, key): 

135 return self.db[f"{self.key}:::{key}"] 

136 # May (properly) raise KeyError 

137 

138 def __setitem__(self, key, value): 

139 self.user_keys.add(key) 

140 self.db[f"{self.key}:::{key}"] = value 

141#@+node:ekr.20180627041556.1: ** class GlobalCacher 

142class GlobalCacher: 

143 """A singleton global cacher, g.app.db""" 

144 

145 def __init__(self): 

146 """Ctor for the GlobalCacher class.""" 

147 trace = 'cache' in g.app.debug 

148 try: 

149 path = join(g.app.homeLeoDir, 'db', 'g_app_db') 

150 if trace: 

151 print('path for g.app.db:', repr(path)) 

152 self.db = SqlitePickleShare(path) 

153 if trace and self.db is not None: 

154 self.dump(tag='Startup') 

155 except Exception: 

156 if trace: 

157 g.es_exception() 

158 # Use a plain dict as a dummy. 

159 self.db = {} # type:ignore 

160 #@+others 

161 #@+node:ekr.20180627045750.1: *3* g_cacher.clear 

162 def clear(self): 

163 """Clear the global cache.""" 

164 # Careful: self.db may be a Python dict. 

165 if 'cache' in g.app.debug: 

166 g.trace('clear g.app.db') 

167 try: 

168 self.db.clear() 

169 except TypeError: 

170 self.db.clear() 

171 except Exception: 

172 g.trace('unexpected exception') 

173 g.es_exception() 

174 self.db = {} # type:ignore 

175 #@+node:ekr.20180627042948.1: *3* g_cacher.commit_and_close() 

176 def commit_and_close(self): 

177 # Careful: self.db may be a dict. 

178 if hasattr(self.db, 'conn'): 

179 # pylint: disable=no-member 

180 if 'cache' in g.app.debug: 

181 self.dump(tag='Shutdown') 

182 self.db.conn.commit() 

183 self.db.conn.close() 

184 #@+node:ekr.20180627045953.1: *3* g_cacher.dump 

185 def dump(self, tag=''): 

186 """Dump the indicated cache if --trace-cache is in effect.""" 

187 tag0 = 'Global Cache' 

188 tag2 = f"{tag0}: {tag}" if tag else tag0 

189 dump_cache(self.db, tag2) 

190 # Careful: g.app.db may not be set yet. 

191 #@-others 

192#@+node:ekr.20100208223942.5967: ** class PickleShareDB 

193_sentinel = object() 

194 

195 

196class PickleShareDB: 

197 """ The main 'connection' object for PickleShare database """ 

198 #@+others 

199 #@+node:ekr.20100208223942.5968: *3* Birth & special methods 

200 #@+node:ekr.20100208223942.5969: *4* __init__ (PickleShareDB) 

201 def __init__(self, root): 

202 """ 

203 Init the PickleShareDB class. 

204 root: The directory that contains the data. Created if it doesn't exist. 

205 """ 

206 self.root = abspath(expanduser(root)) 

207 if not isdir(self.root) and not g.unitTesting: 

208 self._makedirs(self.root) 

209 self.cache = {} 

210 # Keys are normalized file names. 

211 # Values are tuples (obj, orig_mod_time) 

212 

213 def loadz(fileobj): 

214 if fileobj: 

215 # Retain this code for maximum compatibility. 

216 try: 

217 val = pickle.loads( 

218 zlib.decompress(fileobj.read())) 

219 except ValueError: 

220 g.es("Unpickling error - Python 3 data accessed from Python 2?") 

221 return None 

222 return val 

223 return None 

224 

225 def dumpz(val, fileobj): 

226 if fileobj: 

227 try: 

228 # Use Python 2's highest protocol, 2, if possible 

229 data = pickle.dumps(val, 2) 

230 except Exception: 

231 # Use best available if that doesn't work (unlikely) 

232 data = pickle.dumps(val, pickle.HIGHEST_PROTOCOL) 

233 compressed = zlib.compress(data) 

234 fileobj.write(compressed) 

235 

236 self.loader = loadz 

237 self.dumper = dumpz 

238 #@+node:ekr.20100208223942.5970: *4* __contains__(PickleShareDB) 

239 def __contains__(self, key): 

240 

241 return self.has_key(key) # NOQA 

242 #@+node:ekr.20100208223942.5971: *4* __delitem__ 

243 def __delitem__(self, key): 

244 """ del db["key"] """ 

245 fn = join(self.root, key) 

246 self.cache.pop(fn, None) 

247 try: 

248 os.remove(fn) 

249 except OSError: 

250 # notfound and permission denied are ok - we 

251 # lost, the other process wins the conflict 

252 pass 

253 #@+node:ekr.20100208223942.5972: *4* __getitem__ (PickleShareDB) 

254 def __getitem__(self, key): 

255 """ db['key'] reading """ 

256 fn = join(self.root, key) 

257 try: 

258 mtime = (os.stat(fn)[stat.ST_MTIME]) 

259 except OSError: 

260 raise KeyError(key) 

261 if fn in self.cache and mtime == self.cache[fn][1]: 

262 obj = self.cache[fn][0] 

263 return obj 

264 try: 

265 # The cached item has expired, need to read 

266 obj = self.loader(self._openFile(fn, 'rb')) 

267 except Exception: 

268 raise KeyError(key) 

269 self.cache[fn] = (obj, mtime) 

270 return obj 

271 #@+node:ekr.20100208223942.5973: *4* __iter__ 

272 def __iter__(self): 

273 

274 for k in list(self.keys()): 

275 yield k 

276 #@+node:ekr.20100208223942.5974: *4* __repr__ 

277 def __repr__(self): 

278 return f"PickleShareDB('{self.root}')" 

279 #@+node:ekr.20100208223942.5975: *4* __setitem__ (PickleShareDB) 

280 def __setitem__(self, key, value): 

281 """ db['key'] = 5 """ 

282 fn = join(self.root, key) 

283 parent, junk = split(fn) 

284 if parent and not isdir(parent): 

285 self._makedirs(parent) 

286 self.dumper(value, self._openFile(fn, 'wb')) 

287 try: 

288 mtime = os.path.getmtime(fn) 

289 self.cache[fn] = (value, mtime) 

290 except OSError as e: 

291 if e.errno != 2: 

292 raise 

293 #@+node:ekr.20100208223942.10452: *3* _makedirs 

294 def _makedirs(self, fn, mode=0o777): 

295 

296 os.makedirs(fn, mode) 

297 #@+node:ekr.20100208223942.10458: *3* _openFile (PickleShareDB) 

298 def _openFile(self, fn, mode='r'): 

299 """ Open this file. Return a file object. 

300 

301 Do not print an error message. 

302 It is not an error for this to fail. 

303 """ 

304 try: 

305 return open(fn, mode) 

306 except Exception: 

307 return None 

308 #@+node:ekr.20100208223942.10454: *3* _walkfiles & helpers 

309 def _walkfiles(self, s, pattern=None): 

310 """ D.walkfiles() -> iterator over files in D, recursively. 

311 

312 The optional argument, pattern, limits the results to files 

313 with names that match the pattern. For example, 

314 mydir.walkfiles('*.tmp') yields only files with the .tmp 

315 extension. 

316 """ 

317 for child in self._listdir(s): 

318 if isfile(child): 

319 if pattern is None or self._fn_match(child, pattern): 

320 yield child 

321 elif isdir(child): 

322 for f in self._walkfiles(child, pattern): 

323 yield f 

324 #@+node:ekr.20100208223942.10456: *4* _listdir 

325 def _listdir(self, s, pattern=None): 

326 """ D.listdir() -> List of items in this directory. 

327 

328 Use D.files() or D.dirs() instead if you want a listing 

329 of just files or just subdirectories. 

330 

331 The elements of the list are path objects. 

332 

333 With the optional 'pattern' argument, this only lists 

334 items whose names match the given pattern. 

335 """ 

336 names = os.listdir(s) 

337 if pattern is not None: 

338 names = fnmatch.filter(names, pattern) 

339 return [join(s, child) for child in names] 

340 #@+node:ekr.20100208223942.10464: *4* _fn_match 

341 def _fn_match(self, s, pattern): 

342 """ Return True if self.name matches the given pattern. 

343 

344 pattern - A filename pattern with wildcards, for example '*.py'. 

345 """ 

346 return fnmatch.fnmatch(basename(s), pattern) 

347 #@+node:ekr.20100208223942.5978: *3* clear (PickleShareDB) 

348 def clear(self): 

349 # Deletes all files in the fcache subdirectory. 

350 # It would be more thorough to delete everything 

351 # below the root directory, but it's not necessary. 

352 for z in self.keys(): 

353 self.__delitem__(z) 

354 #@+node:ekr.20100208223942.5979: *3* get 

355 def get(self, key, default=None): 

356 

357 try: 

358 val = self[key] 

359 return val 

360 except KeyError: 

361 return default 

362 #@+node:ekr.20100208223942.5980: *3* has_key (PickleShareDB) 

363 def has_key(self, key): 

364 

365 try: 

366 self[key] 

367 except KeyError: 

368 return False 

369 return True 

370 #@+node:ekr.20100208223942.5981: *3* items 

371 def items(self): 

372 return [z for z in self] 

373 #@+node:ekr.20100208223942.5982: *3* keys & helpers (PickleShareDB) 

374 # Called by clear, and during unit testing. 

375 

376 def keys(self, globpat=None): 

377 """Return all keys in DB, or all keys matching a glob""" 

378 if globpat is None: 

379 files = self._walkfiles(self.root) 

380 else: 

381 # Do not call g.glob_glob here. 

382 files = [z for z in join(self.root, globpat)] 

383 result = [self._normalized(p) for p in files if isfile(p)] 

384 return result 

385 #@+node:ekr.20100208223942.5976: *4* _normalized 

386 def _normalized(self, p): 

387 """ Make a key suitable for user's eyes """ 

388 # os.path.relpath doesn't work here. 

389 return self._relpathto(self.root, p).replace('\\', '/') 

390 #@+node:ekr.20100208223942.10460: *4* _relpathto 

391 # Used only by _normalized. 

392 

393 def _relpathto(self, src, dst): 

394 """ Return a relative path from self to dst. 

395 

396 If there is no relative path from self to dst, for example if 

397 they reside on different drives in Windows, then this returns 

398 dst.abspath(). 

399 """ 

400 origin = abspath(src) 

401 dst = abspath(dst) 

402 orig_list = self._splitall(normcase(origin)) 

403 # Don't normcase dst! We want to preserve the case. 

404 dest_list = self._splitall(dst) 

405 if orig_list[0] != normcase(dest_list[0]): 

406 # Can't get here from there. 

407 return dst 

408 # Find the location where the two paths start to differ. 

409 i = 0 

410 for start_seg, dest_seg in zip(orig_list, dest_list): 

411 if start_seg != normcase(dest_seg): 

412 break 

413 i += 1 

414 # Now i is the point where the two paths diverge. 

415 # Need a certain number of "os.pardir"s to work up 

416 # from the origin to the point of divergence. 

417 segments = [os.pardir] * (len(orig_list) - i) 

418 # Need to add the diverging part of dest_list. 

419 segments += dest_list[i:] 

420 if segments: 

421 return join(*segments) 

422 # If they happen to be identical, use os.curdir. 

423 return os.curdir 

424 #@+node:ekr.20100208223942.10462: *4* _splitall 

425 # Used by relpathto. 

426 

427 def _splitall(self, s): 

428 """ Return a list of the path components in this path. 

429 

430 The first item in the list will be a path. Its value will be 

431 either os.curdir, os.pardir, empty, or the root directory of 

432 this path (for example, '/' or 'C:\\'). The other items in 

433 the list will be strings. 

434 

435 path.path.joinpath(*result) will yield the original path. 

436 """ 

437 parts = [] 

438 loc = s 

439 while loc != os.curdir and loc != os.pardir: 

440 prev = loc 

441 loc, child = split(prev) 

442 if loc == prev: 

443 break 

444 parts.append(child) 

445 parts.append(loc) 

446 parts.reverse() 

447 return parts 

448 #@+node:ekr.20100208223942.5989: *3* uncache 

449 def uncache(self, *items): 

450 """ Removes all, or specified items from cache 

451 

452 Use this after reading a large amount of large objects 

453 to free up memory, when you won't be needing the objects 

454 for a while. 

455 

456 """ 

457 if not items: 

458 self.cache = {} 

459 for it in items: 

460 self.cache.pop(it, None) 

461 #@-others 

462#@+node:vitalije.20170716201700.1: ** class SqlitePickleShare 

463_sentinel = object() 

464 

465 

466class SqlitePickleShare: 

467 """ The main 'connection' object for SqlitePickleShare database """ 

468 #@+others 

469 #@+node:vitalije.20170716201700.2: *3* Birth & special methods 

470 def init_dbtables(self, conn): 

471 sql = 'create table if not exists cachevalues(key text primary key, data blob);' 

472 conn.execute(sql) 

473 #@+node:vitalije.20170716201700.3: *4* __init__ (SqlitePickleShare) 

474 def __init__(self, root): 

475 """ 

476 Init the SqlitePickleShare class. 

477 root: The directory that contains the data. Created if it doesn't exist. 

478 """ 

479 self.root = abspath(expanduser(root)) 

480 if not isdir(self.root) and not g.unitTesting: 

481 self._makedirs(self.root) 

482 dbfile = ':memory:' if g.unitTesting else join(root, 'cache.sqlite') 

483 self.conn = sqlite3.connect(dbfile, isolation_level=None) 

484 self.init_dbtables(self.conn) 

485 self.cache = {} 

486 # Keys are normalized file names. 

487 # Values are tuples (obj, orig_mod_time) 

488 

489 def loadz(data): 

490 if data: 

491 # Retain this code for maximum compatibility. 

492 try: 

493 val = pickle.loads(zlib.decompress(data)) 

494 except(ValueError, TypeError): 

495 g.es("Unpickling error - Python 3 data accessed from Python 2?") 

496 return None 

497 return val 

498 return None 

499 

500 def dumpz(val): 

501 try: 

502 # Use Python 2's highest protocol, 2, if possible 

503 data = pickle.dumps(val, protocol=2) 

504 except Exception: 

505 # Use best available if that doesn't work (unlikely) 

506 data = pickle.dumps(val, pickle.HIGHEST_PROTOCOL) 

507 return sqlite3.Binary(zlib.compress(data)) 

508 

509 self.loader = loadz 

510 self.dumper = dumpz 

511 self.reset_protocol_in_values() 

512 #@+node:vitalije.20170716201700.4: *4* __contains__(SqlitePickleShare) 

513 def __contains__(self, key): 

514 

515 return self.has_key(key) # NOQA 

516 #@+node:vitalije.20170716201700.5: *4* __delitem__ 

517 def __delitem__(self, key): 

518 """ del db["key"] """ 

519 try: 

520 self.conn.execute( 

521 '''delete from cachevalues 

522 where key=?''', (key,)) 

523 except sqlite3.OperationalError: 

524 pass 

525 #@+node:vitalije.20170716201700.6: *4* __getitem__ 

526 def __getitem__(self, key): 

527 """ db['key'] reading """ 

528 try: 

529 obj = None 

530 for row in self.conn.execute( 

531 '''select data from cachevalues 

532 where key=?''', (key,)): 

533 obj = self.loader(row[0]) 

534 break 

535 else: 

536 raise KeyError(key) 

537 except sqlite3.Error: 

538 raise KeyError(key) 

539 return obj 

540 #@+node:vitalije.20170716201700.7: *4* __iter__ 

541 def __iter__(self): 

542 

543 for k in list(self.keys()): 

544 yield k 

545 #@+node:vitalije.20170716201700.8: *4* __repr__ 

546 def __repr__(self): 

547 return f"SqlitePickleShare('{self.root}')" 

548 #@+node:vitalije.20170716201700.9: *4* __setitem__ 

549 def __setitem__(self, key, value): 

550 """ db['key'] = 5 """ 

551 try: 

552 data = self.dumper(value) 

553 self.conn.execute( 

554 '''replace into cachevalues(key, data) values(?,?);''', 

555 (key, data)) 

556 except sqlite3.OperationalError: 

557 g.es_exception() 

558 #@+node:vitalije.20170716201700.10: *3* _makedirs 

559 def _makedirs(self, fn, mode=0o777): 

560 

561 os.makedirs(fn, mode) 

562 #@+node:vitalije.20170716201700.11: *3* _openFile (SqlitePickleShare) 

563 def _openFile(self, fn, mode='r'): 

564 """ Open this file. Return a file object. 

565 

566 Do not print an error message. 

567 It is not an error for this to fail. 

568 """ 

569 try: 

570 return open(fn, mode) 

571 except Exception: 

572 return None 

573 #@+node:vitalije.20170716201700.12: *3* _walkfiles & helpers 

574 def _walkfiles(self, s, pattern=None): 

575 """ D.walkfiles() -> iterator over files in D, recursively. 

576 

577 The optional argument, pattern, limits the results to files 

578 with names that match the pattern. For example, 

579 mydir.walkfiles('*.tmp') yields only files with the .tmp 

580 extension. 

581 """ 

582 #@+node:vitalije.20170716201700.13: *4* _listdir 

583 def _listdir(self, s, pattern=None): 

584 """ D.listdir() -> List of items in this directory. 

585 

586 Use D.files() or D.dirs() instead if you want a listing 

587 of just files or just subdirectories. 

588 

589 The elements of the list are path objects. 

590 

591 With the optional 'pattern' argument, this only lists 

592 items whose names match the given pattern. 

593 """ 

594 names = os.listdir(s) 

595 if pattern is not None: 

596 names = fnmatch.filter(names, pattern) 

597 return [join(s, child) for child in names] 

598 #@+node:vitalije.20170716201700.14: *4* _fn_match 

599 def _fn_match(self, s, pattern): 

600 """ Return True if self.name matches the given pattern. 

601 

602 pattern - A filename pattern with wildcards, for example '*.py'. 

603 """ 

604 return fnmatch.fnmatch(basename(s), pattern) 

605 #@+node:vitalije.20170716201700.15: *3* clear (SqlitePickleShare) 

606 def clear(self): 

607 # Deletes all files in the fcache subdirectory. 

608 # It would be more thorough to delete everything 

609 # below the root directory, but it's not necessary. 

610 self.conn.execute('delete from cachevalues;') 

611 #@+node:vitalije.20170716201700.16: *3* get (SqlitePickleShare) 

612 def get(self, key, default=None): 

613 

614 if not self.has_key(key): 

615 return default 

616 try: 

617 val = self[key] 

618 return val 

619 except Exception: # #1444: Was KeyError. 

620 return default 

621 #@+node:vitalije.20170716201700.17: *3* has_key (SqlightPickleShare) 

622 def has_key(self, key): 

623 sql = 'select 1 from cachevalues where key=?;' 

624 for row in self.conn.execute(sql, (key,)): 

625 return True 

626 return False 

627 #@+node:vitalije.20170716201700.18: *3* items 

628 def items(self): 

629 sql = 'select key,data from cachevalues;' 

630 for key, data in self.conn.execute(sql): 

631 yield key, data 

632 #@+node:vitalije.20170716201700.19: *3* keys 

633 # Called by clear, and during unit testing. 

634 

635 def keys(self, globpat=None): 

636 """Return all keys in DB, or all keys matching a glob""" 

637 if globpat is None: 

638 sql = 'select key from cachevalues;' 

639 args: Sequence[Any] = tuple() 

640 else: 

641 sql = "select key from cachevalues where key glob ?;" 

642 # pylint: disable=trailing-comma-tuple 

643 args = globpat, 

644 for key in self.conn.execute(sql, args): 

645 yield key 

646 #@+node:vitalije.20170818091008.1: *3* reset_protocol_in_values 

647 def reset_protocol_in_values(self): 

648 PROTOCOLKEY = '__cache_pickle_protocol__' 

649 if self.get(PROTOCOLKEY, 3) == 2: 

650 return 

651 #@+others 

652 #@+node:vitalije.20170818115606.1: *4* viewrendered special case 

653 import json 

654 row = self.get('viewrendered_default_layouts') or (None, None) 

655 row = json.loads(json.dumps(row[0])), json.loads(json.dumps(row[1])) 

656 self['viewrendered_default_layouts'] = row 

657 #@+node:vitalije.20170818115617.1: *4* do_block 

658 def do_block(cur): 

659 itms = tuple((self.dumper(self.loader(v)), k) for k, v in cur) 

660 if itms: 

661 self.conn.executemany('update cachevalues set data=? where key=?', itms) 

662 self.conn.commit() 

663 return itms[-1][1] 

664 return None 

665 #@-others 

666 self.conn.isolation_level = 'DEFERRED' 

667 

668 sql0 = '''select key, data from cachevalues order by key limit 50''' 

669 sql1 = '''select key, data from cachevalues where key > ? order by key limit 50''' 

670 

671 

672 block = self.conn.execute(sql0) 

673 lk = do_block(block) 

674 while lk: 

675 lk = do_block(self.conn.execute(sql1, (lk,))) 

676 self[PROTOCOLKEY] = 2 

677 self.conn.commit() 

678 

679 self.conn.isolation_level = None 

680 #@+node:vitalije.20170716201700.23: *3* uncache 

681 def uncache(self, *items): 

682 """not used in SqlitePickleShare""" 

683 pass 

684 #@-others 

685#@+node:ekr.20180627050237.1: ** function: dump_cache 

686def dump_cache(db, tag): 

687 """Dump the given cache.""" 

688 print(f'\n===== {tag} =====\n') 

689 if db is None: 

690 print('db is None!') 

691 return 

692 # Create a dict, sorted by file prefixes. 

693 d: Dict[str, Any] = {} 

694 for key in db.keys(): 

695 key = key[0] 

696 val = db.get(key) 

697 data = key.split(':::') 

698 if len(data) == 2: 

699 fn, key2 = data 

700 else: 

701 fn, key2 = 'None', key 

702 aList = d.get(fn, []) 

703 aList.append((key2, val),) 

704 d[fn] = aList 

705 # Print the dict. 

706 files = 0 

707 for key in sorted(d.keys()): 

708 if key != 'None': 

709 dump_list('File: ' + key, d.get(key)) 

710 files += 1 

711 if d.get('None'): 

712 heading = f"All others ({tag})" if files else None 

713 dump_list(heading, d.get('None')) 

714 

715def dump_list(heading, aList): 

716 if heading: 

717 print(f'\n{heading}...\n') 

718 for aTuple in aList: 

719 key, val = aTuple 

720 if isinstance(val, str): 

721 if key.startswith('windowState'): 

722 print(key) 

723 elif key.endswith(('leo_expanded', 'leo_marked')): 

724 if val: 

725 print(f"{key:30}:") 

726 g.printObj(val.split(',')) 

727 else: 

728 print(f"{key:30}: []") 

729 else: 

730 print(f"{key:30}: {val}") 

731 elif isinstance(val, (int, float)): 

732 print(f"{key:30}: {val}") 

733 else: 

734 print(f"{key:30}:") 

735 g.printObj(val) 

736#@-others 

737#@@language python 

738#@@tabwidth -4 

739#@@pagewidth 70 

740 

741#@-leo