Coverage for C:\Repos\leo-editor\leo\core\leoCache.py: 39%
438 statements
« prev ^ index » next coverage.py v6.4, created at 2022-05-24 10:21 -0500
« prev ^ index » next coverage.py v6.4, created at 2022-05-24 10:21 -0500
1#@+leo-ver=5-thin
2#@+node:ekr.20100208065621.5894: * @file leoCache.py
3"""A module encapsulating Leo's file caching"""
4#@+<< imports >>
5#@+node:ekr.20100208223942.10436: ** << imports >> (leoCache)
6import fnmatch
7import os
8import pickle
9import sqlite3
10import stat
11from typing import Any, Dict, Sequence
12import zlib
13from leo.core import leoGlobals as g
14#@-<< imports >>
15# pylint: disable=raise-missing-from
16# Abbreviations used throughout.
17abspath = g.os_path_abspath
18basename = g.os_path_basename
19expanduser = g.os_path_expanduser
20isdir = g.os_path_isdir
21isfile = g.os_path_isfile
22join = g.os_path_join
23normcase = g.os_path_normcase
24split = g.os_path_split
25#@+others
26#@+node:ekr.20100208062523.5885: ** class CommanderCacher
27class CommanderCacher:
28 """A class to manage per-commander caches."""
30 def __init__(self):
31 try:
32 path = join(g.app.homeLeoDir, 'db', 'global_data')
33 self.db = SqlitePickleShare(path)
34 except Exception:
35 self.db = {} # type:ignore
36 #@+others
37 #@+node:ekr.20100209160132.5759: *3* cacher.clear
38 def clear(self):
39 """Clear the cache for all commanders."""
40 # Careful: self.db may be a Python dict.
41 try:
42 self.db.clear()
43 except Exception:
44 g.trace('unexpected exception')
45 g.es_exception()
46 self.db = {} # type:ignore
47 #@+node:ekr.20180627062431.1: *3* cacher.close
48 def close(self):
49 # Careful: self.db may be a dict.
50 if hasattr(self.db, 'conn'):
51 # pylint: disable=no-member
52 self.db.conn.commit()
53 self.db.conn.close()
54 #@+node:ekr.20180627042809.1: *3* cacher.commit
55 def commit(self):
56 # Careful: self.db may be a dict.
57 if hasattr(self.db, 'conn'):
58 # pylint: disable=no-member
59 self.db.conn.commit()
60 #@+node:ekr.20180611054447.1: *3* cacher.dump
61 def dump(self):
62 """Dump the indicated cache if --trace-cache is in effect."""
63 dump_cache(g.app.commander_db, tag='Commander Cache')
64 #@+node:ekr.20180627053508.1: *3* cacher.get_wrapper
65 def get_wrapper(self, c, fn=None):
66 """Return a new wrapper for c."""
67 return CommanderWrapper(c, fn=fn)
68 #@+node:ekr.20100208065621.5890: *3* cacher.test
69 def test(self):
71 # pylint: disable=no-member
72 if g.app.gui.guiName() == 'nullGui':
73 # Null gui's don't normally set the g.app.gui.db.
74 g.app.setGlobalDb()
75 # Fixes bug 670108.
76 assert g.app.db is not None # a PickleShareDB instance.
77 # Make sure g.guessExternalEditor works.
78 g.app.db.get("LEO_EDITOR")
79 # self.initFileDB('~/testpickleshare')
80 db = self.db
81 db.clear()
82 assert not list(db.items())
83 db['hello'] = 15
84 db['aku ankka'] = [1, 2, 313]
85 db['paths/nest/ok/keyname'] = [1, (5, 46)]
86 db.uncache() # frees memory, causes re-reads later
87 # print(db.keys())
88 db.clear()
89 return True
90 #@+node:ekr.20100210163813.5747: *3* cacher.save
91 def save(self, c, fn):
92 """
93 Save the per-commander cache.
95 Change the cache prefix if changeName is True.
97 save and save-as set changeName to True, save-to does not.
98 """
99 self.commit()
100 if fn:
101 # 1484: Change only the key!
102 if isinstance(c.db, CommanderWrapper):
103 c.db.key = fn
104 self.commit()
105 else:
106 g.trace('can not happen', c.db.__class__.__name__)
107 #@-others
108#@+node:ekr.20180627052459.1: ** class CommanderWrapper
109class CommanderWrapper:
110 """A class to distinguish keys from separate commanders."""
112 def __init__(self, c, fn=None):
113 self.c = c
114 self.db = g.app.db
115 self.key = fn or c.mFileName
116 self.user_keys = set()
118 def get(self, key, default=None):
119 value = self.db.get(f"{self.key}:::{key}")
120 return default if value is None else value
122 def keys(self):
123 return sorted(list(self.user_keys))
125 def __contains__(self, key):
126 return f"{self.key}:::{key}" in self.db
128 def __delitem__(self, key):
129 if key in self.user_keys:
130 self.user_keys.remove(key)
131 del self.db[f"{self.key}:::{key}"]
133 def __getitem__(self, key):
134 return self.db[f"{self.key}:::{key}"] # May (properly) raise KeyError
136 def __setitem__(self, key, value):
137 self.user_keys.add(key)
138 self.db[f"{self.key}:::{key}"] = value
139#@+node:ekr.20180627041556.1: ** class GlobalCacher
140class GlobalCacher:
141 """A singleton global cacher, g.app.db"""
143 def __init__(self):
144 """Ctor for the GlobalCacher class."""
145 trace = 'cache' in g.app.debug
146 try:
147 path = join(g.app.homeLeoDir, 'db', 'g_app_db')
148 if trace:
149 print('path for g.app.db:', repr(path))
150 self.db = SqlitePickleShare(path)
151 if trace and self.db is not None:
152 self.dump(tag='Startup')
153 except Exception:
154 if trace:
155 g.es_exception()
156 # Use a plain dict as a dummy.
157 self.db = {} # type:ignore
158 #@+others
159 #@+node:ekr.20180627045750.1: *3* g_cacher.clear
160 def clear(self):
161 """Clear the global cache."""
162 # Careful: self.db may be a Python dict.
163 if 'cache' in g.app.debug:
164 g.trace('clear g.app.db')
165 try:
166 self.db.clear()
167 except TypeError:
168 self.db.clear()
169 except Exception:
170 g.trace('unexpected exception')
171 g.es_exception()
172 self.db = {} # type:ignore
173 #@+node:ekr.20180627042948.1: *3* g_cacher.commit_and_close()
174 def commit_and_close(self):
175 # Careful: self.db may be a dict.
176 if hasattr(self.db, 'conn'):
177 # pylint: disable=no-member
178 if 'cache' in g.app.debug:
179 self.dump(tag='Shutdown')
180 self.db.conn.commit()
181 self.db.conn.close()
182 #@+node:ekr.20180627045953.1: *3* g_cacher.dump
183 def dump(self, tag=''):
184 """Dump the indicated cache if --trace-cache is in effect."""
185 tag0 = 'Global Cache'
186 tag2 = f"{tag0}: {tag}" if tag else tag0
187 dump_cache(self.db, tag2) # Careful: g.app.db may not be set yet.
188 #@-others
189#@+node:ekr.20100208223942.5967: ** class PickleShareDB
190_sentinel = object()
193class PickleShareDB:
194 """ The main 'connection' object for PickleShare database """
195 #@+others
196 #@+node:ekr.20100208223942.5968: *3* Birth & special methods
197 #@+node:ekr.20100208223942.5969: *4* __init__ (PickleShareDB)
198 def __init__(self, root):
199 """
200 Init the PickleShareDB class.
201 root: The directory that contains the data. Created if it doesn't exist.
202 """
203 self.root = abspath(expanduser(root))
204 if not isdir(self.root) and not g.unitTesting:
205 self._makedirs(self.root)
206 # Keys are normalized file names.
207 # Values are tuples (obj, orig_mod_time)
208 self.cache = {}
210 def loadz(fileobj):
211 if fileobj:
212 # Retain this code for maximum compatibility.
213 try:
214 val = pickle.loads(
215 zlib.decompress(fileobj.read()))
216 except ValueError:
217 g.es("Unpickling error - Python 3 data accessed from Python 2?")
218 return None
219 return val
220 return None
222 def dumpz(val, fileobj):
223 if fileobj:
224 try:
225 # Use Python 2's highest protocol, 2, if possible
226 data = pickle.dumps(val, 2)
227 except Exception:
228 # Use best available if that doesn't work (unlikely)
229 data = pickle.dumps(val, pickle.HIGHEST_PROTOCOL)
230 compressed = zlib.compress(data)
231 fileobj.write(compressed)
233 self.loader = loadz
234 self.dumper = dumpz
235 #@+node:ekr.20100208223942.5970: *4* __contains__(PickleShareDB)
236 def __contains__(self, key):
238 return self.has_key(key) # NOQA
239 #@+node:ekr.20100208223942.5971: *4* __delitem__
240 def __delitem__(self, key):
241 """ del db["key"] """
242 fn = join(self.root, key)
243 self.cache.pop(fn, None)
244 try:
245 os.remove(fn)
246 except OSError:
247 # notfound and permission denied are ok - we
248 # lost, the other process wins the conflict
249 pass
250 #@+node:ekr.20100208223942.5972: *4* __getitem__ (PickleShareDB)
251 def __getitem__(self, key):
252 """ db['key'] reading """
253 fn = join(self.root, key)
254 try:
255 mtime = (os.stat(fn)[stat.ST_MTIME])
256 except OSError:
257 raise KeyError(key)
258 if fn in self.cache and mtime == self.cache[fn][1]:
259 obj = self.cache[fn][0]
260 return obj
261 try:
262 # The cached item has expired, need to read
263 obj = self.loader(self._openFile(fn, 'rb'))
264 except Exception:
265 raise KeyError(key)
266 self.cache[fn] = (obj, mtime)
267 return obj
268 #@+node:ekr.20100208223942.5973: *4* __iter__
269 def __iter__(self):
271 for k in list(self.keys()):
272 yield k
273 #@+node:ekr.20100208223942.5974: *4* __repr__
274 def __repr__(self):
275 return f"PickleShareDB('{self.root}')"
276 #@+node:ekr.20100208223942.5975: *4* __setitem__ (PickleShareDB)
277 def __setitem__(self, key, value):
278 """ db['key'] = 5 """
279 fn = join(self.root, key)
280 parent, junk = split(fn)
281 if parent and not isdir(parent):
282 self._makedirs(parent)
283 self.dumper(value, self._openFile(fn, 'wb'))
284 try:
285 mtime = os.path.getmtime(fn)
286 self.cache[fn] = (value, mtime)
287 except OSError as e:
288 if e.errno != 2:
289 raise
290 #@+node:ekr.20100208223942.10452: *3* _makedirs
291 def _makedirs(self, fn, mode=0o777):
293 os.makedirs(fn, mode)
294 #@+node:ekr.20100208223942.10458: *3* _openFile (PickleShareDB)
295 def _openFile(self, fn, mode='r'):
296 """ Open this file. Return a file object.
298 Do not print an error message.
299 It is not an error for this to fail.
300 """
301 try:
302 return open(fn, mode)
303 except Exception:
304 return None
305 #@+node:ekr.20100208223942.10454: *3* _walkfiles & helpers
306 def _walkfiles(self, s, pattern=None):
307 """ D.walkfiles() -> iterator over files in D, recursively.
309 The optional argument, pattern, limits the results to files
310 with names that match the pattern. For example,
311 mydir.walkfiles('*.tmp') yields only files with the .tmp
312 extension.
313 """
314 for child in self._listdir(s):
315 if isfile(child):
316 if pattern is None or self._fn_match(child, pattern):
317 yield child
318 elif isdir(child):
319 for f in self._walkfiles(child, pattern):
320 yield f
321 #@+node:ekr.20100208223942.10456: *4* _listdir
322 def _listdir(self, s, pattern=None):
323 """ D.listdir() -> List of items in this directory.
325 Use D.files() or D.dirs() instead if you want a listing
326 of just files or just subdirectories.
328 The elements of the list are path objects.
330 With the optional 'pattern' argument, this only lists
331 items whose names match the given pattern.
332 """
333 names = os.listdir(s)
334 if pattern is not None:
335 names = fnmatch.filter(names, pattern)
336 return [join(s, child) for child in names]
337 #@+node:ekr.20100208223942.10464: *4* _fn_match
338 def _fn_match(self, s, pattern):
339 """ Return True if self.name matches the given pattern.
341 pattern - A filename pattern with wildcards, for example '*.py'.
342 """
343 return fnmatch.fnmatch(basename(s), pattern)
344 #@+node:ekr.20100208223942.5978: *3* clear (PickleShareDB)
345 def clear(self):
346 # Deletes all files in the fcache subdirectory.
347 # It would be more thorough to delete everything
348 # below the root directory, but it's not necessary.
349 for z in self.keys():
350 self.__delitem__(z)
351 #@+node:ekr.20100208223942.5979: *3* get
352 def get(self, key, default=None):
354 try:
355 val = self[key]
356 return val
357 except KeyError:
358 return default
359 #@+node:ekr.20100208223942.5980: *3* has_key (PickleShareDB)
360 def has_key(self, key):
362 try:
363 self[key]
364 except KeyError:
365 return False
366 return True
367 #@+node:ekr.20100208223942.5981: *3* items
368 def items(self):
369 return [z for z in self]
370 #@+node:ekr.20100208223942.5982: *3* keys & helpers (PickleShareDB)
371 # Called by clear, and during unit testing.
373 def keys(self, globpat=None):
374 """Return all keys in DB, or all keys matching a glob"""
375 if globpat is None:
376 files = self._walkfiles(self.root)
377 else:
378 # Do not call g.glob_glob here.
379 files = [z for z in join(self.root, globpat)]
380 result = [self._normalized(p) for p in files if isfile(p)]
381 return result
382 #@+node:ekr.20100208223942.5976: *4* _normalized
383 def _normalized(self, p):
384 """ Make a key suitable for user's eyes """
385 # os.path.relpath doesn't work here.
386 return self._relpathto(self.root, p).replace('\\', '/')
387 #@+node:ekr.20100208223942.10460: *4* _relpathto
388 # Used only by _normalized.
390 def _relpathto(self, src, dst):
391 """ Return a relative path from self to dst.
393 If there is no relative path from self to dst, for example if
394 they reside on different drives in Windows, then this returns
395 dst.abspath().
396 """
397 origin = abspath(src)
398 dst = abspath(dst)
399 orig_list = self._splitall(normcase(origin))
400 # Don't normcase dst! We want to preserve the case.
401 dest_list = self._splitall(dst)
402 if orig_list[0] != normcase(dest_list[0]):
403 # Can't get here from there.
404 return dst
405 # Find the location where the two paths start to differ.
406 i = 0
407 for start_seg, dest_seg in zip(orig_list, dest_list):
408 if start_seg != normcase(dest_seg):
409 break
410 i += 1
411 # Now i is the point where the two paths diverge.
412 # Need a certain number of "os.pardir"s to work up
413 # from the origin to the point of divergence.
414 segments = [os.pardir] * (len(orig_list) - i)
415 # Need to add the diverging part of dest_list.
416 segments += dest_list[i:]
417 if segments:
418 return join(*segments)
419 # If they happen to be identical, use os.curdir.
420 return os.curdir
421 #@+node:ekr.20100208223942.10462: *4* _splitall
422 # Used by relpathto.
424 def _splitall(self, s):
425 """ Return a list of the path components in this path.
427 The first item in the list will be a path. Its value will be
428 either os.curdir, os.pardir, empty, or the root directory of
429 this path (for example, '/' or 'C:\\'). The other items in
430 the list will be strings.
432 path.path.joinpath(*result) will yield the original path.
433 """
434 parts = []
435 loc = s
436 while loc != os.curdir and loc != os.pardir:
437 prev = loc
438 loc, child = split(prev)
439 if loc == prev:
440 break
441 parts.append(child)
442 parts.append(loc)
443 parts.reverse()
444 return parts
445 #@+node:ekr.20100208223942.5989: *3* uncache
446 def uncache(self, *items):
447 """ Removes all, or specified items from cache
449 Use this after reading a large amount of large objects
450 to free up memory, when you won't be needing the objects
451 for a while.
453 """
454 if not items:
455 self.cache = {}
456 for it in items:
457 self.cache.pop(it, None)
458 #@-others
459#@+node:vitalije.20170716201700.1: ** class SqlitePickleShare
460_sentinel = object()
463class SqlitePickleShare:
464 """ The main 'connection' object for SqlitePickleShare database """
465 #@+others
466 #@+node:vitalije.20170716201700.2: *3* Birth & special methods
467 def init_dbtables(self, conn):
468 sql = 'create table if not exists cachevalues(key text primary key, data blob);'
469 conn.execute(sql)
470 #@+node:vitalije.20170716201700.3: *4* __init__ (SqlitePickleShare)
471 def __init__(self, root):
472 """
473 Init the SqlitePickleShare class.
474 root: The directory that contains the data. Created if it doesn't exist.
475 """
476 self.root = abspath(expanduser(root))
477 if not isdir(self.root) and not g.unitTesting:
478 self._makedirs(self.root)
479 dbfile = ':memory:' if g.unitTesting else join(root, 'cache.sqlite')
480 self.conn = sqlite3.connect(dbfile, isolation_level=None)
481 self.init_dbtables(self.conn)
482 # Keys are normalized file names.
483 # Values are tuples (obj, orig_mod_time)
484 self.cache = {}
486 def loadz(data):
487 if data:
488 # Retain this code for maximum compatibility.
489 try:
490 val = pickle.loads(zlib.decompress(data))
491 except(ValueError, TypeError):
492 g.es("Unpickling error - Python 3 data accessed from Python 2?")
493 return None
494 return val
495 return None
497 def dumpz(val):
498 try:
499 # Use Python 2's highest protocol, 2, if possible
500 data = pickle.dumps(val, protocol=2)
501 except Exception:
502 # Use best available if that doesn't work (unlikely)
503 data = pickle.dumps(val, pickle.HIGHEST_PROTOCOL)
504 return sqlite3.Binary(zlib.compress(data))
506 self.loader = loadz
507 self.dumper = dumpz
508 self.reset_protocol_in_values()
509 #@+node:vitalije.20170716201700.4: *4* __contains__(SqlitePickleShare)
510 def __contains__(self, key):
512 return self.has_key(key) # NOQA
513 #@+node:vitalije.20170716201700.5: *4* __delitem__
514 def __delitem__(self, key):
515 """ del db["key"] """
516 try:
517 self.conn.execute(
518 '''delete from cachevalues
519 where key=?''', (key,))
520 except sqlite3.OperationalError:
521 pass
522 #@+node:vitalije.20170716201700.6: *4* __getitem__
523 def __getitem__(self, key):
524 """ db['key'] reading """
525 try:
526 obj = None
527 for row in self.conn.execute(
528 '''select data from cachevalues
529 where key=?''', (key,)):
530 obj = self.loader(row[0])
531 break
532 else:
533 raise KeyError(key)
534 except sqlite3.Error:
535 raise KeyError(key)
536 return obj
537 #@+node:vitalije.20170716201700.7: *4* __iter__
538 def __iter__(self):
540 for k in list(self.keys()):
541 yield k
542 #@+node:vitalije.20170716201700.8: *4* __repr__
543 def __repr__(self):
544 return f"SqlitePickleShare('{self.root}')"
545 #@+node:vitalije.20170716201700.9: *4* __setitem__
546 def __setitem__(self, key, value):
547 """ db['key'] = 5 """
548 try:
549 data = self.dumper(value)
550 self.conn.execute(
551 '''replace into cachevalues(key, data) values(?,?);''',
552 (key, data))
553 except sqlite3.OperationalError:
554 g.es_exception()
555 #@+node:vitalije.20170716201700.10: *3* _makedirs
556 def _makedirs(self, fn, mode=0o777):
558 os.makedirs(fn, mode)
559 #@+node:vitalije.20170716201700.11: *3* _openFile (SqlitePickleShare)
560 def _openFile(self, fn, mode='r'):
561 """ Open this file. Return a file object.
563 Do not print an error message.
564 It is not an error for this to fail.
565 """
566 try:
567 return open(fn, mode)
568 except Exception:
569 return None
570 #@+node:vitalije.20170716201700.12: *3* _walkfiles & helpers
571 def _walkfiles(self, s, pattern=None):
572 """ D.walkfiles() -> iterator over files in D, recursively.
574 The optional argument, pattern, limits the results to files
575 with names that match the pattern. For example,
576 mydir.walkfiles('*.tmp') yields only files with the .tmp
577 extension.
578 """
579 #@+node:vitalije.20170716201700.13: *4* _listdir
580 def _listdir(self, s, pattern=None):
581 """ D.listdir() -> List of items in this directory.
583 Use D.files() or D.dirs() instead if you want a listing
584 of just files or just subdirectories.
586 The elements of the list are path objects.
588 With the optional 'pattern' argument, this only lists
589 items whose names match the given pattern.
590 """
591 names = os.listdir(s)
592 if pattern is not None:
593 names = fnmatch.filter(names, pattern)
594 return [join(s, child) for child in names]
595 #@+node:vitalije.20170716201700.14: *4* _fn_match
596 def _fn_match(self, s, pattern):
597 """ Return True if self.name matches the given pattern.
599 pattern - A filename pattern with wildcards, for example '*.py'.
600 """
601 return fnmatch.fnmatch(basename(s), pattern)
602 #@+node:vitalije.20170716201700.15: *3* clear (SqlitePickleShare)
603 def clear(self):
604 # Deletes all files in the fcache subdirectory.
605 # It would be more thorough to delete everything
606 # below the root directory, but it's not necessary.
607 self.conn.execute('delete from cachevalues;')
608 #@+node:vitalije.20170716201700.16: *3* get (SqlitePickleShare)
609 def get(self, key, default=None):
611 if not self.has_key(key):
612 return default
613 try:
614 val = self[key]
615 return val
616 except Exception: # #1444: Was KeyError.
617 return default
618 #@+node:vitalije.20170716201700.17: *3* has_key (SqlightPickleShare)
619 def has_key(self, key):
620 sql = 'select 1 from cachevalues where key=?;'
621 for row in self.conn.execute(sql, (key,)):
622 return True
623 return False
624 #@+node:vitalije.20170716201700.18: *3* items
625 def items(self):
626 sql = 'select key,data from cachevalues;'
627 for key, data in self.conn.execute(sql):
628 yield key, data
629 #@+node:vitalije.20170716201700.19: *3* keys
630 # Called by clear, and during unit testing.
632 def keys(self, globpat=None):
633 """Return all keys in DB, or all keys matching a glob"""
634 if globpat is None:
635 sql = 'select key from cachevalues;'
636 args: Sequence[Any] = tuple()
637 else:
638 sql = "select key from cachevalues where key glob ?;"
639 # pylint: disable=trailing-comma-tuple
640 args = globpat,
641 for key in self.conn.execute(sql, args):
642 yield key
643 #@+node:vitalije.20170818091008.1: *3* reset_protocol_in_values
644 def reset_protocol_in_values(self):
645 PROTOCOLKEY = '__cache_pickle_protocol__'
646 if self.get(PROTOCOLKEY, 3) == 2:
647 return
648 #@+others
649 #@+node:vitalije.20170818115606.1: *4* viewrendered special case
650 import json
651 row = self.get('viewrendered_default_layouts') or (None, None)
652 row = json.loads(json.dumps(row[0])), json.loads(json.dumps(row[1]))
653 self['viewrendered_default_layouts'] = row
654 #@+node:vitalije.20170818115617.1: *4* do_block
655 def do_block(cur):
656 itms = tuple((self.dumper(self.loader(v)), k) for k, v in cur)
657 if itms:
658 self.conn.executemany('update cachevalues set data=? where key=?', itms)
659 self.conn.commit()
660 return itms[-1][1]
661 return None
662 #@-others
663 self.conn.isolation_level = 'DEFERRED'
665 sql0 = '''select key, data from cachevalues order by key limit 50'''
666 sql1 = '''select key, data from cachevalues where key > ? order by key limit 50'''
669 block = self.conn.execute(sql0)
670 lk = do_block(block)
671 while lk:
672 lk = do_block(self.conn.execute(sql1, (lk,)))
673 self[PROTOCOLKEY] = 2
674 self.conn.commit()
676 self.conn.isolation_level = None
677 #@+node:vitalije.20170716201700.23: *3* uncache
678 def uncache(self, *items):
679 """not used in SqlitePickleShare"""
680 pass
681 #@-others
682#@+node:ekr.20180627050237.1: ** function: dump_cache
683def dump_cache(db, tag):
684 """Dump the given cache."""
685 print(f'\n===== {tag} =====\n')
686 if db is None:
687 print('db is None!')
688 return
689 # Create a dict, sorted by file prefixes.
690 d: Dict[str, Any] = {}
691 for key in db.keys():
692 key = key[0]
693 val = db.get(key)
694 data = key.split(':::')
695 if len(data) == 2:
696 fn, key2 = data
697 else:
698 fn, key2 = 'None', key
699 aList = d.get(fn, [])
700 aList.append((key2, val),)
701 d[fn] = aList
702 # Print the dict.
703 files = 0
704 for key in sorted(d.keys()):
705 if key != 'None':
706 dump_list('File: ' + key, d.get(key))
707 files += 1
708 if d.get('None'):
709 heading = f"All others ({tag})" if files else None
710 dump_list(heading, d.get('None'))
712def dump_list(heading, aList):
713 if heading:
714 print(f'\n{heading}...\n')
715 for aTuple in aList:
716 key, val = aTuple
717 if isinstance(val, str):
718 if key.startswith('windowState'):
719 print(key)
720 elif key.endswith(('leo_expanded', 'leo_marked')):
721 if val:
722 print(f"{key:30}:")
723 g.printObj(val.split(','))
724 else:
725 print(f"{key:30}: []")
726 else:
727 print(f"{key:30}: {val}")
728 elif isinstance(val, (int, float)):
729 print(f"{key:30}: {val}")
730 else:
731 print(f"{key:30}:")
732 g.printObj(val)
733#@-others
734#@@language python
735#@@tabwidth -4
736#@@pagewidth 70
738#@-leo