From 6b46661b7d0b58b2c178bf8249cb5caacd76391e Mon Sep 17 00:00:00 2001 From: shouldsee Date: Fri, 2 Sep 2022 16:28:50 +0800 Subject: [PATCH 01/15] refactor data storage to allow saving on a per-window basis --- py/visdom/server.py | 376 +++++++++++++++++++++++++++++++++------ py/visdom/test_simple.py | 6 + 2 files changed, 328 insertions(+), 54 deletions(-) create mode 100644 py/visdom/test_simple.py diff --git a/py/visdom/server.py b/py/visdom/server.py index 4c48a1fd..f1c51db1 100644 --- a/py/visdom/server.py +++ b/py/visdom/server.py @@ -8,6 +8,7 @@ """Server""" +DEBUG = 1 import argparse import copy import getpass @@ -20,6 +21,7 @@ import os import sys import time +import shutil import traceback import uuid import warnings @@ -131,34 +133,241 @@ def hash_password(password): - -class LazyEnvData(Mapping): - def __init__(self, env_path_file): - self._env_path_file = env_path_file - self._raw_dict = None - - def lazy_load_data(self): - if self._raw_dict is not None: - return - +class B(object): + VERBOSE = 0 + @staticmethod + def safe_dir(tree): + os.makedirs(tree) if not os.path.exists(tree) else None + return tree + @staticmethod + def join(x,y): + return os.path.join(x,y) + @staticmethod + def SJ(x,y): + return B.safe_dir(B.join(x,y)) + J = join +# class BasicLazyMapping(Mapping): + +class SimpleJsonStorage(object): + ''' + One file per env + ''' + @staticmethod + def read_env_from_file(fn): try: - with open(self._env_path_file, 'r') as fn: - env_data = tornado.escape.json_decode(fn.read()) + with open( fn, 'r') as f: + env_data = tornado.escape.json_decode(f.read()) except Exception as e: raise ValueError( "Failed loading environment json: {} - {}".format( self._env_path_file, repr(e))) - self._raw_dict = { + _raw_dict = { 'jsons': env_data['jsons'], 'reload': env_data['reload'] } + return _raw_dict + + @staticmethod + def get_valid_env_list(fn): + ret = [] + for k in os.listdir( B.safe_dir(fn) ): + if k.endswith('.json'): + eid = k.rsplit('.',1)[0] + ffn = B.J(fn,k) + ret.append((eid, ffn)) + return ret + + # [i for i in os.listdir(env_path) if '.json' in i] + + @staticmethod + def serialize_env(state, eids, env_path=DEFAULT_ENV_PATH): + ''' + This function save serialized data to disk + + Should perform incremental data writing. + + Incremental writing is implemented as an existence check. + If the file associated with the + + up-to-date criteria + - key exists, + - mtime equals to the mtime + and mtime equals to the stored mtime, then + ''' + env_ids = [i for i in eids if i in state] + if env_path is not None: + for env_id in env_ids: + ''' + This was unsafe: use .temp to make sure atomicity + ''' + # env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) + # import pdb; pdb.set_trace() + x = {k:v for k,v in state[env_id].items()} + x['reload'] + x['jsons'] + # os.makedirs(tree) if not os.path.exists(tree) else None + LED.atomic_dump_json(env_path, env_id, x) + + return env_ids + + @staticmethod + def atomic_dump_json(xdir, key, x): + env_path_file = os.path.join(xdir, "{0}.json".format(key)) + with open(env_path_file+".temp", 'w') as f: + f.write(json.dumps(x)) + shutil.move(env_path_file+'.temp',env_path_file) + return env_path_file + + @staticmethod + def serialize_all(state, env_path=DEFAULT_ENV_PATH): + serialize_env(state, list(state.keys()), env_path=env_path) + + +class SimpleWindowJsonStorage(object): + ''' + One json per window + ''' + def read_env_from_file(self, tree): + + xo = {} + + xo['jsons'] = xxo = {} + ttree = B.SJ(tree,'jsons') + for k in os.listdir(ttree): + if k.endswith('.json'): + window = k.rsplit('.json',1)[0] + v = self.safe_parse_file(B.J(ttree,k)) + xxo[window] = v + + xo['reload'] = self.safe_parse_file(B.J(tree,'reload.json')) + # import pdb; pdb.set_trace() + return xo + # # for k,v in os.listdir() + # # for key in 'jsons reload': + # + # env_data = self.safe_parse_file(fn) + # _raw_dict = { + # 'jsons': env_data['jsons'], + # 'reload': env_data['reload'] + # } + # return _raw_dict + @staticmethod + def get_valid_env_list(fn): + ret = [] + for key in os.listdir( B.safe_dir(fn) ): + ffn = B.J(fn,key) + if os.path.isdir(ffn): + if key =='view': continue + ret.append((key,ffn)) + # if k.endswith('.json'): + # eid = k.rsplit('.',1)[0] + # ret.append((eid, ffn)) + return ret + + # def safe_parse_json(self, fn): + # return self.safe_parse_file(fn+'.json') + + @staticmethod + def safe_parse_file(fn): + try: + with open( fn, 'r') as f: + env_data = tornado.escape.json_decode(f.read()) + except Exception as e: + raise ValueError( + "Failed loading file: {} - {}".format( + fn, repr(e))) + return env_data + + + + @staticmethod + def serialize_env(state, eids, env_path=DEFAULT_ENV_PATH): + ''' + This function save serialized data to disk + + Should perform incremental data writing. + + Incremental writing is implemented as an existence check. + If the file associated with the + + up-to-date criteria + - key exists, + - mtime equals to the mtime + and mtime equals to the stored mtime, then + ''' + env_ids = [i for i in eids if i in state] + if env_path is not None: + for env_id in env_ids: + ''' + This was unsafe: use .temp to make sure atomicity + ''' + # env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) + # import pdb; pdb.set_trace() + x = {k:v for k,v in state[env_id].items()} + x['reload'] + x['jsons'] + + + ''' + This is the root tree for a env + ''' + tree = B.SJ(env_path, env_id) + for key, xx in x.items(): + if key in 'reload'.split(): + 'single file ' + LED.atomic_dump_json(tree, key, x[key]) + elif key in 'jsons'.split(): + 'dir storage' + ttree = B.SJ(tree,key) + for kk,xxx in xx.items(): + LED.atomic_dump_json(ttree, kk, xxx) + else: + raise NotImplementedError('cannot serialize key %s'%key) + return env_ids + + @staticmethod + def atomic_dump_json(xdir, key, x): + env_path_file = os.path.join(xdir, "{0}.json".format(key)) + with open(env_path_file+".temp", 'w') as f: + f.write(json.dumps(x)) + shutil.move(env_path_file+'.temp',env_path_file) + return env_path_file + + @staticmethod + def serialize_all(state, env_path=DEFAULT_ENV_PATH): + serialize_env(state, list(state.keys()), env_path=env_path) + + + +# class LazyEnvData(Mapping, SimpleJsonStorage): +class LazyEnvData(Mapping, SimpleWindowJsonStorage): + def __init__(self, env_path_file): + self._env_path_file = env_path_file + self._raw_dict = None + + def lazy_load_data(self): + ''' + This is now the only data entrypoint + ''' + if self._raw_dict is not None: + return + self._raw_dict = self.read_env_from_file(self._env_path_file) def __getitem__(self, key): + ''' + Upon reading, should lazy load each json + ''' self.lazy_load_data() + if key =='windows': key = 'jsons' return self._raw_dict.__getitem__(key) def __setitem__(self, key, value): + ''' + upon setitem, needs to cache data to disk + ''' + # self.send_data_to_disk(env_path_file, key, value) self.lazy_load_data() + if key =='windows': key = 'jsons' return self._raw_dict.__setitem__(key, value) def __iter__(self): @@ -168,6 +377,9 @@ def __iter__(self): def __len__(self): self.lazy_load_data() return len(self._raw_dict) +serialize_all = LazyEnvData.serialize_all +serialize_env = LazyEnvData.serialize_env +LED = LazyEnvData tornado_settings = { @@ -178,22 +390,41 @@ def __len__(self): "compiled_template_cache": False } - -def serialize_env(state, eids, env_path=DEFAULT_ENV_PATH): - env_ids = [i for i in eids if i in state] - if env_path is not None: - for env_id in env_ids: - env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) - with open(env_path_file, 'w') as fn: - if isinstance(state[env_id], LazyEnvData): - fn.write(json.dumps(state[env_id]._raw_dict)) - else: - fn.write(json.dumps(state[env_id])) - return env_ids - - -def serialize_all(state, env_path=DEFAULT_ENV_PATH): - serialize_env(state, list(state.keys()), env_path=env_path) +if 0: + + def serialize_env_old(state, eids, env_path=DEFAULT_ENV_PATH): + ''' + This function save serialized data to disk + + Should perform incremental data writing. + + Incremental writing is implemented as an existence check. + + ''' + env_ids = [i for i in eids if i in state] + if env_path is not None: + for env_id in env_ids: + ''' + This was unsafe . use .temp to make sure atomicity + ''' + env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) + with open(env_path_file+".temp", 'w') as fn: + if isinstance(state[env_id], LazyEnvData): + fn.write(json.dumps(state[env_id]._raw_dict)) + else: + fn.write(json.dumps(state[env_id])) + shutil.move(env_path_file+'.temp',env_path_file) + return env_ids + + # +# # +# def get_dict(x): +# if isinstance(x, LazyEnvData): +# return x._raw_dict +# else: +# return x +# # fn.write(json.dumps(state[env_id])) +# class Application(tornado.web.Application): @@ -286,6 +517,13 @@ def load_layouts(self): return "" def load_state(self): + ''' + This function loads serialized data from disk + + How often this needs to be executed? + + + ''' state = {} env_path = self.env_path if env_path is None: @@ -296,29 +534,28 @@ def load_state(self): ) return {'main': {'jsons': {}, 'reload': {}}} ensure_dir_exists(env_path) - env_jsons = [i for i in os.listdir(env_path) if '.json' in i] - for env_json in env_jsons: - eid = env_json.replace('.json', '') - env_path_file = os.path.join(env_path, env_json) - - if self.eager_data_loading: - try: - with open(env_path_file, 'r') as fn: - env_data = tornado.escape.json_decode(fn.read()) - except Exception as e: - logging.warn( - "Failed loading environment json: {} - {}".format( - env_path_file, repr(e))) - continue - - state[eid] = {'jsons': env_data['jsons'], - 'reload': env_data['reload']} - else: - state[eid] = LazyEnvData(env_path_file) - if 'main' not in state and 'main.json' not in env_jsons: + ''' + [Note] listdir is used to reconstruct env list from file list + ''' + eid_file_pair = LED.get_valid_env_list(env_path) + # env_jsons = [i for i in os.listdir(env_path) if '.json' in i] + for eid, env_path_file in eid_file_pair: + state[eid] = LazyEnvData(env_path_file) + + if self.eager_data_loading: + for k,x in state.items(): + x.lazy_load_data() + + # state[eid].lazy_load_data() + + ''' + Creating default env + ''' + if 'main' not in state: + # and 'main.json' not in env_jsons: state['main'] = {'jsons': {}, 'reload': {}} - serialize_env(state, ['main'], env_path=self.env_path) + # serialize_env(state, ['main'], env_path=self.env_path) return state @@ -357,6 +594,7 @@ def broadcast_envs(handler, target_subs=None): if target_subs is None: target_subs = handler.subs.values() for sub in target_subs: + # print('[DEBUG1]',msg.keys()) sub.write_message(json.dumps( {'command': 'env_update', 'data': list(handler.state.keys())} )) @@ -365,10 +603,12 @@ def broadcast_envs(handler, target_subs=None): def send_to_sources(handler, msg): target_sources = handler.sources.values() for source in target_sources: + # print('[DEBUG2]',msg.keys()) source.write_message(json.dumps(msg)) class BaseWebSocketHandler(tornado.websocket.WebSocketHandler): + # def def get_current_user(self): """ This method determines the self.current_user @@ -379,6 +619,23 @@ def get_current_user(self): return self.get_secure_cookie("user_password") except Exception: # Not using secure cookies return None + def write_message(self,*a,**kw): + ''' + Debugging interceptor + ''' + # assert 0 + + x = a[0] + if B.VERBOSE >=5: + print('[DEBUG3]',type(x),(inspect.stack()[1].function),repr(a[0])[:20]) + # if not x + # if isinstance(x,str): + # x = json.loads(x) + # + # if 'command' not in json.loads(a[0]).keys(): + # assert 0, json.loads(a[0]).keys() + # print('[DEBUG2]',a) + return super().write_message(*a,**kw) class VisSocketHandler(BaseWebSocketHandler): @@ -496,7 +753,6 @@ def get_messages(self): self.last_read_time = time.time() return to_send - class SocketHandler(BaseWebSocketHandler): def initialize(self, app): self.port = app.port @@ -731,6 +987,9 @@ def close(self): self.subs.pop(self.sid, None) def write_message(self, msg): + # import pdb; pdb.set_trace() + # print('[DEBUG2]',msg.keys()) + # assert 0 self.messages.append(msg) def get_messages(self): @@ -1074,12 +1333,12 @@ def update(p, args): del new_data["z"] if updateDir in ["appendRow", "prependRow"]: - checkdir = "y" + checkdir = "y" if len(plot["z"][0]) != len(dz[0]): logging.error("ERROR: There is a mismatch between the number of columns in existing plot ('%i') and new data ('%i')." % (len(plot["z"]), len(dz))) return p else: - checkdir = "x" + checkdir = "x" if len(plot["z"]) != len(dz): logging.error("ERROR: There is a mismatch between the number of rows in existing plot ('%i') and new data ('%i')." % (len(plot["z"]), len(dz))) return p @@ -1456,12 +1715,17 @@ def load_env(state, eid, socket, env_path=DEFAULT_ENV_PATH): env = {} if eid in state: env = state.get(eid) + print('[B1]') elif env_path is not None: p = os.path.join(env_path, eid.strip(), '.json') if os.path.exists(p): with open(p, 'r') as fn: env = tornado.escape.json_decode(fn.read()) state[eid] = env + print('[B2]') + + if B.VERBOSE >=3: + print(repr(env.get('jsons',{}))[:10]) if 'reload' in env: socket.write_message( @@ -1805,14 +2069,18 @@ def get(self, text): raise Exception(error_text) + # function that downloads and installs javascript, css, and font dependencies: def download_scripts(proxies=None, install_dir=None): import visdom + if DEBUG: + visdom.__version__ = 'test' print("Checking for scripts.") # location in which to download stuff: if install_dir is None: - install_dir = os.path.dirname(visdom.__file__) + # install_dir = os.path.dirname(visdom.__file__) + install_dir = os.path.dirname(os.path.realpath(__file__)) # all files that need to be downloaded: b = 'https://unpkg.com/' diff --git a/py/visdom/test_simple.py b/py/visdom/test_simple.py new file mode 100644 index 00000000..6fdbdb4e --- /dev/null +++ b/py/visdom/test_simple.py @@ -0,0 +1,6 @@ +import visdom +import numpy as np +vis = visdom.Visdom(env='test') +for i in range(5): + vis.scatter(np.random.random((50,2)),win=i) +vis.save(['test']) From 860a426894378a2d38e3f0d15f1a7af5562a9b2c Mon Sep 17 00:00:00 2001 From: shouldsee Date: Fri, 2 Sep 2022 19:14:15 +0800 Subject: [PATCH 02/15] cleaning --- py/visdom/server.py | 21 ++++++--------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/py/visdom/server.py b/py/visdom/server.py index f1c51db1..63edb9a3 100644 --- a/py/visdom/server.py +++ b/py/visdom/server.py @@ -623,18 +623,10 @@ def write_message(self,*a,**kw): ''' Debugging interceptor ''' - # assert 0 - x = a[0] - if B.VERBOSE >=5: - print('[DEBUG3]',type(x),(inspect.stack()[1].function),repr(a[0])[:20]) - # if not x - # if isinstance(x,str): - # x = json.loads(x) - # - # if 'command' not in json.loads(a[0]).keys(): - # assert 0, json.loads(a[0]).keys() - # print('[DEBUG2]',a) + if 5 >= logging.root.level: + x = a[0] + logging.log(5, str(['[DEBUG3]',type(x),(inspect.stack()[1].function),repr(a[0])[:20]]) ) return super().write_message(*a,**kw) @@ -1715,17 +1707,16 @@ def load_env(state, eid, socket, env_path=DEFAULT_ENV_PATH): env = {} if eid in state: env = state.get(eid) - print('[B1]') elif env_path is not None: p = os.path.join(env_path, eid.strip(), '.json') if os.path.exists(p): with open(p, 'r') as fn: env = tornado.escape.json_decode(fn.read()) state[eid] = env - print('[B2]') - if B.VERBOSE >=3: - print(repr(env.get('jsons',{}))[:10]) + # if B.VERBOSE >=3: + + logging.log(10, repr(env.get('jsons',{}))[:10]) if 'reload' in env: socket.write_message( From edc4fd2b619f8cc01a9091e7547f792a07f8c4b3 Mon Sep 17 00:00:00 2001 From: shouldsee Date: Fri, 2 Sep 2022 20:28:40 +0800 Subject: [PATCH 03/15] allows switching caching behaviour by class construction --- py/visdom/Makefile | 8 ++ py/visdom/server.py | 243 ++++++++++++++++++++++++++++---------------- 2 files changed, 162 insertions(+), 89 deletions(-) create mode 100644 py/visdom/Makefile diff --git a/py/visdom/Makefile b/py/visdom/Makefile new file mode 100644 index 00000000..afa0fba8 --- /dev/null +++ b/py/visdom/Makefile @@ -0,0 +1,8 @@ +clean: + rm -rf ./test_data/* + +serve_test: + python3 -mpdb -cc server.py -env_path test_data -eager_data_loading -logging_level 5 + +test_simple: + python3 test_simple.py diff --git a/py/visdom/server.py b/py/visdom/server.py index 63edb9a3..7580c1f0 100644 --- a/py/visdom/server.py +++ b/py/visdom/server.py @@ -60,6 +60,14 @@ assert sys.version_info[0] >= 3, 'To use visdom with python 2, downgrade to v0.1.8.9' +def dset(x,key=None): + def wrapper(v,key=key): + if key is None: + key = v.__name__ + x[key] = v + return v + return wrapper + def warn_once(msg, warningtype=None): """ Raise a warning, but only once. @@ -147,7 +155,8 @@ def SJ(x,y): return B.safe_dir(B.join(x,y)) J = join # class BasicLazyMapping(Mapping): - +STORES = {} +@dset(STORES) class SimpleJsonStorage(object): ''' One file per env @@ -206,7 +215,7 @@ def serialize_env(state, eids, env_path=DEFAULT_ENV_PATH): x['reload'] x['jsons'] # os.makedirs(tree) if not os.path.exists(tree) else None - LED.atomic_dump_json(env_path, env_id, x) + LazyEnvData.atomic_dump_json(env_path, env_id, x) return env_ids @@ -223,10 +232,12 @@ def serialize_all(state, env_path=DEFAULT_ENV_PATH): serialize_env(state, list(state.keys()), env_path=env_path) +@dset(STORES) class SimpleWindowJsonStorage(object): ''' One json per window ''' + @classmethod def read_env_from_file(self, tree): xo = {} @@ -242,15 +253,7 @@ def read_env_from_file(self, tree): xo['reload'] = self.safe_parse_file(B.J(tree,'reload.json')) # import pdb; pdb.set_trace() return xo - # # for k,v in os.listdir() - # # for key in 'jsons reload': - # - # env_data = self.safe_parse_file(fn) - # _raw_dict = { - # 'jsons': env_data['jsons'], - # 'reload': env_data['reload'] - # } - # return _raw_dict + @staticmethod def get_valid_env_list(fn): ret = [] @@ -264,8 +267,6 @@ def get_valid_env_list(fn): # ret.append((eid, ffn)) return ret - # def safe_parse_json(self, fn): - # return self.safe_parse_file(fn+'.json') @staticmethod def safe_parse_file(fn): @@ -315,12 +316,12 @@ def serialize_env(state, eids, env_path=DEFAULT_ENV_PATH): for key, xx in x.items(): if key in 'reload'.split(): 'single file ' - LED.atomic_dump_json(tree, key, x[key]) + LazyEnvData.atomic_dump_json(tree, key, x[key]) elif key in 'jsons'.split(): 'dir storage' ttree = B.SJ(tree,key) for kk,xxx in xx.items(): - LED.atomic_dump_json(ttree, kk, xxx) + LazyEnvData.atomic_dump_json(ttree, kk, xxx) else: raise NotImplementedError('cannot serialize key %s'%key) return env_ids @@ -336,51 +337,134 @@ def atomic_dump_json(xdir, key, x): @staticmethod def serialize_all(state, env_path=DEFAULT_ENV_PATH): serialize_env(state, list(state.keys()), env_path=env_path) +# store_dict['SimpleJsonStorage'] +def get_led_cls(sel_led, sel_store): + ''' + LED stands for LazyEnvData + ''' # class LazyEnvData(Mapping, SimpleJsonStorage): -class LazyEnvData(Mapping, SimpleWindowJsonStorage): - def __init__(self, env_path_file): - self._env_path_file = env_path_file - self._raw_dict = None - - def lazy_load_data(self): - ''' - This is now the only data entrypoint - ''' - if self._raw_dict is not None: - return - self._raw_dict = self.read_env_from_file(self._env_path_file) - - def __getitem__(self, key): + # if sel_store == '' + _store = STORES[sel_store] + LEDS = {} + @dset(LEDS) + class LazyEnvData(Mapping): ''' - Upon reading, should lazy load each json + Per-env laziness ''' - self.lazy_load_data() - if key =='windows': key = 'jsons' - return self._raw_dict.__getitem__(key) - - def __setitem__(self, key, value): + store = _store + serialize_env = _store.serialize_env + get_valid_env_list = _store.get_valid_env_list + read_env_from_file = _store.read_env_from_file + atomic_dump_json = _store.atomic_dump_json + # def serialize_env(self,*a) + # SimpleWindowJsonStorage + def __init__(self, env_path_file): + self._env_path_file = env_path_file + self._raw_dict = None + # self.ser + + def lazy_load_data(self): + ''' + This is now the only data entrypoint + ''' + if self._raw_dict is not None: + return + self._raw_dict = self.read_env_from_file(self._env_path_file) + + def __getitem__(self, key): + ''' + Upon reading, should lazy load each json + ''' + self.lazy_load_data() + return self._raw_dict.__getitem__(key) + + def __setitem__(self, key, value): + ''' + upon setitem, needs to cache data to disk + ''' + # self.send_data_to_disk(env_path_file, key, value) + self.lazy_load_data() + return self._raw_dict.__setitem__(key, value) + + def __iter__(self): + self.lazy_load_data() + return iter(self._raw_dict) + + def __len__(self): + self.lazy_load_data() + return len(self._raw_dict) + + @dset(LEDS) + class LazyEnvDataPerWindow(Mapping): ''' - upon setitem, needs to cache data to disk + Per-env laziness ''' - # self.send_data_to_disk(env_path_file, key, value) - self.lazy_load_data() - if key =='windows': key = 'jsons' - return self._raw_dict.__setitem__(key, value) - - def __iter__(self): - self.lazy_load_data() - return iter(self._raw_dict) - - def __len__(self): - self.lazy_load_data() - return len(self._raw_dict) -serialize_all = LazyEnvData.serialize_all + store = _store + serialize_env = _store.serialize_env + get_valid_env_list = _store.get_valid_env_list + read_env_from_file = _store.read_env_from_file + # def serialize_env(self,*a) + # SimpleWindowJsonStorage + def __init__(self, env_path_file): + self._env_path_file = env_path_file + self._raw_dict = None + # self.ser + + def lazy_load_data(self): + ''' + This reduces to a constraint checker checker + ''' + self.check_constraint() + + def check_constraint(self): + ''' + For each valid file in directory + check the plot exists in + ''' + + # self['re'] + + + if self._raw_dict is not None: + return + self._raw_dict = self.read_env_from_file(self._env_path_file) + + def __getitem__(self, key): + ''' + Upon reading, should lazy load each json + ''' + self.lazy_load_data() + return self._raw_dict.__getitem__(key) + + def __setitem__(self, key, value): + ''' + upon setitem, needs to cache data to disk + ''' + # self.send_data_to_disk(env_path_file, key, value) + self.lazy_load_data() + return self._raw_dict.__setitem__(key, value) + + def __iter__(self): + self.lazy_load_data() + return iter(self._raw_dict) + + def __len__(self): + self.lazy_load_data() + return len(self._raw_dict) + + return LEDS[sel_led] + # return LazyEnvData + +# serialize_all = LazyEnvData.serialize_all +# serialize_env = LazyEnvData.serialize_env +# LED = LazyEnvData + +# LED = get_led_cls(None, 'SimpleJsonStorage') +LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonStorage') serialize_env = LazyEnvData.serialize_env -LED = LazyEnvData - tornado_settings = { "autoescape": None, @@ -390,43 +474,6 @@ def __len__(self): "compiled_template_cache": False } -if 0: - - def serialize_env_old(state, eids, env_path=DEFAULT_ENV_PATH): - ''' - This function save serialized data to disk - - Should perform incremental data writing. - - Incremental writing is implemented as an existence check. - - ''' - env_ids = [i for i in eids if i in state] - if env_path is not None: - for env_id in env_ids: - ''' - This was unsafe . use .temp to make sure atomicity - ''' - env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) - with open(env_path_file+".temp", 'w') as fn: - if isinstance(state[env_id], LazyEnvData): - fn.write(json.dumps(state[env_id]._raw_dict)) - else: - fn.write(json.dumps(state[env_id])) - shutil.move(env_path_file+'.temp',env_path_file) - return env_ids - - # -# # -# def get_dict(x): -# if isinstance(x, LazyEnvData): -# return x._raw_dict -# else: -# return x -# # fn.write(json.dumps(state[env_id])) -# - - class Application(tornado.web.Application): def __init__(self, port=DEFAULT_PORT, base_url='', env_path=DEFAULT_ENV_PATH, readonly=False, @@ -538,7 +585,7 @@ def load_state(self): ''' [Note] listdir is used to reconstruct env list from file list ''' - eid_file_pair = LED.get_valid_env_list(env_path) + eid_file_pair = LazyEnvData.get_valid_env_list(env_path) # env_jsons = [i for i in os.listdir(env_path) if '.json' in i] for eid, env_path_file in eid_file_pair: state[eid] = LazyEnvData(env_path_file) @@ -1118,7 +1165,13 @@ def broadcast(self, msg, eid): def register_window(self, p, eid): + ''' + :param p: a dict containing the plotted data + :param eid: a string pointing to the registered environment + ''' # in case env doesn't exist + + ### get default new window is_new_env = False if eid not in self.state: is_new_env = True @@ -1126,16 +1179,28 @@ def register_window(self, p, eid): env = self.state[eid]['jsons'] + '## allocating window index' if p['id'] in env: + '## window_id already in env' p['i'] = env[p['id']]['i'] else: + '## allocate index for new window id' p['i'] = len(env) + ''' + ## this is the __setitem__ call. + Adds per-window caching logic into this call + ''' env[p['id']] = p + + '## sync window to clients' broadcast(self, p, eid) if is_new_env: + '## sync env list to clients' broadcast_envs(self) + + '## write response to caller through handler' self.write(p['id']) From 1c556b718e386e2a9f3c7eb11798e99995f5cd42 Mon Sep 17 00:00:00 2001 From: shouldsee Date: Fri, 2 Sep 2022 21:46:31 +0800 Subject: [PATCH 04/15] [refactor] half way on autosave: impl abstract xt type --- py/visdom/server.py | 352 ++++++++++++++++++++++++-------------------- 1 file changed, 189 insertions(+), 163 deletions(-) diff --git a/py/visdom/server.py b/py/visdom/server.py index 7580c1f0..dec99f90 100644 --- a/py/visdom/server.py +++ b/py/visdom/server.py @@ -155,26 +155,67 @@ def SJ(x,y): return B.safe_dir(B.join(x,y)) J = join # class BasicLazyMapping(Mapping): + + STORES = {} + +def NIF(*a,**kw): raise NotImplementedError() +class StoragePrototype(object): + serialze_env = NIF + serialize_env_single = NIF + get_valid_env_list = NIF + lazy_read_env_from_file = NIF + # atomic_dump_json = NIF + @staticmethod + def callback_before_setitem(state, key, v): + ''' + this callback is blocking. could be used for sanity check + ''' + return None + + @staticmethod + def setitem_callback_setitem(state, key, v): + ''' + Autosave uses this callback to save to disk + ''' + return None + + @staticmethod + def atomic_dump_json(xdir, key, x): + xdir = B.safe_dir(xdir) + env_path_file = os.path.join(xdir, "{0}.json".format(key)) + with open(env_path_file+".temp", 'w') as f: + f.write(json.dumps(x)) + shutil.move(env_path_file+'.temp',env_path_file) + return env_path_file + + # setitem_cll + @dset(STORES) -class SimpleJsonStorage(object): +class SimpleJsonStorage(StoragePrototype): ''' One file per env ''' @staticmethod - def read_env_from_file(fn): - try: - with open( fn, 'r') as f: - env_data = tornado.escape.json_decode(f.read()) - except Exception as e: - raise ValueError( - "Failed loading environment json: {} - {}".format( - self._env_path_file, repr(e))) - _raw_dict = { - 'jsons': env_data['jsons'], - 'reload': env_data['reload'] - } - return _raw_dict + def lazy_read_env_from_file(fn, state): + ''' + :param state: a dict-like object to trigger laziness + ''' + if state is not None: + return state + else: + try: + with open( fn, 'r') as f: + env_data = tornado.escape.json_decode(f.read()) + except Exception as e: + raise ValueError( + "Failed loading environment json: {} - {}".format( + self._env_path_file, repr(e))) + _raw_dict = { + 'jsons': env_data['jsons'], + 'reload': env_data['reload'] + } + return _raw_dict @staticmethod def get_valid_env_list(fn): @@ -188,8 +229,9 @@ def get_valid_env_list(fn): # [i for i in os.listdir(env_path) if '.json' in i] - @staticmethod - def serialize_env(state, eids, env_path=DEFAULT_ENV_PATH): + # @staticmethod + @classmethod + def serialize_env(cls, state, eids, env_path): ''' This function save serialized data to disk @@ -204,28 +246,22 @@ def serialize_env(state, eids, env_path=DEFAULT_ENV_PATH): and mtime equals to the stored mtime, then ''' env_ids = [i for i in eids if i in state] - if env_path is not None: - for env_id in env_ids: - ''' - This was unsafe: use .temp to make sure atomicity - ''' - # env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) - # import pdb; pdb.set_trace() - x = {k:v for k,v in state[env_id].items()} - x['reload'] - x['jsons'] - # os.makedirs(tree) if not os.path.exists(tree) else None - LazyEnvData.atomic_dump_json(env_path, env_id, x) + if env_path is None: raise RuntimeError('env_path is None') + + for env_id in env_ids: + ''' + This was unsafe: use .temp to make sure atomicity + ''' + # env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) + # import pdb; pdb.set_trace() + x = {k:v for k,v in state[env_id].items()} + x['reload'] + x['jsons'] + # os.makedirs(tree) if not os.path.exists(tree) else None + cls.atomic_dump_json(env_path, env_id, x) return env_ids - @staticmethod - def atomic_dump_json(xdir, key, x): - env_path_file = os.path.join(xdir, "{0}.json".format(key)) - with open(env_path_file+".temp", 'w') as f: - f.write(json.dumps(x)) - shutil.move(env_path_file+'.temp',env_path_file) - return env_path_file @staticmethod def serialize_all(state, env_path=DEFAULT_ENV_PATH): @@ -233,26 +269,48 @@ def serialize_all(state, env_path=DEFAULT_ENV_PATH): @dset(STORES) -class SimpleWindowJsonStorage(object): +class SimpleWindowJsonStorage(StoragePrototype): ''' One json per window ''' - @classmethod - def read_env_from_file(self, tree): - - xo = {} - - xo['jsons'] = xxo = {} - ttree = B.SJ(tree,'jsons') - for k in os.listdir(ttree): - if k.endswith('.json'): - window = k.rsplit('.json',1)[0] - v = self.safe_parse_file(B.J(ttree,k)) - xxo[window] = v - xo['reload'] = self.safe_parse_file(B.J(tree,'reload.json')) - # import pdb; pdb.set_trace() + schema_dict = { + 'jsons':'filedir', + 'reload':'file', + } + @classmethod + def lazy_read_env_from_file(self, tree, state): + ''' + Only loads windows that are not in state dict + ''' + # xo = {} + if state is None: + state = init_default_env() + + xo = state + # xo['jsons'] = xxo = {} + key = 'jsons' + for key,xt in self.schema_dict.items(): + if xt == 'filedir': + xxo = xo[key] + ttree = B.SJ(tree, key) + for k in os.listdir(ttree): + if not k.endswith('.json'): + continue + window = k.rsplit('.json',1)[0] + not_up_to_date = window not in xxo + if not_up_to_date: + v = self.safe_parse_file(B.J(ttree,k)) + xxo[window] = v + elif xt=='file': + if xo[key].__len__(): + pass + else: + xo[key] = self.safe_parse_file(B.J(tree,key + '.json')) + else: + raise NotImplementedError('lazy_read_env_from_file(%s,%s)'%(key,xt)) return xo + # @staticmethod def get_valid_env_list(fn): @@ -262,9 +320,6 @@ def get_valid_env_list(fn): if os.path.isdir(ffn): if key =='view': continue ret.append((key,ffn)) - # if k.endswith('.json'): - # eid = k.rsplit('.',1)[0] - # ret.append((eid, ffn)) return ret @@ -280,9 +335,8 @@ def safe_parse_file(fn): return env_data - - @staticmethod - def serialize_env(state, eids, env_path=DEFAULT_ENV_PATH): + @classmethod + def serialize_env(self, state, eids, env_path): ''' This function save serialized data to disk @@ -297,47 +351,72 @@ def serialize_env(state, eids, env_path=DEFAULT_ENV_PATH): and mtime equals to the stored mtime, then ''' env_ids = [i for i in eids if i in state] - if env_path is not None: - for env_id in env_ids: - ''' - This was unsafe: use .temp to make sure atomicity - ''' - # env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) - # import pdb; pdb.set_trace() - x = {k:v for k,v in state[env_id].items()} - x['reload'] - x['jsons'] - - - ''' - This is the root tree for a env - ''' - tree = B.SJ(env_path, env_id) - for key, xx in x.items(): - if key in 'reload'.split(): - 'single file ' - LazyEnvData.atomic_dump_json(tree, key, x[key]) - elif key in 'jsons'.split(): - 'dir storage' - ttree = B.SJ(tree,key) - for kk,xxx in xx.items(): - LazyEnvData.atomic_dump_json(ttree, kk, xxx) - else: - raise NotImplementedError('cannot serialize key %s'%key) + if env_path is None: raise RuntimeError('env_path is None') + # if env_path is not None: + for env_id in env_ids: + tree = self.serialize_env_single(state, env_id, env_path) return env_ids - @staticmethod - def atomic_dump_json(xdir, key, x): - env_path_file = os.path.join(xdir, "{0}.json".format(key)) - with open(env_path_file+".temp", 'w') as f: - f.write(json.dumps(x)) - shutil.move(env_path_file+'.temp',env_path_file) - return env_path_file + # @staticmethod + @classmethod + def write_key_value(cls, tree, key, xx, xt): + ret = [] + if xt == 'file': + 'single file ' + rett = cls.atomic_dump_json(tree, key, xx) + ret.append( rett ) + + elif xt == 'filedir': + '''dir storage + Note no further directory embedding is allowed + ''' + ttree = B.SJ(tree,key) + for kk,xxx in xx.items(): + rett = cls.write_key_value(ttree, kk, xxx, 'file') + ret.append(rett) + # ret.append( cls.atomic_dump_json(ttree, kk, xxx) ) + else: + raise NotImplementedError('cannot serialize key %s'%key) + return ret + + @classmethod + def serialize_env_single(self, state, env_id, env_path): + x = {k:v for k,v in state[env_id].items()} + x['reload'] + x['jsons'] + + ''' + This is the root tree for a env + ''' + tree = B.SJ(env_path, env_id) + for key, xt in self.schema_dict.items(): + xx = x[key] + self.write_key_value(tree, key, xx, xt) + return tree @staticmethod def serialize_all(state, env_path=DEFAULT_ENV_PATH): serialize_env(state, list(state.keys()), env_path=env_path) -# store_dict['SimpleJsonStorage'] + + +@dset(STORES) +class SimpleWindowJsonAutoSave(SimpleWindowJsonStorage): + ''' + One json per window + + + ''' + pass + @staticmethod + def setitem_callback_setitem(state, key, v): + ''' + Autosave uses this callback to save to disk + + Needs to capture the setitem call of json dict + ''' + return None + + # def @@ -355,10 +434,10 @@ class LazyEnvData(Mapping): Per-env laziness ''' store = _store - serialize_env = _store.serialize_env - get_valid_env_list = _store.get_valid_env_list - read_env_from_file = _store.read_env_from_file - atomic_dump_json = _store.atomic_dump_json + serialize_env = _store.serialize_env + get_valid_env_list = _store.get_valid_env_list + lazy_read_env_from_file = _store.lazy_read_env_from_file + atomic_dump_json = _store.atomic_dump_json # def serialize_env(self,*a) # SimpleWindowJsonStorage def __init__(self, env_path_file): @@ -370,9 +449,7 @@ def lazy_load_data(self): ''' This is now the only data entrypoint ''' - if self._raw_dict is not None: - return - self._raw_dict = self.read_env_from_file(self._env_path_file) + self._raw_dict = self.lazy_read_env_from_file(self._env_path_file, self._raw_dict) def __getitem__(self, key): ''' @@ -387,65 +464,10 @@ def __setitem__(self, key, value): ''' # self.send_data_to_disk(env_path_file, key, value) self.lazy_load_data() - return self._raw_dict.__setitem__(key, value) - - def __iter__(self): - self.lazy_load_data() - return iter(self._raw_dict) - - def __len__(self): - self.lazy_load_data() - return len(self._raw_dict) - - @dset(LEDS) - class LazyEnvDataPerWindow(Mapping): - ''' - Per-env laziness - ''' - store = _store - serialize_env = _store.serialize_env - get_valid_env_list = _store.get_valid_env_list - read_env_from_file = _store.read_env_from_file - # def serialize_env(self,*a) - # SimpleWindowJsonStorage - def __init__(self, env_path_file): - self._env_path_file = env_path_file - self._raw_dict = None - # self.ser - - def lazy_load_data(self): - ''' - This reduces to a constraint checker checker - ''' - self.check_constraint() - - def check_constraint(self): - ''' - For each valid file in directory - check the plot exists in - ''' - - # self['re'] - - - if self._raw_dict is not None: - return - self._raw_dict = self.read_env_from_file(self._env_path_file) - - def __getitem__(self, key): - ''' - Upon reading, should lazy load each json - ''' - self.lazy_load_data() - return self._raw_dict.__getitem__(key) - - def __setitem__(self, key, value): - ''' - upon setitem, needs to cache data to disk - ''' - # self.send_data_to_disk(env_path_file, key, value) - self.lazy_load_data() - return self._raw_dict.__setitem__(key, value) + self._store.callback_before_setitem(self,key,value) + ret = self._raw_dict.__setitem__(key, value) + self._store.callback_after_setitem(self,key,value) + return ret def __iter__(self): self.lazy_load_data() @@ -463,7 +485,9 @@ def __len__(self): # LED = LazyEnvData # LED = get_led_cls(None, 'SimpleJsonStorage') -LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonStorage') + +# LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonStorage') +LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonAutoSave') serialize_env = LazyEnvData.serialize_env tornado_settings = { @@ -473,6 +497,8 @@ def __len__(self): "template_path": get_path('static'), "compiled_template_cache": False } +def init_default_env(): + return {'jsons': {}, 'reload': {}} class Application(tornado.web.Application): def __init__(self, port=DEFAULT_PORT, base_url='', @@ -579,7 +605,7 @@ def load_state(self): 'env_path=None.', RuntimeWarning ) - return {'main': {'jsons': {}, 'reload': {}}} + return {'main': init_default_env()} ensure_dir_exists(env_path) ''' @@ -601,7 +627,7 @@ def load_state(self): ''' if 'main' not in state: # and 'main.json' not in env_jsons: - state['main'] = {'jsons': {}, 'reload': {}} + state['main'] = init_default_env() # serialize_env(state, ['main'], env_path=self.env_path) return state @@ -1175,7 +1201,7 @@ def register_window(self, p, eid): is_new_env = False if eid not in self.state: is_new_env = True - self.state[eid] = {'jsons': {}, 'reload': {}} + self.state[eid] = init_default_env() env = self.state[eid]['jsons'] @@ -1958,7 +1984,7 @@ def post(self, args): if 'eid' in msg_args: eid = msg_args['eid'] if eid not in self.state: - self.state[eid] = {'jsons': {}, 'reload': {}} + self.state[eid] = init_default_env() broadcast_envs(self) @@ -2038,7 +2064,7 @@ def wrap_func(handler, args): data = json.loads(args['data']) if eid not in handler.state: - handler.state[eid] = {'jsons': {}, 'reload': {}} + handler.state[eid] = init_default_env() if 'win' in args and args['win'] is None: handler.state[eid]['jsons'] = data From daacf25c12060fa0c757e8612394aeac6b006f95 Mon Sep 17 00:00:00 2001 From: shouldsee Date: Sat, 3 Sep 2022 00:04:48 +0800 Subject: [PATCH 05/15] [refactor] creating infra for auto-saving. ckpt before capturing init events --- py/visdom/server.py | 351 +++++++++++++++++++++++++++----------------- 1 file changed, 220 insertions(+), 131 deletions(-) diff --git a/py/visdom/server.py b/py/visdom/server.py index dec99f90..a4d7f085 100644 --- a/py/visdom/server.py +++ b/py/visdom/server.py @@ -161,10 +161,24 @@ def SJ(x,y): def NIF(*a,**kw): raise NotImplementedError() class StoragePrototype(object): - serialze_env = NIF - serialize_env_single = NIF + + serialize_env = NIF get_valid_env_list = NIF lazy_read_env_from_file = NIF + serialize_env_single = NIF + ''' + This function save serialized data to disk + + Should perform incremental data writing. + + Incremental writing is implemented as an existence check. + If the file associated with the + + up-to-date criteria + - key exists, + - mtime equals to the mtime + and mtime equals to the stored mtime, then + ''' # atomic_dump_json = NIF @staticmethod def callback_before_setitem(state, key, v): @@ -189,7 +203,23 @@ def atomic_dump_json(xdir, key, x): shutil.move(env_path_file+'.temp',env_path_file) return env_path_file - # setitem_cll + @classmethod + def serialize_env_list(self, state, eids, env_path, schema): + ''' + save to disk gracefully. no error if mismatched + ''' + env_ids = [i for i in eids if i in state] + if env_path is None: raise RuntimeError('env_path is None') + # if env_path is not None: + for env_id in env_ids: + tree = self.serialize_env_single(state, env_id, env_path, schema) + return env_ids + + + # @staticmethod + # def serialize_all(state, env_path=DEFAULT_ENV_PATH): + # serialize_env_list(state, list(state.keys()), env_path=env_path) + @dset(STORES) class SimpleJsonStorage(StoragePrototype): @@ -197,10 +227,13 @@ class SimpleJsonStorage(StoragePrototype): One file per env ''' @staticmethod - def lazy_read_env_from_file(fn, state): + def lazy_read_env_from_file(fn, state, schema): ''' :param state: a dict-like object to trigger laziness ''' + del schema ;'not used here' + + if state is not None: return state else: @@ -231,7 +264,16 @@ def get_valid_env_list(fn): # @staticmethod @classmethod - def serialize_env(cls, state, eids, env_path): + def serialize_env_single(cls, state, env_id, env_path, schema): + del schema; '[EXTRA_VAR]' + + # env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) + # import pdb; pdb.set_trace() + x = {k:v for k,v in state[env_id].items()} + x['reload'] + x['jsons'] + # os.makedirs(tree) if not os.path.exists(tree) else None + cls.atomic_dump_json(env_path, env_id, x) ''' This function save serialized data to disk @@ -245,27 +287,11 @@ def serialize_env(cls, state, eids, env_path): - mtime equals to the mtime and mtime equals to the stored mtime, then ''' - env_ids = [i for i in eids if i in state] - if env_path is None: raise RuntimeError('env_path is None') - for env_id in env_ids: - ''' - This was unsafe: use .temp to make sure atomicity - ''' - # env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) - # import pdb; pdb.set_trace() - x = {k:v for k,v in state[env_id].items()} - x['reload'] - x['jsons'] - # os.makedirs(tree) if not os.path.exists(tree) else None - cls.atomic_dump_json(env_path, env_id, x) - return env_ids - - - @staticmethod - def serialize_all(state, env_path=DEFAULT_ENV_PATH): - serialize_env(state, list(state.keys()), env_path=env_path) + ''' + This was unsafe: use .temp to make sure atomicity + ''' @dset(STORES) @@ -274,129 +300,154 @@ class SimpleWindowJsonStorage(StoragePrototype): One json per window ''' - schema_dict = { - 'jsons':'filedir', - 'reload':'file', - } @classmethod - def lazy_read_env_from_file(self, tree, state): + def lazy_read_file_xt(self, tree, par, key, xt): ''' - Only loads windows that are not in state dict + :param tree: a directory + :param par: dict-like parent + :param key: name of child storage + :param xt: string describe the type of data ''' - # xo = {} - if state is None: - state = init_default_env() - - xo = state - # xo['jsons'] = xxo = {} - key = 'jsons' - for key,xt in self.schema_dict.items(): - if xt == 'filedir': - xxo = xo[key] - ttree = B.SJ(tree, key) - for k in os.listdir(ttree): - if not k.endswith('.json'): - continue - window = k.rsplit('.json',1)[0] - not_up_to_date = window not in xxo - if not_up_to_date: - v = self.safe_parse_file(B.J(ttree,k)) - xxo[window] = v - elif xt=='file': - if xo[key].__len__(): - pass - else: - xo[key] = self.safe_parse_file(B.J(tree,key + '.json')) - else: - raise NotImplementedError('lazy_read_env_from_file(%s,%s)'%(key,xt)) - return xo - # - @staticmethod - def get_valid_env_list(fn): + ''' + [TBC] init events + ''' + xxo = par.get(key,{}) ret = [] - for key in os.listdir( B.safe_dir(fn) ): - ffn = B.J(fn,key) - if os.path.isdir(ffn): - if key =='view': continue - ret.append((key,ffn)) - return ret + if xt == 'filedir': + ttree = B.SJ(tree, key) + for k in os.listdir(ttree): + if not k.endswith('.json'): + continue + window = k.rsplit('.json',1)[0] + self.lazy_read_file_xt(ttree, xxo, window, 'file') + elif xt=='file': + if xxo.__len__(): + pass + else: + xxo = self.safe_parse_json(B.J(tree,key)) + logging.debug(f'lazy_read_file_xt({tree!r}, {key!r})') - @staticmethod - def safe_parse_file(fn): - try: - with open( fn, 'r') as f: - env_data = tornado.escape.json_decode(f.read()) - except Exception as e: - raise ValueError( - "Failed loading file: {} - {}".format( - fn, repr(e))) - return env_data + elif isinstance(xt, Mapping): + if xxo is None: + # xxo = init_default_struct(xt) + xxo = init_default_env() - @classmethod - def serialize_env(self, state, eids, env_path): - ''' - This function save serialized data to disk + if key is not None: + ttree = B.SJ(tree, key) + else: + 'root={None:env}. a null parent for loading' + ttree = tree - Should perform incremental data writing. + for keyy,xtt in xt.items(): + self.lazy_read_file_xt(ttree, xxo, keyy, xtt) + else: + raise NotImplementedError('lazy_read_env_from_file(%s,%s)'%(key,xt)) - Incremental writing is implemented as an existence check. - If the file associated with the + if isinstance(par, LazyContainerPrototype): + '[CRITICAL] The lazy_read method is exempted from setitem callback' + par._pure_setitem(key,xxo) + else: + par[key] = xxo + return xxo + # return par,key - up-to-date criteria - - key exists, - - mtime equals to the mtime - and mtime equals to the stored mtime, then - ''' - env_ids = [i for i in eids if i in state] - if env_path is None: raise RuntimeError('env_path is None') - # if env_path is not None: - for env_id in env_ids: - tree = self.serialize_env_single(state, env_id, env_path) - return env_ids - # @staticmethod @classmethod - def write_key_value(cls, tree, key, xx, xt): + def write_key_value_xt(cls, tree, key, data, xt): + ''' + :param tree: a directory + :param key: name of child storage + :param data: value to be stored, usually a dict like + :param xt: string describe the type of data + ''' ret = [] if xt == 'file': 'single file ' - rett = cls.atomic_dump_json(tree, key, xx) + rett = cls.atomic_dump_json(tree, key, data) ret.append( rett ) + logging.debug(f'write_key_value_xt({tree!r}, {key!r})') elif xt == 'filedir': - '''dir storage - Note no further directory embedding is allowed ''' - ttree = B.SJ(tree,key) - for kk,xxx in xx.items(): - rett = cls.write_key_value(ttree, kk, xxx, 'file') + dir storage + !Note that no further directory embedding is allowed + ''' + if isinstance(data, LazyContainerPrototype): + '[CRITICAL] exemption is required. data is a dict-like filedir obj' + it = data._pure_items() + else: + it = data.items() + + ttree = B.SJ(tree, key) + for kk,xxx in it: + rett = cls.write_key_value_xt(ttree, kk, xxx, 'file') ret.append(rett) # ret.append( cls.atomic_dump_json(ttree, kk, xxx) ) + elif isinstance(xt, Mapping): + ''' + Recurse according to schema_dict=xt + ''' + ttree = B.SJ(tree, key) + for key, xtt in xt.items(): + if isinstance(data, LazyContainerPrototype): + '''[CRITICAL] the serialize method is exempted from getitem callback + if the serialize method triggered getitem callback, + then will lazy_read the whole directory, which is unnecessary + ''' + ddata = data._pure_getitem(key) + else: + ddata = data.__getitem__(key) + cls.write_key_value_xt(ttree, key, ddata, xtt) else: raise NotImplementedError('cannot serialize key %s'%key) return ret @classmethod - def serialize_env_single(self, state, env_id, env_path): - x = {k:v for k,v in state[env_id].items()} - x['reload'] - x['jsons'] - + def lazy_read_env_from_file(self, tree, env, schema): ''' - This is the root tree for a env + Only loads windows that are not in state dict ''' - tree = B.SJ(env_path, env_id) - for key, xt in self.schema_dict.items(): - xx = x[key] - self.write_key_value(tree, key, xx, xt) - return tree + # xo = {} + root = {None:env} + env = self.lazy_read_file_xt(tree, root, None, schema) + # self.schema_dict) + return env + + @classmethod + def serialize_env_single(self, state, env_id, env_path, schema): + return self.write_key_value_xt( env_path, env_id, state[env_id], schema) + # xt=self.schema_dict) + + + @staticmethod + def get_valid_env_list(fn): + ret = [] + for key in os.listdir( B.safe_dir(fn) ): + ffn = B.J(fn,key) + if os.path.isdir(ffn): + if key =='view': continue + ret.append((key,ffn)) + return ret + @staticmethod - def serialize_all(state, env_path=DEFAULT_ENV_PATH): - serialize_env(state, list(state.keys()), env_path=env_path) + def safe_parse_file(fn): + try: + with open( fn, 'r') as f: + env_data = tornado.escape.json_decode(f.read()) + except Exception as e: + raise ValueError( + "Failed loading file: {} - {}".format( + fn, repr(e))) + return env_data + + @classmethod + def safe_parse_json(self, fn): + return self.safe_parse_file(fn+'.json') + @dset(STORES) @@ -404,7 +455,6 @@ class SimpleWindowJsonAutoSave(SimpleWindowJsonStorage): ''' One json per window - ''' pass @staticmethod @@ -416,8 +466,10 @@ def setitem_callback_setitem(state, key, v): ''' return None - # def + # def :param data: value to be stored, usually a dict like +class LazyContainerPrototype(Mapping): + pass def get_led_cls(sel_led, sel_store): @@ -428,28 +480,45 @@ def get_led_cls(sel_led, sel_store): # if sel_store == '' _store = STORES[sel_store] LEDS = {} + # @dset(LEDS) + # class LazyContainerPrototype(Mapping): + # pass + @dset(LEDS,'LazyEnvData') @dset(LEDS) class LazyEnvData(Mapping): ''' Per-env laziness ''' + schema = { + 'jsons':'filedir', + 'reload':'file', + } + store = _store - serialize_env = _store.serialize_env + serialize_env_list = _store.serialize_env_list get_valid_env_list = _store.get_valid_env_list lazy_read_env_from_file = _store.lazy_read_env_from_file atomic_dump_json = _store.atomic_dump_json - # def serialize_env(self,*a) # SimpleWindowJsonStorage def __init__(self, env_path_file): self._env_path_file = env_path_file self._raw_dict = None + # _pure_getitem = self._raw_dict.__getitem__ + # _pure_setitem = self._raw_dict.__setitem__ + # _pure_items = self._raw_dict.items # self.ser + def _pure_setitem(self,k,v): + return self._raw_dict.__setitem__(k,v) + def _pure_getitem(self,k): + return self._raw_dict.__getitem__(k) + def _pure_items(self): + return self._raw_dict.items() def lazy_load_data(self): ''' This is now the only data entrypoint ''' - self._raw_dict = self.lazy_read_env_from_file(self._env_path_file, self._raw_dict) + self._raw_dict = self.lazy_read_env_from_file(self._env_path_file, self._raw_dict, self.schema) def __getitem__(self, key): ''' @@ -480,15 +549,10 @@ def __len__(self): return LEDS[sel_led] # return LazyEnvData -# serialize_all = LazyEnvData.serialize_all -# serialize_env = LazyEnvData.serialize_env -# LED = LazyEnvData - -# LED = get_led_cls(None, 'SimpleJsonStorage') # LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonStorage') LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonAutoSave') -serialize_env = LazyEnvData.serialize_env +serialize_env = LazyEnvData.serialize_env_list ## [LEGACY] tornado_settings = { "autoescape": None, @@ -498,8 +562,23 @@ def __len__(self): "compiled_template_cache": False } def init_default_env(): + '[TBC]' return {'jsons': {}, 'reload': {}} +def init_default_strut(schema_dict): + ''' + needs to eval the schema to get the actual value + + if the value is string, then fine and terminal. + + [TBC] this is to do with the variable initing. + variable are bound to (tree,key) pair upon initing + ''' + for k, xt in schema_dict.items(): + + pass + return + class Application(tornado.web.Application): def __init__(self, port=DEFAULT_PORT, base_url='', env_path=DEFAULT_ENV_PATH, readonly=False, @@ -628,7 +707,6 @@ def load_state(self): if 'main' not in state: # and 'main.json' not in env_jsons: state['main'] = init_default_env() - # serialize_env(state, ['main'], env_path=self.env_path) return state @@ -887,7 +965,7 @@ def on_message(self, message): copy.deepcopy(self.state[msg['prev_eid']]) self.state[msg['eid']]['reload'] = msg['data'] self.eid = msg['eid'] - serialize_env(self.state, [self.eid], env_path=self.env_path) + self.app.serialize_env_list_wschema(self.state, [self.eid], self.env_path) ##[TBCovered] elif cmd == 'delete_env': if 'eid' in msg: logging.info('closing environment {}'.format(msg['eid'])) @@ -1020,7 +1098,7 @@ def on_message(self, message): copy.deepcopy(self.state[msg['prev_eid']]) self.state[msg['eid']]['reload'] = msg['data'] self.eid = msg['eid'] - serialize_env(self.state, [self.eid], env_path=self.env_path) + self.app.serialize_env_list_wschema(self.state, [self.eid], self.env_path) ### [TBCR] elif cmd == 'delete_env': if 'eid' in msg: logging.info('closing environment {}'.format(msg['eid'])) @@ -1756,7 +1834,7 @@ def wrap_func(handler, args): assert prev_eid in handler.state, 'env to be forked doesn\'t exit' handler.state[eid] = copy.deepcopy(handler.state[prev_eid]) - serialize_env(handler.state, [eid], env_path=handler.app.env_path) + handler.app.serialize_env_list_wschema(handler.state, [eid], env_path=handler.app.env_path) ### [TBCR] broadcast_envs(handler) handler.write(eid) @@ -2030,13 +2108,15 @@ def initialize(self, app): self.port = app.port self.env_path = app.env_path self.login_enabled = app.login_enabled + # self.store = app.store + self.serialize_env_list_wschema = app.serialize_env_list_wschema @staticmethod def wrap_func(handler, args): envs = args['data'] envs = [escape_eid(eid) for eid in envs] # this drops invalid env ids - ret = serialize_env(handler.state, envs, env_path=handler.env_path) + ret = handler.serialize_env_list_wschema(handler.state, envs, env_path=handler.env_path) handler.write(json.dumps(ret)) @check_auth @@ -2311,6 +2391,15 @@ def start_server(port=DEFAULT_PORT, hostname=DEFAULT_HOSTNAME, readonly=readonly, user_credential=user_credential, use_frontend_client_polling=use_frontend_client_polling, eager_data_loading=eager_data_loading) + + def serialize_env_list_wschema(state, eids, env_path, LazyEnvData=LazyEnvData): + ''' + Bind schema with Store's method to be used by app + ''' + return LazyEnvData.store.serialize_env_list( state, eids, env_path, schema = LazyEnvData.schema) + app.serialize_env_list_wschema = serialize_env_list_wschema + + if bind_local: app.listen(port, max_buffer_size=1024 ** 3, address='127.0.0.1') else: From a22c842dc32efdbf97b9f155b05092bd68a59f7b Mon Sep 17 00:00:00 2001 From: shouldsee Date: Sat, 3 Sep 2022 11:42:28 +0800 Subject: [PATCH 06/15] [refactor]migrate serialize_env_list_wschema and lazy_read --- py/visdom/__init__.py | 21 +-- py/visdom/server.py | 332 +++++++++++++++++++++++++++++++++--------- 2 files changed, 271 insertions(+), 82 deletions(-) diff --git a/py/visdom/__init__.py b/py/visdom/__init__.py index c1f91f4d..d40f3e2e 100644 --- a/py/visdom/__init__.py +++ b/py/visdom/__init__.py @@ -5,6 +5,7 @@ # # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. +# __version__ = 'test' import os.path import requests @@ -405,7 +406,7 @@ def __init__( 'base_url should not end with / as it is appended automatically' self.ipv6 = ipv6 - self.env = env + self.env = env self.env_list={f'{env}'} # default env self.send = send self.event_handlers = {} # Haven't registered any events @@ -801,7 +802,7 @@ def get_env_list(self): This function returns a list of all of the env names that are currently in the server. """ - if self.offline: + if self.offline: return list(self.env_list) else: return json.loads(self._send({}, endpoint='env_state', quiet=True)) @@ -1694,7 +1695,7 @@ def line(self, Y, X=None, win=None, env=None, opts=None, update=None, assert X.ndim == 1 or X.ndim == 2, 'X should have 1 or 2 dim' else: X = np.linspace(0, 1, Y.shape[0]) - + if Y.ndim == 2 and X.ndim == 1: X = np.tile(X, (Y.shape[1], 1)).transpose() @@ -2180,7 +2181,7 @@ def sunburst(self, labels, parents, values=None, win=None, env=None, opts=None): line_width=opts.get("marker_width") assert len(parents.tolist())==len(labels.tolist()), "length of parents and labels should be equal" - + data_dict=[{ 'labels': labels.tolist(), "parents":parents.tolist(), @@ -2196,7 +2197,7 @@ def sunburst(self, labels, parents, values=None, win=None, env=None, opts=None): data_dict[0]['values']=values.tolist() - data=data_dict + data=data_dict return self._send({ 'data': data, 'win': win, @@ -2397,7 +2398,7 @@ def graph(self, edges, edgeLabels = None, nodeLabels = None, opts=dict(), env=No """ This function draws interactive network graphs. It takes list of edges as one of the arguments. The user can also provide custom edge Labels and node Labels in edgeLabels and nodeLabels respectively. - Along with that we have different parameters in opts for making it more user friendly. + Along with that we have different parameters in opts for making it more user friendly. Args: edges : list, required @@ -2419,7 +2420,7 @@ def graph(self, edges, edgeLabels = None, nodeLabels = None, opts=dict(), env=No except: raise RuntimeError( "networkx must be installed to plot Graph figures") - + G = nx.Graph() G.add_edges_from(edges) node_data = list(G.nodes()) @@ -2434,7 +2435,7 @@ def graph(self, edges, edgeLabels = None, nodeLabels = None, opts=dict(), env=No if nodeLabels is not None: assert len(nodeLabels) == len(node_data),\ "length of nodeLabels does not match with the length of nodes {len1} != {len2}".format(len1 = len(nodeLabels), len2 = len(node_data)) - + for i in range(len(node_data)): if i != node_data[i]: raise RuntimeError("The nodes should be numbered from 0 to n-1 for n nodes! {} node is missing!".format(i)) @@ -2455,7 +2456,7 @@ def graph(self, edges, edgeLabels = None, nodeLabels = None, opts=dict(), env=No edge["target"] = int(link_data[i][1]) edge["label"] = str(edgeLabels[i]) if edgeLabels is not None else str(link_data[i][0])+"-"+str(link_data[i][1]) edges.append(edge) - + for i in range(len(node_data)): node = {} node["name"] = int(node_data[i]) @@ -2463,7 +2464,7 @@ def graph(self, edges, edgeLabels = None, nodeLabels = None, opts=dict(), env=No if opts['scheme'] == 'different': node["club"] = int(i) nodes.append(node) - + data = [{ 'content': {"nodes": nodes, "edges": edges}, diff --git a/py/visdom/server.py b/py/visdom/server.py index a4d7f085..93732a3d 100644 --- a/py/visdom/server.py +++ b/py/visdom/server.py @@ -153,6 +153,16 @@ def join(x,y): @staticmethod def SJ(x,y): return B.safe_dir(B.join(x,y)) + @staticmethod + def safe_join_with_none(x,y): + ''' + if y is None, then do not extend original path + ''' + if y is None: + return x + else: + return B.safe_dir(B.join(x,y)) + SJN = safe_join_with_none J = join # class BasicLazyMapping(Mapping): @@ -185,17 +195,18 @@ def callback_before_setitem(state, key, v): ''' this callback is blocking. could be used for sanity check ''' - return None + return v @staticmethod - def setitem_callback_setitem(state, key, v): + def callback_after_setitem(state, key, v): ''' Autosave uses this callback to save to disk ''' return None - @staticmethod - def atomic_dump_json(xdir, key, x): + # @staticmethod + @classmethod + def atomic_dump_json(self, xdir, key, x): xdir = B.safe_dir(xdir) env_path_file = os.path.join(xdir, "{0}.json".format(key)) with open(env_path_file+".temp", 'w') as f: @@ -204,7 +215,7 @@ def atomic_dump_json(xdir, key, x): return env_path_file @classmethod - def serialize_env_list(self, state, eids, env_path, schema): + def _serialize_env_list_(self, state, eids, env_path, schema): ''' save to disk gracefully. no error if mismatched ''' @@ -215,6 +226,22 @@ def serialize_env_list(self, state, eids, env_path, schema): tree = self.serialize_env_single(state, env_id, env_path, schema) return env_ids +json._dumps_old = json.dumps + +class CustomEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, LazyContainerPrototype): + return obj._raw_dict + return super().default(obj) + +def dumps(*a,**kw): + kw['cls'] = CustomEncoder + return json._dumps_old(*a,**kw) +json.dumps = dumps + + # f.write(json.dumps(x,cls=self.CustomEncoder)) + + # @staticmethod # def serialize_all(state, env_path=DEFAULT_ENV_PATH): @@ -243,12 +270,21 @@ def lazy_read_env_from_file(fn, state, schema): except Exception as e: raise ValueError( "Failed loading environment json: {} - {}".format( - self._env_path_file, repr(e))) + self.fn, repr(e))) _raw_dict = { 'jsons': env_data['jsons'], 'reload': env_data['reload'] } return _raw_dict + @classmethod + def legacy_load_state(self, state, env_path, LazyEnvData): + ''' + Old logic via path scanning + ''' + eid_file_pair = self.get_valid_env_list(env_path) + # env_jsons = [i for i in os.listdir(env_path) if '.json' in i] + for eid, env_path_file in eid_file_pair: + state[eid] = LazyEnvData(env_path_file) @staticmethod def get_valid_env_list(fn): @@ -293,12 +329,60 @@ def serialize_env_single(cls, state, env_id, env_path, schema): This was unsafe: use .temp to make sure atomicity ''' +SCHEMAS = {} +SCHEMAS['venv'] = { +'jsons':'filedir', +'reload':'file', +} +SCHEMAS['file'] = 'file' @dset(STORES) class SimpleWindowJsonStorage(StoragePrototype): ''' One json per window ''' + schema_mapper = {} + schema_mapper['vstate'] = SCHEMAS['venv'] + schema_mapper['filedir'] = SCHEMAS['file'] + + @classmethod + def map_schema(self,par_schema,key): + if isinstance(par_schema,Mapping): + return par_schema[key] + else: + return self.schema_mapper[par_schema] + @classmethod + def legacy_load_state(self, state, env_path, LazyEnvData): + pass + + # @staticmethod + # def cast_to_schema(v, schema): + # if schema == 'file': + # assert 0 + # elif schema == 'filedir': + # assert 0 + # elif isinstance(schema,Mapping): + # assert 0 + # # for + # else: + # raise NotImplementedError(f'cast_to_schema({schema!r})') + # return + + @classmethod + def callback_before_setitem(self, state, key, v): + ''' + casting incoming data according to schemas + + need to inherit tree path from state + ''' + # return v + assert key is not None,f'not allowed in {state!r}' + v = state.LazyContainerCurrentBase( B.J( state.tree,key), self.map_schema( state.schema, key), v) + logging.log(5,f'callback_before_setitem({repr(state)[:10]},{key!r},{v!r})') + # tree = state.tree + # v = self.cast_to_schema(v, state.schema[key]) + return v + @classmethod def lazy_read_file_xt(self, tree, par, key, xt): @@ -312,41 +396,67 @@ def lazy_read_file_xt(self, tree, par, key, xt): ''' [TBC] init events ''' - xxo = par.get(key,{}) - ret = [] + if isinstance(par, LazyContainerPrototype): + xxo = par._pure_get(key,None) + else: + xxo = par.get(key,None) + + ret = [] if xt == 'filedir': - ttree = B.SJ(tree, key) + if xxo is None: xxo = {} + ttree = B.SJN(tree, key) for k in os.listdir(ttree): if not k.endswith('.json'): continue window = k.rsplit('.json',1)[0] - self.lazy_read_file_xt(ttree, xxo, window, 'file') + self.lazy_read_file_xt(ttree, xxo, window, self.schema_mapper[xt]) elif xt=='file': - if xxo.__len__(): + if xxo is not None: pass else: - xxo = self.safe_parse_json(B.J(tree,key)) + assert key is not None + fn = B.J(tree,key) + # if not os.path.exists(fn): + # xxo = {} + # logging.debug(f'lazy_read_file_xt({tree!r}, {key!r}):Empty file!') + # else: + xxo = self.safe_parse_json(fn) logging.debug(f'lazy_read_file_xt({tree!r}, {key!r})') - elif isinstance(xt, Mapping): - - if xxo is None: - # xxo = init_default_struct(xt) - xxo = init_default_env() - - if key is not None: - ttree = B.SJ(tree, key) - else: - 'root={None:env}. a null parent for loading' - ttree = tree + elif xt=='vstate': + if xxo is None: xxo = {} + ttree = B.SJN(tree, key) + for k in os.listdir(ttree): + if k in 'view'.split(): + continue + env_id = k + self.lazy_read_file_xt(ttree, xxo, env_id, self.schema_mapper[xt]) + elif isinstance(xt, Mapping): + ''' + ''' + if xxo is None: xxo = {} + 'an empty dict is enough. xt schema will lead the parsing' + # xxo = {} + + # if key is not None: + ttree = B.SJN(tree, key) + # else: + # 'root={None:env}. a null parent for loading' + # ttree = tree + # import pdb; pdb.set_trace() for keyy,xtt in xt.items(): self.lazy_read_file_xt(ttree, xxo, keyy, xtt) else: raise NotImplementedError('lazy_read_env_from_file(%s,%s)'%(key,xt)) + # import pdb; pdb.set_trace() if isinstance(par, LazyContainerPrototype): + ''' + [CRITICAL] use callback to bind input data stream to schema structure + ''' + xxo = self.callback_before_setitem( par, key, xxo) '[CRITICAL] The lazy_read method is exempted from setitem callback' par._pure_setitem(key,xxo) else: @@ -366,6 +476,10 @@ def write_key_value_xt(cls, tree, key, data, xt): ret = [] if xt == 'file': 'single file ' + assert key is not None, f'Not allowed for type {xt!r}' + # assert data.__len__() + # if data.__len__()==0: + # import pdb; pdb.set_trace() rett = cls.atomic_dump_json(tree, key, data) ret.append( rett ) logging.debug(f'write_key_value_xt({tree!r}, {key!r})') @@ -381,7 +495,7 @@ def write_key_value_xt(cls, tree, key, data, xt): else: it = data.items() - ttree = B.SJ(tree, key) + ttree = B.SJN(tree, key) for kk,xxx in it: rett = cls.write_key_value_xt(ttree, kk, xxx, 'file') ret.append(rett) @@ -390,7 +504,7 @@ def write_key_value_xt(cls, tree, key, data, xt): ''' Recurse according to schema_dict=xt ''' - ttree = B.SJ(tree, key) + ttree = B.SJN(tree, key) for key, xtt in xt.items(): if isinstance(data, LazyContainerPrototype): '''[CRITICAL] the serialize method is exempted from getitem callback @@ -469,6 +583,7 @@ def setitem_callback_setitem(state, key, v): # def :param data: value to be stored, usually a dict like class LazyContainerPrototype(Mapping): + # def __init__(self,): pass @@ -483,42 +598,77 @@ def get_led_cls(sel_led, sel_store): # @dset(LEDS) # class LazyContainerPrototype(Mapping): # pass - @dset(LEDS,'LazyEnvData') @dset(LEDS) - class LazyEnvData(Mapping): - ''' - Per-env laziness - ''' - schema = { - 'jsons':'filedir', - 'reload':'file', - } - + class LazyContainerCurrent(LazyContainerPrototype): + # def __init__(self,): store = _store - serialize_env_list = _store.serialize_env_list - get_valid_env_list = _store.get_valid_env_list - lazy_read_env_from_file = _store.lazy_read_env_from_file - atomic_dump_json = _store.atomic_dump_json - # SimpleWindowJsonStorage - def __init__(self, env_path_file): - self._env_path_file = env_path_file - self._raw_dict = None - # _pure_getitem = self._raw_dict.__getitem__ - # _pure_setitem = self._raw_dict.__setitem__ - # _pure_items = self._raw_dict.items - # self.ser + ___serialize_env_list = _store._serialize_env_list_ + get_valid_env_list = _store.get_valid_env_list + lazy_read_env_from_file = _store.lazy_read_env_from_file + atomic_dump_json = _store.atomic_dump_json + + def __init__(self, tree, schema, value): + self._tree = tree + self.schema = schema + # self._raw_dict = None + # if isinstanceself.schema + v = value + if v is None: + v = {} + elif isinstance(v,LazyContainerPrototype): + # v = + v = v._raw_dict + # v = {k:vv for v} + else: + v = v + self._raw_dict = (v) + + @property + def tree(self): + return self._tree + def _pure_setitem(self,k,v): return self._raw_dict.__setitem__(k,v) + def _pure_getitem(self,k): return self._raw_dict.__getitem__(k) + + def _pure_get(self,k,v=None): + return self._raw_dict.get(k,v) + def _pure_items(self): return self._raw_dict.items() - def lazy_load_data(self): + # def serialize_env_list(self, state, eids, env_path, schema): + # def serialize_env_single(self, state, env_id, env_path, schema): + # ''' + # trigger children to save + # ''' + # return self.write_key_value_xt( env_path, env_id, state[env_id], schema) + + + def lazy_load_all_children(self): + ''' + Loads all child nodes according to a up-to-date criteria + + Only loads windows that are not in state dict ''' - This is now the only data entrypoint + root = {None: self} + env = self.store.lazy_read_file_xt( self.tree, root, None, self.schema) + # import pdb; pdb.set_trace() + self._raw_dict = env._raw_dict + # assert 0 + # return env + lazy_load_data = lazy_load_all_children + + def save_all_children(self): ''' - self._raw_dict = self.lazy_read_env_from_file(self._env_path_file, self._raw_dict, self.schema) + save all + ''' + # return self.write_key_value_xt( tree, None, self._raw_dict, schema) + return self.store.write_key_value_xt( self.tree, None, self, self.schema) + + # return self.store.serialize_env_single( state, eid, env_path, schema = self.schema) def __getitem__(self, key): ''' @@ -533,9 +683,9 @@ def __setitem__(self, key, value): ''' # self.send_data_to_disk(env_path_file, key, value) self.lazy_load_data() - self._store.callback_before_setitem(self,key,value) - ret = self._raw_dict.__setitem__(key, value) - self._store.callback_after_setitem(self,key,value) + value = self.store.callback_before_setitem(self,key,value) + ret = self._raw_dict.__setitem__(key, value) + _ = self.store.callback_after_setitem(self,key,value) return ret def __iter__(self): @@ -545,6 +695,34 @@ def __iter__(self): def __len__(self): self.lazy_load_data() return len(self._raw_dict) + LazyContainerCurrent.LazyContainerCurrentBase =LazyContainerCurrent + + + @dset(LEDS) + class LazyEnvData(LazyContainerCurrent): + ''' + Per-env laziness + ''' + schema = SCHEMAS['venv'] + LCC = LazyContainerCurrent + # SimpleWindowJsonStorage + def __init__(self, tree): + super().__init__(tree, self.schema, None) + + @classmethod + def serialize_env_list_wschema(self, state, eids, env_path): + ''' + Bind schema with Store's method to be used by app + ''' + eids = [i for i in eids if i in state] + for eid in eids: + # import pdb; pdb.set_trace() + v = state[eid] + v.save_all_children() + # self.store.serialize_env_single( state, eid, env_path, schema = self.schema) + return eids + + return LEDS[sel_led] # return LazyEnvData @@ -552,7 +730,7 @@ def __len__(self): # LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonStorage') LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonAutoSave') -serialize_env = LazyEnvData.serialize_env_list ## [LEGACY] +# serialize_env = LazyEnvData.serialize_env_list ## [LEGACY] tornado_settings = { "autoescape": None, @@ -583,7 +761,9 @@ class Application(tornado.web.Application): def __init__(self, port=DEFAULT_PORT, base_url='', env_path=DEFAULT_ENV_PATH, readonly=False, user_credential=None, use_frontend_client_polling=False, - eager_data_loading=False): + eager_data_loading=False, LED=None): + + self.LED = LED self.eager_data_loading = eager_data_loading self.env_path = env_path self.state = self.load_state() @@ -634,7 +814,7 @@ def __init__(self, port=DEFAULT_PORT, base_url='', super(Application, self).__init__(handlers, **tornado_settings) def get_last_access(self): - if len(self.subs) > 0 or len(self.sources) > 0: + if len(self.subs) > 0 or len(self.so6urces) > 0: # update the last access time to now, as someone # is currently connected to the server self.last_access = time.time() @@ -679,6 +859,8 @@ def load_state(self): state = {} env_path = self.env_path if env_path is None: + assert 0,'[TBC] needs to fix support for env_path = None' + warn_once( 'Saving and loading to disk has no effect when running with ' 'env_path=None.', @@ -690,10 +872,12 @@ def load_state(self): ''' [Note] listdir is used to reconstruct env list from file list ''' - eid_file_pair = LazyEnvData.get_valid_env_list(env_path) - # env_jsons = [i for i in os.listdir(env_path) if '.json' in i] - for eid, env_path_file in eid_file_pair: - state[eid] = LazyEnvData(env_path_file) + # state = self.LED(env_path) + 'state is a list of directory of LEDs' + state = self.LED.LCC(env_path, schema = 'vstate', value= {}) + + state.store.legacy_load_state(state, env_path, LED) + if self.eager_data_loading: for k,x in state.items(): @@ -770,15 +954,21 @@ def get_current_user(self): return self.get_secure_cookie("user_password") except Exception: # Not using secure cookies return None - def write_message(self,*a,**kw): + def write_message(self, msg, *a, **kw): ''' Debugging interceptor ''' + # if not isinstance(msg,str): + # # import pdb; pdb.set_trace() + # msg = json.dumps(msg) + if isinstance(msg, LazyContainerPrototype): + # msg = msg._raw_dict + msg = json.dumps(msg) if 5 >= logging.root.level: - x = a[0] - logging.log(5, str(['[DEBUG3]',type(x),(inspect.stack()[1].function),repr(a[0])[:20]]) ) - return super().write_message(*a,**kw) + x = msg + logging.log(5, str(['[DEBUG3]',type(x),(inspect.stack()[1].function),repr(x)[:20]]) ) + return super().write_message(msg, *a,**kw) class VisSocketHandler(BaseWebSocketHandler): @@ -2390,14 +2580,12 @@ def start_server(port=DEFAULT_PORT, hostname=DEFAULT_HOSTNAME, app = Application(port=port, base_url=base_url, env_path=env_path, readonly=readonly, user_credential=user_credential, use_frontend_client_polling=use_frontend_client_polling, - eager_data_loading=eager_data_loading) - - def serialize_env_list_wschema(state, eids, env_path, LazyEnvData=LazyEnvData): - ''' - Bind schema with Store's method to be used by app - ''' - return LazyEnvData.store.serialize_env_list( state, eids, env_path, schema = LazyEnvData.schema) - app.serialize_env_list_wschema = serialize_env_list_wschema + eager_data_loading=eager_data_loading, + LED=LED) + app.serialize_env_list_wschema = LED.serialize_env_list_wschema + # app.LCC = LazyContainerCurrent + # app.LED = LazyEnvData + # ContainerCurrent if bind_local: From ea846c7e18e151bc34c820d6d12e2fdc2eb4ad55 Mon Sep 17 00:00:00 2001 From: shouldsee Date: Sat, 3 Sep 2022 12:34:55 +0800 Subject: [PATCH 07/15] [refactor] autosave is now working! --- py/visdom/server.py | 140 ++++++++++++++++++++++---------------------- 1 file changed, 70 insertions(+), 70 deletions(-) diff --git a/py/visdom/server.py b/py/visdom/server.py index 93732a3d..63a769f0 100644 --- a/py/visdom/server.py +++ b/py/visdom/server.py @@ -168,7 +168,6 @@ def safe_join_with_none(x,y): STORES = {} - def NIF(*a,**kw): raise NotImplementedError() class StoragePrototype(object): @@ -206,13 +205,19 @@ def callback_after_setitem(state, key, v): # @staticmethod @classmethod - def atomic_dump_json(self, xdir, key, x): - xdir = B.safe_dir(xdir) - env_path_file = os.path.join(xdir, "{0}.json".format(key)) - with open(env_path_file+".temp", 'w') as f: + # def atomic_dump_json(self, xdir, key, x): + def atomic_dump_json(self, fn, x): + ''' + All i know about the file is its extension + ''' + target_file = fn +'.json' + # xdir = B.safe_dir(xdir) + # target_file = B.SJN(xdir,key)+'.json' + # env_path_file = os.path.join(xdir, "{0}.json".format(key)) + with open(target_file+".temp", 'w') as f: f.write(json.dumps(x)) - shutil.move(env_path_file+'.temp',env_path_file) - return env_path_file + shutil.move(target_file+'.temp',target_file) + return target_file @classmethod def _serialize_env_list_(self, state, eids, env_path, schema): @@ -226,27 +231,36 @@ def _serialize_env_list_(self, state, eids, env_path, schema): tree = self.serialize_env_single(state, env_id, env_path, schema) return env_ids -json._dumps_old = json.dumps - -class CustomEncoder(json.JSONEncoder): - def default(self, obj): - if isinstance(obj, LazyContainerPrototype): - return obj._raw_dict - return super().default(obj) - -def dumps(*a,**kw): - kw['cls'] = CustomEncoder - return json._dumps_old(*a,**kw) -json.dumps = dumps - - # f.write(json.dumps(x,cls=self.CustomEncoder)) - +if 1: + ''' + Override json.dumps() + ''' + json._dumps_old = json.dumps + class CustomEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, LazyContainerPrototype): + return obj._raw_dict + return super().default(obj) + + def dumps(*a,**kw): + kw['cls'] = CustomEncoder + return json._dumps_old(*a,**kw) + json.dumps = dumps + + # f.write(json.dumps(x,cls=self.CustomEncoder)) # @staticmethod # def serialize_all(state, env_path=DEFAULT_ENV_PATH): # serialize_env_list(state, list(state.keys()), env_path=env_path) +def legacy_save_children(x): + if isinstance(x, LazyContainerPrototype): + # import pdb; pdb.set_trace() + x.save_children() + else: + pass + @dset(STORES) class SimpleJsonStorage(StoragePrototype): @@ -297,7 +311,6 @@ def get_valid_env_list(fn): return ret # [i for i in os.listdir(env_path) if '.json' in i] - # @staticmethod @classmethod def serialize_env_single(cls, state, env_id, env_path, schema): @@ -309,7 +322,9 @@ def serialize_env_single(cls, state, env_id, env_path, schema): x['reload'] x['jsons'] # os.makedirs(tree) if not os.path.exists(tree) else None - cls.atomic_dump_json(env_path, env_id, x) + # cls.atomic_dump_json(env_path, env_id, x) + cls.atomic_dump_json(B.SJN(env_path,env_id),x) + # (env_path, env_id, x) ''' This function save serialized data to disk @@ -322,11 +337,8 @@ def serialize_env_single(cls, state, env_id, env_path, schema): - key exists, - mtime equals to the mtime and mtime equals to the stored mtime, then - ''' - - ''' - This was unsafe: use .temp to make sure atomicity + Old saving was unsafe: use .temp to make sure atomicity ''' SCHEMAS = {} @@ -354,7 +366,6 @@ def map_schema(self,par_schema,key): @classmethod def legacy_load_state(self, state, env_path, LazyEnvData): pass - # @staticmethod # def cast_to_schema(v, schema): # if schema == 'file': @@ -367,7 +378,6 @@ def legacy_load_state(self, state, env_path, LazyEnvData): # else: # raise NotImplementedError(f'cast_to_schema({schema!r})') # return - @classmethod def callback_before_setitem(self, state, key, v): ''' @@ -383,7 +393,9 @@ def callback_before_setitem(self, state, key, v): # v = self.cast_to_schema(v, state.schema[key]) return v - + @staticmethod + def add_extension(fn): + return fn+'.json' @classmethod def lazy_read_file_xt(self, tree, par, key, xt): ''' @@ -417,12 +429,13 @@ def lazy_read_file_xt(self, tree, par, key, xt): else: assert key is not None fn = B.J(tree,key) - # if not os.path.exists(fn): - # xxo = {} - # logging.debug(f'lazy_read_file_xt({tree!r}, {key!r}):Empty file!') - # else: - xxo = self.safe_parse_json(fn) - logging.debug(f'lazy_read_file_xt({tree!r}, {key!r})') + fn = self.add_extension(fn) + if not os.path.exists(fn): + xxo = {} + logging.debug(f'lazy_read_file_xt({tree!r}, {key!r}):Empty file!') + else: + xxo = self.safe_parse_file(fn) + logging.debug(f'lazy_read_file_xt({tree!r}, {key!r})') elif xt=='vstate': if xxo is None: xxo = {} @@ -476,11 +489,12 @@ def write_key_value_xt(cls, tree, key, data, xt): ret = [] if xt == 'file': 'single file ' - assert key is not None, f'Not allowed for type {xt!r}' + # assert key is not None, f'None not allowed write_key_value_xt({tree!r}, {key!r},{xt!r})' # assert data.__len__() # if data.__len__()==0: # import pdb; pdb.set_trace() - rett = cls.atomic_dump_json(tree, key, data) + # rett = cls.atomic_dump_json(tree, key, data) + rett = cls.atomic_dump_json(B.SJN(tree,key),data) ret.append( rett ) logging.debug(f'write_key_value_xt({tree!r}, {key!r})') @@ -558,10 +572,6 @@ def safe_parse_file(fn): fn, repr(e))) return env_data - @classmethod - def safe_parse_json(self, fn): - return self.safe_parse_file(fn+'.json') - @dset(STORES) @@ -583,8 +593,9 @@ def setitem_callback_setitem(state, key, v): # def :param data: value to be stored, usually a dict like class LazyContainerPrototype(Mapping): - # def __init__(self,): pass + def save_children(self): + return None def get_led_cls(sel_led, sel_store): @@ -595,9 +606,7 @@ def get_led_cls(sel_led, sel_store): # if sel_store == '' _store = STORES[sel_store] LEDS = {} - # @dset(LEDS) - # class LazyContainerPrototype(Mapping): - # pass + @dset(LEDS) class LazyContainerCurrent(LazyContainerPrototype): # def __init__(self,): @@ -610,15 +619,11 @@ class LazyContainerCurrent(LazyContainerPrototype): def __init__(self, tree, schema, value): self._tree = tree self.schema = schema - # self._raw_dict = None - # if isinstanceself.schema v = value if v is None: v = {} elif isinstance(v,LazyContainerPrototype): - # v = v = v._raw_dict - # v = {k:vv for v} else: v = v self._raw_dict = (v) @@ -633,21 +638,14 @@ def _pure_setitem(self,k,v): def _pure_getitem(self,k): return self._raw_dict.__getitem__(k) - def _pure_get(self,k,v=None): + def _pure_get(self, k, v=None): return self._raw_dict.get(k,v) def _pure_items(self): return self._raw_dict.items() - # def serialize_env_list(self, state, eids, env_path, schema): # def serialize_env_single(self, state, env_id, env_path, schema): - # ''' - # trigger children to save - # ''' - # return self.write_key_value_xt( env_path, env_id, state[env_id], schema) - - - def lazy_load_all_children(self): + def lazy_load_children(self): ''' Loads all child nodes according to a up-to-date criteria @@ -655,19 +653,17 @@ def lazy_load_all_children(self): ''' root = {None: self} env = self.store.lazy_read_file_xt( self.tree, root, None, self.schema) - # import pdb; pdb.set_trace() self._raw_dict = env._raw_dict # assert 0 # return env - lazy_load_data = lazy_load_all_children + lazy_load_data = lazy_load_children - def save_all_children(self): + def save_children(self): ''' save all ''' # return self.write_key_value_xt( tree, None, self._raw_dict, schema) return self.store.write_key_value_xt( self.tree, None, self, self.schema) - # return self.store.serialize_env_single( state, eid, env_path, schema = self.schema) def __getitem__(self, key): @@ -683,7 +679,7 @@ def __setitem__(self, key, value): ''' # self.send_data_to_disk(env_path_file, key, value) self.lazy_load_data() - value = self.store.callback_before_setitem(self,key,value) + value = self.store.callback_before_setitem(self,key,value) ret = self._raw_dict.__setitem__(key, value) _ = self.store.callback_after_setitem(self,key,value) return ret @@ -697,7 +693,6 @@ def __len__(self): return len(self._raw_dict) LazyContainerCurrent.LazyContainerCurrentBase =LazyContainerCurrent - @dset(LEDS) class LazyEnvData(LazyContainerCurrent): ''' @@ -713,17 +708,19 @@ def __init__(self, tree): def serialize_env_list_wschema(self, state, eids, env_path): ''' Bind schema with Store's method to be used by app + + [TBC] needs legacy impl for + - serialize_env_list_wschema() + - lazy_load_data() ''' eids = [i for i in eids if i in state] for eid in eids: # import pdb; pdb.set_trace() v = state[eid] - v.save_all_children() + v.save_children() # self.store.serialize_env_single( state, eid, env_path, schema = self.schema) return eids - - return LEDS[sel_led] # return LazyEnvData @@ -1471,6 +1468,7 @@ def register_window(self, p, eid): is_new_env = True self.state[eid] = init_default_env() + '## this is the "jsons" container!!! not the env container' env = self.state[eid]['jsons'] '## allocating window index' @@ -1484,9 +1482,11 @@ def register_window(self, p, eid): ''' ## this is the __setitem__ call. Adds per-window caching logic into this call + + needs to save_children() ''' env[p['id']] = p - + legacy_save_children(env[p['id']]) '## sync window to clients' broadcast(self, p, eid) From 9b0f58914b7a441d0bf43a55005fffc0ddff74e9 Mon Sep 17 00:00:00 2001 From: shouldsee Date: Sat, 3 Sep 2022 14:27:52 +0800 Subject: [PATCH 08/15] [add,bugfix] adds cmdline -cache_type JPE,JPW,JPWA. fix bugs related to these modes parser.add_argument('-cache_type', metavar='cache_type', type=str, default=DEFAULT_CACHE_TYPE, help='''specify how the received data should be synced between memory and disk. - JPE/OneJsonPerEnv: one json per environment - JPW/OneJsonPerWindow: one json per window - JPWA/OneJsonPerWindowAutoSave: one json per window, autosave to disk when plotting''') --- py/visdom/Makefile | 17 +++- py/visdom/server.py | 223 +++++++++++++++++++++++--------------------- 2 files changed, 134 insertions(+), 106 deletions(-) diff --git a/py/visdom/Makefile b/py/visdom/Makefile index afa0fba8..a9dbc0c4 100644 --- a/py/visdom/Makefile +++ b/py/visdom/Makefile @@ -4,5 +4,18 @@ clean: serve_test: python3 -mpdb -cc server.py -env_path test_data -eager_data_loading -logging_level 5 -test_simple: - python3 test_simple.py +serve_test_jpwa: + python3 -mpdb -cc server.py -env_path test_data -eager_data_loading -logging_level 5 -cache_type JPWA + +serve_test_jpe: + python3 -mpdb -cc server.py -env_path test_data -eager_data_loading -logging_level 5 -cache_type JPE + +serve_test_jpw: + python3 -mpdb -cc server.py -env_path test_data -eager_data_loading -logging_level 5 -cache_type JPW + + +test_save.py: + python3 test_save.py +test_cache.py: + python3 test_cache.py +.PHONY: test_cache.py test_save.py diff --git a/py/visdom/server.py b/py/visdom/server.py index 63a769f0..268e8356 100644 --- a/py/visdom/server.py +++ b/py/visdom/server.py @@ -8,7 +8,6 @@ """Server""" -DEBUG = 1 import argparse import copy import getpass @@ -49,6 +48,7 @@ DEFAULT_PORT = 8097 DEFAULT_HOSTNAME = "localhost" DEFAULT_BASE_URL = "/" +DEFAULT_CACHE_TYPE = 'OneJsonPerEnv' here = os.path.abspath(os.path.dirname(__file__)) COMPACT_SEPARATORS = (',', ':') @@ -163,6 +163,16 @@ def safe_join_with_none(x,y): else: return B.safe_dir(B.join(x,y)) SJN = safe_join_with_none + def join_safe_with_none(x,y): + ''' + make sure dir exists,then join. this is for file pointers + if y is None, then do not extend original path + ''' + if y is None: + return x + else: + return B.join(B.safe_dir(x),y) + JSN = join_safe_with_none J = join # class BasicLazyMapping(Mapping): @@ -249,21 +259,24 @@ def dumps(*a,**kw): return json._dumps_old(*a,**kw) json.dumps = dumps - # f.write(json.dumps(x,cls=self.CustomEncoder)) - # @staticmethod - # def serialize_all(state, env_path=DEFAULT_ENV_PATH): - # serialize_env_list(state, list(state.keys()), env_path=env_path) def legacy_save_children(x): if isinstance(x, LazyContainerPrototype): - # import pdb; pdb.set_trace() - x.save_children() + if issubclass(x.store, SimpleWindowJsonAutoSave): + # import pdb; pdb.set_trace() + x.save_children() else: pass +@dset(STORES,'JPE') +@dset(STORES,'OneJsonPerEnv') @dset(STORES) class SimpleJsonStorage(StoragePrototype): + SCHEMAS = {} + SCHEMAS['venv'] = 'file' + SCHEMAS['file'] = 'file' + ''' One file per env ''' @@ -275,7 +288,8 @@ def lazy_read_env_from_file(fn, state, schema): del schema ;'not used here' - if state is not None: + # if state is not None: + if state.__len__(): return state else: try: @@ -284,12 +298,13 @@ def lazy_read_env_from_file(fn, state, schema): except Exception as e: raise ValueError( "Failed loading environment json: {} - {}".format( - self.fn, repr(e))) + fn, repr(e))) _raw_dict = { 'jsons': env_data['jsons'], 'reload': env_data['reload'] } return _raw_dict + @classmethod def legacy_load_state(self, state, env_path, LazyEnvData): ''' @@ -317,13 +332,12 @@ def serialize_env_single(cls, state, env_id, env_path, schema): del schema; '[EXTRA_VAR]' # env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) - # import pdb; pdb.set_trace() x = {k:v for k,v in state[env_id].items()} - x['reload'] - x['jsons'] - # os.makedirs(tree) if not os.path.exists(tree) else None - # cls.atomic_dump_json(env_path, env_id, x) - cls.atomic_dump_json(B.SJN(env_path,env_id),x) + + if x == {}: + import pdb; pdb.set_trace() + + cls.atomic_dump_json(B.JSN(env_path,env_id),x) # (env_path, env_id, x) ''' This function save serialized data to disk @@ -341,18 +355,24 @@ def serialize_env_single(cls, state, env_id, env_path, schema): Old saving was unsafe: use .temp to make sure atomicity ''' -SCHEMAS = {} -SCHEMAS['venv'] = { -'jsons':'filedir', -'reload':'file', -} -SCHEMAS['file'] = 'file' + +@dset(STORES,'JPW') +@dset(STORES,'OneJsonPerWindow') @dset(STORES) class SimpleWindowJsonStorage(StoragePrototype): ''' One json per window ''' + + SCHEMAS = {} + SCHEMAS['venv'] = { + 'jsons':'filedir', + 'reload':'file', + } + SCHEMAS['file'] = 'file' + + schema_mapper = {} schema_mapper['vstate'] = SCHEMAS['venv'] schema_mapper['filedir'] = SCHEMAS['file'] @@ -366,18 +386,7 @@ def map_schema(self,par_schema,key): @classmethod def legacy_load_state(self, state, env_path, LazyEnvData): pass - # @staticmethod - # def cast_to_schema(v, schema): - # if schema == 'file': - # assert 0 - # elif schema == 'filedir': - # assert 0 - # elif isinstance(schema,Mapping): - # assert 0 - # # for - # else: - # raise NotImplementedError(f'cast_to_schema({schema!r})') - # return + @classmethod def callback_before_setitem(self, state, key, v): ''' @@ -424,6 +433,9 @@ def lazy_read_file_xt(self, tree, par, key, xt): window = k.rsplit('.json',1)[0] self.lazy_read_file_xt(ttree, xxo, window, self.schema_mapper[xt]) elif xt=='file': + ''' + [LazyCriteria] key is already in container dict and mapped to not None value + ''' if xxo is not None: pass else: @@ -453,12 +465,7 @@ def lazy_read_file_xt(self, tree, par, key, xt): 'an empty dict is enough. xt schema will lead the parsing' # xxo = {} - # if key is not None: ttree = B.SJN(tree, key) - # else: - # 'root={None:env}. a null parent for loading' - # ttree = tree - # import pdb; pdb.set_trace() for keyy,xtt in xt.items(): self.lazy_read_file_xt(ttree, xxo, keyy, xtt) else: @@ -488,13 +495,9 @@ def write_key_value_xt(cls, tree, key, data, xt): ''' ret = [] if xt == 'file': - 'single file ' - # assert key is not None, f'None not allowed write_key_value_xt({tree!r}, {key!r},{xt!r})' - # assert data.__len__() - # if data.__len__()==0: - # import pdb; pdb.set_trace() - # rett = cls.atomic_dump_json(tree, key, data) - rett = cls.atomic_dump_json(B.SJN(tree,key),data) + 'single file: if key is None, B.SJN will use tree only' + fn = B.JSN(tree,key) + rett = cls.atomic_dump_json(fn,data) ret.append( rett ) logging.debug(f'write_key_value_xt({tree!r}, {key!r})') @@ -538,16 +541,13 @@ def lazy_read_env_from_file(self, tree, env, schema): ''' Only loads windows that are not in state dict ''' - # xo = {} root = {None:env} env = self.lazy_read_file_xt(tree, root, None, schema) - # self.schema_dict) return env @classmethod def serialize_env_single(self, state, env_id, env_path, schema): return self.write_key_value_xt( env_path, env_id, state[env_id], schema) - # xt=self.schema_dict) @staticmethod @@ -573,7 +573,8 @@ def safe_parse_file(fn): return env_data - +@dset(STORES,'JPWA') +@dset(STORES,'OneJsonPerWindowAutoSave') @dset(STORES) class SimpleWindowJsonAutoSave(SimpleWindowJsonStorage): ''' @@ -581,14 +582,6 @@ class SimpleWindowJsonAutoSave(SimpleWindowJsonStorage): ''' pass - @staticmethod - def setitem_callback_setitem(state, key, v): - ''' - Autosave uses this callback to save to disk - - Needs to capture the setitem call of json dict - ''' - return None # def :param data: value to be stored, usually a dict like @@ -598,7 +591,7 @@ def save_children(self): return None -def get_led_cls(sel_led, sel_store): +def get_led_cls(sel_store): ''' LED stands for LazyEnvData ''' @@ -651,20 +644,34 @@ def lazy_load_children(self): Only loads windows that are not in state dict ''' - root = {None: self} - env = self.store.lazy_read_file_xt( self.tree, root, None, self.schema) - self._raw_dict = env._raw_dict - # assert 0 - # return env + if issubclass(self.store, SimpleWindowJsonStorage): + root = {None: self} + env = self.store.lazy_read_file_xt( self.tree, root, None, self.schema) + self._raw_dict = env._raw_dict + elif issubclass(self.store,SimpleJsonStorage): + ''' + One file per env caching scheme + ''' + if self.schema == 'vstate': + pass + elif self.schema == 'file': + self._raw_dict = v = self.store.lazy_read_env_from_file(self.tree, self._raw_dict, None) + else: + raise NotImplementedError(self.schema,self.store) + else: + raise NotImplementedError(self.store) + + lazy_load_data = lazy_load_children def save_children(self): ''' - save all + save all children nodes + + if called for env, then save the env + if called for window, then save the window file ''' - # return self.write_key_value_xt( tree, None, self._raw_dict, schema) return self.store.write_key_value_xt( self.tree, None, self, self.schema) - # return self.store.serialize_env_single( state, eid, env_path, schema = self.schema) def __getitem__(self, key): ''' @@ -691,18 +698,6 @@ def __iter__(self): def __len__(self): self.lazy_load_data() return len(self._raw_dict) - LazyContainerCurrent.LazyContainerCurrentBase =LazyContainerCurrent - - @dset(LEDS) - class LazyEnvData(LazyContainerCurrent): - ''' - Per-env laziness - ''' - schema = SCHEMAS['venv'] - LCC = LazyContainerCurrent - # SimpleWindowJsonStorage - def __init__(self, tree): - super().__init__(tree, self.schema, None) @classmethod def serialize_env_list_wschema(self, state, eids, env_path): @@ -717,17 +712,28 @@ def serialize_env_list_wschema(self, state, eids, env_path): for eid in eids: # import pdb; pdb.set_trace() v = state[eid] - v.save_children() - # self.store.serialize_env_single( state, eid, env_path, schema = self.schema) + if isinstance(self.store, SimpleWindowJsonAutoSave): + v.save_children() + else: + self.store.serialize_env_single( state, eid, env_path, schema = self.schema) return eids - return LEDS[sel_led] - # return LazyEnvData + LazyContainerCurrent.LazyContainerCurrentBase =LazyContainerCurrent + + @dset(LEDS) + class LazyEnvData(LazyContainerCurrent): + ''' + Per-env laziness + ''' + schema = _store.SCHEMAS['venv'] + LCC = LazyContainerCurrent + def __init__(self, tree): + super().__init__(tree, self.schema, None) -# LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonStorage') -LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonAutoSave') -# serialize_env = LazyEnvData.serialize_env_list ## [LEGACY] + + return LazyEnvData + tornado_settings = { "autoescape": None, @@ -758,9 +764,10 @@ class Application(tornado.web.Application): def __init__(self, port=DEFAULT_PORT, base_url='', env_path=DEFAULT_ENV_PATH, readonly=False, user_credential=None, use_frontend_client_polling=False, - eager_data_loading=False, LED=None): - - self.LED = LED + eager_data_loading=False, cache_type=None): + if cache_type is None: cache_type = DEFAULT_CACHE_TYPE + self.LED = get_led_cls(cache_type) + # self.LED = LED self.eager_data_loading = eager_data_loading self.env_path = env_path self.state = self.load_state() @@ -873,15 +880,12 @@ def load_state(self): 'state is a list of directory of LEDs' state = self.LED.LCC(env_path, schema = 'vstate', value= {}) - state.store.legacy_load_state(state, env_path, LED) - + state.store.legacy_load_state(state, env_path, self.LED) if self.eager_data_loading: for k,x in state.items(): x.lazy_load_data() - # state[eid].lazy_load_data() - ''' Creating default env ''' @@ -1152,7 +1156,7 @@ def on_message(self, message): copy.deepcopy(self.state[msg['prev_eid']]) self.state[msg['eid']]['reload'] = msg['data'] self.eid = msg['eid'] - self.app.serialize_env_list_wschema(self.state, [self.eid], self.env_path) ##[TBCovered] + self.app.LED.serialize_env_list_wschema(self.state, [self.eid], self.env_path) ##[TBCovered] elif cmd == 'delete_env': if 'eid' in msg: logging.info('closing environment {}'.format(msg['eid'])) @@ -1285,7 +1289,7 @@ def on_message(self, message): copy.deepcopy(self.state[msg['prev_eid']]) self.state[msg['eid']]['reload'] = msg['data'] self.eid = msg['eid'] - self.app.serialize_env_list_wschema(self.state, [self.eid], self.env_path) ### [TBCR] + self.app.LED.serialize_env_list_wschema(self.state, [self.eid], self.env_path) ### [TBCR] elif cmd == 'delete_env': if 'eid' in msg: logging.info('closing environment {}'.format(msg['eid'])) @@ -2024,7 +2028,7 @@ def wrap_func(handler, args): assert prev_eid in handler.state, 'env to be forked doesn\'t exit' handler.state[eid] = copy.deepcopy(handler.state[prev_eid]) - handler.app.serialize_env_list_wschema(handler.state, [eid], env_path=handler.app.env_path) ### [TBCR] + handler.app.LED.serialize_env_list_wschema(handler.state, [eid], env_path=handler.app.env_path) ### [TBCR] broadcast_envs(handler) handler.write(eid) @@ -2299,7 +2303,7 @@ def initialize(self, app): self.env_path = app.env_path self.login_enabled = app.login_enabled # self.store = app.store - self.serialize_env_list_wschema = app.serialize_env_list_wschema + self.serialize_env_list_wschema = app.LED.serialize_env_list_wschema @staticmethod def wrap_func(handler, args): @@ -2425,6 +2429,7 @@ def get(self, text): # function that downloads and installs javascript, css, and font dependencies: def download_scripts(proxies=None, install_dir=None): import visdom + DEBUG = '--debug' in sys.argv if DEBUG: visdom.__version__ = 'test' print("Checking for scripts.") @@ -2570,22 +2575,23 @@ def download_scripts(proxies=None, install_dir=None): with open(built_path, 'w+') as build_file: build_file.write(visdom.__version__) - def start_server(port=DEFAULT_PORT, hostname=DEFAULT_HOSTNAME, base_url=DEFAULT_BASE_URL, env_path=DEFAULT_ENV_PATH, readonly=False, print_func=None, user_credential=None, use_frontend_client_polling=False, bind_local=False, - eager_data_loading=False): + eager_data_loading=False, + cache_type = None, + # DEFAULT_cache_type, + + ): print("It's Alive!") app = Application(port=port, base_url=base_url, env_path=env_path, readonly=readonly, user_credential=user_credential, use_frontend_client_polling=use_frontend_client_polling, eager_data_loading=eager_data_loading, - LED=LED) - app.serialize_env_list_wschema = LED.serialize_env_list_wschema - # app.LCC = LazyContainerCurrent - # app.LED = LazyEnvData - # ContainerCurrent + cache_type = cache_type, + ) + if bind_local: @@ -2622,6 +2628,14 @@ def main(print_func=None): parser.add_argument('-env_path', metavar='env_path', type=str, default=DEFAULT_ENV_PATH, help='path to serialized session to reload.') + parser.add_argument('-cache_type', metavar='cache_type', type=str, + default=DEFAULT_CACHE_TYPE, + help='''specify how the received data should be synced + between memory and disk. + - JPE/OneJsonPerEnv: one json per environment + - JPW/OneJsonPerWindow: one json per window + - JPWA/OneJsonPerWindowAutoSave: one json per window, + autosave to disk when plotting''') parser.add_argument('-logging_level', metavar='logger_level', default='INFO', help='logging level (default = INFO). Can take ' @@ -2722,7 +2736,8 @@ def main(print_func=None): print_func=print_func, user_credential=user_credential, use_frontend_client_polling=FLAGS.use_frontend_client_polling, bind_local=FLAGS.bind_local, - eager_data_loading=FLAGS.eager_data_loading) + eager_data_loading=FLAGS.eager_data_loading, + cache_type = FLAGS.cache_type) def download_scripts_and_run(): download_scripts() From ff05aa779f475dda6263efba34c91731c409dcf2 Mon Sep 17 00:00:00 2001 From: shouldsee Date: Sat, 3 Sep 2022 14:27:52 +0800 Subject: [PATCH 09/15] [add,bugfix] adds cmdline -cache_type JPE,JPW,JPWA. fix bugs related to these modes parser.add_argument('-cache_type', metavar='cache_type', type=str, default=DEFAULT_CACHE_TYPE, help='''specify how the received data should be synced between memory and disk. - JPE/OneJsonPerEnv: one json per environment - JPW/OneJsonPerWindow: one json per window - JPWA/OneJsonPerWindowAutoSave: one json per window, autosave to disk when plotting''') --- py/visdom/Makefile | 17 +++- py/visdom/server.py | 224 ++++++++++++++++++++++++-------------------- 2 files changed, 135 insertions(+), 106 deletions(-) diff --git a/py/visdom/Makefile b/py/visdom/Makefile index afa0fba8..a9dbc0c4 100644 --- a/py/visdom/Makefile +++ b/py/visdom/Makefile @@ -4,5 +4,18 @@ clean: serve_test: python3 -mpdb -cc server.py -env_path test_data -eager_data_loading -logging_level 5 -test_simple: - python3 test_simple.py +serve_test_jpwa: + python3 -mpdb -cc server.py -env_path test_data -eager_data_loading -logging_level 5 -cache_type JPWA + +serve_test_jpe: + python3 -mpdb -cc server.py -env_path test_data -eager_data_loading -logging_level 5 -cache_type JPE + +serve_test_jpw: + python3 -mpdb -cc server.py -env_path test_data -eager_data_loading -logging_level 5 -cache_type JPW + + +test_save.py: + python3 test_save.py +test_cache.py: + python3 test_cache.py +.PHONY: test_cache.py test_save.py diff --git a/py/visdom/server.py b/py/visdom/server.py index 63a769f0..c35b05d9 100644 --- a/py/visdom/server.py +++ b/py/visdom/server.py @@ -8,7 +8,6 @@ """Server""" -DEBUG = 1 import argparse import copy import getpass @@ -49,6 +48,7 @@ DEFAULT_PORT = 8097 DEFAULT_HOSTNAME = "localhost" DEFAULT_BASE_URL = "/" +DEFAULT_CACHE_TYPE = 'OneJsonPerEnv' here = os.path.abspath(os.path.dirname(__file__)) COMPACT_SEPARATORS = (',', ':') @@ -163,6 +163,16 @@ def safe_join_with_none(x,y): else: return B.safe_dir(B.join(x,y)) SJN = safe_join_with_none + def join_safe_with_none(x,y): + ''' + make sure dir exists,then join. this is for file pointers + if y is None, then do not extend original path + ''' + if y is None: + return x + else: + return B.join(B.safe_dir(x),y) + JSN = join_safe_with_none J = join # class BasicLazyMapping(Mapping): @@ -249,21 +259,24 @@ def dumps(*a,**kw): return json._dumps_old(*a,**kw) json.dumps = dumps - # f.write(json.dumps(x,cls=self.CustomEncoder)) - # @staticmethod - # def serialize_all(state, env_path=DEFAULT_ENV_PATH): - # serialize_env_list(state, list(state.keys()), env_path=env_path) def legacy_save_children(x): if isinstance(x, LazyContainerPrototype): - # import pdb; pdb.set_trace() - x.save_children() + if issubclass(x.store, SimpleWindowJsonAutoSave): + # import pdb; pdb.set_trace() + x.save_children() else: pass +@dset(STORES,'JPE') +@dset(STORES,'OneJsonPerEnv') @dset(STORES) class SimpleJsonStorage(StoragePrototype): + SCHEMAS = {} + SCHEMAS['venv'] = 'file' + SCHEMAS['file'] = 'file' + ''' One file per env ''' @@ -275,7 +288,8 @@ def lazy_read_env_from_file(fn, state, schema): del schema ;'not used here' - if state is not None: + # if state is not None: + if state.__len__(): return state else: try: @@ -284,12 +298,13 @@ def lazy_read_env_from_file(fn, state, schema): except Exception as e: raise ValueError( "Failed loading environment json: {} - {}".format( - self.fn, repr(e))) + fn, repr(e))) _raw_dict = { 'jsons': env_data['jsons'], 'reload': env_data['reload'] } return _raw_dict + @classmethod def legacy_load_state(self, state, env_path, LazyEnvData): ''' @@ -317,13 +332,12 @@ def serialize_env_single(cls, state, env_id, env_path, schema): del schema; '[EXTRA_VAR]' # env_path_file = os.path.join(env_path, "{0}.json".format(env_id)) - # import pdb; pdb.set_trace() x = {k:v for k,v in state[env_id].items()} - x['reload'] - x['jsons'] - # os.makedirs(tree) if not os.path.exists(tree) else None - # cls.atomic_dump_json(env_path, env_id, x) - cls.atomic_dump_json(B.SJN(env_path,env_id),x) + + if x == {}: + import pdb; pdb.set_trace() + + cls.atomic_dump_json(B.JSN(env_path,env_id),x) # (env_path, env_id, x) ''' This function save serialized data to disk @@ -341,18 +355,24 @@ def serialize_env_single(cls, state, env_id, env_path, schema): Old saving was unsafe: use .temp to make sure atomicity ''' -SCHEMAS = {} -SCHEMAS['venv'] = { -'jsons':'filedir', -'reload':'file', -} -SCHEMAS['file'] = 'file' + +@dset(STORES,'JPW') +@dset(STORES,'OneJsonPerWindow') @dset(STORES) class SimpleWindowJsonStorage(StoragePrototype): ''' One json per window ''' + + SCHEMAS = {} + SCHEMAS['venv'] = { + 'jsons':'filedir', + 'reload':'file', + } + SCHEMAS['file'] = 'file' + + schema_mapper = {} schema_mapper['vstate'] = SCHEMAS['venv'] schema_mapper['filedir'] = SCHEMAS['file'] @@ -366,18 +386,7 @@ def map_schema(self,par_schema,key): @classmethod def legacy_load_state(self, state, env_path, LazyEnvData): pass - # @staticmethod - # def cast_to_schema(v, schema): - # if schema == 'file': - # assert 0 - # elif schema == 'filedir': - # assert 0 - # elif isinstance(schema,Mapping): - # assert 0 - # # for - # else: - # raise NotImplementedError(f'cast_to_schema({schema!r})') - # return + @classmethod def callback_before_setitem(self, state, key, v): ''' @@ -424,6 +433,9 @@ def lazy_read_file_xt(self, tree, par, key, xt): window = k.rsplit('.json',1)[0] self.lazy_read_file_xt(ttree, xxo, window, self.schema_mapper[xt]) elif xt=='file': + ''' + [LazyCriteria] key is already in container dict and mapped to not None value + ''' if xxo is not None: pass else: @@ -453,12 +465,7 @@ def lazy_read_file_xt(self, tree, par, key, xt): 'an empty dict is enough. xt schema will lead the parsing' # xxo = {} - # if key is not None: ttree = B.SJN(tree, key) - # else: - # 'root={None:env}. a null parent for loading' - # ttree = tree - # import pdb; pdb.set_trace() for keyy,xtt in xt.items(): self.lazy_read_file_xt(ttree, xxo, keyy, xtt) else: @@ -488,13 +495,9 @@ def write_key_value_xt(cls, tree, key, data, xt): ''' ret = [] if xt == 'file': - 'single file ' - # assert key is not None, f'None not allowed write_key_value_xt({tree!r}, {key!r},{xt!r})' - # assert data.__len__() - # if data.__len__()==0: - # import pdb; pdb.set_trace() - # rett = cls.atomic_dump_json(tree, key, data) - rett = cls.atomic_dump_json(B.SJN(tree,key),data) + 'single file: if key is None, B.SJN will use tree only' + fn = B.JSN(tree,key) + rett = cls.atomic_dump_json(fn,data) ret.append( rett ) logging.debug(f'write_key_value_xt({tree!r}, {key!r})') @@ -538,16 +541,13 @@ def lazy_read_env_from_file(self, tree, env, schema): ''' Only loads windows that are not in state dict ''' - # xo = {} root = {None:env} env = self.lazy_read_file_xt(tree, root, None, schema) - # self.schema_dict) return env @classmethod def serialize_env_single(self, state, env_id, env_path, schema): return self.write_key_value_xt( env_path, env_id, state[env_id], schema) - # xt=self.schema_dict) @staticmethod @@ -573,7 +573,8 @@ def safe_parse_file(fn): return env_data - +@dset(STORES,'JPWA') +@dset(STORES,'OneJsonPerWindowAutoSave') @dset(STORES) class SimpleWindowJsonAutoSave(SimpleWindowJsonStorage): ''' @@ -581,14 +582,6 @@ class SimpleWindowJsonAutoSave(SimpleWindowJsonStorage): ''' pass - @staticmethod - def setitem_callback_setitem(state, key, v): - ''' - Autosave uses this callback to save to disk - - Needs to capture the setitem call of json dict - ''' - return None # def :param data: value to be stored, usually a dict like @@ -598,7 +591,7 @@ def save_children(self): return None -def get_led_cls(sel_led, sel_store): +def get_led_cls(sel_store): ''' LED stands for LazyEnvData ''' @@ -651,20 +644,34 @@ def lazy_load_children(self): Only loads windows that are not in state dict ''' - root = {None: self} - env = self.store.lazy_read_file_xt( self.tree, root, None, self.schema) - self._raw_dict = env._raw_dict - # assert 0 - # return env + if issubclass(self.store, SimpleWindowJsonStorage): + root = {None: self} + env = self.store.lazy_read_file_xt( self.tree, root, None, self.schema) + self._raw_dict = env._raw_dict + elif issubclass(self.store,SimpleJsonStorage): + ''' + One file per env caching scheme + ''' + if self.schema == 'vstate': + pass + elif self.schema == 'file': + self._raw_dict = v = self.store.lazy_read_env_from_file(self.tree, self._raw_dict, None) + else: + raise NotImplementedError(self.schema,self.store) + else: + raise NotImplementedError(self.store) + + lazy_load_data = lazy_load_children def save_children(self): ''' - save all + save all children nodes + + if called for env, then save the env + if called for window, then save the window file ''' - # return self.write_key_value_xt( tree, None, self._raw_dict, schema) return self.store.write_key_value_xt( self.tree, None, self, self.schema) - # return self.store.serialize_env_single( state, eid, env_path, schema = self.schema) def __getitem__(self, key): ''' @@ -691,18 +698,6 @@ def __iter__(self): def __len__(self): self.lazy_load_data() return len(self._raw_dict) - LazyContainerCurrent.LazyContainerCurrentBase =LazyContainerCurrent - - @dset(LEDS) - class LazyEnvData(LazyContainerCurrent): - ''' - Per-env laziness - ''' - schema = SCHEMAS['venv'] - LCC = LazyContainerCurrent - # SimpleWindowJsonStorage - def __init__(self, tree): - super().__init__(tree, self.schema, None) @classmethod def serialize_env_list_wschema(self, state, eids, env_path): @@ -717,17 +712,28 @@ def serialize_env_list_wschema(self, state, eids, env_path): for eid in eids: # import pdb; pdb.set_trace() v = state[eid] - v.save_children() - # self.store.serialize_env_single( state, eid, env_path, schema = self.schema) + if isinstance(self.store, SimpleWindowJsonAutoSave): + v.save_children() + else: + self.store.serialize_env_single( state, eid, env_path, schema = self.schema) return eids - return LEDS[sel_led] - # return LazyEnvData + LazyContainerCurrent.LazyContainerCurrentBase =LazyContainerCurrent + + @dset(LEDS) + class LazyEnvData(LazyContainerCurrent): + ''' + Per-env laziness + ''' + schema = _store.SCHEMAS['venv'] + LCC = LazyContainerCurrent + def __init__(self, tree): + super().__init__(tree, self.schema, None) -# LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonStorage') -LED = LazyEnvData = get_led_cls( 'LazyEnvData', 'SimpleWindowJsonAutoSave') -# serialize_env = LazyEnvData.serialize_env_list ## [LEGACY] + + return LazyEnvData + tornado_settings = { "autoescape": None, @@ -758,9 +764,10 @@ class Application(tornado.web.Application): def __init__(self, port=DEFAULT_PORT, base_url='', env_path=DEFAULT_ENV_PATH, readonly=False, user_credential=None, use_frontend_client_polling=False, - eager_data_loading=False, LED=None): - - self.LED = LED + eager_data_loading=False, cache_type=None): + if cache_type is None: cache_type = DEFAULT_CACHE_TYPE + self.LED = get_led_cls(cache_type) + # self.LED = LED self.eager_data_loading = eager_data_loading self.env_path = env_path self.state = self.load_state() @@ -864,6 +871,7 @@ def load_state(self): RuntimeWarning ) return {'main': init_default_env()} + B.safe_dir(env_path) ensure_dir_exists(env_path) ''' @@ -873,15 +881,12 @@ def load_state(self): 'state is a list of directory of LEDs' state = self.LED.LCC(env_path, schema = 'vstate', value= {}) - state.store.legacy_load_state(state, env_path, LED) - + state.store.legacy_load_state(state, env_path, self.LED) if self.eager_data_loading: for k,x in state.items(): x.lazy_load_data() - # state[eid].lazy_load_data() - ''' Creating default env ''' @@ -1152,7 +1157,7 @@ def on_message(self, message): copy.deepcopy(self.state[msg['prev_eid']]) self.state[msg['eid']]['reload'] = msg['data'] self.eid = msg['eid'] - self.app.serialize_env_list_wschema(self.state, [self.eid], self.env_path) ##[TBCovered] + self.app.LED.serialize_env_list_wschema(self.state, [self.eid], self.env_path) ##[TBCovered] elif cmd == 'delete_env': if 'eid' in msg: logging.info('closing environment {}'.format(msg['eid'])) @@ -1285,7 +1290,7 @@ def on_message(self, message): copy.deepcopy(self.state[msg['prev_eid']]) self.state[msg['eid']]['reload'] = msg['data'] self.eid = msg['eid'] - self.app.serialize_env_list_wschema(self.state, [self.eid], self.env_path) ### [TBCR] + self.app.LED.serialize_env_list_wschema(self.state, [self.eid], self.env_path) ### [TBCR] elif cmd == 'delete_env': if 'eid' in msg: logging.info('closing environment {}'.format(msg['eid'])) @@ -2024,7 +2029,7 @@ def wrap_func(handler, args): assert prev_eid in handler.state, 'env to be forked doesn\'t exit' handler.state[eid] = copy.deepcopy(handler.state[prev_eid]) - handler.app.serialize_env_list_wschema(handler.state, [eid], env_path=handler.app.env_path) ### [TBCR] + handler.app.LED.serialize_env_list_wschema(handler.state, [eid], env_path=handler.app.env_path) ### [TBCR] broadcast_envs(handler) handler.write(eid) @@ -2299,7 +2304,7 @@ def initialize(self, app): self.env_path = app.env_path self.login_enabled = app.login_enabled # self.store = app.store - self.serialize_env_list_wschema = app.serialize_env_list_wschema + self.serialize_env_list_wschema = app.LED.serialize_env_list_wschema @staticmethod def wrap_func(handler, args): @@ -2425,6 +2430,7 @@ def get(self, text): # function that downloads and installs javascript, css, and font dependencies: def download_scripts(proxies=None, install_dir=None): import visdom + DEBUG = '--debug' in sys.argv if DEBUG: visdom.__version__ = 'test' print("Checking for scripts.") @@ -2570,22 +2576,23 @@ def download_scripts(proxies=None, install_dir=None): with open(built_path, 'w+') as build_file: build_file.write(visdom.__version__) - def start_server(port=DEFAULT_PORT, hostname=DEFAULT_HOSTNAME, base_url=DEFAULT_BASE_URL, env_path=DEFAULT_ENV_PATH, readonly=False, print_func=None, user_credential=None, use_frontend_client_polling=False, bind_local=False, - eager_data_loading=False): + eager_data_loading=False, + cache_type = None, + # DEFAULT_cache_type, + + ): print("It's Alive!") app = Application(port=port, base_url=base_url, env_path=env_path, readonly=readonly, user_credential=user_credential, use_frontend_client_polling=use_frontend_client_polling, eager_data_loading=eager_data_loading, - LED=LED) - app.serialize_env_list_wschema = LED.serialize_env_list_wschema - # app.LCC = LazyContainerCurrent - # app.LED = LazyEnvData - # ContainerCurrent + cache_type = cache_type, + ) + if bind_local: @@ -2622,6 +2629,14 @@ def main(print_func=None): parser.add_argument('-env_path', metavar='env_path', type=str, default=DEFAULT_ENV_PATH, help='path to serialized session to reload.') + parser.add_argument('-cache_type', metavar='cache_type', type=str, + default=DEFAULT_CACHE_TYPE, + help='''specify how the received data should be synced + between memory and disk. + - JPE/OneJsonPerEnv: one json per environment + - JPW/OneJsonPerWindow: one json per window + - JPWA/OneJsonPerWindowAutoSave: one json per window, + autosave to disk when plotting''') parser.add_argument('-logging_level', metavar='logger_level', default='INFO', help='logging level (default = INFO). Can take ' @@ -2722,7 +2737,8 @@ def main(print_func=None): print_func=print_func, user_credential=user_credential, use_frontend_client_polling=FLAGS.use_frontend_client_polling, bind_local=FLAGS.bind_local, - eager_data_loading=FLAGS.eager_data_loading) + eager_data_loading=FLAGS.eager_data_loading, + cache_type = FLAGS.cache_type) def download_scripts_and_run(): download_scripts() From efc049e4140c88e8bd96385a6f362d5c1b7fc4cd Mon Sep 17 00:00:00 2001 From: shouldsee Date: Sat, 3 Sep 2022 16:18:10 +0800 Subject: [PATCH 10/15] [files] for Makefile --- py/visdom/test_cache.py | 6 ++++++ py/visdom/test_save.py | 6 ++++++ 2 files changed, 12 insertions(+) create mode 100644 py/visdom/test_cache.py create mode 100644 py/visdom/test_save.py diff --git a/py/visdom/test_cache.py b/py/visdom/test_cache.py new file mode 100644 index 00000000..713b5742 --- /dev/null +++ b/py/visdom/test_cache.py @@ -0,0 +1,6 @@ +import visdom +import numpy as np +vis = visdom.Visdom(env='test') +for i in range(5): + vis.scatter(np.random.random((50,2)),win=i) +#vis.save(['test']) diff --git a/py/visdom/test_save.py b/py/visdom/test_save.py new file mode 100644 index 00000000..6fdbdb4e --- /dev/null +++ b/py/visdom/test_save.py @@ -0,0 +1,6 @@ +import visdom +import numpy as np +vis = visdom.Visdom(env='test') +for i in range(5): + vis.scatter(np.random.random((50,2)),win=i) +vis.save(['test']) From cb5cb9f11bf3db8ffa8358c43d6f3853d47590a9 Mon Sep 17 00:00:00 2001 From: shouldsee Date: Tue, 27 Sep 2022 01:03:06 +0800 Subject: [PATCH 11/15] __delitem__ now triggers file deletion in JWPA. strict error in demo.py --- example/demo.py | 20 ++--- py/visdom/server.py | 177 ++++++++++++++++++++++++++++++++++++--- py/visdom/test_simple.py | 6 -- 3 files changed, 174 insertions(+), 29 deletions(-) delete mode 100644 py/visdom/test_simple.py diff --git a/example/demo.py b/example/demo.py index 5b9c2633..dc7d92d7 100644 --- a/example/demo.py +++ b/example/demo.py @@ -156,17 +156,17 @@ def run_demo(viz, env, args): np.random.seed(int(FLAGS.seed)) if FLAGS.run == "all": - try: +# try: run_demo(viz, FLAGS.env if FLAGS.env else None, FLAGS.args) - except Exception as e: - print( - "The visdom experienced an exception while running: {}\n" - "The demo displays up-to-date functionality with the GitHub " - "version, which may not yet be pushed to pip. Please upgrade " - "using `pip install -e .` or `easy_install .`\n" - "If this does not resolve the problem, please open an issue on " - "our GitHub.".format(repr(e)) - ) +# except Exception as e: +# print( +# "The visdom experienced an exception while running: {}\n" +# "The demo displays up-to-date functionality with the GitHub " +# "version, which may not yet be pushed to pip. Please upgrade " +# "using `pip install -e .` or `easy_install .`\n" +# "If this does not resolve the problem, please open an issue on " +# "our GitHub.".format(repr(e)) +# ) else: locals()[FLAGS.run](viz, FLAGS.run + FLAGS.env_suffix if not FLAGS.env else FLAGS.env, FLAGS.args) diff --git a/py/visdom/server.py b/py/visdom/server.py index c35b05d9..f5cb4c05 100644 --- a/py/visdom/server.py +++ b/py/visdom/server.py @@ -179,12 +179,14 @@ def join_safe_with_none(x,y): STORES = {} def NIF(*a,**kw): raise NotImplementedError() +NullValue = object() class StoragePrototype(object): serialize_env = NIF get_valid_env_list = NIF lazy_read_env_from_file = NIF serialize_env_single = NIF + managed_dir = NullValue ''' This function save serialized data to disk @@ -199,6 +201,15 @@ class StoragePrototype(object): and mtime equals to the stored mtime, then ''' # atomic_dump_json = NIF + @staticmethod + def callback_before_delitem(state,key): + # setitem(state, key, v): + ''' + this callback is blocking. could be used for sanity check + ''' + return key + + @staticmethod def callback_before_setitem(state, key, v): ''' @@ -277,6 +288,7 @@ class SimpleJsonStorage(StoragePrototype): SCHEMAS['venv'] = 'file' SCHEMAS['file'] = 'file' + ''' One file per env ''' @@ -357,6 +369,10 @@ def serialize_env_single(cls, state, env_id, env_path, schema): + +import os,shutil +import glob + @dset(STORES,'JPW') @dset(STORES,'OneJsonPerWindow') @dset(STORES) @@ -376,6 +392,7 @@ class SimpleWindowJsonStorage(StoragePrototype): schema_mapper = {} schema_mapper['vstate'] = SCHEMAS['venv'] schema_mapper['filedir'] = SCHEMAS['file'] + # schema_mapper @classmethod def map_schema(self,par_schema,key): @@ -387,17 +404,100 @@ def map_schema(self,par_schema,key): def legacy_load_state(self, state, env_path, LazyEnvData): pass + + @staticmethod + def path_is_parent(parent_path, child_path): + ''' + https://stackoverflow.com/a/37095733/8083313 + ''' + # Smooth out relative path names, note: if you are concerned about symbolic links, you should use os.path.realpath too + + parent_path = os.path.realpath(parent_path) + child_path = os.path.realpath(child_path) + + # Compare the common path of the parent and child path with the common path of just the parent path. Using the commonpath method on just the parent path will regularise the path name in the same way as the comparison that deals with both paths, removing any trailing path separator + return os.path.commonpath([parent_path]) == os.path.commonpath([parent_path, child_path]) + @classmethod - def callback_before_setitem(self, state, key, v): + def safe_rm_path(cls, tree,typ): + par = cls.managed_dir + if cls.managed_dir is None: + assert 0, f'This should not happen. self.managed_dir not specified for {cls!r}' + if not cls.path_is_parent(par, tree): + assert 0,f'parent({par}) needs to contain ({tree})' + + if typ=='file': + os.unlink(tree) + elif typ=='dir': + shutil.rmtree(tree) + else: + raise NotImplementedError(typ) + + @classmethod + def safe_rm_node(cls, node): + # if isinstance() + if node.schema == 'file': + cls.safe_rm_path(cls.add_extension(node.tree), 'file') + elif node.schema=='filedir': + cls.safe_rm_path(node.tree, 'dir') + elif isinstance(node.schema, Mapping): + for k,v in node.items(): + cls.safe_rm_node(v) + else: + raise NotImplementedError(node.schema) + + # # +'.json') + # + # tree = node.tree + # print(f'{par}\n{tree}\n{cls.path_is_parent(par,tree)},{os.path.exists(tree)}') + # if cls.path_is_parent(par, tree): + # + # for tree_match in glob.glob(tree+'*'): + # ''' + # This is to match file like .json + # ''' + # shutil.rmtree(tree_match) + # # res: + # for + # shutil.rmtree(tree) + + @classmethod + def callback_before_delitem(cls,node,key): + ''' + persistent file is removed if item is deleted from memory + ''' + if key in node: + if node.schema == 'file': + pass + else: + child = node.data[key] + cls.safe_rm_node(child) + # cls.safe_rmtree(child.tree) + + return + + + @classmethod + def callback_before_setitem(cls, self, key, v): ''' casting incoming data according to schemas need to inherit tree path from state ''' # return v - assert key is not None,f'not allowed in {state!r}' - v = state.LazyContainerCurrentBase( B.J( state.tree,key), self.map_schema( state.schema, key), v) - logging.log(5,f'callback_before_setitem({repr(state)[:10]},{key!r},{v!r})') + assert key is not None,f'not allowed in {self!r}' + if self.schema == 'file': + ''' + file is a terminal node. its content remain uncasted + ''' + v = v + else: + v = self.LazyContainerCurrentBase( B.J( self.tree,key), cls.map_schema( self.schema, key), v) + # print('-----') + + if 5 >= logging.root.level: + logging.log(5,f'callback_before_setitem({repr(self)[:10]},{key!r},{repr(v)[:20]})') + # print('-----'*2) # tree = state.tree # v = self.cast_to_schema(v, state.schema[key]) return v @@ -585,21 +685,29 @@ class SimpleWindowJsonAutoSave(SimpleWindowJsonStorage): # def :param data: value to be stored, usually a dict like -class LazyContainerPrototype(Mapping): +# from collections import Mapping +import collections +class LazyContainerPrototype(collections.UserDict): +# class LazyContainerPrototype(Mapping): pass def save_children(self): return None +# def __delitem__(self,k): +# print(f'[LCP.__delitem__] {k}') +# LCP = LazyContainerPrototype +# import pdb; pdb.set_trace() - -def get_led_cls(sel_store): +def get_led_cls(sel_store, env_path): ''' LED stands for LazyEnvData ''' # class LazyEnvData(Mapping, SimpleJsonStorage): # if sel_store == '' _store = STORES[sel_store] + _store.managed_dir = env_path LEDS = {} + _RaiseKeyError = object() @dset(LEDS) class LazyContainerCurrent(LazyContainerPrototype): # def __init__(self,): @@ -608,7 +716,7 @@ class LazyContainerCurrent(LazyContainerPrototype): get_valid_env_list = _store.get_valid_env_list lazy_read_env_from_file = _store.lazy_read_env_from_file atomic_dump_json = _store.atomic_dump_json - + # _env_path def __init__(self, tree, schema, value): self._tree = tree self.schema = schema @@ -619,7 +727,40 @@ def __init__(self, tree, schema, value): v = v._raw_dict else: v = v - self._raw_dict = (v) + # print(type(v)) + # super().__init__({}) + super().__init__({}) + self.data = v + + if tree=='visdom_data/main/jsons': + print(f'{self}.__init__') + + # self.data.update(v) + + # self._raw_dict = (v) + def _set_raw_dict(self,v): + self.data = v + @property + def _raw_dict(self): + return self.data + def __delitem__(self,key): + print(f'{self!r}.__delitem__({key})') + self.store.callback_before_delitem(self,key) + # print(key in self) + print(self) + super().__delitem__(key) + print(key in self.data) + print(f'{self!r}.__delitem__({key})') + # print(f'{self.__class__}({hex(id(self)})).__delitem__({key})') + # print(self) + # self.data.__delitem__(key) + # print(key in self.data) + # print(key in self.data) + # print(key in self) + + def __repr__(self): + return f'<{self.__class__}(data_id={hex(id(self.data))},tree={self.tree}) with {len(self.data)} elements>' + # @property def tree(self): @@ -647,7 +788,9 @@ def lazy_load_children(self): if issubclass(self.store, SimpleWindowJsonStorage): root = {None: self} env = self.store.lazy_read_file_xt( self.tree, root, None, self.schema) - self._raw_dict = env._raw_dict + # self._raw_dict = env._raw_dict + self._set_raw_dict(env._raw_dict) + # self._raw_dict.update(env._raw_dict) elif issubclass(self.store,SimpleJsonStorage): ''' One file per env caching scheme @@ -655,13 +798,16 @@ def lazy_load_children(self): if self.schema == 'vstate': pass elif self.schema == 'file': - self._raw_dict = v = self.store.lazy_read_env_from_file(self.tree, self._raw_dict, None) + v = self.store.lazy_read_env_from_file(self.tree, self._raw_dict, None) + # self._raw_dict = v + self._set_raw_dict(v) + # self._raw_dict.update(v) else: raise NotImplementedError(self.schema,self.store) else: raise NotImplementedError(self.store) - + # import pdb; pdb.set_trace() lazy_load_data = lazy_load_children def save_children(self): @@ -766,7 +912,7 @@ def __init__(self, port=DEFAULT_PORT, base_url='', user_credential=None, use_frontend_client_polling=False, eager_data_loading=False, cache_type=None): if cache_type is None: cache_type = DEFAULT_CACHE_TYPE - self.LED = get_led_cls(cache_type) + self.LED = get_led_cls(cache_type, env_path) # self.LED = LED self.eager_data_loading = eager_data_loading self.env_path = env_path @@ -1542,6 +1688,8 @@ def post(self): class ExistsHandler(BaseHandler): def initialize(self, app): self.state = app.state + print(self.state) + print(app.state) self.subs = app.subs self.sources = app.sources self.port = app.port @@ -1551,6 +1699,9 @@ def initialize(self, app): @staticmethod def wrap_func(handler, args): eid = extract_eid(args) + if not args['win']: + handler.write('false') + return if eid in handler.state and args['win'] in handler.state[eid]['jsons']: handler.write('true') else: diff --git a/py/visdom/test_simple.py b/py/visdom/test_simple.py deleted file mode 100644 index 6fdbdb4e..00000000 --- a/py/visdom/test_simple.py +++ /dev/null @@ -1,6 +0,0 @@ -import visdom -import numpy as np -vis = visdom.Visdom(env='test') -for i in range(5): - vis.scatter(np.random.random((50,2)),win=i) -vis.save(['test']) From 7cbb7c419a99177e8a3969bfe6cd2ae06839bcbc Mon Sep 17 00:00:00 2001 From: shouldsee Date: Tue, 27 Sep 2022 18:50:03 +0800 Subject: [PATCH 12/15] [adds] TextPane click callback --- js/TextPane.js | 6 + py/visdom/static/js/main.js | 117355 ++++++++++++++++++++++++++++++++- yarn.lock | 10197 ++- 3 files changed, 122398 insertions(+), 5160 deletions(-) diff --git a/js/TextPane.js b/js/TextPane.js index 09735764..1db93bec 100644 --- a/js/TextPane.js +++ b/js/TextPane.js @@ -29,6 +29,12 @@ class TextPane extends React.Component { key_code: e.keyCode, }); break; + case 'click': + this.props.appApi.sendPaneMessage({ + event_type: 'Click', + }); + break; + } }; diff --git a/py/visdom/static/js/main.js b/py/visdom/static/js/main.js index 0cca9747..bfe1b00c 100644 --- a/py/visdom/static/js/main.js +++ b/py/visdom/static/js/main.js @@ -1,3 +1,117352 @@ -/*! For license information please see main.js.LICENSE.txt */ -(()=>{var e={5993:(e,t)=>{"use strict";function n(){return!1}function r(){return!0}function i(){this.timeStamp=Date.now(),this.target=void 0,this.currentTarget=void 0}Object.defineProperty(t,"__esModule",{value:!0}),i.prototype={isEventObject:1,constructor:i,isDefaultPrevented:n,isPropagationStopped:n,isImmediatePropagationStopped:n,preventDefault:function(){this.isDefaultPrevented=r},stopPropagation:function(){this.isPropagationStopped=r},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=r,this.stopPropagation()},halt:function(e){e?this.stopImmediatePropagation():this.stopPropagation(),this.preventDefault()}},t.default=i,e.exports=t.default},4577:(e,t,n)=>{"use strict";function r(e){return e&&e.__esModule?e:{default:e}}Object.defineProperty(t,"__esModule",{value:!0});var i=r(n(5993)),o=r(n(7418)),a=!1,s=["altKey","bubbles","cancelable","ctrlKey","currentTarget","eventPhase","metaKey","shiftKey","target","timeStamp","view","type"];function l(e){return null==e}var c=[{reg:/^key/,props:["char","charCode","key","keyCode","which"],fix:function(e,t){l(e.which)&&(e.which=l(t.charCode)?t.keyCode:t.charCode),void 0===e.metaKey&&(e.metaKey=e.ctrlKey)}},{reg:/^touch/,props:["touches","changedTouches","targetTouches"]},{reg:/^hashchange$/,props:["newURL","oldURL"]},{reg:/^gesturechange$/i,props:["rotation","scale"]},{reg:/^(mousewheel|DOMMouseScroll)$/,props:[],fix:function(e,t){var n=void 0,r=void 0,i=void 0,o=t.wheelDelta,a=t.axis,s=t.wheelDeltaY,l=t.wheelDeltaX,c=t.detail;o&&(i=o/120),c&&(i=0-(c%3==0?c/3:c)),void 0!==a&&(a===e.HORIZONTAL_AXIS?(r=0,n=0-i):a===e.VERTICAL_AXIS&&(n=0,r=i)),void 0!==s&&(r=s/120),void 0!==l&&(n=-1*l/120),n||r||(r=i),void 0!==n&&(e.deltaX=n),void 0!==r&&(e.deltaY=r),void 0!==i&&(e.delta=i)}},{reg:/^mouse|contextmenu|click|mspointer|(^DOMMouseScroll$)/i,props:["buttons","clientX","clientY","button","offsetX","relatedTarget","which","fromElement","toElement","offsetY","pageX","pageY","screenX","screenY"],fix:function(e,t){var n=void 0,r=void 0,i=void 0,o=e.target,a=t.button;return o&&l(e.pageX)&&!l(t.clientX)&&(r=(n=o.ownerDocument||document).documentElement,i=n.body,e.pageX=t.clientX+(r&&r.scrollLeft||i&&i.scrollLeft||0)-(r&&r.clientLeft||i&&i.clientLeft||0),e.pageY=t.clientY+(r&&r.scrollTop||i&&i.scrollTop||0)-(r&&r.clientTop||i&&i.clientTop||0)),e.which||void 0===a||(e.which=1&a?1:2&a?3:4&a?2:0),!e.relatedTarget&&e.fromElement&&(e.relatedTarget=e.fromElement===o?e.toElement:e.fromElement),e}}];function u(){return!0}function p(){return a}function h(e){var t=e.type,n="function"==typeof e.stopPropagation||"boolean"==typeof e.cancelBubble;i.default.call(this),this.nativeEvent=e;var r=p;"defaultPrevented"in e?r=e.defaultPrevented?u:p:"getPreventDefault"in e?r=e.getPreventDefault()?u:p:"returnValue"in e&&(r=e.returnValue===a?u:p),this.isDefaultPrevented=r;var o=[],l=void 0,h=void 0,f=s.concat();for(c.forEach((function(e){t.match(e.reg)&&(f=f.concat(e.props),e.fix&&o.push(e.fix))})),l=f.length;l;)this[h=f[--l]]=e[h];for(!this.target&&n&&(this.target=e.srcElement||document),this.target&&3===this.target.nodeType&&(this.target=this.target.parentNode),l=o.length;l;)(0,o[--l])(this,e);this.timeStamp=e.timeStamp||Date.now()}var f=i.default.prototype;(0,o.default)(h.prototype,f,{constructor:h,preventDefault:function(){var e=this.nativeEvent;e.preventDefault?e.preventDefault():e.returnValue=a,f.preventDefault.call(this)},stopPropagation:function(){var e=this.nativeEvent;e.stopPropagation?e.stopPropagation():e.cancelBubble=!0,f.stopPropagation.call(this)}}),t.default=h,e.exports=t.default},4953:(e,t,n)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t,n,r){function o(t){var r=new i.default(t);n.call(e,r)}if(e.addEventListener){var a=(s=!1,"object"==typeof r?s=r.capture||!1:"boolean"==typeof r&&(s=r),e.addEventListener(t,o,r||!1),{v:{remove:function(){e.removeEventListener(t,o,s)}}});if("object"==typeof a)return a.v}else if(e.attachEvent)return e.attachEvent("on"+t,o),{remove:function(){e.detachEvent("on"+t,o)}};var s};var r,i=(r=n(4577))&&r.__esModule?r:{default:r};e.exports=t.default},2945:(e,t,n)=>{e.exports={default:n(6981),__esModule:!0}},5861:(e,t,n)=>{e.exports={default:n(5627),__esModule:!0}},2242:(e,t,n)=>{e.exports={default:n(3391),__esModule:!0}},5345:(e,t,n)=>{e.exports={default:n(433),__esModule:!0}},3516:(e,t,n)=>{e.exports={default:n(25),__esModule:!0}},4275:(e,t,n)=>{e.exports={default:n(2392),__esModule:!0}},9663:(e,t)=>{"use strict";t.Z=function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}},2600:(e,t,n)=>{"use strict";var r,i=(r=n(2242))&&r.__esModule?r:{default:r};t.Z=function(){function e(e,t){for(var n=0;n{"use strict";var r,i=(r=n(2242))&&r.__esModule?r:{default:r};t.Z=function(e,t,n){return t in e?(0,i.default)(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}},8239:(e,t,n)=>{"use strict";var r,i=(r=n(2945))&&r.__esModule?r:{default:r};t.Z=i.default||function(e){for(var t=1;t{"use strict";var r=a(n(5345)),i=a(n(5861)),o=a(n(2444));function a(e){return e&&e.__esModule?e:{default:e}}t.Z=function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function, not "+(void 0===t?"undefined":(0,o.default)(t)));e.prototype=(0,i.default)(t&&t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}),t&&(r.default?(0,r.default)(e,t):e.__proto__=t)}},2723:(e,t)=>{"use strict";t.Z=function(e,t){var n={};for(var r in e)t.indexOf(r)>=0||Object.prototype.hasOwnProperty.call(e,r)&&(n[r]=e[r]);return n}},9135:(e,t,n)=>{"use strict";var r,i=(r=n(2444))&&r.__esModule?r:{default:r};t.Z=function(e,t){if(!e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!t||"object"!==(void 0===t?"undefined":(0,i.default)(t))&&"function"!=typeof t?e:t}},2444:(e,t,n)=>{"use strict";t.__esModule=!0;var r=a(n(4275)),i=a(n(3516)),o="function"==typeof i.default&&"symbol"==typeof r.default?function(e){return typeof e}:function(e){return e&&"function"==typeof i.default&&e.constructor===i.default&&e!==i.default.prototype?"symbol":typeof e};function a(e){return e&&e.__esModule?e:{default:e}}t.default="function"==typeof i.default&&"symbol"===o(r.default)?function(e){return void 0===e?"undefined":o(e)}:function(e){return e&&"function"==typeof i.default&&e.constructor===i.default&&e!==i.default.prototype?"symbol":void 0===e?"undefined":o(e)}},487:e=>{var t={utf8:{stringToBytes:function(e){return t.bin.stringToBytes(unescape(encodeURIComponent(e)))},bytesToString:function(e){return decodeURIComponent(escape(t.bin.bytesToString(e)))}},bin:{stringToBytes:function(e){for(var t=[],n=0;n{var n;!function(){"use strict";var r={}.hasOwnProperty;function i(){for(var e=[],t=0;t{"use strict";function r(e){var t,n,i="";if("string"==typeof e||"number"==typeof e)i+=e;else if("object"==typeof e)if(Array.isArray(e))for(t=0;ti})},2809:(e,t,n)=>{try{var r=n(4155)}catch(e){r=n(4155)}var i=/\s+/,o=Object.prototype.toString;function a(e){if(!e||!e.nodeType)throw new Error("A DOM element reference is required");this.el=e,this.list=e.classList}e.exports=function(e){return new a(e)},a.prototype.add=function(e){if(this.list)return this.list.add(e),this;var t=this.array();return~r(t,e)||t.push(e),this.el.className=t.join(" "),this},a.prototype.remove=function(e){if("[object RegExp]"==o.call(e))return this.removeMatching(e);if(this.list)return this.list.remove(e),this;var t=this.array(),n=r(t,e);return~n&&t.splice(n,1),this.el.className=t.join(" "),this},a.prototype.removeMatching=function(e){for(var t=this.array(),n=0;n{e.exports=function(e,t){if(e.indexOf)return e.indexOf(t);for(var n=0;n{n(2699),e.exports=n(4579).Object.assign},5627:(e,t,n)=>{n(6760);var r=n(4579).Object;e.exports=function(e,t){return r.create(e,t)}},3391:(e,t,n)=>{n(1477);var r=n(4579).Object;e.exports=function(e,t,n){return r.defineProperty(e,t,n)}},433:(e,t,n)=>{n(9349),e.exports=n(4579).Object.setPrototypeOf},25:(e,t,n)=>{n(6840),n(4058),n(8174),n(6461),e.exports=n(4579).Symbol},2392:(e,t,n)=>{n(1867),n(3871),e.exports=n(5103).f("iterator")},5663:e=>{e.exports=function(e){if("function"!=typeof e)throw TypeError(e+" is not a function!");return e}},9003:e=>{e.exports=function(){}},2159:(e,t,n)=>{var r=n(6727);e.exports=function(e){if(!r(e))throw TypeError(e+" is not an object!");return e}},7428:(e,t,n)=>{var r=n(7932),i=n(8728),o=n(6531);e.exports=function(e){return function(t,n,a){var s,l=r(t),c=i(l.length),u=o(a,c);if(e&&n!=n){for(;c>u;)if((s=l[u++])!=s)return!0}else for(;c>u;u++)if((e||u in l)&&l[u]===n)return e||u||0;return!e&&-1}}},2894:e=>{var t={}.toString;e.exports=function(e){return t.call(e).slice(8,-1)}},4579:e=>{var t=e.exports={version:"2.6.12"};"number"==typeof __e&&(__e=t)},3817:(e,t,n)=>{var r=n(5663);e.exports=function(e,t,n){if(r(e),void 0===t)return e;switch(n){case 1:return function(n){return e.call(t,n)};case 2:return function(n,r){return e.call(t,n,r)};case 3:return function(n,r,i){return e.call(t,n,r,i)}}return function(){return e.apply(t,arguments)}}},8333:e=>{e.exports=function(e){if(null==e)throw TypeError("Can't call method on "+e);return e}},9666:(e,t,n)=>{e.exports=!n(7929)((function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a}))},7467:(e,t,n)=>{var r=n(6727),i=n(3938).document,o=r(i)&&r(i.createElement);e.exports=function(e){return o?i.createElement(e):{}}},3338:e=>{e.exports="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(",")},337:(e,t,n)=>{var r=n(6162),i=n(8195),o=n(6274);e.exports=function(e){var t=r(e),n=i.f;if(n)for(var a,s=n(e),l=o.f,c=0;s.length>c;)l.call(e,a=s[c++])&&t.push(a);return t}},3856:(e,t,n)=>{var r=n(3938),i=n(4579),o=n(3817),a=n(1818),s=n(7069),l=function(e,t,n){var c,u,p,h=e&l.F,f=e&l.G,d=e&l.S,m=e&l.P,v=e&l.B,g=e&l.W,y=f?i:i[t]||(i[t]={}),b=y.prototype,w=f?r:d?r[t]:(r[t]||{}).prototype;for(c in f&&(n=t),n)(u=!h&&w&&void 0!==w[c])&&s(y,c)||(p=u?w[c]:n[c],y[c]=f&&"function"!=typeof w[c]?n[c]:v&&u?o(p,r):g&&w[c]==p?function(e){var t=function(t,n,r){if(this instanceof e){switch(arguments.length){case 0:return new e;case 1:return new e(t);case 2:return new e(t,n)}return new e(t,n,r)}return e.apply(this,arguments)};return t.prototype=e.prototype,t}(p):m&&"function"==typeof p?o(Function.call,p):p,m&&((y.virtual||(y.virtual={}))[c]=p,e&l.R&&b&&!b[c]&&a(b,c,p)))};l.F=1,l.G=2,l.S=4,l.P=8,l.B=16,l.W=32,l.U=64,l.R=128,e.exports=l},7929:e=>{e.exports=function(e){try{return!!e()}catch(e){return!0}}},3938:e=>{var t=e.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=t)},7069:e=>{var t={}.hasOwnProperty;e.exports=function(e,n){return t.call(e,n)}},1818:(e,t,n)=>{var r=n(4743),i=n(3101);e.exports=n(9666)?function(e,t,n){return r.f(e,t,i(1,n))}:function(e,t,n){return e[t]=n,e}},4881:(e,t,n)=>{var r=n(3938).document;e.exports=r&&r.documentElement},3758:(e,t,n)=>{e.exports=!n(9666)&&!n(7929)((function(){return 7!=Object.defineProperty(n(7467)("div"),"a",{get:function(){return 7}}).a}))},799:(e,t,n)=>{var r=n(2894);e.exports=Object("z").propertyIsEnumerable(0)?Object:function(e){return"String"==r(e)?e.split(""):Object(e)}},1421:(e,t,n)=>{var r=n(2894);e.exports=Array.isArray||function(e){return"Array"==r(e)}},6727:e=>{e.exports=function(e){return"object"==typeof e?null!==e:"function"==typeof e}},3945:(e,t,n)=>{"use strict";var r=n(8989),i=n(3101),o=n(5378),a={};n(1818)(a,n(2939)("iterator"),(function(){return this})),e.exports=function(e,t,n){e.prototype=r(a,{next:i(1,n)}),o(e,t+" Iterator")}},5700:(e,t,n)=>{"use strict";var r=n(6227),i=n(3856),o=n(7470),a=n(1818),s=n(5449),l=n(3945),c=n(5378),u=n(5089),p=n(2939)("iterator"),h=!([].keys&&"next"in[].keys()),f="keys",d="values",m=function(){return this};e.exports=function(e,t,n,v,g,y,b){l(n,t,v);var w,x,_,A=function(e){if(!h&&e in T)return T[e];switch(e){case f:case d:return function(){return new n(this,e)}}return function(){return new n(this,e)}},E=t+" Iterator",S=g==d,M=!1,T=e.prototype,C=T[p]||T["@@iterator"]||g&&T[g],P=C||A(g),O=g?S?A("entries"):P:void 0,k="Array"==t&&T.entries||C;if(k&&(_=u(k.call(new e)))!==Object.prototype&&_.next&&(c(_,E,!0),r||"function"==typeof _[p]||a(_,p,m)),S&&C&&C.name!==d&&(M=!0,P=function(){return C.call(this)}),r&&!b||!h&&!M&&T[p]||a(T,p,P),s[t]=P,s[E]=m,g)if(w={values:S?P:A(d),keys:y?P:A(f),entries:O},b)for(x in w)x in T||o(T,x,w[x]);else i(i.P+i.F*(h||M),t,w);return w}},5084:e=>{e.exports=function(e,t){return{value:t,done:!!e}}},5449:e=>{e.exports={}},6227:e=>{e.exports=!0},7177:(e,t,n)=>{var r=n(5730)("meta"),i=n(6727),o=n(7069),a=n(4743).f,s=0,l=Object.isExtensible||function(){return!0},c=!n(7929)((function(){return l(Object.preventExtensions({}))})),u=function(e){a(e,r,{value:{i:"O"+ ++s,w:{}}})},p=e.exports={KEY:r,NEED:!1,fastKey:function(e,t){if(!i(e))return"symbol"==typeof e?e:("string"==typeof e?"S":"P")+e;if(!o(e,r)){if(!l(e))return"F";if(!t)return"E";u(e)}return e[r].i},getWeak:function(e,t){if(!o(e,r)){if(!l(e))return!0;if(!t)return!1;u(e)}return e[r].w},onFreeze:function(e){return c&&p.NEED&&l(e)&&!o(e,r)&&u(e),e}}},8082:(e,t,n)=>{"use strict";var r=n(9666),i=n(6162),o=n(8195),a=n(6274),s=n(6530),l=n(799),c=Object.assign;e.exports=!c||n(7929)((function(){var e={},t={},n=Symbol(),r="abcdefghijklmnopqrst";return e[n]=7,r.split("").forEach((function(e){t[e]=e})),7!=c({},e)[n]||Object.keys(c({},t)).join("")!=r}))?function(e,t){for(var n=s(e),c=arguments.length,u=1,p=o.f,h=a.f;c>u;)for(var f,d=l(arguments[u++]),m=p?i(d).concat(p(d)):i(d),v=m.length,g=0;v>g;)f=m[g++],r&&!h.call(d,f)||(n[f]=d[f]);return n}:c},8989:(e,t,n)=>{var r=n(2159),i=n(7856),o=n(3338),a=n(7281)("IE_PROTO"),s=function(){},l=function(){var e,t=n(7467)("iframe"),r=o.length;for(t.style.display="none",n(4881).appendChild(t),t.src="javascript:",(e=t.contentWindow.document).open(),e.write("