Skip to content

Commit

Permalink
use my.config instead of mycfg; minor cleanups and docstrings
Browse files Browse the repository at this point in the history
  • Loading branch information
karlicoss committed Apr 11, 2020
1 parent f31ff71 commit e5b3a1e
Show file tree
Hide file tree
Showing 37 changed files with 142 additions and 110 deletions.
6 changes: 3 additions & 3 deletions my/bluemaestro/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,22 +14,22 @@
from ..common import mcachew, LazyLogger, get_files


import mycfg
from my.config import bluemaestro as config


logger = LazyLogger('bluemaestro', level='debug')


def _get_exports():
return get_files(mycfg.bluemaestro.export_path, glob='*.db')
return get_files(config.export_path, glob='*.db')


class Measurement(NamedTuple):
dt: datetime
temp: float


@mcachew(cache_path=mycfg.bluemaestro.cache_path)
@mcachew(cache_path=config.cache_path)
def _iter_measurements(dbs) -> Iterable[Measurement]:
# I guess we can affort keeping them in sorted order
points: Set[Measurement] = set()
Expand Down
7 changes: 3 additions & 4 deletions my/body/blood.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#!/usr/bin/env python3
"""
Blood tracking
"""
Expand All @@ -14,7 +13,7 @@

from kython.org import parse_org_date

from mycfg import paths
from my.config import blood as config

import pandas as pd # type: ignore

Expand Down Expand Up @@ -52,7 +51,7 @@ def try_float(s: str) -> Optional[float]:


def iter_gluc_keto_data() -> Iterable[Result]:
o = porg.Org.from_file(str(paths.blood.blood_log))
o = porg.Org.from_file(str(config.blood_log))
tbl = o.xpath('//table')
for l in tbl.lines:
kets = l['ket'].strip()
Expand All @@ -71,7 +70,7 @@ def iter_gluc_keto_data() -> Iterable[Result]:


def iter_tests_data() -> Iterable[Result]:
o = porg.Org.from_file(str(paths.blood.blood_tests_log))
o = porg.Org.from_file(str(config.blood_tests_log))
tbl = o.xpath('//table')
for d in tbl.lines:
try:
Expand Down
2 changes: 1 addition & 1 deletion my/body/weight.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from ..error import Res
from ..notes import orgmode

from mycfg import weight as config
from my.config import weight as config


log = LazyLogger('my.body.weight')
Expand Down
8 changes: 4 additions & 4 deletions my/books/kobo.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@

from typing import Callable, Union, List

from mycfg import paths
from mycfg.repos.kobuddy.src.kobuddy import *
from my.config import kobo as config
from my.config.repos.kobuddy.src.kobuddy import *
# hmm, explicit imports make pylint a bit happier..
from mycfg.repos.kobuddy.src.kobuddy import Highlight, set_databases, get_highlights
from my.config.repos.kobuddy.src.kobuddy import Highlight, set_databases, get_highlights

set_databases(paths.kobuddy.export_dir)
set_databases(config.export_dir)

# TODO maybe type over T?
_Predicate = Callable[[str], bool]
Expand Down
2 changes: 1 addition & 1 deletion my/calendar/holidays.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from typing import Tuple, Iterator, List, Union


from mycfg.holidays_data import HOLIDAYS_DATA
from my.config.holidays_data import HOLIDAYS_DATA


# pip3 install workalendar
Expand Down
10 changes: 6 additions & 4 deletions my/coding/codeforces.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
#!/usr/bin/env python3
from .. import init

from my.config import codeforces as config

from datetime import datetime
from typing import NamedTuple
from pathlib import Path
Expand Down Expand Up @@ -31,8 +35,7 @@ def make(cls, j) -> 'Contest':


def get_contests() -> Cmap:
from mycfg import paths
last = max(get_files(paths.codeforces.export_path, 'allcontests*.json'))
last = max(get_files(config.export_path, 'allcontests*.json'))
j = json.loads(last.read_text())
d = {}
for c in j['result']:
Expand Down Expand Up @@ -77,8 +80,7 @@ def make(cls, cmap, json) -> Iterator[Res['Competition']]:

def iter_data() -> Iterator[Res[Competition]]:
cmap = get_contests()
from mycfg import paths
last = max(get_files(paths.codeforces.export_path, 'codeforces*.json'))
last = max(get_files(config.export_path, 'codeforces*.json'))

with wrap(json.loads(last.read_text())) as j:
j['status'].ignore()
Expand Down
2 changes: 1 addition & 1 deletion my/coding/commits.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from typing import List, NamedTuple, Optional, Dict, Any, Iterator, Set

from ..common import PathIsh, LazyLogger, mcachew
from mycfg import commits as config
from my.config import commits as config

# pip3 install gitpython
import git # type: ignore
Expand Down
12 changes: 7 additions & 5 deletions my/coding/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
Github events and their metadata: comments/issues/pull requests
"""

from .. import init

from typing import Dict, List, Union, Any, NamedTuple, Tuple, Optional, Iterator, TypeVar, Set
from datetime import datetime
import json
Expand All @@ -14,8 +16,8 @@
from ..common import get_files, mcachew
from ..error import Res

from mycfg import paths
import mycfg.repos.ghexport.dal as ghexport
from my.config import github as config
import my.config.repos.ghexport.dal as ghexport


logger = LazyLogger('my.github')
Expand Down Expand Up @@ -79,7 +81,7 @@ def _get_summary(e) -> Tuple[str, Optional[str], Optional[str]]:


def get_dal():
sources = get_files(paths.github.export_dir, glob='*.json*')
sources = get_files(config.export_dir, glob='*.json*')
sources = list(map(CPath, sources)) # TODO maybe move it to get_files? e.g. compressed=True arg?
return ghexport.DAL(sources)

Expand Down Expand Up @@ -178,7 +180,7 @@ def iter_gdpr_events() -> Iterator[Res[Event]]:
"""
Parses events from GDPR export (https://github.com/settings/admin)
"""
files = list(sorted(paths.github.gdpr_dir.glob('*.json')))
files = list(sorted(config.gdpr_dir.glob('*.json')))
handler_map = {
'schema' : None,
'issue_events_': None, # eh, doesn't seem to have any useful bodies
Expand Down Expand Up @@ -215,7 +217,7 @@ def iter_gdpr_events() -> Iterator[Res[Event]]:


# TODO hmm. not good, need to be lazier?...
@mcachew(paths.github.cache_dir, hashf=lambda dal: dal.sources)
@mcachew(config.cache_dir, hashf=lambda dal: dal.sources)
def iter_backup_events(dal=get_dal()) -> Iterator[Event]:
for d in dal.events():
yield _parse_event(d)
Expand Down
7 changes: 5 additions & 2 deletions my/coding/topcoder.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
#!/usr/bin/env python3
from .. import init

from my.config import topcoder as config

from datetime import datetime
from typing import NamedTuple
from pathlib import Path
Expand All @@ -15,8 +19,7 @@

# TODO json type??
def _get_latest() -> Dict:
from mycfg import paths
pp = max(get_files(paths.topcoder.export_path, glob='*.json'))
pp = max(get_files(config.export_path, glob='*.json'))
return json.loads(pp.read_text())


Expand Down
10 changes: 5 additions & 5 deletions my/emfit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from ..common import get_files, LazyLogger, cproperty, group_by_key, mcachew

import mycfg
from my.config import emfit as config


logger = LazyLogger('my.emfit', level='info')
Expand All @@ -36,7 +36,7 @@ def hhmm(minutes):
Sid = str

# TODO use tz provider for that?
_TZ = pytz.timezone(mycfg.emfit.tz)
_TZ = pytz.timezone(config.tz)

# TODO use common tz thing?
def fromts(ts) -> datetime:
Expand Down Expand Up @@ -299,19 +299,19 @@ def dir_hash(path: Path):
return mtimes


@mcachew(cache_path=mycfg.emfit.cache_path, hashf=dir_hash, logger=logger)
@mcachew(cache_path=config.cache_path, hashf=dir_hash, logger=logger)
def iter_datas_cached(path: Path) -> Iterator[Emfit]:
# TODO use get_files?
for f in sorted(path.glob('*.json')):
sid = f.stem
if sid in mycfg.emfit.excluded_sids:
if sid in config.excluded_sids:
continue

em = EmfitOld(sid=sid, jj=json.loads(f.read_text()))
yield from Emfit.make(em)


def iter_datas(path=mycfg.emfit.export_path) -> Iterator[Emfit]:
def iter_datas(path=config.export_path) -> Iterator[Emfit]:
yield from iter_datas_cached(path)


Expand Down
6 changes: 3 additions & 3 deletions my/fbmessenger.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@

from .common import PathIsh

import mycfg.repos.fbmessengerexport.dal as messenger
from mycfg import paths
import my.config.repos.fbmessengerexport.dal as messenger
from my.config import fbmessenger as config


def _dal() -> messenger.DAL:
return messenger.DAL(paths.fbmessenger.export_db)
return messenger.DAL(config.export_db)


# TODO Result type?
Expand Down
5 changes: 3 additions & 2 deletions my/feedbin.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from .common import listify
from ._rss import Subscription

from mycfg import paths
from my.config import feedbin as config

import json
from pathlib import Path
Expand All @@ -27,7 +27,8 @@ def parse_file(f: Path):

def get_states() -> Dict[datetime, List[Subscription]]:
res = {}
for f in sorted(Path(paths.feedbin.export_dir).glob('*.json')):
# TODO use get_files
for f in sorted(Path(config.export_dir).glob('*.json')):
dts = f.stem.split('_')[-1]
dt = isoparse(dts)
subs = parse_file(f)
Expand Down
5 changes: 3 additions & 2 deletions my/feedly.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from .common import listify
from ._rss import Subscription

from mycfg import paths
from my.config import feedly as config

import json
from pathlib import Path
Expand All @@ -30,7 +30,8 @@ def parse_file(f: Path):

def get_states() -> Dict[datetime, List[Subscription]]:
res = {}
for f in sorted(Path(paths.feedly.export_dir).glob('*.json')):
# TODO use get_files
for f in sorted(Path(config.export_dir).glob('*.json')):
dts = f.stem.split('_')[-1]
dt = datetime.strptime(dts, '%Y%m%d%H%M%S')
dt = pytz.utc.localize(dt)
Expand Down
11 changes: 7 additions & 4 deletions my/foursquare.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
#!/usr/bin/env python3
'''
Foursquare/Swarm checkins
'''

from datetime import datetime, timezone, timedelta
from itertools import chain
from pathlib import Path
Expand All @@ -9,14 +12,14 @@
# TODO pytz for timezone???

from .common import get_files, LazyLogger
from my.config import foursquare as config


logger = LazyLogger('my.foursquare')
logger = LazyLogger(__package__)


def _get_exports() -> List[Path]:
from mycfg import paths
return get_files(paths.foursquare.export_path, '*.json')
return get_files(config.export_path, '*.json')


class Checkin:
Expand Down
6 changes: 3 additions & 3 deletions my/instapaper.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@
from .common import group_by_key, PathIsh, get_files


from mycfg import paths
import mycfg.repos.instapexport.dal as dal
from my.config import instapaper as config
import my.config.repos.instapexport.dal as dal


def _get_files():
return get_files(paths.instapaper.export_path, glob='*.json')
return get_files(config.export_path, glob='*.json')


def get_dal() -> dal.DAL:
Expand Down
4 changes: 2 additions & 2 deletions my/jawbone/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
import logging
import pytz

from mycfg import paths
from my.config import jawbone as config


BDIR = paths.jawbone.export_dir
BDIR = config.export_dir
PHASES_FILE = BDIR / 'phases.json'
SLEEPS_FILE = BDIR / 'sleeps.json'
GRAPHS_DIR = BDIR / 'graphs'
Expand Down
4 changes: 2 additions & 2 deletions my/jawbone/plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,9 @@ def iter_useful(data_file: str):

# TODO <<< hmm. these files do contain deep and light sleep??
# also steps stats??
from mycfg import paths
from my.config import jawbone as config

p = paths.jawbone.export_dir / 'old_csv'
p = config.export_dir / 'old_csv'
# TODO with_my?
files = [
p / "2015.csv",
Expand Down
12 changes: 9 additions & 3 deletions my/lastfm/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
#!/usr/bin/env python3
'''
Last.fm scrobbles
'''

from .. import init

from functools import lru_cache
from typing import NamedTuple, Dict, Any
from datetime import datetime
Expand All @@ -7,7 +12,7 @@

import pytz

from mycfg import paths
from my.config import lastfm as config

# TODO Json type?
# TODO memoised properties?
Expand Down Expand Up @@ -43,7 +48,8 @@ def track(self) -> str:
# TODO memoise...?
# TODO watch out, if we keep the app running it might expire
def _iter_scrobbles():
last = max(Path(paths.lastfm.export_path).glob('*.json'))
# TODO use get_files
last = max(Path(config.export_path).glob('*.json'))
# TODO mm, no timezone? hopefuly it's UTC
j = json.loads(last.read_text())

Expand Down
Loading

0 comments on commit e5b3a1e

Please # to comment.