Skip to content

Commit 0267bc1

Browse files
committed
Add LazyIO, Fix LazyDB
1 parent be57a12 commit 0267bc1

File tree

18 files changed

+995
-297
lines changed

18 files changed

+995
-297
lines changed

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,4 +9,5 @@ __pycache__*
99
build.sh
1010
*test.py
1111
*.egg-info*
12-
*.vscode
12+
*.vscode
13+
*test_ops

lazyops/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from .lazyclasses import lazyclass
1010

1111
from .envs import LazyEnv, get_logger, lazywatcher, lazywatch
12-
from .models import LazyData, LazyTime, LazyDate, LazyFormatter, LazyTimer
12+
from .models import LazyData, LazyTime, LazyDate, LazyFormatter, LazyTimer, LazyObject
1313
from .common import lazylibs, lazy_init, run_cmd, clone_repo, File
1414
from .utils import find_binary_in_path, timed_cache, latest_tf_ckpt, require_module
1515
from .utils import build_chunks, retryable, list_to_dict
@@ -38,6 +38,7 @@
3838
'LazyDate',
3939
'LazyFormatter',
4040
'LazyTimer',
41+
'LazyObject',
4142
'lazylibs',
4243
'lazy_init',
4344
'run_cmd',

lazyops/apis.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,10 @@
88

99
from typing import List, Dict, Any, Optional
1010

11-
async def async_req(sess, url, method='POST', decode_json=True, with_url=False, *args, **kwargs):
11+
12+
13+
async def async_req(url, method='POST', decode_json=True, with_url=False, sess=None, headers=None, *args, **kwargs):
14+
if not sess: sess = await aiohttp.ClientSession(headers=headers)
1215
async with sess.request(method, url=url, *args, **kwargs) as resp:
1316
data = await resp.json() if decode_json else await resp
1417
if not with_url:

lazyops/envs.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,9 @@ def setup_logging(self):
9090
logger.propagate = self.config.get('propagate', False)
9191
return logger
9292

93-
def get_logger(self):
93+
def get_logger(self, module=None):
94+
if module:
95+
return self.logger.getChild(module)
9496
return self.logger
9597

9698
def debug(self, msg, *args, **kwargs):
@@ -122,7 +124,7 @@ def setup_new_logger(name, log_level='info', quiet_loggers=None, clear_handlers=
122124
'logfile_file': None,
123125
'logfile_log_level': "debug",
124126
'logfile_log_color': False,
125-
'log_line_template': f"%(color_on)s[{name}] %(funcName)-5s%(color_off)s: %(message)s",
127+
'log_line_template': f"%(color_on)s[{name}] %(name)-5s %(funcName)-5s%(color_off)s: %(message)s",
126128
'clear_handlers': clear_handlers,
127129
'quiet_loggers': quiet_loggers,
128130
'propagate': propagate
@@ -146,9 +148,11 @@ class EnvChecker:
146148

147149

148150
@classmethod
149-
def get_logger(cls, name = 'LazyOps', *args, **kwargs):
151+
def get_logger(cls, name = 'LazyOps', module=None, *args, **kwargs):
150152
if not EnvChecker.loggers.get(name):
151153
EnvChecker.loggers[name] = ThreadSafeHandler()
154+
if module:
155+
return EnvChecker.loggers[name].get(setup_new_logger, name=name, *args, **kwargs).get_logger(module=module)
152156
return EnvChecker.loggers[name].get(setup_new_logger, name=name, *args, **kwargs)
153157

154158
@property

lazyops/examples/README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# LazyOps Examples to Test Out
2+

lazyops/lazydatabase/__init__.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,29 @@
1-
from .schema import (
1+
from .backends import (
22
LazyDBCacheBase,
33
PklDBCache,
4-
LazyDBIndex,
4+
)
5+
6+
from .core import (
7+
LazyDBModel,
58
LazyDBConfig,
69
LazyDBBase,
710
LazyDB
811
)
912

10-
pkldb_cache = PklDBCache
11-
lazydb_index = LazyDBIndex
13+
pkldb_backend = PklDBCache
14+
lazydb_model = LazyDBModel
1215
lazydb_config = LazyDBConfig
1316
lazydb = LazyDB
1417

1518
__all__ = [
1619
'LazyDBCacheBase',
1720
'PklDBCache',
18-
'LazyDBIndex',
21+
'LazyDBModel',
1922
'LazyDBConfig',
2023
'LazyDBBase',
2124
'LazyDB',
22-
'pkldb_cache',
23-
'lazydb_index',
25+
'pkldb_backend',
26+
'lazydb_model',
2427
'lazydb_config',
2528
'lazydb'
2629
]

lazyops/lazydatabase/_base.py

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,21 +3,28 @@
33
lazy_init('passlib[argon2]', 'passlib')
44

55
import abc
6+
import sys
67
import threading
78
import time
89
import base64
10+
import asyncio
11+
import fileio
12+
13+
14+
from functools import cached_property
915
from dataclasses import dataclass
1016
from pydantic.typing import NoneType
1117
from pydantic import create_model, ValidationError, validator
1218
from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union
1319
from passlib.context import CryptContext
1420
from uuid import uuid4
21+
from enum import Enum
1522

16-
import fileio
1723
from lazyops.lazyclasses import lazyclass
1824
from lazyops.lazyrpc import BaseModel, Field, sjson_dumps, sjson_loads, jsonable_encoder
19-
from lazyops import LazyDate, fio, get_logger, timer, tstamp, LazyEnv
25+
from lazyops import LazyDate, fio, get_logger, timer, tstamp, LazyEnv, LazyObject
26+
from lazyops import timed_cache
2027

2128
_lazydb_default_cache_path = fio.userdir('.lazydb', mkdirs=True)
22-
_lazydb_logger = get_logger('LazyDB')
23-
_lazydb_picker = fileio.src._pickler
29+
baselogger = get_logger('LazyDB')
30+
pkler = fileio.src._pickler

lazyops/lazydatabase/backends.py

Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
from ._base import *
2+
from .models import LazyDBCacheBase
3+
4+
logger = get_logger('LazyDB', module='backends')
5+
6+
class PklDBCache(LazyDBCacheBase):
7+
def dumps(self, data, *args, **kwargs):
8+
return pkler.dumps(data, *args, **kwargs)
9+
10+
def loads(self, data, *args, **kwargs):
11+
return pkler.loads(data, *args, **kwargs)
12+
13+
def restore(self, *args, **kwargs):
14+
self.lock.acquire()
15+
dbdata = {}
16+
with self.lock:
17+
try:
18+
dbdata = fio.pklload(self.cache_filepath)
19+
except Exception as e:
20+
logger.error(f'Failed to Restore DB {self._cachename}: {str(e)}')
21+
logger.error(f'Copying DB to Backup')
22+
tempfn = fio.mod_fname(self.cache_filepath, suffix='_' + tstamp())
23+
fio.copy(self.cache_filepath, tempfn)
24+
logger.error(f'DB saved to {tempfn}')
25+
self.lock.release()
26+
return dbdata
27+
28+
def save(self, dbdata, *args, **kwargs):
29+
_saved = False
30+
self.lock.acquire()
31+
with self.lock:
32+
try:
33+
fio.pklsave(dbdata, self.cache_filepath)
34+
_saved = True
35+
except Exception as e:
36+
logger.error(f'Failed to Save DB {self._cachename}: {str(e)}')
37+
_saved = False
38+
self.lock.release()
39+
return _saved
40+
41+
class GSheetsDBCache(LazyDBCacheBase):
42+
def __init__(self, sheets_url=None, cache_name='LazyCacheDB', auth_path=None, *args, **kwargs):
43+
self.url = sheets_url
44+
self.auth = auth_path
45+
self.cache_name = cache_name
46+
self.indexes = {}
47+
self.setup_client()
48+
49+
def setup_client(self):
50+
gspread = lazy_init('gspread')
51+
self.gc = gspread.service_account(filename=self.auth)
52+
if self.url:
53+
self.sheet = self.gc.open_by_url(self.url)
54+
logger.info(f'Loaded GSheetsDB from URL: {self.url}')
55+
else:
56+
try:
57+
self.sheet = self.gc.open(self.cache_name)
58+
logger.info(f'Loaded GSheetsDB from Name: {self.cache_name}')
59+
60+
except Exception as e:
61+
self.sheet = None
62+
logger.error(f'Failed to Load GSheetsDB from Name: {self.cache_name}')
63+
64+
if not self.sheet:
65+
self.sheet = self.gc.create(self.cache_name)
66+
logger.info(f'Created GSheetsDB by Name: {self.cache_name}')
67+
self.url = self.sheet.url
68+
self.refresh_index()
69+
70+
def create_indexes(self, dbdata):
71+
for n, i in enumerate(dbdata):
72+
if i not in self.all_wks:
73+
idx = dbdata[i]
74+
wks = self.sheet.add_worksheet(i, index=n)
75+
# Create row 0 - header
76+
wks.append_row(idx.schema_props)
77+
78+
def dump_indexes(self, dbdata):
79+
for schema, idx in dbdata.items():
80+
wks = self.sheet.worksheet(schema)
81+
items = []
82+
for i in idx.index.values():
83+
d = i.dict()
84+
items.append(list(d.values()))
85+
wks.insert_rows(items, row=2)
86+
self.refresh_index()
87+
88+
def refresh_index(self):
89+
self.index = self.all_wks_dict
90+
91+
def get_header(self, wks):
92+
return wks.row_values(1)
93+
94+
@property
95+
def all_wks(self):
96+
return list(self.sheet.worksheets())
97+
98+
@property
99+
def all_wks_dict(self):
100+
return {n: s for n,s in enumerate(self.all_wks)}
101+
102+
@property
103+
def cache_file(self):
104+
return None
105+
106+
@property
107+
def cache_filepath(self):
108+
return None
109+
110+
@property
111+
def exists(self):
112+
return bool(self.url)
113+
114+
# Need to figure out how to reconstruct the data
115+
def dumps(self, data, *args, **kwargs):
116+
pass
117+
118+
def loads(self, *args, **kwargs):
119+
pass
120+
121+
def restore(self, *args, **kwargs):
122+
pass
123+
124+
def save(self, db, *args, **kwargs):
125+
pass

0 commit comments

Comments
 (0)