Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Introduce caching of configured live accounts in each test process #3298

Merged
merged 3 commits into from
May 9, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 3 additions & 0 deletions python/mypy.ini
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
[mypy]

[mypy-py.*]
ignore_missing_imports = True

[mypy-deltachat.capi.*]
ignore_missing_imports = True

Expand Down
85 changes: 79 additions & 6 deletions python/src/deltachat/testplugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@

import pytest
import requests
import pathlib

from . import Account, const, account_hookimpl, get_core_info
from .events import FFIEventLogger, FFIEventTracker
Expand Down Expand Up @@ -178,6 +179,48 @@ def get_liveconfig_producer(self):
yield config
pytest.fail("more than {} live accounts requested.".format(MAX_LIVE_CREATED_ACCOUNTS))

def cache_maybe_retrieve_configured_db_files(self, cache_addr, db_target_path):
link2xt marked this conversation as resolved.
Show resolved Hide resolved
db_target_path = pathlib.Path(db_target_path)
assert not db_target_path.exists()

try:
filescache = self._addr2files[cache_addr]
except KeyError:
print("CACHE FAIL for", cache_addr)
link2xt marked this conversation as resolved.
Show resolved Hide resolved
return False
else:
print("CACHE HIT for", cache_addr)
hpk42 marked this conversation as resolved.
Show resolved Hide resolved
targetdir = db_target_path.parent
write_dict_to_dir(filescache, targetdir)
return True

def cache_maybe_store_configured_db_files(self, acc):
addr = acc.get_config("addr")
assert acc.is_configured()
# don't overwrite existing entries
if addr not in self._addr2files:
print("storing cache for", addr)
basedir = pathlib.Path(acc.get_blobdir()).parent
self._addr2files[addr] = create_dict_from_files_in_path(basedir)
return True


def create_dict_from_files_in_path(base):
cachedict = {}
for path in base.glob("**/*"):
if path.is_file():
cachedict[path.relative_to(base)] = path.read_bytes()
return cachedict


def write_dict_to_dir(dic, target_dir):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why not just store a single file with JSON or pickle? This is like 1 lines instead of going through files in a directory:

from pathlib import Path
with Path(target_dir).open('w') as fp: json.dump(dic, fp)

And why store to files at all if nothing is persisted between pytest runs? Each time temporary directory is new and all stored dictionaries are lost.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why not just store a single file with JSON or pickle? This is like 1 lines instead of going through files in a directory:

from pathlib import Path
with Path(target_dir).open('w') as fp: json.dump(dic, fp)

There is no persistence for the caching, on purpose, and to keep it simple. Each testprocess just maintains an in-process cache so we don't need to care for outdated accounts (CI email accounts are valid for one hour).
Note that the code transfers a directory of files into a dict, and from a dict to a directory. It's a cheap way to record and replay a DC database dir with blob files and "shm" and "wal" files etc.

assert dic
for relpath, content in dic.items():
path = target_dir.joinpath(relpath)
if not path.parent.exists():
os.makedirs(path.parent)
path.write_bytes(content)


@pytest.fixture
def data(request):
Expand Down Expand Up @@ -225,6 +268,15 @@ def __init__(self, testprocess, init_time):
self.testprocess = testprocess
self.init_time = init_time

def log(self, *args):
print("[acsetup]", "{:.3f}".format(time.time() - self.init_time), *args)

def add_configured(self, account):
""" add an already configured account. """
assert account.is_configured()
self._account2state[account] = self.CONFIGURED
self.log("added already configured account", account, account.get_config("addr"))

def start_configure(self, account, reconfigure=False):
""" add an account and start its configure process. """
class PendingTracker:
Expand All @@ -235,7 +287,7 @@ def ac_configure_completed(this, success):
account.add_account_plugin(PendingTracker(), name="pending_tracker")
self._account2state[account] = self.CONFIGURING
account.configure(reconfigure=reconfigure)
print("started configure on pending", account)
self.log("started configure on", account)

def wait_one_configured(self, account):
""" wait until this account has successfully configured. """
Expand Down Expand Up @@ -328,6 +380,9 @@ def __init__(self, request, testprocess, tmpdir, data) -> None:
self.set_logging_default(False)
request.addfinalizer(self.finalize)

def log(self, *args):
print("[acfactory]", "{:.3f}".format(time.time() - self.init_time), *args)

def finalize(self):
while self._finalizers:
fin = self._finalizers.pop()
Expand Down Expand Up @@ -359,9 +414,19 @@ def get_next_liveconfig(self):
assert "addr" in configdict and "mail_pw" in configdict
return configdict

def _get_cached_account(self, addr):
if addr in self.testprocess._addr2files:
return self._getaccount(addr)

def get_unconfigured_account(self):
return self._getaccount()

def _getaccount(self, try_cache_addr=None):
logid = "ac{}".format(len(self._accounts) + 1)
path = self.tmpdir.join(logid)
# we need to use fixed database basename for maybe_cache_* functions to work
path = self.tmpdir.mkdir(logid).join("dc.db")
if try_cache_addr:
self.testprocess.cache_maybe_retrieve_configured_db_files(try_cache_addr, path)
ac = Account(path.strpath, logging=self._logging)
ac._logid = logid # later instantiated FFIEventLogger needs this
ac._evtracker = ac.add_account_plugin(FFIEventTracker(ac))
Expand Down Expand Up @@ -394,7 +459,7 @@ def _preconfigure_key(self, account, addr):
def get_pseudo_configured_account(self):
# do a pseudo-configured account
ac = self.get_unconfigured_account()
acname = os.path.basename(ac.db_path)
acname = ac._logid
addr = "{}@offline.org".format(acname)
ac.update_config(dict(
addr=addr, displayname=acname, mail_pw="123",
Expand All @@ -405,7 +470,7 @@ def get_pseudo_configured_account(self):
self._acsetup.init_logging(ac)
return ac

def new_online_configuring_account(self, cloned_from=None, **kwargs):
def new_online_configuring_account(self, cloned_from=None, cache=False, **kwargs):
if cloned_from is None:
configdict = self.get_next_liveconfig()
else:
Expand All @@ -415,6 +480,12 @@ def new_online_configuring_account(self, cloned_from=None, **kwargs):
mail_pw=cloned_from.get_config("mail_pw"),
)
configdict.update(kwargs)
ac = self._get_cached_account(addr=configdict["addr"]) if cache else None
if ac is not None:
# make sure we consume a preconfig key, as if we had created a fresh account
self._preconfigured_keys.pop(0)
self._acsetup.add_configured(ac)
return ac
ac = self.prepare_account_from_liveconfig(configdict)
self._acsetup.start_configure(ac)
return ac
Expand All @@ -439,9 +510,11 @@ def bring_accounts_online(self):
print("all accounts online")

def get_online_accounts(self, num):
# to reduce number of log events logging starts after accounts can receive
accounts = [self.new_online_configuring_account() for i in range(num)]
accounts = [self.new_online_configuring_account(cache=True) for i in range(num)]
self.bring_accounts_online()
# we cache fully configured and started accounts
for acc in accounts:
self.testprocess.cache_maybe_store_configured_db_files(acc)
return accounts

def run_bot_process(self, module, ffi=True):
Expand Down
32 changes: 30 additions & 2 deletions python/tests/test_4_lowlevel.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,33 @@
from __future__ import print_function

import os

from queue import Queue
from deltachat import capi, cutil, const
from deltachat import register_global_plugin
from deltachat.hookspec import global_hookimpl
from deltachat.capi import ffi
from deltachat.capi import lib
from deltachat.testplugin import ACSetup
from deltachat.testplugin import ACSetup, create_dict_from_files_in_path, write_dict_to_dir
# from deltachat.account import EventLogger


class TestACSetup:

def test_cache_writing(self, tmp_path):
base = tmp_path.joinpath("hello")
base.mkdir()
d1 = base.joinpath("dir1")
d1.mkdir()
d1.joinpath("file1").write_bytes(b'content1')
d2 = d1.joinpath("dir2")
d2.mkdir()
d2.joinpath("file2").write_bytes(b"123")
d = create_dict_from_files_in_path(base)
newbase = tmp_path.joinpath("other")
write_dict_to_dir(d, newbase)
assert newbase.joinpath("dir1", "dir2", "file2").exists()
assert newbase.joinpath("dir1", "file1").exists()

def test_basic_states(self, acfactory, monkeypatch, testprocess):
pc = ACSetup(init_time=0.0, testprocess=testprocess)
acc = acfactory.get_unconfigured_account()
Expand Down Expand Up @@ -46,6 +63,17 @@ def test_two_accounts_one_waited_all_started(self, monkeypatch, acfactory, testp
assert pc._account2state[ac1] == pc.IDLEREADY
assert pc._account2state[ac2] == pc.IDLEREADY

def test_store_and_retrieve_configured_account_cache(self, acfactory, tmpdir):
ac1 = acfactory.get_pseudo_configured_account()
holder = acfactory._acsetup.testprocess
assert holder.cache_maybe_store_configured_db_files(ac1)
assert not holder.cache_maybe_store_configured_db_files(ac1)
acdir = tmpdir.mkdir("newaccount")
addr = ac1.get_config("addr")
target_db_path = acdir.join("db").strpath
assert holder.cache_maybe_retrieve_configured_db_files(addr, target_db_path)
assert len(os.listdir(acdir)) >= 2


def test_liveconfig_caching(acfactory, monkeypatch):
prod = [
Expand Down