Skip to content

Commit

Permalink
setup to run against local memcache and log ndb global_cache
Browse files Browse the repository at this point in the history
for #1149
  • Loading branch information
snarfed committed Dec 20, 2024
1 parent c67fb95 commit a00ab0d
Show file tree
Hide file tree
Showing 11 changed files with 42 additions and 46 deletions.
5 changes: 2 additions & 3 deletions app.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,9 @@ env_variables:
APPVIEW_HOST: api.bsky.app
BGS_HOST: bsky.network
MOD_SERVICE_HOST: mod.bsky.app
MOD_SERVICE_DID: did:plc:ar7c4by46qjdydhdevvrndac
MOD_SERVICE_DID: did:plc:ar7c4by46qjdydhdevvrndac # ...or test against labeler.dholms.xyz / did:plc:vzxheqfwpbi3lxbgdh22js66
# https://console.cloud.google.com/memorystore/memcached/locations/us-central1/instances/bridgy-fed/details?project=bridgy-federated
MEMCACHE_HOST: '10.126.144.3'
# ...or test against labeler.dholms.xyz / did:plc:vzxheqfwpbi3lxbgdh22js66

# https://github.com/googleapis/python-ndb/blob/c55ec62b5153787404488b046c4bf6ffa02fee64/google/cloud/ndb/utils.py#L78-L81
NDB_DEBUG: true

Expand Down
6 changes: 3 additions & 3 deletions atproto_hub.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,14 @@ env_variables:
APPVIEW_HOST: api.bsky.app
BGS_HOST: bsky.network
MOD_SERVICE_HOST: mod.bsky.app
MOD_SERVICE_DID: did:plc:ar7c4by46qjdydhdevvrndac
# ...or test against labeler.dholms.xyz / did:plc:vzxheqfwpbi3lxbgdh22js66
MOD_SERVICE_DID: did:plc:ar7c4by46qjdydhdevvrndac # ...or test against labeler.dholms.xyz / did:plc:vzxheqfwpbi3lxbgdh22js66
# https://console.cloud.google.com/memorystore/memcached/locations/us-central1/instances/bridgy-fed/details?project=bridgy-federated
MEMCACHE_HOST: '10.126.144.3'
ROLLBACK_WINDOW: 50000
SUBSCRIBE_REPOS_BATCH_DELAY: 10

# https://github.com/googleapis/python-ndb/blob/c55ec62b5153787404488b046c4bf6ffa02fee64/google/cloud/ndb/utils.py#L78-L81
NDB_DEBUG: true
# NDB_DEBUG: true

# need only one instance so that new commits can be delivered to subscribeRepos
# subscribers in memory
Expand Down
25 changes: 14 additions & 11 deletions common.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,20 +105,23 @@
# https://github.com/memcached/memcached/wiki/Commands#standard-protocol
MEMCACHE_KEY_MAX_LEN = 250

if appengine_info.DEBUG or appengine_info.LOCAL_SERVER:
logger.info('Using in memory mock memcache')
memcache = MockMemcacheClient(allow_unicode_keys=True)
pickle_memcache = MockMemcacheClient(allow_unicode_keys=True, serde=PickleSerde())
global_cache = _InProcessGlobalCache()
else:
logger.info('Using production Memorystore memcache')
if memcache_host := os.environ.get('MEMCACHE_HOST'):
logger.info(f'Using real memcache at {memcache_host}')
memcache = pymemcache.client.base.PooledClient(
os.environ['MEMCACHE_HOST'], timeout=10, connect_timeout=10, # seconds
memcache_host, timeout=10, connect_timeout=10, # seconds
allow_unicode_keys=True)
pickle_memcache = pymemcache.client.base.PooledClient(
os.environ['MEMCACHE_HOST'], timeout=10, connect_timeout=10, # seconds
memcache_host, timeout=10, connect_timeout=10, # seconds
serde=PickleSerde(), allow_unicode_keys=True)
global_cache = MemcacheCache(memcache)
# ideally we'd use MemcacheCache.from_environment, but it doesn't let us
# pass kwargs like serde to the pymemcache client constructor
global_cache = MemcacheCache(memcache, strict_read=True)
else:
assert appengine_info.DEBUG or appengine_info.LOCAL_SERVER
logger.info('Using in memory mock memcache')
memcache = MockMemcacheClient(allow_unicode_keys=True)
pickle_memcache = MockMemcacheClient(allow_unicode_keys=True, serde=PickleSerde())
global_cache = _InProcessGlobalCache()

_negotiator = ContentNegotiator(acceptable=[
AcceptParameters(ContentType(CONTENT_TYPE_HTML)),
Expand Down Expand Up @@ -506,7 +509,7 @@ def memcache_memoize_key(fn, *args, **kwargs):

NONE = () # empty tuple

def memcache_memoize(expire=None, key=None):
def memcache_memoize(expire=0, key=None):
"""Memoize function decorator that stores the cached value in memcache.
Args:
Expand Down
1 change: 1 addition & 0 deletions config.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
# https://github.com/googleapis/python-ndb/blob/c55ec62b5153787404488b046c4bf6ffa02fee64/google/cloud/ndb/utils.py#L78-L81
# logging.getLogger('google.cloud.ndb').setLevel(logging.DEBUG)
logging.getLogger('google.cloud.ndb._cache').setLevel(logging.DEBUG)
logging.getLogger('google.cloud.ndb.global_cache').setLevel(logging.DEBUG)

KEYS_ID_RE = re.compile(f'name: "([^"]+)"')

Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ git+https://github.com/snarfed/lexrpc.git#egg=lexrpc
git+https://github.com/snarfed/mox3.git#egg=mox3
git+https://github.com/snarfed/negotiator.git@py3#egg=negotiator
git+https://github.com/snarfed/oauth-dropins.git#egg=oauth_dropins
git+https://github.com/snarfed/python-ndb.git@global-cache-logging#egg=google-cloud-ndb
# TODO: switch back to pypi as soon as a new release is cut after 4.0.0
# that includes https://github.com/pinterest/pymemcache/pull/471
git+https://github.com/pinterest/pymemcache.git#egg=pymemcache
Expand Down Expand Up @@ -49,7 +50,7 @@ google-cloud-datastore==2.20.1
google-cloud-dns==0.35.0
google-cloud-error-reporting==1.11.1
google-cloud-logging==3.11.3
google-cloud-ndb==2.3.2
# google-cloud-ndb==2.3.2
google-cloud-tasks==2.18.0
googleapis-common-protos==1.66.0
grapheme==0.6.0
Expand Down
2 changes: 1 addition & 1 deletion router.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ env_variables:
# https://bsky.app/profile/gargaj.umlaut.hu/post/3kxsvpqiuln26
CHAT_HOST: api.bsky.chat
CHAT_DID: did:web:api.bsky.chat
# https://console.cloud.google.com/memorystore/memcached/locations/us-central1/instances/bridgy-fed/details?project=bridgy-federated
MEMCACHE_HOST: '10.126.144.3'

# https://github.com/googleapis/python-ndb/blob/c55ec62b5153787404488b046c4bf6ffa02fee64/google/cloud/ndb/utils.py#L78-L81
NDB_DEBUG: true

Expand Down
4 changes: 2 additions & 2 deletions tests/test_dms.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ def test_receive_prompt_request_rate_limit(self):

self.assert_sent(ExplicitFake, [bob, eve], 'request_bridging',
ALICE_REQUEST_CONTENT)
self.assertEqual(2, memcache.get('dm-user-requests-efake-efake:alice'))
self.assertEqual(2, int(memcache.get('dm-user-requests-efake-efake:alice')))

# over the limit
OtherFake.sent = []
Expand All @@ -348,7 +348,7 @@ def test_receive_prompt_request_rate_limit(self):
self.assertEqual(('OK', 200), receive(from_user=alice, obj=obj))
self.assertEqual([], OtherFake.sent)
self.assert_replied(OtherFake, alice, '?', "Sorry, you've hit your limit of 2 requests per day. Try again tomorrow!")
self.assertEqual(3, memcache.get('dm-user-requests-efake-efake:alice'))
self.assertEqual(3, int(memcache.get('dm-user-requests-efake-efake:alice')))

def test_receive_prompt_wrong_protocol(self):
self.make_user(id='other.brid.gy', cls=Web)
Expand Down
12 changes: 6 additions & 6 deletions tests/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -440,7 +440,7 @@ def test_count_followers(self):
self.assertEqual((0, 0), user.count_followers())

# clear both
common.pickle_memcache.clear()
common.pickle_memcache.flush_all()
user.count_followers.cache.clear()
self.assertEqual((1, 2), user.count_followers())

Expand Down Expand Up @@ -1026,7 +1026,7 @@ def test_resolve_ids_copies_follow(self):

models.get_original_user_key.cache_clear()
models.get_original_object_key.cache_clear()
common.pickle_memcache.clear()
common.pickle_memcache.flush_all()

# matching copy users
self.make_user('other:alice', cls=OtherFake,
Expand Down Expand Up @@ -1065,7 +1065,7 @@ def test_resolve_ids_copies_reply(self):

models.get_original_user_key.cache_clear()
models.get_original_object_key.cache_clear()
common.pickle_memcache.clear()
common.pickle_memcache.flush_all()

# matching copies
self.make_user('other:alice', cls=OtherFake,
Expand Down Expand Up @@ -1107,7 +1107,7 @@ def test_resolve_ids_multiple_in_reply_to(self):

models.get_original_user_key.cache_clear()
models.get_original_object_key.cache_clear()
common.pickle_memcache.clear()
common.pickle_memcache.flush_all()

# matching copies
self.store_object(id='other:a',
Expand Down Expand Up @@ -1221,15 +1221,15 @@ def test_normalize_ids_reply(self):
def test_get_original_user_key(self):
self.assertIsNone(models.get_original_user_key('other:user'))
models.get_original_user_key.cache_clear()
common.pickle_memcache.clear()
common.pickle_memcache.flush_all()
user = self.make_user('fake:user', cls=Fake,
copies=[Target(uri='other:user', protocol='other')])
self.assertEqual(user.key, models.get_original_user_key('other:user'))

def test_get_original_object_key(self):
self.assertIsNone(models.get_original_object_key('other:post'))
models.get_original_object_key.cache_clear()
common.pickle_memcache.clear()
common.pickle_memcache.flush_all()
obj = self.store_object(id='fake:post',
copies=[Target(uri='other:post', protocol='other')])
self.assertEqual(obj.key, models.get_original_object_key('other:post'))
Expand Down
15 changes: 6 additions & 9 deletions tests/test_protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -2757,10 +2757,10 @@ def test_resolve_ids_follow(self):
self.alice.copies = [Target(uri='fake:alice', protocol='fake')]
self.alice.put()

common.memcache.clear()
common.memcache.flush_all()
models.get_original_user_key.cache_clear()
models.get_original_object_key.cache_clear()
common.pickle_memcache.clear()
common.pickle_memcache.flush_all()

obj.new = True
Fake.fetchable = {
Expand Down Expand Up @@ -2792,10 +2792,10 @@ def test_resolve_ids_share(self):
self.store_object(id='other:post',
copies=[Target(uri='fake:post', protocol='fake')])

common.memcache.clear()
common.memcache.flush_all()
models.get_original_user_key.cache_clear()
models.get_original_object_key.cache_clear()
common.pickle_memcache.clear()
common.pickle_memcache.flush_all()
obj.new = True

_, code = Fake.receive(obj, authed_as='fake:user')
Expand Down Expand Up @@ -2847,7 +2847,7 @@ def test_resolve_ids_reply_mentions(self):

models.get_original_user_key.cache_clear()
models.get_original_object_key.cache_clear()
common.pickle_memcache.clear()
common.pickle_memcache.flush_all()

obj.new = True
self.assertEqual(('OK', 202), Fake.receive(obj, authed_as='fake:user'))
Expand Down Expand Up @@ -3149,10 +3149,7 @@ def send(*args, **kwargs):

def receive():
with app.test_request_context('/'), \
ndb_client.context(
cache_policy=common.cache_policy,
global_cache=_InProcessGlobalCache(),
global_cache_timeout_policy=common.global_cache_timeout_policy):
ndb_client.context(**common.NDB_CONTEXT_KWARGS):
try:
Fake.receive_as1(post_as1)
except NoContent: # raised by the second thread
Expand Down
13 changes: 4 additions & 9 deletions tests/testutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@

# other modules are imported _after_ Fake etc classes is defined so that it's in
# PROTOCOLS when URL routes are registered.
from common import long_to_base64, TASKS_LOCATION
from common import long_to_base64, NDB_CONTEXT_KWARGS, TASKS_LOCATION
import ids
import models
from models import KEY_BITS, Object, PROTOCOLS, Target, User
Expand Down Expand Up @@ -320,21 +320,16 @@ def setUp(self):

self.router_client = router.app.test_client()

memcache.clear()
pickle_memcache.clear()
memcache.flush_all()
pickle_memcache.flush_all()
global_cache.clear()
models.get_original_object_key.cache_clear()
models.get_original_user_key.cache_clear()
common.pickle_memcache.clear()
activitypub.WEB_OPT_OUT_DOMAINS = set()

# clear datastore
requests.post(f'http://{ndb_client.host}/reset')
self.ndb_context = ndb_client.context(
cache_policy=common.cache_policy,
global_cache=_InProcessGlobalCache(),
global_cache_policy=common.global_cache_policy,
global_cache_timeout_policy=common.global_cache_timeout_policy)
self.ndb_context = ndb_client.context(**NDB_CONTEXT_KWARGS)
self.ndb_context.__enter__()

util.now = lambda **kwargs: testutil.NOW
Expand Down
2 changes: 1 addition & 1 deletion webfinger.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def template_vars(self):
cls = Protocol.for_request(fed='web')

if not cls:
error(f"Couldn't determine protocol for f{resource}")
error(f"Couldn't determine protocol for {resource}")

# is this a handle?
if cls.owns_id(id) is False:
Expand Down

0 comments on commit a00ab0d

Please sign in to comment.