Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Synapse on PyPy #2760

Merged
merged 5 commits into from
Apr 11, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions synapse/federation/send_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
from synapse.util.metrics import Measure
import synapse.metrics

from blist import sorteddict
from sortedcontainers import SortedDict
from collections import namedtuple

import logging
Expand All @@ -56,19 +56,19 @@ def __init__(self, hs):
self.is_mine_id = hs.is_mine_id

self.presence_map = {} # Pending presence map user_id -> UserPresenceState
self.presence_changed = sorteddict() # Stream position -> user_id
self.presence_changed = SortedDict() # Stream position -> user_id

self.keyed_edu = {} # (destination, key) -> EDU
self.keyed_edu_changed = sorteddict() # stream position -> (destination, key)
self.keyed_edu_changed = SortedDict() # stream position -> (destination, key)

self.edus = sorteddict() # stream position -> Edu
self.edus = SortedDict() # stream position -> Edu

self.failures = sorteddict() # stream position -> (destination, Failure)
self.failures = SortedDict() # stream position -> (destination, Failure)

self.device_messages = sorteddict() # stream position -> destination
self.device_messages = SortedDict() # stream position -> destination

self.pos = 1
self.pos_time = sorteddict()
self.pos_time = SortedDict()

# EVERYTHING IS SAD. In particular, python only makes new scopes when
# we make a new function, so we need to make a new function so the inner
Expand Down
8 changes: 7 additions & 1 deletion synapse/metrics/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import functools
import time
import gc
import platform

from twisted.internet import reactor

Expand All @@ -30,6 +31,7 @@
logger = logging.getLogger(__name__)


running_on_pypy = platform.python_implementation() == 'PyPy'
all_metrics = []
all_collectors = []

Expand Down Expand Up @@ -174,6 +176,9 @@ def f(*args, **kwargs):
tick_time.inc_by(end - start)
pending_calls_metric.inc_by(num_pending)

if running_on_pypy:
return ret

# Check if we need to do a manual GC (since its been disabled), and do
# one if necessary.
threshold = gc.get_threshold()
Expand Down Expand Up @@ -206,6 +211,7 @@ def f(*args, **kwargs):

# We manually run the GC each reactor tick so that we can get some metrics
# about time spent doing GC,
gc.disable()
if not running_on_pypy:
gc.disable()
except AttributeError:
pass
2 changes: 1 addition & 1 deletion synapse/python_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@
"bcrypt": ["bcrypt>=3.1.0"],
"pillow": ["PIL"],
"pydenticon": ["pydenticon"],
"blist": ["blist"],
"pysaml2>=3.0.0": ["saml2>=3.0.0"],
"sortedcontainers": ["sortedcontainers"],
"pymacaroons-pynacl": ["pymacaroons"],
"msgpack-python>=0.3.0": ["msgpack"],
"phonenumbers>=8.2.0": ["phonenumbers"],
Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -376,7 +376,7 @@ def cursor_to_dict(cursor):
Returns:
A list of dicts where the key is the column header.
"""
col_headers = list(intern(column[0]) for column in cursor.description)
col_headers = list(intern(str(column[0])) for column in cursor.description)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

could you explain why the cast to str is required under psycopg2cffi? this is such a hot path that I'm reluctant to add anything that isn't absolutely vital.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not 100% sure why this is necessary: The value returned here by psycopg2cffi comes from ffi.string, which for some reason couldn't be interned as-is. I couldn't find anything on google regarding ffi.string vs intern, and casting to str was a simple fix. Could be it returns a unicode object, but PQfname is documented to return char* and not wchar_t* so if it does I don't know why :)

If this is performance-critical, might be worth checking the actual runtime type. Although actually cursor_to_dict might be conceptually suboptimal if this is really on a super hot path?

results = list(
dict(zip(col_headers, row)) for row in cursor
)
Expand Down
5 changes: 5 additions & 0 deletions synapse/storage/engines/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from .sqlite3 import Sqlite3Engine

import importlib
import platform


SUPPORTED_MODULE = {
Expand All @@ -31,6 +32,10 @@ def create_engine(database_config):
engine_class = SUPPORTED_MODULE.get(name, None)

if engine_class:
# pypy requires psycopg2cffi rather than psycopg2
if (name == "psycopg2" and
platform.python_implementation() == "PyPy"):
name = "psycopg2cffi"
module = importlib.import_module(name)
return engine_class(module, database_config)

Expand Down
2 changes: 1 addition & 1 deletion synapse/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def is_valid(cls, s):
except Exception:
return False

__str__ = to_string
__repr__ = to_string


class UserID(DomainSpecificString):
Expand Down
4 changes: 2 additions & 2 deletions synapse/util/caches/stream_change_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from synapse.util.caches import register_cache, CACHE_SIZE_FACTOR


from blist import sorteddict
from sortedcontainers import SortedDict
import logging


Expand All @@ -35,7 +35,7 @@ class StreamChangeCache(object):
def __init__(self, name, current_stream_pos, max_size=10000, prefilled_cache={}):
self._max_size = int(max_size * CACHE_SIZE_FACTOR)
self._entity_to_key = {}
self._cache = sorteddict()
self._cache = SortedDict()
self._earliest_known_stream_pos = current_stream_pos
self.name = name
self.metrics = register_cache(self.name, self._cache)
Expand Down