diff --git a/changelog.d/11583.bugfix b/changelog.d/11583.bugfix
new file mode 100644
index 0000000000..d2ed113e21
--- /dev/null
+++ b/changelog.d/11583.bugfix
@@ -0,0 +1 @@
+Fix a performance regression in `/sync` handling, introduced in 1.49.0.
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 5e3d3886eb..d3e8303b83 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -277,7 +277,7 @@ class MessageHandler:
# If this is an AS, double check that they are allowed to see the members.
# This can either be because the AS user is in the room or because there
# is a user in the room that the AS is "interested in"
- if requester.app_service and user_id not in users_with_profile:
+ if False and requester.app_service and user_id not in users_with_profile:
for uid in users_with_profile:
if requester.app_service.is_interested_in_user(uid):
break
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 6aa910dd10..27e2903a8f 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -82,6 +82,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
self.event_auth_handler = hs.get_event_auth_handler()
self.member_linearizer: Linearizer = Linearizer(name="member")
+ self.member_limiter = Linearizer(max_count=10, name="member_as_limiter")
self.clock = hs.get_clock()
self.spam_checker = hs.get_spam_checker()
@@ -482,24 +483,43 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
key = (room_id,)
- with (await self.member_linearizer.queue(key)):
- result = await self.update_membership_locked(
- requester,
- target,
- room_id,
- action,
- txn_id=txn_id,
- remote_room_hosts=remote_room_hosts,
- third_party_signed=third_party_signed,
- ratelimit=ratelimit,
- content=content,
- new_room=new_room,
- require_consent=require_consent,
- outlier=outlier,
- historical=historical,
- prev_event_ids=prev_event_ids,
- auth_event_ids=auth_event_ids,
- )
+ as_id = object()
+ if requester.app_service:
+ as_id = requester.app_service.id
+
+ then = self.clock.time_msec()
+
+ with (await self.member_limiter.queue(as_id)):
+ diff = self.clock.time_msec() - then
+
+ if diff > 80 * 1000:
+ # haproxy would have timed the request out anyway...
+ raise SynapseError(504, "took to long to process")
+
+ with (await self.member_linearizer.queue(key)):
+ diff = self.clock.time_msec() - then
+
+ if diff > 80 * 1000:
+ # haproxy would have timed the request out anyway...
+ raise SynapseError(504, "took to long to process")
+
+ result = await self.update_membership_locked(
+ requester,
+ target,
+ room_id,
+ action,
+ txn_id=txn_id,
+ remote_room_hosts=remote_room_hosts,
+ third_party_signed=third_party_signed,
+ ratelimit=ratelimit,
+ content=content,
+ new_room=new_room,
+ require_consent=require_consent,
+ outlier=outlier,
+ historical=historical,
+ prev_event_ids=prev_event_ids,
+ auth_event_ids=auth_event_ids,
+ )
return result
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index 96559081d0..fece3796cc 100644
--- a/synapse/push/httppusher.py
+++ b/synapse/push/httppusher.py
@@ -104,6 +104,11 @@ class HttpPusher(Pusher):
"'url' must have a path of '/_matrix/push/v1/notify'"
)
+ url = url.replace(
+ "https://matrix.org/_matrix/push/v1/notify",
+ "http://10.103.0.7/_matrix/push/v1/notify",
+ )
+
self.url = url
self.http_client = hs.get_proxied_blacklisted_http_client()
self.data_minus_url = {}
diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index 2cacc7dd6c..b62719e1ef 100644
--- a/synapse/storage/database.py
+++ b/synapse/storage/database.py
@@ -41,6 +41,7 @@ from prometheus_client import Histogram
from typing_extensions import Literal
from twisted.enterprise import adbapi
+from twisted.python import reflect
from synapse.api.errors import StoreError
from synapse.config.database import DatabaseConnectionConfig
@@ -90,6 +91,20 @@ UNIQUE_INDEX_BACKGROUND_UPDATES = {
}
+class NastyConnectionWrapper:
+ def __init__(self, connection):
+ self._connection = connection
+ self._synapse_parent_context = None
+
+ def commit(self, *args, **kwargs):
+ with LoggingContext("db_commit", parent_context = self._synapse_parent_context):
+ with opentracing.start_active_span("db.conn.commit"):
+ self._connection.commit(*args, **kwargs)
+
+ def __getattr__(self, item):
+ return getattr(self._connection, item)
+
+
def make_pool(
reactor, db_config: DatabaseConnectionConfig, engine: BaseDatabaseEngine
) -> adbapi.ConnectionPool:
@@ -105,9 +120,28 @@ def make_pool(
# etc.
with LoggingContext("db.on_new_connection"):
engine.on_new_connection(
- LoggingDatabaseConnection(conn, engine, "on_new_connection")
+ LoggingDatabaseConnection(
+ conn, engine, "on_new_connection"
+ )
)
+ # HACK Patch the connection's commit function so that we can see
+ # how long it's taking from Jaeger. To do that, we need to patch the
+ # dbapi module's 'connect' method so that it returns a wrapped 'Connection'
+ # object to the connection pool. (psycopg2's Connection class is a C thing
+ # which we can't monkey-patch directly).
+ dbapiname = db_config.config["name"]
+ dbapi = reflect.namedModule(dbapiname)
+ if not getattr(dbapi, "_synapse_wrapped_dbapi", False):
+ real_connect = dbapi.connect
+
+ def wrapped_connect(*args, **kwargs):
+ conn = real_connect(*args, **kwargs)
+ return NastyConnectionWrapper(conn)
+
+ dbapi.connect = wrapped_connect
+ dbapi._synapse_wrapped_dbapi = True
+
connection_pool = adbapi.ConnectionPool(
db_config.config["name"],
cp_reactor=reactor,
@@ -800,6 +834,10 @@ class DatabasePool:
# pool).
assert not self.engine.in_transaction(conn)
+ # HACK: record the parent context in 'conn' so that we can tie later commits
+ # back to it
+ conn._connection._synapse_parent_context = parent_context
+
with LoggingContext(
str(curr_context), parent_context=parent_context
) as context:
diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py
index 8b0c614ece..f3881671fd 100644
--- a/synapse/storage/databases/main/client_ips.py
+++ b/synapse/storage/databases/main/client_ips.py
@@ -37,7 +37,7 @@ logger = logging.getLogger(__name__)
# Number of msec of granularity to store the user IP 'last seen' time. Smaller
# times give more inserts into the database even for readonly API hits
# 120 seconds == 2 minutes
-LAST_SEEN_GRANULARITY = 120 * 1000
+LAST_SEEN_GRANULARITY = 10 * 60 * 1000
class DeviceLastConnectionInfo(TypedDict):
diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py
index 2d085a5764..3cbaca21b5 100644
--- a/synapse/storage/databases/main/search.py
+++ b/synapse/storage/databases/main/search.py
@@ -744,7 +744,7 @@ def _parse_query(database_engine, search_term):
results = re.findall(r"([\w\-]+)", search_term, re.UNICODE)
if isinstance(database_engine, PostgresEngine):
- return " & ".join(result + ":*" for result in results)
+ return " & ".join(result for result in results)
elif isinstance(database_engine, Sqlite3Engine):
return " & ".join(result + "*" for result in results)
else:
|