summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--changelog.d/11583.bugfix1
-rw-r--r--changelog.d/12367.feature1
-rw-r--r--synapse/handlers/message.py4
-rw-r--r--synapse/handlers/room_member.py14
-rw-r--r--synapse/push/httppusher.py5
-rw-r--r--synapse/replication/slave/storage/devices.py12
-rw-r--r--synapse/rest/client/account.py3
-rw-r--r--synapse/storage/databases/main/__init__.py12
-rw-r--r--synapse/storage/databases/main/client_ips.py2
-rw-r--r--synapse/storage/databases/main/search.py2
10 files changed, 50 insertions, 6 deletions
diff --git a/changelog.d/11583.bugfix b/changelog.d/11583.bugfix
new file mode 100644
index 0000000000..d2ed113e21
--- /dev/null
+++ b/changelog.d/11583.bugfix
@@ -0,0 +1 @@
+Fix a performance regression in `/sync` handling, introduced in 1.49.0.
diff --git a/changelog.d/12367.feature b/changelog.d/12367.feature
new file mode 100644
index 0000000000..34bb60e966
--- /dev/null
+++ b/changelog.d/12367.feature
@@ -0,0 +1 @@
+Reduce overhead of restarting synchrotrons.
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 766f597a55..a3021d4ada 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -277,8 +277,8 @@ class MessageHandler:
         # If this is an AS, double check that they are allowed to see the members.
         # This can either be because the AS user is in the room or because there
         # is a user in the room that the AS is "interested in"
-        if requester.app_service and user_id not in users_with_profile:
-            for uid in users_with_profile:
+        if False and requester.app_service and user_id not in users_with_profile:  # type: ignore[unreachable]
+            for uid in users_with_profile:  # type: ignore[unreachable]
                 if requester.app_service.is_interested_in_user(uid):
                     break
             else:
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 0785e31114..938965f303 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -513,10 +513,24 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
         if requester.app_service:
             as_id = requester.app_service.id
 
+        then = self.clock.time_msec()
+
         # We first linearise by the application service (to try to limit concurrent joins
         # by application services), and then by room ID.
         with (await self.member_as_limiter.queue(as_id)):
+            diff = self.clock.time_msec() - then
+
+            if diff > 80 * 1000:
+                # haproxy would have timed the request out anyway...
+                raise SynapseError(504, "took to long to process")
+
             with (await self.member_linearizer.queue(key)):
+                diff = self.clock.time_msec() - then
+
+                if diff > 80 * 1000:
+                    # haproxy would have timed the request out anyway...
+                    raise SynapseError(504, "took to long to process")
+
                 result = await self.update_membership_locked(
                     requester,
                     target,
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index 5818344520..f3c4419932 100644
--- a/synapse/push/httppusher.py
+++ b/synapse/push/httppusher.py
@@ -104,6 +104,11 @@ class HttpPusher(Pusher):
                 "'url' must have a path of '/_matrix/push/v1/notify'"
             )
 
+        url = url.replace(
+            "https://matrix.org/_matrix/push/v1/notify",
+            "http://10.103.0.7/_matrix/push/v1/notify",
+        )
+
         self.url = url
         self.http_client = hs.get_proxied_blacklisted_http_client()
         self.data_minus_url = {}
diff --git a/synapse/replication/slave/storage/devices.py b/synapse/replication/slave/storage/devices.py
index 0ffd34f1da..a0dd9d2b89 100644
--- a/synapse/replication/slave/storage/devices.py
+++ b/synapse/replication/slave/storage/devices.py
@@ -47,8 +47,18 @@ class SlavedDeviceStore(EndToEndKeyWorkerStore, DeviceWorkerStore, BaseSlavedSto
             ],
         )
         device_list_max = self._device_list_id_gen.get_current_token()
+        device_list_prefill, min_device_list_id = self.db_pool.get_cache_dict(
+            db_conn,
+            "device_lists_stream",
+            entity_column="user_id",
+            stream_column="stream_id",
+            max_value=device_list_max,
+            limit=1000,
+        )
         self._device_list_stream_cache = StreamChangeCache(
-            "DeviceListStreamChangeCache", device_list_max
+            "DeviceListStreamChangeCache",
+            min_device_list_id,
+            prefilled_cache=device_list_prefill,
         )
         self._user_signature_stream_cache = StreamChangeCache(
             "UserSignatureStreamChangeCache", device_list_max
diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py
index 5587cae98a..d7821cbfa5 100644
--- a/synapse/rest/client/account.py
+++ b/synapse/rest/client/account.py
@@ -467,6 +467,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
         next_link = body.get("next_link")  # Optional param
 
         msisdn = phone_number_to_msisdn(country, phone_number)
+        logger.info("Request #%s to verify ownership of %s", send_attempt, msisdn)
 
         if not await check_3pid_allowed(self.hs, "msisdn", msisdn):
             raise SynapseError(
@@ -494,6 +495,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
                 await self.hs.get_clock().sleep(random.randint(1, 10) / 10)
                 return 200, {"sid": random_string(16)}
 
+            logger.info("MSISDN %s is already in use by %s", msisdn, existing_user_id)
             raise SynapseError(400, "MSISDN is already in use", Codes.THREEPID_IN_USE)
 
         if not self.hs.config.registration.account_threepid_delegate_msisdn:
@@ -518,6 +520,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
         threepid_send_requests.labels(type="msisdn", reason="add_threepid").observe(
             send_attempt
         )
+        logger.info("MSISDN %s: got response from identity server: %s", msisdn, ret)
 
         return 200, ret
 
diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py
index f024761ba7..68abf6783f 100644
--- a/synapse/storage/databases/main/__init__.py
+++ b/synapse/storage/databases/main/__init__.py
@@ -183,8 +183,18 @@ class DataStore(
         super().__init__(database, db_conn, hs)
 
         device_list_max = self._device_list_id_gen.get_current_token()
+        device_list_prefill, min_device_list_id = self.db_pool.get_cache_dict(
+            db_conn,
+            "device_lists_stream",
+            entity_column="user_id",
+            stream_column="stream_id",
+            max_value=device_list_max,
+            limit=1000,
+        )
         self._device_list_stream_cache = StreamChangeCache(
-            "DeviceListStreamChangeCache", device_list_max
+            "DeviceListStreamChangeCache",
+            min_device_list_id,
+            prefilled_cache=device_list_prefill,
         )
         self._user_signature_stream_cache = StreamChangeCache(
             "UserSignatureStreamChangeCache", device_list_max
diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py
index 8b0c614ece..f3881671fd 100644
--- a/synapse/storage/databases/main/client_ips.py
+++ b/synapse/storage/databases/main/client_ips.py
@@ -37,7 +37,7 @@ logger = logging.getLogger(__name__)
 # Number of msec of granularity to store the user IP 'last seen' time. Smaller
 # times give more inserts into the database even for readonly API hits
 # 120 seconds == 2 minutes
-LAST_SEEN_GRANULARITY = 120 * 1000
+LAST_SEEN_GRANULARITY = 10 * 60 * 1000
 
 
 class DeviceLastConnectionInfo(TypedDict):
diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py
index d4482c06db..79abe758e6 100644
--- a/synapse/storage/databases/main/search.py
+++ b/synapse/storage/databases/main/search.py
@@ -774,7 +774,7 @@ def _parse_query(database_engine, search_term):
     results = re.findall(r"([\w\-]+)", search_term, re.UNICODE)
 
     if isinstance(database_engine, PostgresEngine):
-        return " & ".join(result + ":*" for result in results)
+        return " & ".join(result for result in results)
     elif isinstance(database_engine, Sqlite3Engine):
         return " & ".join(result + "*" for result in results)
     else: