diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index 254a9f6856..d7442c62a7 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -133,7 +133,7 @@ class PaginationHandler(object):
include_null = False
logger.info(
- "[purge] Running purge job for %d < max_lifetime <= %d (include NULLs = %s)",
+ "[purge] Running purge job for %s < max_lifetime <= %s (include NULLs = %s)",
min_ms,
max_ms,
include_null,
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 04bc2385a2..042a605198 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -353,10 +353,12 @@ class JsonResource(HttpServer, resource.Resource):
if request.method == b"OPTIONS":
return _options_handler, "options_request_handler", {}
+ request_path = request.path.decode("ascii")
+
# Loop through all the registered callbacks to check if the method
# and path regex match
for path_entry in self.path_regexs.get(request.method, []):
- m = path_entry.pattern.match(request.path.decode("ascii"))
+ m = path_entry.pattern.match(request_path)
if m:
# We found a match!
return path_entry.callback, path_entry.servlet_classname, m.groupdict()
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index 143dc738c6..64f51406fb 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -16,6 +16,7 @@
""" This module contains REST servlets to do with rooms: /rooms/<paths> """
import logging
+import re
from typing import List, Optional
from six.moves.urllib import parse as urlparse
@@ -848,7 +849,12 @@ class RoomTypingRestServlet(RestServlet):
class RoomAliasListServlet(RestServlet):
- PATTERNS = client_patterns("/rooms/(?P<room_id>[^/]*)/aliases", unstable=False)
+ PATTERNS = [
+ re.compile(
+ r"^/_matrix/client/unstable/org\.matrix\.msc2432"
+ r"/rooms/(?P<room_id>[^/]*)/aliases"
+ ),
+ ]
def __init__(self, hs: "synapse.server.HomeServer"):
super().__init__()
diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py
index 3eeb3607f4..d90a6a890b 100644
--- a/synapse/rest/client/versions.py
+++ b/synapse/rest/client/versions.py
@@ -72,6 +72,8 @@ class VersionsRestServlet(RestServlet):
"org.matrix.label_based_filtering": True,
# Implements support for cross signing as described in MSC1756
"org.matrix.e2e_cross_signing": True,
+ # Implements additional endpoints as described in MSC2432
+ "org.matrix.msc2432": True,
},
},
)
diff --git a/synapse/storage/data_stores/main/event_federation.py b/synapse/storage/data_stores/main/event_federation.py
index 1746f40adf..dcc375b840 100644
--- a/synapse/storage/data_stores/main/event_federation.py
+++ b/synapse/storage/data_stores/main/event_federation.py
@@ -62,32 +62,37 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas
)
def _get_auth_chain_ids_txn(self, txn, event_ids, include_given):
+ if include_given:
+ results = set(event_ids)
+ else:
+ results = set()
+
if isinstance(self.database_engine, PostgresEngine):
# For efficiency we make the database do this if we can.
- sql = """
- WITH RECURSIVE auth_chain(event_id) AS (
- SELECT auth_id FROM event_auth WHERE event_id = ANY(?)
- UNION
- SELECT auth_id FROM event_auth
- INNER JOIN auth_chain USING (event_id)
- )
- SELECT event_id FROM auth_chain
- """
- txn.execute(sql, (list(event_ids),))
-
- results = set(event_id for event_id, in txn)
- if include_given:
- results.update(event_ids)
+ # We need to be a little careful with querying large amounts at
+ # once, for some reason postgres really doesn't like it. We do this
+ # by only asking for auth chain of 500 events at a time.
+ event_ids = list(event_ids)
+ chunks = [event_ids[x : x + 500] for x in range(0, len(event_ids), 500)]
+ for chunk in chunks:
+ sql = """
+ WITH RECURSIVE auth_chain(event_id) AS (
+ SELECT auth_id FROM event_auth WHERE event_id = ANY(?)
+ UNION
+ SELECT auth_id FROM event_auth
+ INNER JOIN auth_chain USING (event_id)
+ )
+ SELECT event_id FROM auth_chain
+ """
+ txn.execute(sql, (chunk,))
+
+ results.update(event_id for event_id, in txn)
return list(results)
# Database doesn't necessarily support recursive CTE, so we fall
# back to do doing it manually.
- if include_given:
- results = set(event_ids)
- else:
- results = set()
base_sql = "SELECT auth_id FROM event_auth WHERE "
diff --git a/synapse/storage/data_stores/main/roommember.py b/synapse/storage/data_stores/main/roommember.py
index 042289f0e0..d5ced05701 100644
--- a/synapse/storage/data_stores/main/roommember.py
+++ b/synapse/storage/data_stores/main/roommember.py
@@ -868,6 +868,37 @@ class RoomMemberWorkerStore(EventsWorkerStore):
desc="get_membership_from_event_ids",
)
+ async def is_local_host_in_room_ignoring_users(
+ self, room_id: str, ignore_users: Collection[str]
+ ) -> bool:
+ """Check if there are any local users, excluding those in the given
+ list, in the room.
+ """
+
+ clause, args = make_in_list_sql_clause(
+ self.database_engine, "user_id", ignore_users
+ )
+
+ sql = """
+ SELECT 1 FROM local_current_membership
+ WHERE
+ room_id = ? AND membership = ?
+ AND NOT (%s)
+ LIMIT 1
+ """ % (
+ clause,
+ )
+
+ def _is_local_host_in_room_ignoring_users_txn(txn):
+ txn.execute(sql, (room_id, Membership.JOIN, *args))
+
+ return bool(txn.fetchone())
+
+ return await self.db.runInteraction(
+ "is_local_host_in_room_ignoring_users",
+ _is_local_host_in_room_ignoring_users_txn,
+ )
+
class RoomMemberBackgroundUpdateStore(SQLBaseStore):
def __init__(self, database: Database, db_conn, hs):
diff --git a/synapse/storage/persist_events.py b/synapse/storage/persist_events.py
index a5370ed527..b950550f23 100644
--- a/synapse/storage/persist_events.py
+++ b/synapse/storage/persist_events.py
@@ -727,6 +727,7 @@ class EventsPersistenceStorage(object):
# Check if any of the given events are a local join that appear in the
# current state
+ events_to_check = [] # Event IDs that aren't an event we're persisting
for (typ, state_key), event_id in delta.to_insert.items():
if typ != EventTypes.Member or not self.is_mine_id(state_key):
continue
@@ -736,8 +737,33 @@ class EventsPersistenceStorage(object):
if event.membership == Membership.JOIN:
return True
- # There's been a change of membership but we don't have a local join
- # event in the new events, so we need to check the full state.
+ # The event is not in `ev_ctx_rm`, so we need to pull it out of
+ # the DB.
+ events_to_check.append(event_id)
+
+ # Check if any of the changes that we don't have events for are joins.
+ if events_to_check:
+ rows = await self.main_store.get_membership_from_event_ids(events_to_check)
+ is_still_joined = any(row["membership"] == Membership.JOIN for row in rows)
+ if is_still_joined:
+ return True
+
+ # None of the new state events are local joins, so we check the database
+ # to see if there are any other local users in the room. We ignore users
+ # whose state has changed as we've already their new state above.
+ users_to_ignore = [
+ state_key
+ for _, state_key in itertools.chain(delta.to_insert, delta.to_delete)
+ if self.is_mine_id(state_key)
+ ]
+
+ if await self.main_store.is_local_host_in_room_ignoring_users(
+ room_id, users_to_ignore
+ ):
+ return True
+
+ # The server will leave the room, so we go and find out which remote
+ # users will still be joined when we leave.
if current_state is None:
current_state = await self.main_store.get_current_state_ids(room_id)
current_state = dict(current_state)
@@ -746,19 +772,6 @@ class EventsPersistenceStorage(object):
current_state.update(delta.to_insert)
- event_ids = [
- event_id
- for (typ, state_key,), event_id in current_state.items()
- if typ == EventTypes.Member and self.is_mine_id(state_key)
- ]
-
- rows = await self.main_store.get_membership_from_event_ids(event_ids)
- is_still_joined = any(row["membership"] == Membership.JOIN for row in rows)
- if is_still_joined:
- return True
-
- # The server will leave the room, so we go and find out which remote
- # users will still be joined when we leave.
remote_event_ids = [
event_id
for (typ, state_key,), event_id in current_state.items()
|