diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py
index 26be377d03..a9888381b4 100644
--- a/synapse/api/filtering.py
+++ b/synapse/api/filtering.py
@@ -43,7 +43,7 @@ if TYPE_CHECKING:
from synapse.server import HomeServer
FILTER_SCHEMA = {
- "additionalProperties": False,
+ "additionalProperties": True, # Allow new fields for forward compatibility
"type": "object",
"properties": {
"limit": {"type": "number"},
@@ -63,7 +63,7 @@ FILTER_SCHEMA = {
}
ROOM_FILTER_SCHEMA = {
- "additionalProperties": False,
+ "additionalProperties": True, # Allow new fields for forward compatibility
"type": "object",
"properties": {
"not_rooms": {"$ref": "#/definitions/room_id_array"},
@@ -77,7 +77,7 @@ ROOM_FILTER_SCHEMA = {
}
ROOM_EVENT_FILTER_SCHEMA = {
- "additionalProperties": False,
+ "additionalProperties": True, # Allow new fields for forward compatibility
"type": "object",
"properties": {
"limit": {"type": "number"},
@@ -143,7 +143,7 @@ USER_FILTER_SCHEMA = {
},
},
},
- "additionalProperties": False,
+ "additionalProperties": True, # Allow new fields for forward compatibility
}
diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py
index bb065f9f2f..6034a0346e 100644
--- a/synapse/config/metrics.py
+++ b/synapse/config/metrics.py
@@ -43,7 +43,7 @@ class MetricsConfig(Config):
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
self.enable_metrics = config.get("enable_metrics", False)
- self.enable_legacy_metrics = config.get("enable_legacy_metrics", True)
+ self.enable_legacy_metrics = config.get("enable_legacy_metrics", False)
self.report_stats = config.get("report_stats", None)
self.report_stats_endpoint = config.get(
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index 2670e561d7..0066d63987 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -256,7 +256,7 @@ class BasePresenceHandler(abc.ABC):
with the app.
"""
- async def update_external_syncs_row(
+ async def update_external_syncs_row( # noqa: B027 (no-op by design)
self, process_id: str, user_id: str, is_syncing: bool, sync_time_msec: int
) -> None:
"""Update the syncing users for an external process as a delta.
@@ -272,7 +272,9 @@ class BasePresenceHandler(abc.ABC):
sync_time_msec: Time in ms when the user was last syncing
"""
- async def update_external_syncs_clear(self, process_id: str) -> None:
+ async def update_external_syncs_clear( # noqa: B027 (no-op by design)
+ self, process_id: str
+ ) -> None:
"""Marks all users that had been marked as syncing by a given process
as offline.
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 984f4ad5af..7e6d6490aa 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -1077,6 +1077,19 @@ class RoomCreationHandler:
for_batch: bool,
**kwargs: Any,
) -> Tuple[EventBase, synapse.events.snapshot.EventContext]:
+ """
+ Creates an event and associated event context.
+ Args:
+ etype: the type of event to be created
+ content: content of the event
+ for_batch: whether the event is being created for batch persisting. If
+ bool for_batch is true, this will create an event using the prev_event_ids,
+ and will create an event context for the event using the parameters state_map
+ and current_state_group, thus these parameters must be provided in this
+ case if for_batch is True. The subsequently created event and context
+ are suitable for being batched up and bulk persisted to the database
+ with other similarly created events.
+ """
nonlocal depth
nonlocal prev_event
@@ -1141,6 +1154,14 @@ class RoomCreationHandler:
)
current_state_group = event_to_state_group[member_event_id]
+ # we need the state group of the membership event as it is the current state group
+ event_to_state = (
+ await self._storage_controllers.state.get_state_group_for_events(
+ [member_event_id]
+ )
+ )
+ current_state_group = event_to_state[member_event_id]
+
events_to_send = []
# We treat the power levels override specially as this needs to be one
# of the first events that get sent into a room.
diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py
index 7774f1967d..05706b598c 100644
--- a/synapse/rest/client/login.py
+++ b/synapse/rest/client/login.py
@@ -536,7 +536,7 @@ def _get_auth_flow_dict_for_idp(idp: SsoIdentityProvider) -> JsonDict:
class RefreshTokenServlet(RestServlet):
- PATTERNS = (re.compile("^/_matrix/client/v1/refresh$"),)
+ PATTERNS = client_patterns("/refresh$")
def __init__(self, hs: "HomeServer"):
self._auth_handler = hs.get_auth_handler()
diff --git a/synapse/server.py b/synapse/server.py
index df3a1cb405..c4e025af22 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -315,7 +315,7 @@ class HomeServer(metaclass=abc.ABCMeta):
if self.config.worker.run_background_tasks:
self.setup_background_tasks()
- def start_listening(self) -> None:
+ def start_listening(self) -> None: # noqa: B027 (no-op by design)
"""Start the HTTP, manhole, metrics, etc listeners
Does nothing in this base class; overridden in derived classes to start the
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index bf42aeb8d1..69abf6fa87 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -50,7 +50,7 @@ class SQLBaseStore(metaclass=ABCMeta):
self.external_cached_functions: Dict[str, CachedFunction] = {}
- def process_replication_rows(
+ def process_replication_rows( # noqa: B027 (no-op by design)
self,
stream_name: str,
instance_name: str,
diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py
index 6e8aeed7b4..9e31798ab1 100644
--- a/synapse/storage/databases/main/events_bg_updates.py
+++ b/synapse/storage/databases/main/events_bg_updates.py
@@ -1435,16 +1435,16 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
),
)
- endpoint = None
row = txn.fetchone()
if row:
endpoint = row[0]
+ else:
+ # if the query didn't return a row, we must be almost done. We just
+ # need to go up to the recorded max_stream_ordering.
+ endpoint = max_stream_ordering_inclusive
- where_clause = "stream_ordering > ?"
- args = [min_stream_ordering_exclusive]
- if endpoint:
- where_clause += " AND stream_ordering <= ?"
- args.append(endpoint)
+ where_clause = "stream_ordering > ? AND stream_ordering <= ?"
+ args = [min_stream_ordering_exclusive, endpoint]
# now do the updates.
txn.execute(
@@ -1458,13 +1458,13 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
)
logger.info(
- "populated new `events` columns up to %s/%i: updated %i rows",
+ "populated new `events` columns up to %i/%i: updated %i rows",
endpoint,
max_stream_ordering_inclusive,
txn.rowcount,
)
- if endpoint is None:
+ if endpoint >= max_stream_ordering_inclusive:
# we're done
return True
diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py
index 9bf74bbf59..0c4fd88914 100644
--- a/synapse/storage/engines/postgres.py
+++ b/synapse/storage/engines/postgres.py
@@ -81,8 +81,8 @@ class PostgresEngine(
allow_unsafe_locale = self.config.get("allow_unsafe_locale", False)
# Are we on a supported PostgreSQL version?
- if not allow_outdated_version and self._version < 100000:
- raise RuntimeError("Synapse requires PostgreSQL 10 or above.")
+ if not allow_outdated_version and self._version < 110000:
+ raise RuntimeError("Synapse requires PostgreSQL 11 or above.")
with db_conn.cursor() as txn:
txn.execute("SHOW SERVER_ENCODING")
|