diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml
index fed3a41586..8019f4c250 100644
--- a/.github/workflows/release-artifacts.yml
+++ b/.github/workflows/release-artifacts.yml
@@ -130,7 +130,7 @@ jobs:
python-version: "3.x"
- name: Install cibuildwheel
- run: python -m pip install cibuildwheel==2.9.0
+ run: python -m pip install cibuildwheel==2.16.2
- name: Set up QEMU to emulate aarch64
if: matrix.arch == 'aarch64'
diff --git a/CHANGES.md b/CHANGES.md
index 2e7f199299..9358a22525 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,3 +1,28 @@
+# Synapse 1.96.1 (2023-11-17)
+
+Synapse will soon be forked by Element under an AGPLv3.0 licence (with CLA, for
+proprietary dual licensing). You can read more about this here:
+
+* https://matrix.org/blog/2023/11/06/future-of-synapse-dendrite/
+* https://element.io/blog/element-to-adopt-agplv3/
+
+The Matrix.org Foundation copy of the project will be archived. Any changes needed
+by server administrators will be communicated via our usual
+[announcements channels](https://matrix.to/#/#homeowners:matrix.org), but we are
+striving to make this as seamless as possible.
+
+This minor release was needed only because of CI-related trouble on [v1.96.0](https://github.com/matrix-org/synapse/releases/tag/v1.96.0), which was never released.
+
+### Internal Changes
+
+- Fix building of wheels in CI. ([\#16653](https://github.com/matrix-org/synapse/issues/16653))
+
+# Synapse 1.96.0 (2023-11-16)
+
+### Bugfixes
+
+- Fix "'int' object is not iterable" error in `set_device_id_for_pushers` background update introduced in Synapse 1.95.0. ([\#16594](https://github.com/matrix-org/synapse/issues/16594))
+
# Synapse 1.96.0rc1 (2023-10-31)
### Features
diff --git a/changelog.d/16654.doc b/changelog.d/16654.doc
new file mode 100644
index 0000000000..61019e0367
--- /dev/null
+++ b/changelog.d/16654.doc
@@ -0,0 +1 @@
+Provide an example for a common encrypted media response from the admin user media API and mention possible null values.
diff --git a/changelog.d/16655.misc b/changelog.d/16655.misc
new file mode 100644
index 0000000000..3b1cc2185d
--- /dev/null
+++ b/changelog.d/16655.misc
@@ -0,0 +1 @@
+More efficiently handle no-op `POSITION` over replication.
diff --git a/changelog.d/16656.misc b/changelog.d/16656.misc
new file mode 100644
index 0000000000..6763685b9d
--- /dev/null
+++ b/changelog.d/16656.misc
@@ -0,0 +1 @@
+Reduce max concurrency of background tasks, reducing potential max DB load.
diff --git a/changelog.d/16657.misc b/changelog.d/16657.misc
new file mode 100644
index 0000000000..c444aa15e4
--- /dev/null
+++ b/changelog.d/16657.misc
@@ -0,0 +1 @@
+Speed up purge room by adding an index to `event_push_summary`.
diff --git a/changelog.d/16659.misc b/changelog.d/16659.misc
new file mode 100644
index 0000000000..72079d1f63
--- /dev/null
+++ b/changelog.d/16659.misc
@@ -0,0 +1 @@
+Bump requests-toolbelt from 0.10.1 to 1.0.0.
diff --git a/changelog.d/16660.misc b/changelog.d/16660.misc
new file mode 100644
index 0000000000..6763685b9d
--- /dev/null
+++ b/changelog.d/16660.misc
@@ -0,0 +1 @@
+Reduce max concurrency of background tasks, reducing potential max DB load.
diff --git a/debian/changelog b/debian/changelog
index 5470f3877f..1d9a135d98 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,15 @@
+matrix-synapse-py3 (1.96.1) stable; urgency=medium
+
+ * New synapse release 1.96.1.
+
+ -- Synapse Packaging team <packages@matrix.org> Fri, 17 Nov 2023 12:48:45 +0000
+
+matrix-synapse-py3 (1.96.0) stable; urgency=medium
+
+ * New synapse release 1.96.0.
+
+ -- Synapse Packaging team <packages@matrix.org> Thu, 16 Nov 2023 17:54:26 +0000
+
matrix-synapse-py3 (1.96.0~rc1) stable; urgency=medium
* New Synapse release 1.96.0rc1.
diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md
index 66089c634b..e8e492d095 100644
--- a/docs/admin_api/user_admin_api.md
+++ b/docs/admin_api/user_admin_api.md
@@ -618,6 +618,16 @@ A response body like the following is returned:
"quarantined_by": null,
"safe_from_quarantine": false,
"upload_name": "test2.png"
+ },
+ {
+ "created_ts": 300400,
+ "last_access_ts": 300700,
+ "media_id": "BzYNLRUgGHphBkdKGbzXwbjX",
+ "media_length": 1337,
+ "media_type": "application/octet-stream",
+ "quarantined_by": null,
+ "safe_from_quarantine": false,
+ "upload_name": null
}
],
"next_token": 3,
@@ -679,16 +689,17 @@ The following fields are returned in the JSON response body:
- `media` - An array of objects, each containing information about a media.
Media objects contain the following fields:
- `created_ts` - integer - Timestamp when the content was uploaded in ms.
- - `last_access_ts` - integer - Timestamp when the content was last accessed in ms.
+ - `last_access_ts` - integer or null - Timestamp when the content was last accessed in ms.
+ Null if there was no access, yet.
- `media_id` - string - The id used to refer to the media. Details about the format
are documented under
[media repository](../media_repository.md).
- `media_length` - integer - Length of the media in bytes.
- `media_type` - string - The MIME-type of the media.
- - `quarantined_by` - string - The user ID that initiated the quarantine request
- for this media.
+ - `quarantined_by` - string or null - The user ID that initiated the quarantine request
+ for this media. Null if not quarantined.
- `safe_from_quarantine` - bool - Status if this media is safe from quarantining.
- - `upload_name` - string - The name the media was uploaded with.
+ - `upload_name` - string or null - The name the media was uploaded with. Null if not provided during upload.
- `next_token`: integer - Indication for pagination. See above.
- `total` - integer - Total number of media.
diff --git a/poetry.lock b/poetry.lock
index 32b12d8076..05899b460e 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand.
[[package]]
name = "alabaster"
@@ -2273,13 +2273,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-toolbelt"
-version = "0.10.1"
+version = "1.0.0"
description = "A utility belt for advanced users of python-requests"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
- {file = "requests-toolbelt-0.10.1.tar.gz", hash = "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d"},
- {file = "requests_toolbelt-0.10.1-py2.py3-none-any.whl", hash = "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7"},
+ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"},
+ {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"},
]
[package.dependencies]
diff --git a/pyproject.toml b/pyproject.toml
index 825ff73f95..47c255e395 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -96,7 +96,7 @@ module-name = "synapse.synapse_rust"
[tool.poetry]
name = "matrix-synapse"
-version = "1.96.0rc1"
+version = "1.96.1"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "Apache-2.0"
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index 1af6d77545..98e6e42563 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -383,7 +383,7 @@ class DeviceWorkerHandler:
)
DEVICE_MSGS_DELETE_BATCH_LIMIT = 1000
- DEVICE_MSGS_DELETE_SLEEP_MS = 1000
+ DEVICE_MSGS_DELETE_SLEEP_MS = 100
async def _delete_device_messages(
self,
diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py
index cc34dfb322..1f6402c2da 100644
--- a/synapse/replication/tcp/streams/_base.py
+++ b/synapse/replication/tcp/streams/_base.py
@@ -305,6 +305,14 @@ class BackfillStream(Stream):
# which means we need to negate it.
return -self.store._backfill_id_gen.get_minimal_local_current_token()
+ def can_discard_position(
+ self, instance_name: str, prev_token: int, new_token: int
+ ) -> bool:
+ # Backfill stream can't go backwards, so we know we can ignore any
+ # positions where the tokens are from before the current token.
+
+ return new_token <= self.current_token(instance_name)
+
class PresenceStream(_StreamFromIdGen):
@attr.s(slots=True, frozen=True, auto_attribs=True)
@@ -519,6 +527,14 @@ class CachesStream(Stream):
return self.store._cache_id_gen.get_minimal_local_current_token()
return self.current_token(self.local_instance_name)
+ def can_discard_position(
+ self, instance_name: str, prev_token: int, new_token: int
+ ) -> bool:
+ # Caches streams can't go backwards, so we know we can ignore any
+ # positions where the tokens are from before the current token.
+
+ return new_token <= self.current_token(instance_name)
+
class DeviceListsStream(_StreamFromIdGen):
"""Either a user has updated their devices or a remote server needs to be
diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py
index 39556481ff..dd8957680a 100644
--- a/synapse/storage/databases/main/event_push_actions.py
+++ b/synapse/storage/databases/main/event_push_actions.py
@@ -311,6 +311,14 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
self._background_drop_null_thread_id_indexes,
)
+ # Add a room ID index to speed up room deletion
+ self.db_pool.updates.register_background_index_update(
+ "event_push_summary_index_room_id",
+ index_name="event_push_summary_index_room_id",
+ table="event_push_summary",
+ columns=["room_id"],
+ )
+
async def _background_drop_null_thread_id_indexes(
self, progress: JsonDict, batch_size: int
) -> int:
diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py
index a6a1671bd6..8f36cfce12 100644
--- a/synapse/storage/databases/main/pusher.py
+++ b/synapse/storage/databases/main/pusher.py
@@ -601,7 +601,7 @@ class PusherBackgroundUpdatesStore(SQLBaseStore):
(last_pusher_id, batch_size),
)
- rows = txn.fetchall()
+ rows = cast(List[Tuple[int, Optional[str], Optional[str]]], txn.fetchall())
if len(rows) == 0:
return 0
@@ -617,7 +617,7 @@ class PusherBackgroundUpdatesStore(SQLBaseStore):
txn=txn,
table="pushers",
key_names=("id",),
- key_values=[row[0] for row in rows],
+ key_values=[(row[0],) for row in rows],
value_names=("device_id", "access_token"),
# If there was already a device_id on the pusher, we only want to clear
# the access_token column, so we keep the existing device_id. Otherwise,
diff --git a/synapse/storage/schema/main/delta/83/06_event_push_summary_room.sql b/synapse/storage/schema/main/delta/83/06_event_push_summary_room.sql
new file mode 100644
index 0000000000..1aae1b7557
--- /dev/null
+++ b/synapse/storage/schema/main/delta/83/06_event_push_summary_room.sql
@@ -0,0 +1,17 @@
+/* Copyright 2023 The Matrix.org Foundation C.I.C
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+INSERT INTO background_updates (ordering, update_name, progress_json) VALUES
+ (8306, 'event_push_summary_index_room_id', '{}');
diff --git a/synapse/util/task_scheduler.py b/synapse/util/task_scheduler.py
index 29c561e555..b254d3f84c 100644
--- a/synapse/util/task_scheduler.py
+++ b/synapse/util/task_scheduler.py
@@ -71,7 +71,7 @@ class TaskScheduler:
# Time before a complete or failed task is deleted from the DB
KEEP_TASKS_FOR_MS = 7 * 24 * 60 * 60 * 1000 # 1 week
# Maximum number of tasks that can run at the same time
- MAX_CONCURRENT_RUNNING_TASKS = 10
+ MAX_CONCURRENT_RUNNING_TASKS = 5
# Time from the last task update after which we will log a warning
LAST_UPDATE_BEFORE_WARNING_MS = 24 * 60 * 60 * 1000 # 24hrs
@@ -377,7 +377,7 @@ class TaskScheduler:
self._running_tasks.remove(task.id)
# Try launch a new task since we've finished with this one.
- self._clock.call_later(1, self._launch_scheduled_tasks)
+ self._clock.call_later(0.1, self._launch_scheduled_tasks)
if len(self._running_tasks) >= TaskScheduler.MAX_CONCURRENT_RUNNING_TASKS:
return
|