diff options
author | Erik Johnston <erik@matrix.org> | 2016-09-08 15:26:26 +0100 |
---|---|---|
committer | Erik Johnston <erik@matrix.org> | 2016-09-08 15:26:26 +0100 |
commit | 5834c6178cd8ab8121b0ad87bfe0d3b69a48c21b (patch) | |
tree | 3d1fe71fc6a53ea65981f814facd80d79d99bf68 /synapse/replication/resource.py | |
parent | Merge branch 'release-v0.17.1' of github.com:matrix-org/synapse (diff) | |
parent | Bump version and changelog (diff) | |
download | synapse-5834c6178cd8ab8121b0ad87bfe0d3b69a48c21b.tar.xz |
Merge branch 'release-v0.17.2' of github.com:matrix-org/synapse v0.17.2
Diffstat (limited to 'synapse/replication/resource.py')
-rw-r--r-- | synapse/replication/resource.py | 43 |
1 files changed, 19 insertions, 24 deletions
diff --git a/synapse/replication/resource.py b/synapse/replication/resource.py index 84993b33b3..1ed9034bcb 100644 --- a/synapse/replication/resource.py +++ b/synapse/replication/resource.py @@ -40,8 +40,8 @@ STREAM_NAMES = ( ("backfill",), ("push_rules",), ("pushers",), - ("state",), ("caches",), + ("to_device",), ) @@ -130,7 +130,6 @@ class ReplicationResource(Resource): backfill_token = yield self.store.get_current_backfill_token() push_rules_token, room_stream_token = self.store.get_push_rules_stream_token() pushers_token = self.store.get_pushers_stream_token() - state_token = self.store.get_state_stream_token() caches_token = self.store.get_cache_stream_token() defer.returnValue(_ReplicationToken( @@ -142,8 +141,9 @@ class ReplicationResource(Resource): backfill_token, push_rules_token, pushers_token, - state_token, + 0, # State stream is no longer a thing caches_token, + int(stream_token.to_device_key), )) @request_handler() @@ -191,8 +191,8 @@ class ReplicationResource(Resource): yield self.receipts(writer, current_token, limit, request_streams) yield self.push_rules(writer, current_token, limit, request_streams) yield self.pushers(writer, current_token, limit, request_streams) - yield self.state(writer, current_token, limit, request_streams) yield self.caches(writer, current_token, limit, request_streams) + yield self.to_device(writer, current_token, limit, request_streams) self.streams(writer, current_token, request_streams) logger.info("Replicated %d rows", writer.total) @@ -366,25 +366,6 @@ class ReplicationResource(Resource): )) @defer.inlineCallbacks - def state(self, writer, current_token, limit, request_streams): - current_position = current_token.state - - state = request_streams.get("state") - - if state is not None: - state_groups, state_group_state = ( - yield self.store.get_all_new_state_groups( - state, current_position, limit - ) - ) - writer.write_header_and_rows("state_groups", state_groups, ( - "position", "room_id", "event_id" - )) - writer.write_header_and_rows("state_group_state", state_group_state, ( - "position", "type", "state_key", "event_id" - )) - - @defer.inlineCallbacks def caches(self, writer, current_token, limit, request_streams): current_position = current_token.caches @@ -398,6 +379,20 @@ class ReplicationResource(Resource): "position", "cache_func", "keys", "invalidation_ts" )) + @defer.inlineCallbacks + def to_device(self, writer, current_token, limit, request_streams): + current_position = current_token.to_device + + to_device = request_streams.get("to_device") + + if to_device is not None: + to_device_rows = yield self.store.get_all_new_device_messages( + to_device, current_position, limit + ) + writer.write_header_and_rows("to_device", to_device_rows, ( + "position", "user_id", "device_id", "message_json" + )) + class _Writer(object): """Writes the streams as a JSON object as the response to the request""" @@ -426,7 +421,7 @@ class _Writer(object): class _ReplicationToken(collections.namedtuple("_ReplicationToken", ( "events", "presence", "typing", "receipts", "account_data", "backfill", - "push_rules", "pushers", "state", "caches", + "push_rules", "pushers", "state", "caches", "to_device", ))): __slots__ = [] |