summary refs log tree commit diff
path: root/synapse/streams
diff options
context:
space:
mode:
Diffstat (limited to 'synapse/streams')
-rw-r--r--synapse/streams/__init__.py14
-rw-r--r--synapse/streams/config.py84
-rw-r--r--synapse/streams/events.py180
3 files changed, 278 insertions, 0 deletions
diff --git a/synapse/streams/__init__.py b/synapse/streams/__init__.py
new file mode 100644
index 0000000000..fe8a073cd3
--- /dev/null
+++ b/synapse/streams/__init__.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014 matrix.org
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/synapse/streams/config.py b/synapse/streams/config.py
new file mode 100644
index 0000000000..2434844d80
--- /dev/null
+++ b/synapse/streams/config.py
@@ -0,0 +1,84 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014 matrix.org
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from synapse.api.errors import SynapseError
+from synapse.types import StreamToken
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class PaginationConfig(object):
+
+    """A configuration object which stores pagination parameters."""
+
+    def __init__(self, from_token=None, to_token=None, direction='f',
+                 limit=0):
+        self.from_token = from_token
+        self.to_token = to_token
+        self.direction = 'f' if direction == 'f' else 'b'
+        self.limit = int(limit)
+
+    @classmethod
+    def from_request(cls, request, raise_invalid_params=True):
+        def get_param(name, default=None):
+            lst = request.args.get(name, [])
+            if len(lst) > 1:
+                raise SynapseError(
+                    400, "%s must be specified only once" % (name,)
+                )
+            elif len(lst) == 1:
+                return lst[0]
+            else:
+                return default
+
+        direction = get_param("dir", 'f')
+        if direction not in ['f', 'b']:
+            raise SynapseError(400, "'dir' parameter is invalid.")
+
+        from_tok = get_param("from")
+        to_tok = get_param("to")
+
+        try:
+            if from_tok == "END":
+                from_tok = None  # For backwards compat.
+            elif from_tok:
+                from_tok = StreamToken.from_string(from_tok)
+        except:
+            raise SynapseError(400, "'from' paramater is invalid")
+
+        try:
+            if to_tok:
+                to_tok = StreamToken.from_string(to_tok)
+        except:
+            raise SynapseError(400, "'to' paramater is invalid")
+
+        limit = get_param("limit", "0")
+        if not limit.isdigit():
+            raise SynapseError(400, "'limit' parameter must be an integer.")
+
+        try:
+            return PaginationConfig(from_tok, to_tok, direction, limit)
+        except:
+            logger.exception("Failed to create pagination config")
+            raise SynapseError(400, "Invalid request.")
+
+    def __str__(self):
+        return (
+            "<PaginationConfig from_tok=%s, to_tok=%s, "
+            "direction=%s, limit=%s>"
+        ) % (self.from_tok, self.to_tok, self.direction, self.limit)
diff --git a/synapse/streams/events.py b/synapse/streams/events.py
new file mode 100644
index 0000000000..2e6ea6ca26
--- /dev/null
+++ b/synapse/streams/events.py
@@ -0,0 +1,180 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014 matrix.org
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.api.constants import Membership
+from synapse.types import StreamToken
+
+
+class RoomEventSource(object):
+    def __init__(self, hs):
+        self.store = hs.get_datastore()
+
+    @defer.inlineCallbacks
+    def get_new_events_for_user(self, user, from_token, limit):
+        # We just ignore the key for now.
+
+        to_key = yield self.get_current_token_part()
+
+        events, end_key = yield self.store.get_room_events_stream(
+            user_id=user.to_string(),
+            from_key=from_token.events_key,
+            to_key=to_key,
+            room_id=None,
+            limit=limit,
+        )
+
+        end_token = from_token.copy_and_replace("events_key", end_key)
+
+        defer.returnValue((events, end_token))
+
+    def get_current_token_part(self):
+        return self.store.get_room_events_max_id()
+
+    @defer.inlineCallbacks
+    def get_pagination_rows(self, user, pagination_config, key):
+        from_token = pagination_config.from_token
+        to_token = pagination_config.to_token
+        limit = pagination_config.limit
+        direction = pagination_config.direction
+
+        to_key = to_token.events_key if to_token else None
+
+        events, next_key = yield self.store.paginate_room_events(
+            room_id=key,
+            from_key=from_token.events_key,
+            to_key=to_key,
+            direction=direction,
+            limit=limit,
+            with_feedback=True
+        )
+
+        next_token = from_token.copy_and_replace("events_key", next_key)
+
+        defer.returnValue((events, next_token))
+
+
+class PresenceSource(object):
+    def __init__(self, hs):
+        self.hs = hs
+        self.clock = hs.get_clock()
+
+    def get_new_events_for_user(self, user, from_token, limit):
+        from_key = int(from_token.presence_key)
+
+        presence = self.hs.get_handlers().presence_handler
+        cachemap = presence._user_cachemap
+
+        # TODO(paul): limit, and filter by visibility
+        updates = [(k, cachemap[k]) for k in cachemap
+                   if from_key < cachemap[k].serial]
+
+        if updates:
+            clock = self.clock
+
+            latest_serial = max([x[1].serial for x in updates])
+            data = [x[1].make_event(user=x[0], clock=clock) for x in updates]
+
+            end_token = from_token.copy_and_replace(
+                "presence_key", latest_serial
+            )
+            return ((data, end_token))
+        else:
+            end_token = from_token.copy_and_replace(
+                "presence_key", presence._user_cachemap_latest_serial
+            )
+            return (([], end_token))
+
+    def get_current_token_part(self):
+        presence = self.hs.get_handlers().presence_handler
+        return presence._user_cachemap_latest_serial
+
+    def get_pagination_rows(self, user, pagination_config, key):
+        # TODO (erikj): Does this make sense? Ordering?
+
+        from_token = pagination_config.from_token
+        to_token = pagination_config.to_token
+
+        from_key = int(from_token.presence_key)
+
+        if to_token:
+            to_key = int(to_token.presence_key)
+        else:
+            to_key = -1
+
+        presence = self.hs.get_handlers().presence_handler
+        cachemap = presence._user_cachemap
+
+        # TODO(paul): limit, and filter by visibility
+        updates = [(k, cachemap[k]) for k in cachemap
+                   if to_key < cachemap[k].serial < from_key]
+
+        if updates:
+            clock = self.clock
+
+            earliest_serial = max([x[1].serial for x in updates])
+            data = [x[1].make_event(user=x[0], clock=clock) for x in updates]
+
+            if to_token:
+                next_token = to_token
+            else:
+                next_token = from_token
+
+            next_token = next_token.copy_and_replace(
+                "presence_key", earliest_serial
+            )
+            return ((data, next_token))
+        else:
+            if not to_token:
+                to_token = from_token.copy_and_replace(
+                    "presence_key", 0
+                )
+            return (([], to_token))
+
+
+class EventSources(object):
+    SOURCE_TYPES = {
+        "room": RoomEventSource,
+        "presence": PresenceSource,
+    }
+
+    def __init__(self, hs):
+        self.sources = {
+            name: cls(hs)
+            for name, cls in EventSources.SOURCE_TYPES.items()
+        }
+
+    @staticmethod
+    def create_token(events_key, presence_key):
+        return StreamToken(events_key=events_key, presence_key=presence_key)
+
+    @defer.inlineCallbacks
+    def get_current_token(self):
+        events_key = yield self.sources["room"].get_current_token_part()
+        presence_key = yield self.sources["presence"].get_current_token_part()
+        token = EventSources.create_token(events_key, presence_key)
+        defer.returnValue(token)
+
+
+class StreamSource(object):
+    def get_new_events_for_user(self, user, from_token, limit):
+        raise NotImplementedError("get_new_events_for_user")
+
+    def get_current_token_part(self):
+        raise NotImplementedError("get_current_token_part")
+
+    def get_pagination_rows(self, user, pagination_config, key):
+        raise NotImplementedError("get_rows")