summary refs log tree commit diff
path: root/synapse/storage
diff options
context:
space:
mode:
Diffstat (limited to 'synapse/storage')
-rw-r--r--synapse/storage/__init__.py9
-rw-r--r--synapse/storage/_base.py1
-rw-r--r--synapse/storage/event_federation.py2
-rw-r--r--synapse/storage/events.py23
-rw-r--r--synapse/storage/keys.py2
-rw-r--r--synapse/storage/pusher.py2
-rw-r--r--synapse/storage/registration.py74
-rw-r--r--synapse/storage/schema/delta/23/drop_state_index.sql16
-rw-r--r--synapse/storage/schema/delta/23/refresh_tokens.sql21
-rw-r--r--synapse/storage/signatures.py2
-rw-r--r--synapse/storage/transactions.py2
11 files changed, 126 insertions, 28 deletions
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index f154b1c8ae..77cb1dbd81 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -54,7 +54,7 @@ logger = logging.getLogger(__name__)
 
 # Remember to update this number every time a change is made to database
 # schema files, so the users will be informed on server restarts.
-SCHEMA_VERSION = 22
+SCHEMA_VERSION = 23
 
 dir_path = os.path.abspath(os.path.dirname(__file__))
 
@@ -94,9 +94,9 @@ class DataStore(RoomMemberStore, RoomStore,
         )
 
     @defer.inlineCallbacks
-    def insert_client_ip(self, user, access_token, device_id, ip, user_agent):
+    def insert_client_ip(self, user, access_token, ip, user_agent):
         now = int(self._clock.time_msec())
-        key = (user.to_string(), access_token, device_id, ip)
+        key = (user.to_string(), access_token, ip)
 
         try:
             last_seen = self.client_ip_last_seen.get(key)
@@ -120,7 +120,6 @@ class DataStore(RoomMemberStore, RoomStore,
                 "user_agent": user_agent,
             },
             values={
-                "device_id": device_id,
                 "last_seen": now,
             },
             desc="insert_client_ip",
@@ -132,7 +131,7 @@ class DataStore(RoomMemberStore, RoomStore,
             table="user_ips",
             keyvalues={"user_id": user.to_string()},
             retcols=[
-                "device_id", "access_token", "ip", "user_agent", "last_seen"
+                "access_token", "ip", "user_agent", "last_seen"
             ],
             desc="get_user_ip_and_agents",
         )
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index d976e17786..495ef087c9 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -181,6 +181,7 @@ class SQLBaseStore(object):
         self._transaction_id_gen = IdGenerator("sent_transactions", "id", self)
         self._state_groups_id_gen = IdGenerator("state_groups", "id", self)
         self._access_tokens_id_gen = IdGenerator("access_tokens", "id", self)
+        self._refresh_tokens_id_gen = IdGenerator("refresh_tokens", "id", self)
         self._pushers_id_gen = IdGenerator("pushers", "id", self)
         self._push_rule_id_gen = IdGenerator("push_rules", "id", self)
         self._push_rules_enable_id_gen = IdGenerator("push_rules_enable", "id", self)
diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py
index 7cb314dee8..c1cabbaa60 100644
--- a/synapse/storage/event_federation.py
+++ b/synapse/storage/event_federation.py
@@ -17,7 +17,7 @@ from twisted.internet import defer
 
 from ._base import SQLBaseStore
 from synapse.util.caches.descriptors import cached
-from syutil.base64util import encode_base64
+from unpaddedbase64 import encode_base64
 
 import logging
 from Queue import PriorityQueue, Empty
diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index 8774b3b388..0a477e3122 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -24,7 +24,7 @@ from synapse.util.logcontext import preserve_context_over_deferred
 from synapse.util.logutils import log_function
 from synapse.api.constants import EventTypes
 
-from syutil.jsonutil import encode_json
+from canonicaljson import encode_canonical_json
 from contextlib import contextmanager
 
 import logging
@@ -33,6 +33,13 @@ import ujson as json
 logger = logging.getLogger(__name__)
 
 
+def encode_json(json_object):
+    if USE_FROZEN_DICTS:
+        # ujson doesn't like frozen_dicts
+        return encode_canonical_json(json_object)
+    else:
+        return json.dumps(json_object, ensure_ascii=False)
+
 # These values are used in the `enqueus_event` and `_do_fetch` methods to
 # control how we batch/bulk fetch events from the database.
 # The values are plucked out of thing air to make initial sync run faster
@@ -253,8 +260,7 @@ class EventsStore(SQLBaseStore):
                 )
 
                 metadata_json = encode_json(
-                    event.internal_metadata.get_dict(),
-                    using_frozen_dicts=USE_FROZEN_DICTS
+                    event.internal_metadata.get_dict()
                 ).decode("UTF-8")
 
                 sql = (
@@ -331,12 +337,9 @@ class EventsStore(SQLBaseStore):
                     "event_id": event.event_id,
                     "room_id": event.room_id,
                     "internal_metadata": encode_json(
-                        event.internal_metadata.get_dict(),
-                        using_frozen_dicts=USE_FROZEN_DICTS
-                    ).decode("UTF-8"),
-                    "json": encode_json(
-                        event_dict(event), using_frozen_dicts=USE_FROZEN_DICTS
+                        event.internal_metadata.get_dict()
                     ).decode("UTF-8"),
+                    "json": encode_json(event_dict(event)).decode("UTF-8"),
                 }
                 for event, _ in events_and_contexts
             ],
@@ -355,9 +358,7 @@ class EventsStore(SQLBaseStore):
                     "type": event.type,
                     "processed": True,
                     "outlier": event.internal_metadata.is_outlier(),
-                    "content": encode_json(
-                        event.content, using_frozen_dicts=USE_FROZEN_DICTS
-                    ).decode("UTF-8"),
+                    "content": encode_json(event.content).decode("UTF-8"),
                 }
                 for event, _ in events_and_contexts
             ],
diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py
index ffd6daa880..344cacdc75 100644
--- a/synapse/storage/keys.py
+++ b/synapse/storage/keys.py
@@ -19,7 +19,7 @@ from synapse.util.caches.descriptors import cachedInlineCallbacks
 from twisted.internet import defer
 
 import OpenSSL
-from syutil.crypto.signing_key import decode_verify_key_bytes
+from signedjson.key import decode_verify_key_bytes
 import hashlib
 
 
diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py
index 08ea62681b..00b748f131 100644
--- a/synapse/storage/pusher.py
+++ b/synapse/storage/pusher.py
@@ -18,7 +18,7 @@ from twisted.internet import defer
 
 from synapse.api.errors import StoreError
 
-from syutil.jsonutil import encode_canonical_json
+from canonicaljson import encode_canonical_json
 
 import logging
 import simplejson as json
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index 586628579d..c9ceb132ae 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -51,6 +51,28 @@ class RegistrationStore(SQLBaseStore):
         )
 
     @defer.inlineCallbacks
+    def add_refresh_token_to_user(self, user_id, token):
+        """Adds a refresh token for the given user.
+
+        Args:
+            user_id (str): The user ID.
+            token (str): The new refresh token to add.
+        Raises:
+            StoreError if there was a problem adding this.
+        """
+        next_id = yield self._refresh_tokens_id_gen.get_next()
+
+        yield self._simple_insert(
+            "refresh_tokens",
+            {
+                "id": next_id,
+                "user_id": user_id,
+                "token": token
+            },
+            desc="add_refresh_token_to_user",
+        )
+
+    @defer.inlineCallbacks
     def register(self, user_id, token, password_hash):
         """Attempts to register an account.
 
@@ -146,26 +168,65 @@ class RegistrationStore(SQLBaseStore):
             user_id
         )
         for r in rows:
-            self.get_user_by_token.invalidate((r,))
+            self.get_user_by_access_token.invalidate((r,))
 
     @cached()
-    def get_user_by_token(self, token):
+    def get_user_by_access_token(self, token):
         """Get a user from the given access token.
 
         Args:
             token (str): The access token of a user.
         Returns:
-            dict: Including the name (user_id), device_id and whether they are
-                an admin.
+            dict: Including the name (user_id) and the ID of their access token.
         Raises:
             StoreError if no user was found.
         """
         return self.runInteraction(
-            "get_user_by_token",
+            "get_user_by_access_token",
             self._query_for_auth,
             token
         )
 
+    def exchange_refresh_token(self, refresh_token, token_generator):
+        """Exchange a refresh token for a new access token and refresh token.
+
+        Doing so invalidates the old refresh token - refresh tokens are single
+        use.
+
+        Args:
+            token (str): The refresh token of a user.
+            token_generator (fn: str -> str): Function which, when given a
+                user ID, returns a unique refresh token for that user. This
+                function must never return the same value twice.
+        Returns:
+            tuple of (user_id, refresh_token)
+        Raises:
+            StoreError if no user was found with that refresh token.
+        """
+        return self.runInteraction(
+            "exchange_refresh_token",
+            self._exchange_refresh_token,
+            refresh_token,
+            token_generator
+        )
+
+    def _exchange_refresh_token(self, txn, old_token, token_generator):
+        sql = "SELECT user_id FROM refresh_tokens WHERE token = ?"
+        txn.execute(sql, (old_token,))
+        rows = self.cursor_to_dict(txn)
+        if not rows:
+            raise StoreError(403, "Did not recognize refresh token")
+        user_id = rows[0]["user_id"]
+
+        # TODO(danielwh): Maybe perform a validation on the macaroon that
+        # macaroon.user_id == user_id.
+
+        new_token = token_generator(user_id)
+        sql = "UPDATE refresh_tokens SET token = ? WHERE token = ?"
+        txn.execute(sql, (new_token, old_token,))
+
+        return user_id, new_token
+
     @defer.inlineCallbacks
     def is_server_admin(self, user):
         res = yield self._simple_select_one_onecol(
@@ -180,8 +241,7 @@ class RegistrationStore(SQLBaseStore):
 
     def _query_for_auth(self, txn, token):
         sql = (
-            "SELECT users.name, users.admin,"
-            " access_tokens.device_id, access_tokens.id as token_id"
+            "SELECT users.name, access_tokens.id as token_id"
             " FROM users"
             " INNER JOIN access_tokens on users.name = access_tokens.user_id"
             " WHERE token = ?"
diff --git a/synapse/storage/schema/delta/23/drop_state_index.sql b/synapse/storage/schema/delta/23/drop_state_index.sql
new file mode 100644
index 0000000000..07d0ea5cb2
--- /dev/null
+++ b/synapse/storage/schema/delta/23/drop_state_index.sql
@@ -0,0 +1,16 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+DROP INDEX IF EXISTS state_groups_state_tuple;
diff --git a/synapse/storage/schema/delta/23/refresh_tokens.sql b/synapse/storage/schema/delta/23/refresh_tokens.sql
new file mode 100644
index 0000000000..437b1ac1be
--- /dev/null
+++ b/synapse/storage/schema/delta/23/refresh_tokens.sql
@@ -0,0 +1,21 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+CREATE TABLE IF NOT EXISTS refresh_tokens(
+    id INTEGER PRIMARY KEY,
+    token TEXT NOT NULL,
+    user_id TEXT NOT NULL,
+    UNIQUE (token)
+);
diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py
index 4f15e534b4..ab57b92174 100644
--- a/synapse/storage/signatures.py
+++ b/synapse/storage/signatures.py
@@ -17,7 +17,7 @@ from twisted.internet import defer
 
 from _base import SQLBaseStore
 
-from syutil.base64util import encode_base64
+from unpaddedbase64 import encode_base64
 from synapse.crypto.event_signing import compute_event_reference_hash
 
 
diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py
index c8c7e6591a..15695e9831 100644
--- a/synapse/storage/transactions.py
+++ b/synapse/storage/transactions.py
@@ -18,7 +18,7 @@ from synapse.util.caches.descriptors import cached
 
 from collections import namedtuple
 
-from syutil.jsonutil import encode_canonical_json
+from canonicaljson import encode_canonical_json
 import logging
 
 logger = logging.getLogger(__name__)