summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--synapse/handlers/admin.py28
-rw-r--r--synapse/handlers/message.py12
-rw-r--r--synapse/rest/client/v1/admin.py2
-rw-r--r--synapse/rest/client/v1/base.py7
-rw-r--r--synapse/rest/client/v1/login.py2
-rw-r--r--synapse/rest/client/v1/register.py2
-rw-r--r--synapse/rest/client/v1/room.py29
-rw-r--r--synapse/rest/client/v2_alpha/account.py4
-rw-r--r--synapse/rest/client/v2_alpha/keys.py2
-rw-r--r--synapse/storage/schema/delta/15/v15.sql23
-rw-r--r--synapse/storage/search.py47
11 files changed, 82 insertions, 76 deletions
diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py
index d852a18555..5ba3c7039a 100644
--- a/synapse/handlers/admin.py
+++ b/synapse/handlers/admin.py
@@ -30,33 +30,27 @@ class AdminHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def get_whois(self, user):
-        res = yield self.store.get_user_ip_and_agents(user)
-
-        d = {}
-        for r in res:
-            # Note that device_id is always None
-            device = d.setdefault(r["device_id"], {})
-            session = device.setdefault(r["access_token"], [])
-            session.append({
-                "ip": r["ip"],
-                "user_agent": r["user_agent"],
-                "last_seen": r["last_seen"],
+        connections = []
+
+        sessions = yield self.store.get_user_ip_and_agents(user)
+        for session in sessions:
+            connections.append({
+                "ip": session["ip"],
+                "last_seen": session["last_seen"],
+                "user_agent": session["user_agent"],
             })
 
         ret = {
             "user_id": user.to_string(),
             "devices": [
                 {
-                    "device_id": k,
+                    "device_id": None,
                     "sessions": [
                         {
-                            # "access_token": x, TODO (erikj)
-                            "connections": y,
+                            "connections": connections,
                         }
-                        for x, y in v.items()
                     ]
-                }
-                for k, v in d.items()
+                },
             ],
         }
 
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index c972e8cd4c..ccdd3d8473 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -26,6 +26,8 @@ from synapse.types import UserID, RoomStreamToken, StreamToken
 
 from ._base import BaseHandler
 
+from canonicaljson import encode_canonical_json
+
 import logging
 
 logger = logging.getLogger(__name__)
@@ -213,6 +215,16 @@ class MessageHandler(BaseHandler):
             builder=builder,
         )
 
+        if event.is_state():
+            prev_state = context.current_state.get((event.type, event.state_key))
+            if prev_state and event.user_id == prev_state.user_id:
+                prev_content = encode_canonical_json(prev_state.content)
+                next_content = encode_canonical_json(event.content)
+                if prev_content == next_content:
+                    # Duplicate suppression for state updates with same sender
+                    # and content.
+                    defer.returnValue(prev_state)
+
         if event.type == EventTypes.Member:
             member_handler = self.hs.get_handlers().room_member_handler
             yield member_handler.change_membership(event, context, is_guest=is_guest)
diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py
index 0103697889..886199a6da 100644
--- a/synapse/rest/client/v1/admin.py
+++ b/synapse/rest/client/v1/admin.py
@@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
 
 
 class WhoisRestServlet(ClientV1RestServlet):
-    PATTERNS = client_path_patterns("/admin/whois/(?P<user_id>[^/]*)", releases=())
+    PATTERNS = client_path_patterns("/admin/whois/(?P<user_id>[^/]*)")
 
     @defer.inlineCallbacks
     def on_GET(self, request, user_id):
diff --git a/synapse/rest/client/v1/base.py b/synapse/rest/client/v1/base.py
index 7ae3839a19..6273ce0795 100644
--- a/synapse/rest/client/v1/base.py
+++ b/synapse/rest/client/v1/base.py
@@ -27,7 +27,7 @@ import logging
 logger = logging.getLogger(__name__)
 
 
-def client_path_patterns(path_regex, releases=(0,)):
+def client_path_patterns(path_regex, releases=(0,), include_in_unstable=True):
     """Creates a regex compiled client path with the correct client path
     prefix.
 
@@ -38,8 +38,9 @@ def client_path_patterns(path_regex, releases=(0,)):
         SRE_Pattern
     """
     patterns = [re.compile("^" + CLIENT_PREFIX + path_regex)]
-    unstable_prefix = CLIENT_PREFIX.replace("/api/v1", "/unstable")
-    patterns.append(re.compile("^" + unstable_prefix + path_regex))
+    if include_in_unstable:
+        unstable_prefix = CLIENT_PREFIX.replace("/api/v1", "/unstable")
+        patterns.append(re.compile("^" + unstable_prefix + path_regex))
     for release in releases:
         new_prefix = CLIENT_PREFIX.replace("/api/v1", "/r%d" % release)
         patterns.append(re.compile("^" + new_prefix + path_regex))
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index ad17900c0d..776e1667c1 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -35,7 +35,7 @@ logger = logging.getLogger(__name__)
 
 
 class LoginRestServlet(ClientV1RestServlet):
-    PATTERNS = client_path_patterns("/login$", releases=())
+    PATTERNS = client_path_patterns("/login$", releases=(), include_in_unstable=False)
     PASS_TYPE = "m.login.password"
     SAML2_TYPE = "m.login.saml2"
     CAS_TYPE = "m.login.cas"
diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py
index 5b95d63e25..4b02311e05 100644
--- a/synapse/rest/client/v1/register.py
+++ b/synapse/rest/client/v1/register.py
@@ -48,7 +48,7 @@ class RegisterRestServlet(ClientV1RestServlet):
     handler doesn't have a concept of multi-stages or sessions.
     """
 
-    PATTERNS = client_path_patterns("/register$", releases=())
+    PATTERNS = client_path_patterns("/register$", releases=(), include_in_unstable=False)
 
     def __init__(self, hs):
         super(RegisterRestServlet, self).__init__(hs)
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index d86d266465..53cc29becb 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -110,10 +110,10 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
                                    client_path_patterns(state_key),
                                    self.on_PUT)
         http_server.register_paths("GET",
-                                   client_path_patterns(no_state_key, releases=()),
+                                   client_path_patterns(no_state_key),
                                    self.on_GET_no_state_key)
         http_server.register_paths("PUT",
-                                   client_path_patterns(no_state_key, releases=()),
+                                   client_path_patterns(no_state_key),
                                    self.on_PUT_no_state_key)
 
     def on_GET_no_state_key(self, request, room_id, event_type):
@@ -383,30 +383,6 @@ class RoomInitialSyncRestServlet(ClientV1RestServlet):
         defer.returnValue((200, content))
 
 
-class RoomTriggerBackfill(ClientV1RestServlet):
-    PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/backfill$", releases=())
-
-    def __init__(self, hs):
-        super(RoomTriggerBackfill, self).__init__(hs)
-        self.clock = hs.get_clock()
-
-    @defer.inlineCallbacks
-    def on_GET(self, request, room_id):
-        remote_server = urllib.unquote(
-            request.args["remote"][0]
-        ).decode("UTF-8")
-
-        limit = int(request.args["limit"][0])
-
-        handler = self.handlers.federation_handler
-        events = yield handler.backfill(remote_server, room_id, limit)
-
-        time_now = self.clock.time_msec()
-
-        res = [serialize_event(event, time_now) for event in events]
-        defer.returnValue((200, res))
-
-
 class RoomEventContext(ClientV1RestServlet):
     PATTERNS = client_path_patterns(
         "/rooms/(?P<room_id>[^/]*)/context/(?P<event_id>[^/]*)$"
@@ -679,7 +655,6 @@ def register_servlets(hs, http_server):
     RoomMemberListRestServlet(hs).register(http_server)
     RoomMessageListRestServlet(hs).register(http_server)
     JoinRoomAliasServlet(hs).register(http_server)
-    RoomTriggerBackfill(hs).register(http_server)
     RoomMembershipRestServlet(hs).register(http_server)
     RoomSendEventRestServlet(hs).register(http_server)
     PublicRoomListRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index 6f1c33f75b..3e1459d5b9 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -29,7 +29,7 @@ logger = logging.getLogger(__name__)
 
 
 class PasswordRestServlet(RestServlet):
-    PATTERNS = client_v2_patterns("/account/password", releases=())
+    PATTERNS = client_v2_patterns("/account/password")
 
     def __init__(self, hs):
         super(PasswordRestServlet, self).__init__()
@@ -89,7 +89,7 @@ class PasswordRestServlet(RestServlet):
 
 
 class ThreepidRestServlet(RestServlet):
-    PATTERNS = client_v2_patterns("/account/3pid", releases=())
+    PATTERNS = client_v2_patterns("/account/3pid")
 
     def __init__(self, hs):
         super(ThreepidRestServlet, self).__init__()
diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py
index c55e85920f..753f2988a1 100644
--- a/synapse/rest/client/v2_alpha/keys.py
+++ b/synapse/rest/client/v2_alpha/keys.py
@@ -54,7 +54,7 @@ class KeyUploadServlet(RestServlet):
       },
     }
     """
-    PATTERNS = client_v2_patterns("/keys/upload/(?P<device_id>[^/]*)")
+    PATTERNS = client_v2_patterns("/keys/upload/(?P<device_id>[^/]*)", releases=())
 
     def __init__(self, hs):
         super(KeyUploadServlet, self).__init__()
diff --git a/synapse/storage/schema/delta/15/v15.sql b/synapse/storage/schema/delta/15/v15.sql
index f5b2a08ca4..9523d2bcc3 100644
--- a/synapse/storage/schema/delta/15/v15.sql
+++ b/synapse/storage/schema/delta/15/v15.sql
@@ -1,23 +1,22 @@
 -- Drop, copy & recreate pushers table to change unique key
 -- Also add access_token column at the same time
 CREATE TABLE IF NOT EXISTS pushers2 (
-  id INTEGER PRIMARY KEY AUTOINCREMENT,
+  id BIGINT PRIMARY KEY,
   user_name TEXT NOT NULL,
-  access_token INTEGER DEFAULT NULL,
-  profile_tag varchar(32) NOT NULL,
-  kind varchar(8) NOT NULL,
-  app_id varchar(64) NOT NULL,
-  app_display_name varchar(64) NOT NULL,
-  device_display_name varchar(128) NOT NULL,
-  pushkey blob NOT NULL,
+  access_token BIGINT DEFAULT NULL,
+  profile_tag VARCHAR(32) NOT NULL,
+  kind VARCHAR(8) NOT NULL,
+  app_id VARCHAR(64) NOT NULL,
+  app_display_name VARCHAR(64) NOT NULL,
+  device_display_name VARCHAR(128) NOT NULL,
+  pushkey bytea NOT NULL,
   ts BIGINT NOT NULL,
-  lang varchar(8),
-  data blob,
+  lang VARCHAR(8),
+  data bytea,
   last_token TEXT,
   last_success BIGINT,
   failing_since BIGINT,
-  FOREIGN KEY(user_name) REFERENCES users(name),
-  UNIQUE (app_id, pushkey, user_name)
+  UNIQUE (app_id, pushkey)
 );
 INSERT INTO pushers2 (id, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, ts, lang, data, last_token, last_success, failing_since)
   SELECT id, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, ts, lang, data, last_token, last_success, failing_since FROM pushers;
diff --git a/synapse/storage/search.py b/synapse/storage/search.py
index 20a62d07ff..39f600f53c 100644
--- a/synapse/storage/search.py
+++ b/synapse/storage/search.py
@@ -140,7 +140,10 @@ class SearchStore(BackgroundUpdateStore):
             list of dicts
         """
         clauses = []
-        args = []
+
+        search_query = search_query = _parse_query(self.database_engine, search_term)
+
+        args = [search_query]
 
         # Make sure we don't explode because the person is in too many rooms.
         # We filter the results below regardless.
@@ -162,7 +165,7 @@ class SearchStore(BackgroundUpdateStore):
         if isinstance(self.database_engine, PostgresEngine):
             sql = (
                 "SELECT ts_rank_cd(vector, query) AS rank, room_id, event_id"
-                " FROM plainto_tsquery('english', ?) as query, event_search"
+                " FROM to_tsquery('english', ?) as query, event_search"
                 " WHERE vector @@ query"
             )
         elif isinstance(self.database_engine, Sqlite3Engine):
@@ -183,7 +186,7 @@ class SearchStore(BackgroundUpdateStore):
         sql += " ORDER BY rank DESC LIMIT 500"
 
         results = yield self._execute(
-            "search_msgs", self.cursor_to_dict, sql, *([search_term] + args)
+            "search_msgs", self.cursor_to_dict, sql, *args
         )
 
         results = filter(lambda row: row["room_id"] in room_ids, results)
@@ -197,7 +200,7 @@ class SearchStore(BackgroundUpdateStore):
 
         highlights = None
         if isinstance(self.database_engine, PostgresEngine):
-            highlights = yield self._find_highlights_in_postgres(search_term, events)
+            highlights = yield self._find_highlights_in_postgres(search_query, events)
 
         defer.returnValue({
             "results": [
@@ -226,7 +229,10 @@ class SearchStore(BackgroundUpdateStore):
             list of dicts
         """
         clauses = []
-        args = [search_term]
+
+        search_query = search_query = _parse_query(self.database_engine, search_term)
+
+        args = [search_query]
 
         # Make sure we don't explode because the person is in too many rooms.
         # We filter the results below regardless.
@@ -263,7 +269,7 @@ class SearchStore(BackgroundUpdateStore):
             sql = (
                 "SELECT ts_rank_cd(vector, query) as rank,"
                 " origin_server_ts, stream_ordering, room_id, event_id"
-                " FROM plainto_tsquery('english', ?) as query, event_search"
+                " FROM to_tsquery('english', ?) as query, event_search"
                 " NATURAL JOIN events"
                 " WHERE vector @@ query AND "
             )
@@ -313,7 +319,7 @@ class SearchStore(BackgroundUpdateStore):
 
         highlights = None
         if isinstance(self.database_engine, PostgresEngine):
-            highlights = yield self._find_highlights_in_postgres(search_term, events)
+            highlights = yield self._find_highlights_in_postgres(search_query, events)
 
         defer.returnValue({
             "results": [
@@ -330,7 +336,7 @@ class SearchStore(BackgroundUpdateStore):
             "highlights": highlights,
         })
 
-    def _find_highlights_in_postgres(self, search_term, events):
+    def _find_highlights_in_postgres(self, search_query, events):
         """Given a list of events and a search term, return a list of words
         that match from the content of the event.
 
@@ -338,7 +344,7 @@ class SearchStore(BackgroundUpdateStore):
         highlight the matching parts.
 
         Args:
-            search_term (str)
+            search_query (str)
             events (list): A list of events
 
         Returns:
@@ -370,14 +376,14 @@ class SearchStore(BackgroundUpdateStore):
                 while stop_sel in value:
                     stop_sel += ">"
 
-                query = "SELECT ts_headline(?, plainto_tsquery('english', ?), %s)" % (
+                query = "SELECT ts_headline(?, to_tsquery('english', ?), %s)" % (
                     _to_postgres_options({
                         "StartSel": start_sel,
                         "StopSel": stop_sel,
                         "MaxFragments": "50",
                     })
                 )
-                txn.execute(query, (value, search_term,))
+                txn.execute(query, (value, search_query,))
                 headline, = txn.fetchall()[0]
 
                 # Now we need to pick the possible highlights out of the haedline
@@ -399,3 +405,22 @@ def _to_postgres_options(options_dict):
     return "'%s'" % (
         ",".join("%s=%s" % (k, v) for k, v in options_dict.items()),
     )
+
+
+def _parse_query(database_engine, search_term):
+    """Takes a plain unicode string from the user and converts it into a form
+    that can be passed to database.
+    We use this so that we can add prefix matching, which isn't something
+    that is supported by default.
+    """
+
+    # Pull out the individual words, discarding any non-word characters.
+    results = re.findall(r"([\w\-]+)", search_term, re.UNICODE)
+
+    if isinstance(database_engine, PostgresEngine):
+        return " & ".join(result + ":*" for result in results)
+    elif isinstance(database_engine, Sqlite3Engine):
+        return " & ".join(result + "*" for result in results)
+    else:
+        # This should be unreachable.
+        raise Exception("Unrecognized database engine")