diff options
Diffstat (limited to 'synapse')
-rw-r--r-- | synapse/handlers/federation.py | 12 | ||||
-rw-r--r-- | synapse/handlers/search.py | 8 | ||||
-rw-r--r-- | synapse/rest/client/v2_alpha/sync.py | 17 | ||||
-rw-r--r-- | synapse/storage/roommember.py | 2 | ||||
-rw-r--r-- | synapse/storage/search.py | 73 |
5 files changed, 94 insertions, 18 deletions
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2855f2d7c3..e7ad48c948 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -596,7 +596,7 @@ class FederationHandler(BaseHandler): handled_events = set() try: - new_event = self._sign_event(event) + event = self._sign_event(event) # Try the host we successfully got a response to /make_join/ # request first. try: @@ -604,7 +604,7 @@ class FederationHandler(BaseHandler): target_hosts.insert(0, origin) except ValueError: pass - ret = yield self.replication_layer.send_join(target_hosts, new_event) + ret = yield self.replication_layer.send_join(target_hosts, event) origin = ret["origin"] state = ret["state"] @@ -613,12 +613,12 @@ class FederationHandler(BaseHandler): handled_events.update([s.event_id for s in state]) handled_events.update([a.event_id for a in auth_chain]) - handled_events.add(new_event.event_id) + handled_events.add(event.event_id) logger.debug("do_invite_join auth_chain: %s", auth_chain) logger.debug("do_invite_join state: %s", state) - logger.debug("do_invite_join event: %s", new_event) + logger.debug("do_invite_join event: %s", event) try: yield self.store.store_room( @@ -636,14 +636,14 @@ class FederationHandler(BaseHandler): with PreserveLoggingContext(): d = self.notifier.on_new_room_event( - new_event, event_stream_id, max_stream_id, + event, event_stream_id, max_stream_id, extra_users=[joinee] ) def log_failure(f): logger.warn( "Failed to notify about %s: %s", - new_event.event_id, f.value + event.event_id, f.value ) d.addErrback(log_failure) diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index bc79564287..99ef56871c 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -152,11 +152,15 @@ class SearchHandler(BaseHandler): highlights = set() + count = None + if order_by == "rank": search_result = yield self.store.search_msgs( room_ids, search_term, keys ) + count = search_result["count"] + if search_result["highlights"]: highlights.update(search_result["highlights"]) @@ -207,6 +211,8 @@ class SearchHandler(BaseHandler): if search_result["highlights"]: highlights.update(search_result["highlights"]) + count = search_result["count"] + results = search_result["results"] results_map = {r["event"].event_id: r for r in results} @@ -359,7 +365,7 @@ class SearchHandler(BaseHandler): rooms_cat_res = { "results": results, - "count": len(results), + "count": count, "highlights": list(highlights), } diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 3f8ce701dc..73b44e92eb 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -351,14 +351,27 @@ class SyncRestServlet(RestServlet): continue prev_event_id = timeline_event.unsigned.get("replaces_state", None) - logger.debug("Replacing %s with %s in state dict", - timeline_event.event_id, prev_event_id) prev_content = timeline_event.unsigned.get('prev_content') prev_sender = timeline_event.unsigned.get('prev_sender') + # Empircally it seems possible for the event to have a + # "replaces_state" key but not a prev_content or prev_sender + # markjh conjectures that it could be due to the server not + # having a copy of that event. + # If this is the case the we ignore the previous event. This will + # cause the displayname calculations on the client to be incorrect if prev_event_id is None or not prev_content or not prev_sender: + logger.debug( + "Removing %r from the state dict, as it is missing" + " prev_content (prev_event_id=%r)", + timeline_event.event_id, prev_event_id + ) del result[event_key] else: + logger.debug( + "Replacing %r with %r in state dict", + timeline_event.event_id, prev_event_id + ) result[event_key] = FrozenEvent({ "type": timeline_event.type, "state_key": timeline_event.state_key, diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index e1777d7afa..4e0e9ab59a 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -121,7 +121,7 @@ class RoomMemberStore(SQLBaseStore): return self.get_rooms_for_user_where_membership_is( user_id, [Membership.INVITE] ).addCallback(lambda invites: self._get_events([ - invites.event_id for invite in invites + invite.event_id for invite in invites ])) def get_leave_and_ban_events_for_user(self, user_id): diff --git a/synapse/storage/search.py b/synapse/storage/search.py index 04246101df..57c9cc1c5f 100644 --- a/synapse/storage/search.py +++ b/synapse/storage/search.py @@ -148,7 +148,7 @@ class SearchStore(BackgroundUpdateStore): search_query = search_query = _parse_query(self.database_engine, search_term) - args = [search_query] + args = [] # Make sure we don't explode because the person is in too many rooms. # We filter the results below regardless. @@ -167,18 +167,36 @@ class SearchStore(BackgroundUpdateStore): "(%s)" % (" OR ".join(local_clauses),) ) + count_args = args + count_clauses = clauses + if isinstance(self.database_engine, PostgresEngine): sql = ( - "SELECT ts_rank_cd(vector, query) AS rank, room_id, event_id" - " FROM to_tsquery('english', ?) as query, event_search" - " WHERE vector @@ query" + "SELECT ts_rank_cd(vector, to_tsquery('english', ?)) AS rank," + " room_id, event_id" + " FROM event_search" + " WHERE vector @@ to_tsquery('english', ?)" + ) + args = [search_query, search_query] + args + + count_sql = ( + "SELECT room_id, count(*) as count FROM event_search" + " WHERE vector @@ to_tsquery('english', ?)" ) + count_args = [search_query] + count_args elif isinstance(self.database_engine, Sqlite3Engine): sql = ( "SELECT rank(matchinfo(event_search)) as rank, room_id, event_id" " FROM event_search" " WHERE value MATCH ?" ) + args = [search_query] + args + + count_sql = ( + "SELECT room_id, count(*) as count FROM event_search" + " WHERE value MATCH ?" + ) + count_args = [search_term] + count_args else: # This should be unreachable. raise Exception("Unrecognized database engine") @@ -186,6 +204,9 @@ class SearchStore(BackgroundUpdateStore): for clause in clauses: sql += " AND " + clause + for clause in count_clauses: + count_sql += " AND " + clause + # We add an arbitrary limit here to ensure we don't try to pull the # entire table from the database. sql += " ORDER BY rank DESC LIMIT 500" @@ -207,6 +228,14 @@ class SearchStore(BackgroundUpdateStore): if isinstance(self.database_engine, PostgresEngine): highlights = yield self._find_highlights_in_postgres(search_query, events) + count_sql += " GROUP BY room_id" + + count_results = yield self._execute( + "search_rooms_count", self.cursor_to_dict, count_sql, *count_args + ) + + count = sum(row["count"] for row in count_results if row["room_id"] in room_ids) + defer.returnValue({ "results": [ { @@ -217,6 +246,7 @@ class SearchStore(BackgroundUpdateStore): if r["event_id"] in event_map ], "highlights": highlights, + "count": count, }) @defer.inlineCallbacks @@ -237,7 +267,7 @@ class SearchStore(BackgroundUpdateStore): search_query = search_query = _parse_query(self.database_engine, search_term) - args = [search_query] + args = [] # Make sure we don't explode because the person is in too many rooms. # We filter the results below regardless. @@ -256,6 +286,9 @@ class SearchStore(BackgroundUpdateStore): "(%s)" % (" OR ".join(local_clauses),) ) + count_args = args + count_clauses = clauses + if pagination_token: try: origin_server_ts, stream = pagination_token.split(",") @@ -272,12 +305,19 @@ class SearchStore(BackgroundUpdateStore): if isinstance(self.database_engine, PostgresEngine): sql = ( - "SELECT ts_rank_cd(vector, query) as rank," + "SELECT ts_rank_cd(vector, to_tsquery('english', ?)) as rank," " origin_server_ts, stream_ordering, room_id, event_id" - " FROM to_tsquery('english', ?) as query, event_search" + " FROM event_search" " NATURAL JOIN events" - " WHERE vector @@ query AND " + " WHERE vector @@ to_tsquery('english', ?) AND " ) + args = [search_query, search_query] + args + + count_sql = ( + "SELECT room_id, count(*) as count FROM event_search" + " WHERE vector @@ to_tsquery('english', ?) AND " + ) + count_args = [search_query] + count_args elif isinstance(self.database_engine, Sqlite3Engine): # We use CROSS JOIN here to ensure we use the right indexes. # https://sqlite.org/optoverview.html#crossjoin @@ -297,11 +337,19 @@ class SearchStore(BackgroundUpdateStore): " CROSS JOIN events USING (event_id)" " WHERE " ) + args = [search_query] + args + + count_sql = ( + "SELECT room_id, count(*) as count FROM event_search" + " WHERE value MATCH ? AND " + ) + count_args = [search_term] + count_args else: # This should be unreachable. raise Exception("Unrecognized database engine") sql += " AND ".join(clauses) + count_sql += " AND ".join(count_clauses) # We add an arbitrary limit here to ensure we don't try to pull the # entire table from the database. @@ -326,6 +374,14 @@ class SearchStore(BackgroundUpdateStore): if isinstance(self.database_engine, PostgresEngine): highlights = yield self._find_highlights_in_postgres(search_query, events) + count_sql += " GROUP BY room_id" + + count_results = yield self._execute( + "search_rooms_count", self.cursor_to_dict, count_sql, *count_args + ) + + count = sum(row["count"] for row in count_results if row["room_id"] in room_ids) + defer.returnValue({ "results": [ { @@ -339,6 +395,7 @@ class SearchStore(BackgroundUpdateStore): if r["event_id"] in event_map ], "highlights": highlights, + "count": count, }) def _find_highlights_in_postgres(self, search_query, events): |