diff --git a/CHANGES.rst b/CHANGES.rst
index 5c38c1915f..6f427f677b 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,3 +1,48 @@
+Changes in synapse v0.12.0-rc2 (2015-12-14)
+===========================================
+
+* Add caches for whether rooms have been forgotten by a user (PR #434)
+* Remove instructions to use ``--process-dependency-link`` since all of the
+ dependencies of synapse are on PyPI (PR #436)
+* Parallelise the processing of ``/sync`` requests (PR #437)
+* Fix race updating presence in ``/events`` (PR #444)
+* Fix bug back-populating search results (PR #441)
+* Fix bug calculating state in ``/sync`` requests (PR #442)
+
+Changes in synapse v0.12.0-rc1 (2015-12-10)
+===========================================
+
+* Host the client APIs released as r0 by
+ https://matrix.org/docs/spec/r0.0.0/client_server.html
+ on paths prefixed by ``/_matrix/client/r0``. (PR #430, PR #415, PR #400)
+* Updates the client APIs to match r0 of the matrix specification.
+
+ * All APIs return events in the new event format, old APIs also include
+ the fields needed to parse the event using the old format for
+ compatibility. (PR #402)
+ * Search results are now given as a JSON array rather than
+ a JSON object (PR #405)
+ * Miscellaneous changes to search (PR #403, PR #406, PR #412)
+ * Filter JSON objects may now be passed as query parameters to ``/sync``
+ (PR #431)
+ * Fix implementation of ``/admin/whois`` (PR #418)
+ * Only include the rooms that user has left in ``/sync`` if the client
+ requests them in the filter (PR #423)
+ * Don't push for ``m.room.message`` by default (PR #411)
+ * Add API for setting per account user data (PR #392)
+ * Allow users to forget rooms (PR #385)
+
+* Performance improvements and monitoring:
+
+ * Add per-request counters for CPU time spent on the main python thread.
+ (PR #421, PR #420)
+ * Add per-request counters for time spent in the database (PR #429)
+ * Make state updates in the C+S API idempotent (PR #416)
+ * Only fire ``user_joined_room`` if the user has actually joined. (PR #410)
+ * Reuse a single http client, rather than creating new ones (PR #413)
+
+* Fixed a bug upgrading from older versions of synapse on postgresql (PR #417)
+
Changes in synapse v0.11.1 (2015-11-20)
=======================================
diff --git a/README.rst b/README.rst
index 80e1b26e60..06f06fd353 100644
--- a/README.rst
+++ b/README.rst
@@ -130,7 +130,7 @@ To install the synapse homeserver run::
virtualenv -p python2.7 ~/.synapse
source ~/.synapse/bin/activate
pip install --upgrade setuptools
- pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
+ pip install https://github.com/matrix-org/synapse/tarball/master
This installs synapse, along with the libraries it uses, into a virtual
environment under ``~/.synapse``. Feel free to pick a different directory
@@ -235,8 +235,7 @@ pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
You also may need to explicitly specify python 2.7 again during the install
request::
- pip2.7 install --process-dependency-links \
- https://github.com/matrix-org/synapse/tarball/master
+ pip2.7 install https://github.com/matrix-org/synapse/tarball/master
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
@@ -295,8 +294,7 @@ Troubleshooting
Troubleshooting Installation
----------------------------
-Synapse requires pip 1.7 or later, so if your OS provides too old a version and
-you get errors about ``error: no such option: --process-dependency-links`` you
+Synapse requires pip 1.7 or later, so if your OS provides too old a version you
may need to manually upgrade it::
sudo pip install --upgrade pip
diff --git a/jenkins.sh b/jenkins.sh
index 0018ca610a..7075b1a51a 100755
--- a/jenkins.sh
+++ b/jenkins.sh
@@ -5,9 +5,10 @@ export PYTHONDONTWRITEBYTECODE=yep
# Output test results as junit xml
export TRIAL_FLAGS="--reporter=subunit"
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
-
-# Output coverage to coverage.xml
-export DUMP_COVERAGE_COMMAND="coverage xml -o coverage.xml"
+# Write coverage reports to a separate file for each process
+# Include branch coverage
+export COVERAGE_OPTS="-p"
+export DUMP_COVERAGE_COMMAND="coverage help"
# Output flake8 violations to violations.flake8.log
# Don't exit with non-0 status code on Jenkins,
@@ -15,6 +16,8 @@ export DUMP_COVERAGE_COMMAND="coverage xml -o coverage.xml"
# UNSTABLE or FAILURE this build.
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
+rm .coverage.* || echo "No files to remove"
+
tox
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
@@ -45,7 +48,7 @@ export PERL5LIB PERL_MB_OPT PERL_MM_OPT
: ${PORT_BASE:=8000}
echo >&2 "Running sytest with SQLite3";
-./run-tests.pl -O tap --synapse-directory .. --all --port-base $PORT_BASE > results-sqlite3.tap
+./run-tests.pl --coverage -O tap --synapse-directory .. --all --port-base $PORT_BASE > results-sqlite3.tap
RUN_POSTGRES=""
@@ -64,7 +67,15 @@ done
if test $RUN_POSTGRES = ":$(($PORT_BASE + 1)):$(($PORT_BASE + 2))"; then
echo >&2 "Running sytest with PostgreSQL";
pip install psycopg2
- ./run-tests.pl -O tap --synapse-directory .. --all --port-base $PORT_BASE > results-postgresql.tap
+ ./run-tests.pl --coverage -O tap --synapse-directory .. --all --port-base $PORT_BASE > results-postgresql.tap
else
echo >&2 "Skipping running sytest with PostgreSQL, $RUN_POSTGRES"
fi
+
+cd ..
+cp sytest/.coverage.* .
+
+# Combine the coverage reports
+python -m coverage combine
+# Output coverage to coverage.xml
+coverage xml -o coverage.xml
diff --git a/scripts/gen_password b/scripts/gen_password
new file mode 100644
index 0000000000..7afd3a5dfd
--- /dev/null
+++ b/scripts/gen_password
@@ -0,0 +1 @@
+perl -MCrypt::Random -MCrypt::Eksblowfish::Bcrypt -e 'print Crypt::Eksblowfish::Bcrypt::bcrypt("secret", "\$2\$12\$" . Crypt::Eksblowfish::Bcrypt::en_base64(Crypt::Random::makerandom_octet(Length=>16)))."\n"'
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 3e7e26bf60..e07c26ccd0 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server.
"""
-__version__ = "0.11.1"
+__version__ = "0.12.0-rc2"
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index b9c3e6d2c4..adb7d64482 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -778,7 +778,7 @@ class Auth(object):
if "third_party_invite" in event.content:
key = (
EventTypes.ThirdPartyInvite,
- event.content["third_party_invite"]["token"]
+ event.content["third_party_invite"]["signed"]["token"]
)
third_party_invite = current_state.get(key)
if third_party_invite:
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index bc5bb5cdb1..1fea568eed 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -230,7 +230,9 @@ class Keyring(object):
missing_keys = {}
for group in group_id_to_group.values():
- missing_keys.setdefault(group.server_name, set()).union(group.key_ids)
+ missing_keys.setdefault(group.server_name, set()).update(
+ group.key_ids
+ )
for fn in key_fetch_fns:
results = yield fn(missing_keys.items())
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index fe300433e6..576d77e0e7 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -69,7 +69,12 @@ class EventStreamHandler(BaseHandler):
A deferred that completes once their presence has been updated.
"""
if user not in self._streams_per_user:
- self._streams_per_user[user] = 0
+ # Make sure we set the streams per user to 1 here rather than
+ # setting it to zero and incrementing the value below.
+ # Otherwise this may race with stopped_stream causing the
+ # user to be erased from the map before we have a chance
+ # to increment it.
+ self._streams_per_user[user] = 1
if user in self._stop_timer_per_user:
try:
self.clock.cancel_call_later(
@@ -79,8 +84,8 @@ class EventStreamHandler(BaseHandler):
logger.exception("Failed to cancel event timer")
else:
yield started_user_eventstream(self.distributor, user)
-
- self._streams_per_user[user] += 1
+ else:
+ self._streams_per_user[user] += 1
def stopped_stream(self, user):
"""If there are no streams for a user this starts a timer that will
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 6525bde430..0b1221deb5 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -604,7 +604,7 @@ class FederationHandler(BaseHandler):
handled_events = set()
try:
- new_event = self._sign_event(event)
+ event = self._sign_event(event)
# Try the host we successfully got a response to /make_join/
# request first.
try:
@@ -612,7 +612,7 @@ class FederationHandler(BaseHandler):
target_hosts.insert(0, origin)
except ValueError:
pass
- ret = yield self.replication_layer.send_join(target_hosts, new_event)
+ ret = yield self.replication_layer.send_join(target_hosts, event)
origin = ret["origin"]
state = ret["state"]
@@ -621,12 +621,12 @@ class FederationHandler(BaseHandler):
handled_events.update([s.event_id for s in state])
handled_events.update([a.event_id for a in auth_chain])
- handled_events.add(new_event.event_id)
+ handled_events.add(event.event_id)
logger.debug("do_invite_join auth_chain: %s", auth_chain)
logger.debug("do_invite_join state: %s", state)
- logger.debug("do_invite_join event: %s", new_event)
+ logger.debug("do_invite_join event: %s", event)
try:
yield self.store.store_room(
@@ -644,14 +644,14 @@ class FederationHandler(BaseHandler):
with PreserveLoggingContext():
d = self.notifier.on_new_room_event(
- new_event, event_stream_id, max_stream_id,
+ event, event_stream_id, max_stream_id,
extra_users=[joinee]
)
def log_failure(f):
logger.warn(
"Failed to notify about %s: %s",
- new_event.event_id, f.value
+ event.event_id, f.value
)
d.addErrback(log_failure)
@@ -1658,11 +1658,22 @@ class FederationHandler(BaseHandler):
sender = invite["sender"]
room_id = invite["room_id"]
+ if "signed" not in invite or "token" not in invite["signed"]:
+ logger.info(
+ "Discarding received notification of third party invite "
+ "without signed: %s" % (invite,)
+ )
+ return
+
+ third_party_invite = {
+ "signed": invite["signed"],
+ }
+
event_dict = {
"type": EventTypes.Member,
"content": {
"membership": Membership.INVITE,
- "third_party_invite": invite,
+ "third_party_invite": third_party_invite,
},
"room_id": room_id,
"sender": sender,
@@ -1673,6 +1684,11 @@ class FederationHandler(BaseHandler):
builder = self.event_builder_factory.new(event_dict)
EventValidator().validate_new(builder)
event, context = yield self._create_new_client_event(builder=builder)
+
+ event, context = yield self.add_display_name_to_third_party_invite(
+ event_dict, event, context
+ )
+
self.auth.check(event, context.current_state)
yield self._validate_keyserver(event, auth_events=context.current_state)
member_handler = self.hs.get_handlers().room_member_handler
@@ -1694,6 +1710,10 @@ class FederationHandler(BaseHandler):
builder=builder,
)
+ event, context = yield self.add_display_name_to_third_party_invite(
+ event_dict, event, context
+ )
+
self.auth.check(event, auth_events=context.current_state)
yield self._validate_keyserver(event, auth_events=context.current_state)
@@ -1704,6 +1724,27 @@ class FederationHandler(BaseHandler):
yield member_handler.change_membership(event, context)
@defer.inlineCallbacks
+ def add_display_name_to_third_party_invite(self, event_dict, event, context):
+ key = (
+ EventTypes.ThirdPartyInvite,
+ event.content["third_party_invite"]["signed"]["token"]
+ )
+ original_invite = context.current_state.get(key)
+ if not original_invite:
+ logger.info(
+ "Could not find invite event for third_party_invite - "
+ "discarding: %s" % (event_dict,)
+ )
+ return
+
+ display_name = original_invite.content["display_name"]
+ event_dict["content"]["third_party_invite"]["display_name"] = display_name
+ builder = self.event_builder_factory.new(event_dict)
+ EventValidator().validate_new(builder)
+ event, context = yield self._create_new_client_event(builder=builder)
+ defer.returnValue((event, context))
+
+ @defer.inlineCallbacks
def _validate_keyserver(self, event, auth_events):
token = event.content["third_party_invite"]["signed"]["token"]
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index a037da0f70..baf7c14e40 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -42,7 +42,7 @@ class RegistrationHandler(BaseHandler):
self.distributor = hs.get_distributor()
self.distributor.declare("registered_user")
- self.captch_client = CaptchaServerHttpClient(hs)
+ self.captcha_client = CaptchaServerHttpClient(hs)
@defer.inlineCallbacks
def check_username(self, localpart):
@@ -132,25 +132,9 @@ class RegistrationHandler(BaseHandler):
raise RegistrationError(
500, "Cannot generate user ID.")
- # create a default avatar for the user
- # XXX: ideally clients would explicitly specify one, but given they don't
- # and we want consistent and pretty identicons for random users, we'll
- # do it here.
- try:
- auth_user = UserID.from_string(user_id)
- media_repository = self.hs.get_resource_for_media_repository()
- identicon_resource = media_repository.getChildWithDefault("identicon", None)
- upload_resource = media_repository.getChildWithDefault("upload", None)
- identicon_bytes = identicon_resource.generate_identicon(user_id, 320, 320)
- content_uri = yield upload_resource.create_content(
- "image/png", None, identicon_bytes, len(identicon_bytes), auth_user
- )
- profile_handler = self.hs.get_handlers().profile_handler
- profile_handler.set_avatar_url(
- auth_user, auth_user, ("%s#auto" % (content_uri,))
- )
- except NotImplementedError:
- pass # make tests pass without messing around creating default avatars
+ # We used to generate default identicons here, but nowadays
+ # we want clients to generate their own as part of their branding
+ # rather than there being consistent matrix-wide ones, so we don't.
defer.returnValue((user_id, token))
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 116a998c42..6a482dacc9 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -704,13 +704,48 @@ class RoomMemberHandler(BaseHandler):
token_id,
txn_id
):
+ room_state = yield self.hs.get_state_handler().get_current_state(room_id)
+
+ inviter_display_name = ""
+ inviter_avatar_url = ""
+ member_event = room_state.get((EventTypes.Member, user.to_string()))
+ if member_event:
+ inviter_display_name = member_event.content.get("displayname", "")
+ inviter_avatar_url = member_event.content.get("avatar_url", "")
+
+ canonical_room_alias = ""
+ canonical_alias_event = room_state.get((EventTypes.CanonicalAlias, ""))
+ if canonical_alias_event:
+ canonical_room_alias = canonical_alias_event.content.get("alias", "")
+
+ room_name = ""
+ room_name_event = room_state.get((EventTypes.Name, ""))
+ if room_name_event:
+ room_name = room_name_event.content.get("name", "")
+
+ room_join_rules = ""
+ join_rules_event = room_state.get((EventTypes.JoinRules, ""))
+ if join_rules_event:
+ room_join_rules = join_rules_event.content.get("join_rule", "")
+
+ room_avatar_url = ""
+ room_avatar_event = room_state.get((EventTypes.RoomAvatar, ""))
+ if room_avatar_event:
+ room_avatar_url = room_avatar_event.content.get("url", "")
+
token, public_key, key_validity_url, display_name = (
yield self._ask_id_server_for_third_party_invite(
- id_server,
- medium,
- address,
- room_id,
- user.to_string()
+ id_server=id_server,
+ medium=medium,
+ address=address,
+ room_id=room_id,
+ inviter_user_id=user.to_string(),
+ room_alias=canonical_room_alias,
+ room_avatar_url=room_avatar_url,
+ room_join_rules=room_join_rules,
+ room_name=room_name,
+ inviter_display_name=inviter_display_name,
+ inviter_avatar_url=inviter_avatar_url
)
)
msg_handler = self.hs.get_handlers().message_handler
@@ -732,7 +767,19 @@ class RoomMemberHandler(BaseHandler):
@defer.inlineCallbacks
def _ask_id_server_for_third_party_invite(
- self, id_server, medium, address, room_id, sender):
+ self,
+ id_server,
+ medium,
+ address,
+ room_id,
+ inviter_user_id,
+ room_alias,
+ room_avatar_url,
+ room_join_rules,
+ room_name,
+ inviter_display_name,
+ inviter_avatar_url
+ ):
is_url = "%s%s/_matrix/identity/api/v1/store-invite" % (
id_server_scheme, id_server,
)
@@ -742,7 +789,13 @@ class RoomMemberHandler(BaseHandler):
"medium": medium,
"address": address,
"room_id": room_id,
- "sender": sender,
+ "room_alias": room_alias,
+ "room_avatar_url": room_avatar_url,
+ "room_join_rules": room_join_rules,
+ "room_name": room_name,
+ "sender": inviter_user_id,
+ "sender_display_name": inviter_display_name,
+ "sender_avatar_url": inviter_avatar_url,
}
)
# TODO: Check for success
@@ -755,7 +808,7 @@ class RoomMemberHandler(BaseHandler):
defer.returnValue((token, public_key, key_validity_url, display_name))
def forget(self, user, room_id):
- self.store.forget(user.to_string(), room_id)
+ return self.store.forget(user.to_string(), room_id)
class RoomListHandler(BaseHandler):
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index bc79564287..99ef56871c 100644
--- a/synapse/handlers/search.py
+++ b/synapse/handlers/search.py
@@ -152,11 +152,15 @@ class SearchHandler(BaseHandler):
highlights = set()
+ count = None
+
if order_by == "rank":
search_result = yield self.store.search_msgs(
room_ids, search_term, keys
)
+ count = search_result["count"]
+
if search_result["highlights"]:
highlights.update(search_result["highlights"])
@@ -207,6 +211,8 @@ class SearchHandler(BaseHandler):
if search_result["highlights"]:
highlights.update(search_result["highlights"])
+ count = search_result["count"]
+
results = search_result["results"]
results_map = {r["event"].event_id: r for r in results}
@@ -359,7 +365,7 @@ class SearchHandler(BaseHandler):
rooms_cat_res = {
"results": results,
- "count": len(results),
+ "count": count,
"highlights": list(highlights),
}
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 20b2a2595a..4cbb43a31b 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -17,6 +17,7 @@ from ._base import BaseHandler
from synapse.streams.config import PaginationConfig
from synapse.api.constants import Membership, EventTypes
+from synapse.util import unwrapFirstError
from twisted.internet import defer
@@ -224,9 +225,10 @@ class SyncHandler(BaseHandler):
joined = []
invited = []
archived = []
+ deferreds = []
for event in room_list:
if event.membership == Membership.JOIN:
- room_sync = yield self.full_state_sync_for_joined_room(
+ room_sync_deferred = self.full_state_sync_for_joined_room(
room_id=event.room_id,
sync_config=sync_config,
now_token=now_token,
@@ -235,7 +237,8 @@ class SyncHandler(BaseHandler):
tags_by_room=tags_by_room,
account_data_by_room=account_data_by_room,
)
- joined.append(room_sync)
+ room_sync_deferred.addCallback(joined.append)
+ deferreds.append(room_sync_deferred)
elif event.membership == Membership.INVITE:
invite = yield self.store.get_event(event.event_id)
invited.append(InvitedSyncResult(
@@ -246,7 +249,7 @@ class SyncHandler(BaseHandler):
leave_token = now_token.copy_and_replace(
"room_key", "s%d" % (event.stream_ordering,)
)
- room_sync = yield self.full_state_sync_for_archived_room(
+ room_sync_deferred = self.full_state_sync_for_archived_room(
sync_config=sync_config,
room_id=event.room_id,
leave_event_id=event.event_id,
@@ -255,7 +258,12 @@ class SyncHandler(BaseHandler):
tags_by_room=tags_by_room,
account_data_by_room=account_data_by_room,
)
- archived.append(room_sync)
+ room_sync_deferred.addCallback(archived.append)
+ deferreds.append(room_sync_deferred)
+
+ yield defer.gatherResults(
+ deferreds, consumeErrors=True
+ ).addErrback(unwrapFirstError)
defer.returnValue(SyncResult(
presence=presence,
diff --git a/synapse/http/server.py b/synapse/http/server.py
index c44bdfc888..682b6b379b 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -15,7 +15,7 @@
from synapse.api.errors import (
- cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError
+ cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError, Codes
)
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
import synapse.metrics
@@ -127,7 +127,10 @@ def request_handler(request_handler):
respond_with_json(
request,
500,
- {"error": "Internal server error"},
+ {
+ "error": "Internal server error",
+ "errcode": Codes.UNKNOWN,
+ },
send_cors=True
)
return wrapped_request_handler
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index 53cc29becb..6fe53f70e5 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -490,7 +490,7 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
)
if membership_action == "forget":
- self.handlers.room_member_handler.forget(user, room_id)
+ yield self.handlers.room_member_handler.forget(user, room_id)
defer.returnValue((200, {}))
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
index 4ca10732c1..93e607f9ec 100644
--- a/synapse/rest/client/v2_alpha/sync.py
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -104,7 +104,6 @@ class SyncRestServlet(RestServlet):
)
if filter_id and filter_id.startswith('{'):
- logging.error("MJH %r", filter_id)
try:
filter_object = json.loads(filter_id)
except:
@@ -352,20 +351,36 @@ class SyncRestServlet(RestServlet):
continue
prev_event_id = timeline_event.unsigned.get("replaces_state", None)
- logger.debug("Replacing %s with %s in state dict",
- timeline_event.event_id, prev_event_id)
- if prev_event_id is None:
+ prev_content = timeline_event.unsigned.get('prev_content')
+ prev_sender = timeline_event.unsigned.get('prev_sender')
+ # Empircally it seems possible for the event to have a
+ # "replaces_state" key but not a prev_content or prev_sender
+ # markjh conjectures that it could be due to the server not
+ # having a copy of that event.
+ # If this is the case the we ignore the previous event. This will
+ # cause the displayname calculations on the client to be incorrect
+ if prev_event_id is None or not prev_content or not prev_sender:
+ logger.debug(
+ "Removing %r from the state dict, as it is missing"
+ " prev_content (prev_event_id=%r)",
+ timeline_event.event_id, prev_event_id
+ )
del result[event_key]
else:
+ logger.debug(
+ "Replacing %r with %r in state dict",
+ timeline_event.event_id, prev_event_id
+ )
result[event_key] = FrozenEvent({
"type": timeline_event.type,
"state_key": timeline_event.state_key,
- "content": timeline_event.unsigned['prev_content'],
- "sender": timeline_event.unsigned['prev_sender'],
+ "content": prev_content,
+ "sender": prev_sender,
"event_id": prev_event_id,
"room_id": timeline_event.room_id,
})
+
logger.debug("New value: %r", result.get(event_key))
return result
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index f230faa25e..4676f225b9 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -258,10 +258,10 @@ class RegistrationStore(SQLBaseStore):
@defer.inlineCallbacks
def user_add_threepid(self, user_id, medium, address, validated_at, added_at):
yield self._simple_upsert("user_threepids", {
- "user_id": user_id,
"medium": medium,
"address": address,
}, {
+ "user_id": user_id,
"validated_at": validated_at,
"added_at": added_at,
})
diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py
index 69398b7c8e..4e0e9ab59a 100644
--- a/synapse/storage/roommember.py
+++ b/synapse/storage/roommember.py
@@ -18,7 +18,7 @@ from twisted.internet import defer
from collections import namedtuple
from ._base import SQLBaseStore
-from synapse.util.caches.descriptors import cached
+from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
from synapse.api.constants import Membership
from synapse.types import UserID
@@ -121,7 +121,7 @@ class RoomMemberStore(SQLBaseStore):
return self.get_rooms_for_user_where_membership_is(
user_id, [Membership.INVITE]
).addCallback(lambda invites: self._get_events([
- invites.event_id for invite in invites
+ invite.event_id for invite in invites
]))
def get_leave_and_ban_events_for_user(self, user_id):
@@ -270,6 +270,7 @@ class RoomMemberStore(SQLBaseStore):
defer.returnValue(ret)
+ @defer.inlineCallbacks
def forget(self, user_id, room_id):
"""Indicate that user_id wishes to discard history for room_id."""
def f(txn):
@@ -284,9 +285,11 @@ class RoomMemberStore(SQLBaseStore):
" room_id = ?"
)
txn.execute(sql, (user_id, room_id))
- self.runInteraction("forget_membership", f)
+ yield self.runInteraction("forget_membership", f)
+ self.was_forgotten_at.invalidate_all()
+ self.did_forget.invalidate((user_id, room_id))
- @defer.inlineCallbacks
+ @cachedInlineCallbacks(num_args=2)
def did_forget(self, user_id, room_id):
"""Returns whether user_id has elected to discard history for room_id.
@@ -310,7 +313,7 @@ class RoomMemberStore(SQLBaseStore):
count = yield self.runInteraction("did_forget_membership", f)
defer.returnValue(count == 0)
- @defer.inlineCallbacks
+ @cachedInlineCallbacks(num_args=3)
def was_forgotten_at(self, user_id, room_id, event_id):
"""Returns whether user_id has elected to discard history for room_id at event_id.
diff --git a/synapse/storage/search.py b/synapse/storage/search.py
index 39f600f53c..6cb5e73b6e 100644
--- a/synapse/storage/search.py
+++ b/synapse/storage/search.py
@@ -85,6 +85,11 @@ class SearchStore(BackgroundUpdateStore):
# skip over it.
continue
+ if not isinstance(value, basestring):
+ # If the event body, name or topic isn't a string
+ # then skip over it
+ continue
+
event_search_rows.append((event_id, room_id, key, value))
if isinstance(self.database_engine, PostgresEngine):
@@ -143,7 +148,7 @@ class SearchStore(BackgroundUpdateStore):
search_query = search_query = _parse_query(self.database_engine, search_term)
- args = [search_query]
+ args = []
# Make sure we don't explode because the person is in too many rooms.
# We filter the results below regardless.
@@ -162,18 +167,36 @@ class SearchStore(BackgroundUpdateStore):
"(%s)" % (" OR ".join(local_clauses),)
)
+ count_args = args
+ count_clauses = clauses
+
if isinstance(self.database_engine, PostgresEngine):
sql = (
- "SELECT ts_rank_cd(vector, query) AS rank, room_id, event_id"
- " FROM to_tsquery('english', ?) as query, event_search"
- " WHERE vector @@ query"
+ "SELECT ts_rank_cd(vector, to_tsquery('english', ?)) AS rank,"
+ " room_id, event_id"
+ " FROM event_search"
+ " WHERE vector @@ to_tsquery('english', ?)"
+ )
+ args = [search_query, search_query] + args
+
+ count_sql = (
+ "SELECT room_id, count(*) as count FROM event_search"
+ " WHERE vector @@ to_tsquery('english', ?)"
)
+ count_args = [search_query] + count_args
elif isinstance(self.database_engine, Sqlite3Engine):
sql = (
"SELECT rank(matchinfo(event_search)) as rank, room_id, event_id"
" FROM event_search"
" WHERE value MATCH ?"
)
+ args = [search_query] + args
+
+ count_sql = (
+ "SELECT room_id, count(*) as count FROM event_search"
+ " WHERE value MATCH ?"
+ )
+ count_args = [search_term] + count_args
else:
# This should be unreachable.
raise Exception("Unrecognized database engine")
@@ -181,6 +204,9 @@ class SearchStore(BackgroundUpdateStore):
for clause in clauses:
sql += " AND " + clause
+ for clause in count_clauses:
+ count_sql += " AND " + clause
+
# We add an arbitrary limit here to ensure we don't try to pull the
# entire table from the database.
sql += " ORDER BY rank DESC LIMIT 500"
@@ -202,6 +228,14 @@ class SearchStore(BackgroundUpdateStore):
if isinstance(self.database_engine, PostgresEngine):
highlights = yield self._find_highlights_in_postgres(search_query, events)
+ count_sql += " GROUP BY room_id"
+
+ count_results = yield self._execute(
+ "search_rooms_count", self.cursor_to_dict, count_sql, *count_args
+ )
+
+ count = sum(row["count"] for row in count_results if row["room_id"] in room_ids)
+
defer.returnValue({
"results": [
{
@@ -212,6 +246,7 @@ class SearchStore(BackgroundUpdateStore):
if r["event_id"] in event_map
],
"highlights": highlights,
+ "count": count,
})
@defer.inlineCallbacks
@@ -232,7 +267,7 @@ class SearchStore(BackgroundUpdateStore):
search_query = search_query = _parse_query(self.database_engine, search_term)
- args = [search_query]
+ args = []
# Make sure we don't explode because the person is in too many rooms.
# We filter the results below regardless.
@@ -251,6 +286,11 @@ class SearchStore(BackgroundUpdateStore):
"(%s)" % (" OR ".join(local_clauses),)
)
+ # take copies of the current args and clauses lists, before adding
+ # pagination clauses to main query.
+ count_args = list(args)
+ count_clauses = list(clauses)
+
if pagination_token:
try:
origin_server_ts, stream = pagination_token.split(",")
@@ -267,12 +307,19 @@ class SearchStore(BackgroundUpdateStore):
if isinstance(self.database_engine, PostgresEngine):
sql = (
- "SELECT ts_rank_cd(vector, query) as rank,"
+ "SELECT ts_rank_cd(vector, to_tsquery('english', ?)) as rank,"
" origin_server_ts, stream_ordering, room_id, event_id"
- " FROM to_tsquery('english', ?) as query, event_search"
+ " FROM event_search"
" NATURAL JOIN events"
- " WHERE vector @@ query AND "
+ " WHERE vector @@ to_tsquery('english', ?) AND "
+ )
+ args = [search_query, search_query] + args
+
+ count_sql = (
+ "SELECT room_id, count(*) as count FROM event_search"
+ " WHERE vector @@ to_tsquery('english', ?) AND "
)
+ count_args = [search_query] + count_args
elif isinstance(self.database_engine, Sqlite3Engine):
# We use CROSS JOIN here to ensure we use the right indexes.
# https://sqlite.org/optoverview.html#crossjoin
@@ -292,11 +339,19 @@ class SearchStore(BackgroundUpdateStore):
" CROSS JOIN events USING (event_id)"
" WHERE "
)
+ args = [search_query] + args
+
+ count_sql = (
+ "SELECT room_id, count(*) as count FROM event_search"
+ " WHERE value MATCH ? AND "
+ )
+ count_args = [search_term] + count_args
else:
# This should be unreachable.
raise Exception("Unrecognized database engine")
sql += " AND ".join(clauses)
+ count_sql += " AND ".join(count_clauses)
# We add an arbitrary limit here to ensure we don't try to pull the
# entire table from the database.
@@ -321,6 +376,14 @@ class SearchStore(BackgroundUpdateStore):
if isinstance(self.database_engine, PostgresEngine):
highlights = yield self._find_highlights_in_postgres(search_query, events)
+ count_sql += " GROUP BY room_id"
+
+ count_results = yield self._execute(
+ "search_rooms_count", self.cursor_to_dict, count_sql, *count_args
+ )
+
+ count = sum(row["count"] for row in count_results if row["room_id"] in room_ids)
+
defer.returnValue({
"results": [
{
@@ -334,6 +397,7 @@ class SearchStore(BackgroundUpdateStore):
if r["event_id"] in event_map
],
"highlights": highlights,
+ "count": count,
})
def _find_highlights_in_postgres(self, search_query, events):
diff --git a/tox.ini b/tox.ini
index 95424765c3..bd313a4f36 100644
--- a/tox.ini
+++ b/tox.ini
@@ -11,7 +11,8 @@ deps =
setenv =
PYTHONDONTWRITEBYTECODE = no_byte_code
commands =
- /bin/bash -c "coverage run --source=synapse {envbindir}/trial {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}"
+ /bin/bash -c "coverage run {env:COVERAGE_OPTS:} --source={toxinidir}/synapse \
+ {envbindir}/trial {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}"
{env:DUMP_COVERAGE_COMMAND:coverage report -m}
[testenv:packaging]
|