diff --git a/AUTHORS.rst b/AUTHORS.rst
index 58a67c6b12..f19d17d24f 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -48,3 +48,6 @@ Muthu Subramanian <muthu.subramanian.karunanidhi at ericsson.com>
Steven Hammerton <steven.hammerton at openmarket.com>
* Add CAS support for registration and login.
+
+Mads Robin Christensen <mads at v42 dot dk>
+ * CentOS 7 installation instructions.
diff --git a/CHANGES.rst b/CHANGES.rst
index 5c38c1915f..1d43fd3604 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,3 +1,77 @@
+Changes in synapse v0.12.0 (2016-01-04)
+=======================================
+
+* Expose ``/login`` under ``r0`` (PR #459)
+
+Changes in synapse v0.12.0-rc3 (2015-12-23)
+===========================================
+
+* Allow guest accounts access to ``/sync`` (PR #455)
+* Allow filters to include/exclude rooms at the room level
+ rather than just from the components of the sync for each
+ room. (PR #454)
+* Include urls for room avatars in the response to ``/publicRooms`` (PR #453)
+* Don't set a identicon as the avatar for a user when they register (PR #450)
+* Add a ``display_name`` to third-party invites (PR #449)
+* Send more information to the identity server for third-party invites so that
+ it can send richer messages to the invitee (PR #446)
+
+* Cache the responses to ``/intialSync`` for 5 minutes. If a client
+ retries a request to ``/initialSync`` before the a response was computed
+ to the first request then the same response is used for both requests
+ (PR #457)
+
+* Fix a bug where synapse would always request the signing keys of
+ remote servers even when the key was cached locally (PR #452)
+* Fix 500 when pagination search results (PR #447)
+* Fix a bug where synapse was leaking raw email address in third-party invites
+ (PR #448)
+
+Changes in synapse v0.12.0-rc2 (2015-12-14)
+===========================================
+
+* Add caches for whether rooms have been forgotten by a user (PR #434)
+* Remove instructions to use ``--process-dependency-link`` since all of the
+ dependencies of synapse are on PyPI (PR #436)
+* Parallelise the processing of ``/sync`` requests (PR #437)
+* Fix race updating presence in ``/events`` (PR #444)
+* Fix bug back-populating search results (PR #441)
+* Fix bug calculating state in ``/sync`` requests (PR #442)
+
+Changes in synapse v0.12.0-rc1 (2015-12-10)
+===========================================
+
+* Host the client APIs released as r0 by
+ https://matrix.org/docs/spec/r0.0.0/client_server.html
+ on paths prefixed by ``/_matrix/client/r0``. (PR #430, PR #415, PR #400)
+* Updates the client APIs to match r0 of the matrix specification.
+
+ * All APIs return events in the new event format, old APIs also include
+ the fields needed to parse the event using the old format for
+ compatibility. (PR #402)
+ * Search results are now given as a JSON array rather than
+ a JSON object (PR #405)
+ * Miscellaneous changes to search (PR #403, PR #406, PR #412)
+ * Filter JSON objects may now be passed as query parameters to ``/sync``
+ (PR #431)
+ * Fix implementation of ``/admin/whois`` (PR #418)
+ * Only include the rooms that user has left in ``/sync`` if the client
+ requests them in the filter (PR #423)
+ * Don't push for ``m.room.message`` by default (PR #411)
+ * Add API for setting per account user data (PR #392)
+ * Allow users to forget rooms (PR #385)
+
+* Performance improvements and monitoring:
+
+ * Add per-request counters for CPU time spent on the main python thread.
+ (PR #421, PR #420)
+ * Add per-request counters for time spent in the database (PR #429)
+ * Make state updates in the C+S API idempotent (PR #416)
+ * Only fire ``user_joined_room`` if the user has actually joined. (PR #410)
+ * Reuse a single http client, rather than creating new ones (PR #413)
+
+* Fixed a bug upgrading from older versions of synapse on postgresql (PR #417)
+
Changes in synapse v0.11.1 (2015-11-20)
=======================================
diff --git a/README.rst b/README.rst
index 8c1864b6ed..446cc6e7c1 100644
--- a/README.rst
+++ b/README.rst
@@ -111,6 +111,14 @@ Installing prerequisites on ArchLinux::
sudo pacman -S base-devel python2 python-pip \
python-setuptools python-virtualenv sqlite3
+Installing prerequisites on CentOS 7::
+
+ sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
+ lcms2-devel libwebp-devel tcl-devel tk-devel \
+ python-virtualenv libffi-devel openssl-devel
+ sudo yum groupinstall "Development Tools"
+
+
Installing prerequisites on Mac OS X::
xcode-select --install
@@ -122,7 +130,7 @@ To install the synapse homeserver run::
virtualenv -p python2.7 ~/.synapse
source ~/.synapse/bin/activate
pip install --upgrade setuptools
- pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
+ pip install https://github.com/matrix-org/synapse/tarball/master
This installs synapse, along with the libraries it uses, into a virtual
environment under ``~/.synapse``. Feel free to pick a different directory
@@ -148,9 +156,10 @@ To set up your homeserver, run (in your virtualenv, as before)::
python -m synapse.app.homeserver \
--server-name machine.my.domain.name \
--config-path homeserver.yaml \
- --generate-config
+ --generate-config \
+ --report-stats=[yes|no]
-Substituting your host and domain name as appropriate.
+...substituting your host and domain name as appropriate.
This will generate you a config file that you can then customise, but it will
also generate a set of keys for you. These keys will allow your Home Server to
@@ -163,10 +172,11 @@ key in the <server name>.signing.key file (the second word, which by default is
By default, registration of new users is disabled. You can either enable
registration in the config by specifying ``enable_registration: true``
-(it is then recommended to also set up CAPTCHA), or
+(it is then recommended to also set up CAPTCHA - see docs/CAPTCHA_SETUP), or
you can use the command line to register new users::
$ source ~/.synapse/bin/activate
+ $ synctl start # if not already running
$ register_new_matrix_user -c homeserver.yaml https://localhost:8448
New user localpart: erikj
Password:
@@ -176,6 +186,16 @@ you can use the command line to register new users::
For reliable VoIP calls to be routed via this homeserver, you MUST configure
a TURN server. See docs/turn-howto.rst for details.
+Running Synapse
+===============
+
+To actually run your new homeserver, pick a working directory for Synapse to
+run (e.g. ``~/.synapse``), and::
+
+ cd ~/.synapse
+ source ./bin/activate
+ synctl start
+
Using PostgreSQL
================
@@ -198,16 +218,6 @@ may have a few regressions relative to SQLite.
For information on how to install and use PostgreSQL, please see
`docs/postgres.rst <docs/postgres.rst>`_.
-Running Synapse
-===============
-
-To actually run your new homeserver, pick a working directory for Synapse to
-run (e.g. ``~/.synapse``), and::
-
- cd ~/.synapse
- source ./bin/activate
- synctl start
-
Platform Specific Instructions
==============================
@@ -229,8 +239,7 @@ pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
You also may need to explicitly specify python 2.7 again during the install
request::
- pip2.7 install --process-dependency-links \
- https://github.com/matrix-org/synapse/tarball/master
+ pip2.7 install https://github.com/matrix-org/synapse/tarball/master
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
@@ -289,8 +298,7 @@ Troubleshooting
Troubleshooting Installation
----------------------------
-Synapse requires pip 1.7 or later, so if your OS provides too old a version and
-you get errors about ``error: no such option: --process-dependency-links`` you
+Synapse requires pip 1.7 or later, so if your OS provides too old a version you
may need to manually upgrade it::
sudo pip install --upgrade pip
@@ -434,6 +442,10 @@ SRV record, as that is the name other machines will expect it to have::
python -m synapse.app.homeserver --config-path homeserver.yaml
+If you've already generated the config file, you need to edit the "server_name"
+in you ```homeserver.yaml``` file. If you've already started Synapse and a
+database has been created, you will have to recreate the database.
+
You may additionally want to pass one or more "-v" options, in order to
increase the verbosity of logging output; at least for initial testing.
diff --git a/docs/postgres.rst b/docs/postgres.rst
index b5027fefb0..402ff9a4de 100644
--- a/docs/postgres.rst
+++ b/docs/postgres.rst
@@ -18,8 +18,8 @@ encoding use, e.g.::
This would create an appropriate database named ``synapse`` owned by the
``synapse_user`` user (which must already exist).
-Set up client
-=============
+Set up client in Debian/Ubuntu
+===========================
Postgres support depends on the postgres python connector ``psycopg2``. In the
virtual env::
@@ -27,6 +27,19 @@ virtual env::
sudo apt-get install libpq-dev
pip install psycopg2
+Set up client in RHEL/CentOs 7
+==============================
+
+Make sure you have the appropriate version of postgres-devel installed. For a
+postgres 9.4, use the postgres 9.4 packages from
+[here](https://wiki.postgresql.org/wiki/YUM_Installation).
+
+As with Debian/Ubuntu, postgres support depends on the postgres python connector
+``psycopg2``. In the virtual env::
+
+ sudo yum install postgresql-devel libpqxx-devel.x86_64
+ export PATH=/usr/pgsql-9.4/bin/:$PATH
+ pip install psycopg2
Synapse config
==============
diff --git a/jenkins.sh b/jenkins.sh
index 8d2ac63c56..e2bb706c7f 100755
--- a/jenkins.sh
+++ b/jenkins.sh
@@ -5,9 +5,9 @@ export PYTHONDONTWRITEBYTECODE=yep
# Output test results as junit xml
export TRIAL_FLAGS="--reporter=subunit"
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
-
-# Output coverage to coverage.xml
-export DUMP_COVERAGE_COMMAND="coverage xml -o coverage.xml"
+# Write coverage reports to a separate file for each process
+export COVERAGE_OPTS="-p"
+export DUMP_COVERAGE_COMMAND="coverage help"
# Output flake8 violations to violations.flake8.log
# Don't exit with non-0 status code on Jenkins,
@@ -15,13 +15,13 @@ export DUMP_COVERAGE_COMMAND="coverage xml -o coverage.xml"
# UNSTABLE or FAILURE this build.
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
+rm .coverage* || echo "No coverage files to remove"
+
tox
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
-set +u
-. .tox/py27/bin/activate
-set -u
+TOX_BIN=$WORKSPACE/.tox/py27/bin
if [[ ! -e .sytest-base ]]; then
git clone https://github.com/matrix-org/sytest.git .sytest-base --mirror
@@ -42,4 +42,40 @@ export PERL5LIB PERL_MB_OPT PERL_MM_OPT
./install-deps.pl
-./run-tests.pl -O tap --synapse-directory .. --all > results.tap
+: ${PORT_BASE:=8000}
+
+echo >&2 "Running sytest with SQLite3";
+./run-tests.pl --coverage -O tap --synapse-directory $WORKSPACE \
+ --python $TOX_BIN/python --all --port-base $PORT_BASE > results-sqlite3.tap
+
+RUN_POSTGRES=""
+
+for port in $(($PORT_BASE + 1)) $(($PORT_BASE + 2)); do
+ if psql synapse_jenkins_$port <<< ""; then
+ RUN_POSTGRES=$RUN_POSTGRES:$port
+ cat > localhost-$port/database.yaml << EOF
+name: psycopg2
+args:
+ database: synapse_jenkins_$port
+EOF
+ fi
+done
+
+# Run if both postgresql databases exist
+if test $RUN_POSTGRES = ":$(($PORT_BASE + 1)):$(($PORT_BASE + 2))"; then
+ echo >&2 "Running sytest with PostgreSQL";
+ $TOX_BIN/pip install psycopg2
+ ./run-tests.pl --coverage -O tap --synapse-directory $WORKSPACE \
+ --python $TOX_BIN/python --all --port-base $PORT_BASE > results-postgresql.tap
+else
+ echo >&2 "Skipping running sytest with PostgreSQL, $RUN_POSTGRES"
+fi
+
+cd ..
+cp sytest/.coverage.* .
+
+# Combine the coverage reports
+echo "Combining:" .coverage.*
+$TOX_BIN/python -m coverage combine
+# Output coverage to coverage.xml
+$TOX_BIN/coverage xml -o coverage.xml
diff --git a/scripts-dev/definitions.py b/scripts-dev/definitions.py
index f0d0cd8a3f..8340c72618 100755
--- a/scripts-dev/definitions.py
+++ b/scripts-dev/definitions.py
@@ -79,16 +79,16 @@ def defined_names(prefix, defs, names):
defined_names(prefix + name + ".", funcs, names)
-def used_names(prefix, defs, names):
+def used_names(prefix, item, defs, names):
for name, funcs in defs.get('def', {}).items():
- used_names(prefix + name + ".", funcs, names)
+ used_names(prefix + name + ".", name, funcs, names)
for name, funcs in defs.get('class', {}).items():
- used_names(prefix + name + ".", funcs, names)
+ used_names(prefix + name + ".", name, funcs, names)
for used in defs.get('uses', ()):
if used in names:
- names[used].setdefault('used', []).append(prefix.rstrip('.'))
+ names[used].setdefault('used', {}).setdefault(item, []).append(prefix.rstrip('.'))
if __name__ == '__main__':
@@ -109,6 +109,14 @@ if __name__ == '__main__':
"directories", nargs='+', metavar="DIR",
help="Directories to search for definitions"
)
+ parser.add_argument(
+ "--referrers", default=0, type=int,
+ help="Include referrers up to the given depth"
+ )
+ parser.add_argument(
+ "--format", default="yaml",
+ help="Output format, one of 'yaml' or 'dot'"
+ )
args = parser.parse_args()
definitions = {}
@@ -124,7 +132,7 @@ if __name__ == '__main__':
defined_names(filepath + ":", defs, names)
for filepath, defs in definitions.items():
- used_names(filepath + ":", defs, names)
+ used_names(filepath + ":", None, defs, names)
patterns = [re.compile(pattern) for pattern in args.pattern or ()]
ignore = [re.compile(pattern) for pattern in args.ignore or ()]
@@ -139,4 +147,29 @@ if __name__ == '__main__':
continue
result[name] = definition
- yaml.dump(result, sys.stdout, default_flow_style=False)
+ referrer_depth = args.referrers
+ referrers = set()
+ while referrer_depth:
+ referrer_depth -= 1
+ for entry in result.values():
+ for used_by in entry.get("used", ()):
+ referrers.add(used_by)
+ for name, definition in names.items():
+ if not name in referrers:
+ continue
+ if ignore and any(pattern.match(name) for pattern in ignore):
+ continue
+ result[name] = definition
+
+ if args.format == 'yaml':
+ yaml.dump(result, sys.stdout, default_flow_style=False)
+ elif args.format == 'dot':
+ print "digraph {"
+ for name, entry in result.items():
+ print name
+ for used_by in entry.get("used", ()):
+ if used_by in result:
+ print used_by, "->", name
+ print "}"
+ else:
+ raise ValueError("Unknown format %r" % (args.format))
diff --git a/scripts/gen_password b/scripts/gen_password
new file mode 100644
index 0000000000..7afd3a5dfd
--- /dev/null
+++ b/scripts/gen_password
@@ -0,0 +1 @@
+perl -MCrypt::Random -MCrypt::Eksblowfish::Bcrypt -e 'print Crypt::Eksblowfish::Bcrypt::bcrypt("secret", "\$2\$12\$" . Crypt::Eksblowfish::Bcrypt::en_base64(Crypt::Random::makerandom_octet(Length=>16)))."\n"'
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 3e7e26bf60..5db4eae354 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server.
"""
-__version__ = "0.11.1"
+__version__ = "0.12.0"
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 4a13f7e2e1..adb7d64482 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -207,6 +207,13 @@ class Auth(object):
user_id, room_id
))
+ if membership == Membership.LEAVE:
+ forgot = yield self.store.did_forget(user_id, room_id)
+ if forgot:
+ raise AuthError(403, "User %s not in room %s" % (
+ user_id, room_id
+ ))
+
defer.returnValue(member)
@defer.inlineCallbacks
@@ -771,7 +778,7 @@ class Auth(object):
if "third_party_invite" in event.content:
key = (
EventTypes.ThirdPartyInvite,
- event.content["third_party_invite"]["token"]
+ event.content["third_party_invite"]["signed"]["token"]
)
third_party_invite = current_state.get(key)
if third_party_invite:
@@ -853,7 +860,7 @@ class Auth(object):
redact_level = self._get_named_level(auth_events, "redact", 50)
- if user_level > redact_level:
+ if user_level >= redact_level:
return False
redacter_domain = EventID.from_string(event.event_id).domain
diff --git a/synapse/api/errors.py b/synapse/api/errors.py
index d4037b3d55..8bc7b9e6db 100644
--- a/synapse/api/errors.py
+++ b/synapse/api/errors.py
@@ -120,6 +120,22 @@ class AuthError(SynapseError):
super(AuthError, self).__init__(*args, **kwargs)
+class GuestAccessError(AuthError):
+ """An error raised when a there is a problem with a guest user accessing
+ a room"""
+
+ def __init__(self, rooms, *args, **kwargs):
+ self.rooms = rooms
+ super(GuestAccessError, self).__init__(*args, **kwargs)
+
+ def error_dict(self):
+ return cs_error(
+ self.msg,
+ self.errcode,
+ rooms=self.rooms,
+ )
+
+
class EventSizeError(SynapseError):
"""An error raised when an event is too big."""
diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py
index 18f2ec3ae8..5287aaa757 100644
--- a/synapse/api/filtering.py
+++ b/synapse/api/filtering.py
@@ -50,7 +50,7 @@ class Filtering(object):
# many definitions.
top_level_definitions = [
- "presence"
+ "presence", "account_data"
]
room_level_definitions = [
@@ -62,10 +62,29 @@ class Filtering(object):
self._check_definition(user_filter_json[key])
if "room" in user_filter_json:
+ self._check_definition_room_lists(user_filter_json["room"])
for key in room_level_definitions:
if key in user_filter_json["room"]:
self._check_definition(user_filter_json["room"][key])
+ def _check_definition_room_lists(self, definition):
+ """Check that "rooms" and "not_rooms" are lists of room ids if they
+ are present
+
+ Args:
+ definition(dict): The filter definition
+ Raises:
+ SynapseError: If there was a problem with this definition.
+ """
+ # check rooms are valid room IDs
+ room_id_keys = ["rooms", "not_rooms"]
+ for key in room_id_keys:
+ if key in definition:
+ if type(definition[key]) != list:
+ raise SynapseError(400, "Expected %s to be a list." % key)
+ for room_id in definition[key]:
+ RoomID.from_string(room_id)
+
def _check_definition(self, definition):
"""Check if the provided definition is valid.
@@ -85,14 +104,7 @@ class Filtering(object):
400, "Expected JSON object, not %s" % (definition,)
)
- # check rooms are valid room IDs
- room_id_keys = ["rooms", "not_rooms"]
- for key in room_id_keys:
- if key in definition:
- if type(definition[key]) != list:
- raise SynapseError(400, "Expected %s to be a list." % key)
- for room_id in definition[key]:
- RoomID.from_string(room_id)
+ self._check_definition_room_lists(definition)
# check senders are valid user IDs
user_id_keys = ["senders", "not_senders"]
@@ -119,25 +131,26 @@ class FilterCollection(object):
def __init__(self, filter_json):
self.filter_json = filter_json
- self.room_timeline_filter = Filter(
- self.filter_json.get("room", {}).get("timeline", {})
- )
+ room_filter_json = self.filter_json.get("room", {})
- self.room_state_filter = Filter(
- self.filter_json.get("room", {}).get("state", {})
- )
+ self.room_filter = Filter({
+ k: v for k, v in room_filter_json.items()
+ if k in ("rooms", "not_rooms")
+ })
- self.room_ephemeral_filter = Filter(
- self.filter_json.get("room", {}).get("ephemeral", {})
- )
+ self.room_timeline_filter = Filter(room_filter_json.get("timeline", {}))
+ self.room_state_filter = Filter(room_filter_json.get("state", {}))
+ self.room_ephemeral_filter = Filter(room_filter_json.get("ephemeral", {}))
+ self.room_account_data = Filter(room_filter_json.get("account_data", {}))
+ self.presence_filter = Filter(self.filter_json.get("presence", {}))
+ self.account_data = Filter(self.filter_json.get("account_data", {}))
- self.room_account_data = Filter(
- self.filter_json.get("room", {}).get("account_data", {})
+ self.include_leave = self.filter_json.get("room", {}).get(
+ "include_leave", False
)
- self.presence_filter = Filter(
- self.filter_json.get("presence", {})
- )
+ def list_rooms(self):
+ return self.room_filter.list_rooms()
def timeline_limit(self):
return self.room_timeline_filter.limit()
@@ -151,23 +164,35 @@ class FilterCollection(object):
def filter_presence(self, events):
return self.presence_filter.filter(events)
+ def filter_account_data(self, events):
+ return self.account_data.filter(events)
+
def filter_room_state(self, events):
- return self.room_state_filter.filter(events)
+ return self.room_state_filter.filter(self.room_filter.filter(events))
def filter_room_timeline(self, events):
- return self.room_timeline_filter.filter(events)
+ return self.room_timeline_filter.filter(self.room_filter.filter(events))
def filter_room_ephemeral(self, events):
- return self.room_ephemeral_filter.filter(events)
+ return self.room_ephemeral_filter.filter(self.room_filter.filter(events))
def filter_room_account_data(self, events):
- return self.room_account_data.filter(events)
+ return self.room_account_data.filter(self.room_filter.filter(events))
class Filter(object):
def __init__(self, filter_json):
self.filter_json = filter_json
+ def list_rooms(self):
+ """The list of room_id strings this filter restricts the output to
+ or None if the this filter doesn't list the room ids.
+ """
+ if "rooms" in self.filter_json:
+ return list(set(self.filter_json["rooms"]))
+ else:
+ return None
+
def check(self, event):
"""Checks whether the filter matches the given event.
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index cd7a52ec07..0807def6ca 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -15,6 +15,8 @@
# limitations under the License.
import sys
+from synapse.rest import ClientRestResource
+
sys.dont_write_bytecode = True
from synapse.python_dependencies import (
check_requirements, DEPENDENCY_LINKS, MissingRequirementError
@@ -53,15 +55,13 @@ from synapse.rest.key.v1.server_key_resource import LocalKey
from synapse.rest.key.v2 import KeyApiV2Resource
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.api.urls import (
- CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
- SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, STATIC_PREFIX,
+ FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
+ SERVER_KEY_PREFIX, MEDIA_PREFIX, STATIC_PREFIX,
SERVER_KEY_V2_PREFIX,
)
from synapse.config.homeserver import HomeServerConfig
from synapse.crypto import context_factory
from synapse.util.logcontext import LoggingContext
-from synapse.rest.client.v1 import ClientV1RestResource
-from synapse.rest.client.v2_alpha import ClientV2AlphaRestResource
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
from synapse import events
@@ -92,11 +92,8 @@ class SynapseHomeServer(HomeServer):
def build_http_client(self):
return MatrixFederationHttpClient(self)
- def build_resource_for_client(self):
- return ClientV1RestResource(self)
-
- def build_resource_for_client_v2_alpha(self):
- return ClientV2AlphaRestResource(self)
+ def build_client_resource(self):
+ return ClientRestResource(self)
def build_resource_for_federation(self):
return JsonResource(self)
@@ -179,16 +176,15 @@ class SynapseHomeServer(HomeServer):
for res in listener_config["resources"]:
for name in res["names"]:
if name == "client":
+ client_resource = self.get_client_resource()
if res["compress"]:
- client_v1 = gz_wrap(self.get_resource_for_client())
- client_v2 = gz_wrap(self.get_resource_for_client_v2_alpha())
- else:
- client_v1 = self.get_resource_for_client()
- client_v2 = self.get_resource_for_client_v2_alpha()
+ client_resource = gz_wrap(client_resource)
resources.update({
- CLIENT_PREFIX: client_v1,
- CLIENT_V2_ALPHA_PREFIX: client_v2,
+ "/_matrix/client/api/v1": client_resource,
+ "/_matrix/client/r0": client_resource,
+ "/_matrix/client/unstable": client_resource,
+ "/_matrix/client/v2_alpha": client_resource,
})
if name == "federation":
@@ -499,13 +495,28 @@ class SynapseRequest(Request):
self.start_time = int(time.time() * 1000)
def finished_processing(self):
+
+ try:
+ context = LoggingContext.current_context()
+ ru_utime, ru_stime = context.get_resource_usage()
+ db_txn_count = context.db_txn_count
+ db_txn_duration = context.db_txn_duration
+ except:
+ ru_utime, ru_stime = (0, 0)
+ db_txn_count, db_txn_duration = (0, 0)
+
self.site.access_logger.info(
"%s - %s - {%s}"
- " Processed request: %dms %sB %s \"%s %s %s\" \"%s\"",
+ " Processed request: %dms (%dms, %dms) (%dms/%d)"
+ " %sB %s \"%s %s %s\" \"%s\"",
self.getClientIP(),
self.site.site_tag,
self.authenticated_entity,
int(time.time() * 1000) - self.start_time,
+ int(ru_utime * 1000),
+ int(ru_stime * 1000),
+ int(db_txn_duration * 1000),
+ int(db_txn_count),
self.sentLength,
self.code,
self.method,
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 5c2d6bfeab..187edd516b 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -133,6 +133,7 @@ class ServerConfig(Config):
# The domain name of the server, with optional explicit port.
# This is used by remote servers to connect to this server,
# e.g. matrix.org, localhost:8080, etc.
+ # This is also the last part of your UserID.
server_name: "%(server_name)s"
# When running as a daemon, the file to store the pid in
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index bc5bb5cdb1..1fea568eed 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -230,7 +230,9 @@ class Keyring(object):
missing_keys = {}
for group in group_id_to_group.values():
- missing_keys.setdefault(group.server_name, set()).union(group.key_ids)
+ missing_keys.setdefault(group.server_name, set()).update(
+ group.key_ids
+ )
for fn in key_fetch_fns:
results = yield fn(missing_keys.items())
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index 44cc1ef132..e634b149ba 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -100,22 +100,20 @@ def format_event_raw(d):
def format_event_for_client_v1(d):
- d["user_id"] = d.pop("sender", None)
+ d = format_event_for_client_v2(d)
+
+ sender = d.get("sender")
+ if sender is not None:
+ d["user_id"] = sender
- move_keys = (
+ copy_keys = (
"age", "redacted_because", "replaces_state", "prev_content",
"invite_room_state",
)
- for key in move_keys:
+ for key in copy_keys:
if key in d["unsigned"]:
d[key] = d["unsigned"][key]
- drop_keys = (
- "auth_events", "prev_events", "hashes", "signatures", "depth",
- "unsigned", "origin", "prev_state"
- )
- for key in drop_keys:
- d.pop(key, None)
return d
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 127b4da4f8..6b164fd2d1 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -165,7 +165,7 @@ class BaseFederationServlet(object):
if code is None:
continue
- server.register_path(method, pattern, self._wrap(code))
+ server.register_paths(method, (pattern,), self._wrap(code))
class FederationSendServlet(BaseFederationServlet):
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index 6519f183df..5fd20285d2 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -92,7 +92,15 @@ class BaseHandler(object):
membership_event = state.get((EventTypes.Member, user_id), None)
if membership_event:
- membership = membership_event.membership
+ was_forgotten_at_event = yield self.store.was_forgotten_at(
+ membership_event.state_key,
+ membership_event.room_id,
+ membership_event.event_id
+ )
+ if was_forgotten_at_event:
+ membership = None
+ else:
+ membership = membership_event.membership
else:
membership = None
diff --git a/synapse/handlers/account_data.py b/synapse/handlers/account_data.py
index 1d35d3b7dc..fe773bee9b 100644
--- a/synapse/handlers/account_data.py
+++ b/synapse/handlers/account_data.py
@@ -29,9 +29,10 @@ class AccountDataEventSource(object):
last_stream_id = from_key
current_stream_id = yield self.store.get_max_account_data_stream_id()
- tags = yield self.store.get_updated_tags(user_id, last_stream_id)
results = []
+ tags = yield self.store.get_updated_tags(user_id, last_stream_id)
+
for room_id, room_tags in tags.items():
results.append({
"type": "m.tag",
@@ -39,6 +40,24 @@ class AccountDataEventSource(object):
"room_id": room_id,
})
+ account_data, room_account_data = (
+ yield self.store.get_updated_account_data_for_user(user_id, last_stream_id)
+ )
+
+ for account_data_type, content in account_data.items():
+ results.append({
+ "type": account_data_type,
+ "content": content,
+ })
+
+ for room_id, account_data in room_account_data.items():
+ for account_data_type, content in account_data.items():
+ results.append({
+ "type": account_data_type,
+ "content": content,
+ "room_id": room_id,
+ })
+
defer.returnValue((results, current_stream_id))
@defer.inlineCallbacks
diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py
index d852a18555..04fa58df65 100644
--- a/synapse/handlers/admin.py
+++ b/synapse/handlers/admin.py
@@ -30,34 +30,27 @@ class AdminHandler(BaseHandler):
@defer.inlineCallbacks
def get_whois(self, user):
- res = yield self.store.get_user_ip_and_agents(user)
-
- d = {}
- for r in res:
- # Note that device_id is always None
- device = d.setdefault(r["device_id"], {})
- session = device.setdefault(r["access_token"], [])
- session.append({
- "ip": r["ip"],
- "user_agent": r["user_agent"],
- "last_seen": r["last_seen"],
+ connections = []
+
+ sessions = yield self.store.get_user_ip_and_agents(user)
+ for session in sessions:
+ connections.append({
+ "ip": session["ip"],
+ "last_seen": session["last_seen"],
+ "user_agent": session["user_agent"],
})
ret = {
"user_id": user.to_string(),
- "devices": [
- {
- "device_id": k,
+ "devices": {
+ "": {
"sessions": [
{
- # "access_token": x, TODO (erikj)
- "connections": y,
+ "connections": connections,
}
- for x, y in v.items()
]
- }
- for k, v in d.items()
- ],
+ },
+ },
}
defer.returnValue(ret)
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index 0e4c0d4d06..576d77e0e7 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -28,6 +28,18 @@ import random
logger = logging.getLogger(__name__)
+def started_user_eventstream(distributor, user):
+ return distributor.fire("started_user_eventstream", user)
+
+
+def stopped_user_eventstream(distributor, user):
+ return distributor.fire("stopped_user_eventstream", user)
+
+
+def user_joined_room(distributor, user, room_id):
+ return distributor.fire("user_joined_room", user, room_id)
+
+
class EventStreamHandler(BaseHandler):
def __init__(self, hs):
@@ -57,7 +69,12 @@ class EventStreamHandler(BaseHandler):
A deferred that completes once their presence has been updated.
"""
if user not in self._streams_per_user:
- self._streams_per_user[user] = 0
+ # Make sure we set the streams per user to 1 here rather than
+ # setting it to zero and incrementing the value below.
+ # Otherwise this may race with stopped_stream causing the
+ # user to be erased from the map before we have a chance
+ # to increment it.
+ self._streams_per_user[user] = 1
if user in self._stop_timer_per_user:
try:
self.clock.cancel_call_later(
@@ -66,9 +83,9 @@ class EventStreamHandler(BaseHandler):
except:
logger.exception("Failed to cancel event timer")
else:
- yield self.distributor.fire("started_user_eventstream", user)
-
- self._streams_per_user[user] += 1
+ yield started_user_eventstream(self.distributor, user)
+ else:
+ self._streams_per_user[user] += 1
def stopped_stream(self, user):
"""If there are no streams for a user this starts a timer that will
@@ -89,7 +106,7 @@ class EventStreamHandler(BaseHandler):
self._stop_timer_per_user.pop(user, None)
- return self.distributor.fire("stopped_user_eventstream", user)
+ return stopped_user_eventstream(self.distributor, user)
logger.debug("Scheduling _later: for %s", user)
self._stop_timer_per_user[user] = (
@@ -120,9 +137,7 @@ class EventStreamHandler(BaseHandler):
timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
if is_guest:
- yield self.distributor.fire(
- "user_joined_room", user=auth_user, room_id=room_id
- )
+ yield user_joined_room(self.distributor, auth_user, room_id)
events, tokens = yield self.notifier.get_events_for(
auth_user, pagin_config, timeout,
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index c1bce07e31..28f2ff68d6 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -44,6 +44,10 @@ import logging
logger = logging.getLogger(__name__)
+def user_joined_room(distributor, user, room_id):
+ return distributor.fire("user_joined_room", user, room_id)
+
+
class FederationHandler(BaseHandler):
"""Handles events that originated from federation.
Responsible for:
@@ -60,10 +64,7 @@ class FederationHandler(BaseHandler):
self.hs = hs
- self.distributor.observe(
- "user_joined_room",
- self._on_user_joined
- )
+ self.distributor.observe("user_joined_room", self.user_joined_room)
self.waiting_for_join_list = {}
@@ -176,7 +177,7 @@ class FederationHandler(BaseHandler):
)
try:
- _, event_stream_id, max_stream_id = yield self._handle_new_event(
+ context, event_stream_id, max_stream_id = yield self._handle_new_event(
origin,
event,
state=state,
@@ -233,10 +234,13 @@ class FederationHandler(BaseHandler):
if event.type == EventTypes.Member:
if event.membership == Membership.JOIN:
- user = UserID.from_string(event.state_key)
- yield self.distributor.fire(
- "user_joined_room", user=user, room_id=event.room_id
- )
+ prev_state = context.current_state.get((event.type, event.state_key))
+ if not prev_state or prev_state.membership != Membership.JOIN:
+ # Only fire user_joined_room if the user has acutally
+ # joined the room. Don't bother if the user is just
+ # changing their profile info.
+ user = UserID.from_string(event.state_key)
+ yield user_joined_room(self.distributor, user, event.room_id)
@defer.inlineCallbacks
def _filter_events_for_server(self, server_name, room_id, events):
@@ -592,7 +596,7 @@ class FederationHandler(BaseHandler):
handled_events = set()
try:
- new_event = self._sign_event(event)
+ event = self._sign_event(event)
# Try the host we successfully got a response to /make_join/
# request first.
try:
@@ -600,7 +604,7 @@ class FederationHandler(BaseHandler):
target_hosts.insert(0, origin)
except ValueError:
pass
- ret = yield self.replication_layer.send_join(target_hosts, new_event)
+ ret = yield self.replication_layer.send_join(target_hosts, event)
origin = ret["origin"]
state = ret["state"]
@@ -609,12 +613,12 @@ class FederationHandler(BaseHandler):
handled_events.update([s.event_id for s in state])
handled_events.update([a.event_id for a in auth_chain])
- handled_events.add(new_event.event_id)
+ handled_events.add(event.event_id)
logger.debug("do_invite_join auth_chain: %s", auth_chain)
logger.debug("do_invite_join state: %s", state)
- logger.debug("do_invite_join event: %s", new_event)
+ logger.debug("do_invite_join event: %s", event)
try:
yield self.store.store_room(
@@ -632,14 +636,14 @@ class FederationHandler(BaseHandler):
with PreserveLoggingContext():
d = self.notifier.on_new_room_event(
- new_event, event_stream_id, max_stream_id,
+ event, event_stream_id, max_stream_id,
extra_users=[joinee]
)
def log_failure(f):
logger.warn(
"Failed to notify about %s: %s",
- new_event.event_id, f.value
+ event.event_id, f.value
)
d.addErrback(log_failure)
@@ -733,9 +737,7 @@ class FederationHandler(BaseHandler):
if event.type == EventTypes.Member:
if event.content["membership"] == Membership.JOIN:
user = UserID.from_string(event.state_key)
- yield self.distributor.fire(
- "user_joined_room", user=user, room_id=event.room_id
- )
+ yield user_joined_room(self.distributor, user, event.room_id)
new_pdu = event
@@ -1082,7 +1084,7 @@ class FederationHandler(BaseHandler):
return self.store.get_min_depth(context)
@log_function
- def _on_user_joined(self, user, room_id):
+ def user_joined_room(self, user, room_id):
waiters = self.waiting_for_join_list.get(
(user.to_string(), room_id),
[]
@@ -1648,11 +1650,22 @@ class FederationHandler(BaseHandler):
sender = invite["sender"]
room_id = invite["room_id"]
+ if "signed" not in invite or "token" not in invite["signed"]:
+ logger.info(
+ "Discarding received notification of third party invite "
+ "without signed: %s" % (invite,)
+ )
+ return
+
+ third_party_invite = {
+ "signed": invite["signed"],
+ }
+
event_dict = {
"type": EventTypes.Member,
"content": {
"membership": Membership.INVITE,
- "third_party_invite": invite,
+ "third_party_invite": third_party_invite,
},
"room_id": room_id,
"sender": sender,
@@ -1663,6 +1676,11 @@ class FederationHandler(BaseHandler):
builder = self.event_builder_factory.new(event_dict)
EventValidator().validate_new(builder)
event, context = yield self._create_new_client_event(builder=builder)
+
+ event, context = yield self.add_display_name_to_third_party_invite(
+ event_dict, event, context
+ )
+
self.auth.check(event, context.current_state)
yield self._validate_keyserver(event, auth_events=context.current_state)
member_handler = self.hs.get_handlers().room_member_handler
@@ -1684,6 +1702,10 @@ class FederationHandler(BaseHandler):
builder=builder,
)
+ event, context = yield self.add_display_name_to_third_party_invite(
+ event_dict, event, context
+ )
+
self.auth.check(event, auth_events=context.current_state)
yield self._validate_keyserver(event, auth_events=context.current_state)
@@ -1694,6 +1716,27 @@ class FederationHandler(BaseHandler):
yield member_handler.change_membership(event, context)
@defer.inlineCallbacks
+ def add_display_name_to_third_party_invite(self, event_dict, event, context):
+ key = (
+ EventTypes.ThirdPartyInvite,
+ event.content["third_party_invite"]["signed"]["token"]
+ )
+ original_invite = context.current_state.get(key)
+ if not original_invite:
+ logger.info(
+ "Could not find invite event for third_party_invite - "
+ "discarding: %s" % (event_dict,)
+ )
+ return
+
+ display_name = original_invite.content["display_name"]
+ event_dict["content"]["third_party_invite"]["display_name"] = display_name
+ builder = self.event_builder_factory.new(event_dict)
+ EventValidator().validate_new(builder)
+ event, context = yield self._create_new_client_event(builder=builder)
+ defer.returnValue((event, context))
+
+ @defer.inlineCallbacks
def _validate_keyserver(self, event, auth_events):
token = event.content["third_party_invite"]["signed"]["token"]
diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py
index 2a99921d5f..f1fa562fff 100644
--- a/synapse/handlers/identity.py
+++ b/synapse/handlers/identity.py
@@ -20,7 +20,6 @@ from synapse.api.errors import (
CodeMessageException
)
from ._base import BaseHandler
-from synapse.http.client import SimpleHttpClient
from synapse.util.async import run_on_reactor
from synapse.api.errors import SynapseError
@@ -35,13 +34,12 @@ class IdentityHandler(BaseHandler):
def __init__(self, hs):
super(IdentityHandler, self).__init__(hs)
+ self.http_client = hs.get_simple_http_client()
+
@defer.inlineCallbacks
def threepid_from_creds(self, creds):
yield run_on_reactor()
- # TODO: get this from the homeserver rather than creating a new one for
- # each request
- http_client = SimpleHttpClient(self.hs)
# XXX: make this configurable!
# trustedIdServers = ['matrix.org', 'localhost:8090']
trustedIdServers = ['matrix.org', 'vector.im']
@@ -67,7 +65,7 @@ class IdentityHandler(BaseHandler):
data = {}
try:
- data = yield http_client.get_json(
+ data = yield self.http_client.get_json(
"https://%s%s" % (
id_server,
"/_matrix/identity/api/v1/3pid/getValidated3pid"
@@ -85,7 +83,6 @@ class IdentityHandler(BaseHandler):
def bind_threepid(self, creds, mxid):
yield run_on_reactor()
logger.debug("binding threepid %r to %s", creds, mxid)
- http_client = SimpleHttpClient(self.hs)
data = None
if 'id_server' in creds:
@@ -103,7 +100,7 @@ class IdentityHandler(BaseHandler):
raise SynapseError(400, "No client_secret in creds")
try:
- data = yield http_client.post_urlencoded_get_json(
+ data = yield self.http_client.post_urlencoded_get_json(
"https://%s%s" % (
id_server, "/_matrix/identity/api/v1/3pid/bind"
),
@@ -121,7 +118,6 @@ class IdentityHandler(BaseHandler):
@defer.inlineCallbacks
def requestEmailToken(self, id_server, email, client_secret, send_attempt, **kwargs):
yield run_on_reactor()
- http_client = SimpleHttpClient(self.hs)
params = {
'email': email,
@@ -131,7 +127,7 @@ class IdentityHandler(BaseHandler):
params.update(kwargs)
try:
- data = yield http_client.post_urlencoded_get_json(
+ data = yield self.http_client.post_urlencoded_get_json(
"https://%s%s" % (
id_server,
"/_matrix/identity/api/v1/validate/email/requestToken"
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 2e7d0d7f82..a1bed9b0dc 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -22,15 +22,22 @@ from synapse.events.utils import serialize_event
from synapse.events.validator import EventValidator
from synapse.util import unwrapFirstError
from synapse.util.logcontext import PreserveLoggingContext
+from synapse.util.caches.snapshot_cache import SnapshotCache
from synapse.types import UserID, RoomStreamToken, StreamToken
from ._base import BaseHandler
+from canonicaljson import encode_canonical_json
+
import logging
logger = logging.getLogger(__name__)
+def collect_presencelike_data(distributor, user, content):
+ return distributor.fire("collect_presencelike_data", user, content)
+
+
class MessageHandler(BaseHandler):
def __init__(self, hs):
@@ -39,6 +46,7 @@ class MessageHandler(BaseHandler):
self.state = hs.get_state_handler()
self.clock = hs.get_clock()
self.validator = EventValidator()
+ self.snapshot_cache = SnapshotCache()
@defer.inlineCallbacks
def get_message(self, msg_id=None, room_id=None, sender_id=None,
@@ -195,10 +203,8 @@ class MessageHandler(BaseHandler):
if membership == Membership.JOIN:
joinee = UserID.from_string(builder.state_key)
# If event doesn't include a display name, add one.
- yield self.distributor.fire(
- "collect_presencelike_data",
- joinee,
- builder.content
+ yield collect_presencelike_data(
+ self.distributor, joinee, builder.content
)
if token_id is not None:
@@ -211,6 +217,16 @@ class MessageHandler(BaseHandler):
builder=builder,
)
+ if event.is_state():
+ prev_state = context.current_state.get((event.type, event.state_key))
+ if prev_state and event.user_id == prev_state.user_id:
+ prev_content = encode_canonical_json(prev_state.content)
+ next_content = encode_canonical_json(event.content)
+ if prev_content == next_content:
+ # Duplicate suppression for state updates with same sender
+ # and content.
+ defer.returnValue(prev_state)
+
if event.type == EventTypes.Member:
member_handler = self.hs.get_handlers().room_member_handler
yield member_handler.change_membership(event, context, is_guest=is_guest)
@@ -312,7 +328,6 @@ class MessageHandler(BaseHandler):
[serialize_event(c, now) for c in room_state.values()]
)
- @defer.inlineCallbacks
def snapshot_all_rooms(self, user_id=None, pagin_config=None,
as_client_event=True, include_archived=False):
"""Retrieve a snapshot of all rooms the user is invited or has joined.
@@ -332,6 +347,28 @@ class MessageHandler(BaseHandler):
is joined on, may return a "messages" key with messages, depending
on the specified PaginationConfig.
"""
+ key = (
+ user_id,
+ pagin_config.from_token,
+ pagin_config.to_token,
+ pagin_config.direction,
+ pagin_config.limit,
+ as_client_event,
+ include_archived,
+ )
+ now_ms = self.clock.time_msec()
+ result = self.snapshot_cache.get(now_ms, key)
+ if result is not None:
+ return result
+
+ return self.snapshot_cache.set(now_ms, key, self._snapshot_all_rooms(
+ user_id, pagin_config, as_client_event, include_archived
+ ))
+
+ @defer.inlineCallbacks
+ def _snapshot_all_rooms(self, user_id=None, pagin_config=None,
+ as_client_event=True, include_archived=False):
+
memberships = [Membership.INVITE, Membership.JOIN]
if include_archived:
memberships.append(Membership.LEAVE)
@@ -359,6 +396,10 @@ class MessageHandler(BaseHandler):
tags_by_room = yield self.store.get_tags_for_user(user_id)
+ account_data, account_data_by_room = (
+ yield self.store.get_account_data_for_user(user_id)
+ )
+
public_room_ids = yield self.store.get_public_room_ids()
limit = pagin_config.limit
@@ -436,14 +477,22 @@ class MessageHandler(BaseHandler):
for c in current_state.values()
]
- account_data = []
+ account_data_events = []
tags = tags_by_room.get(event.room_id)
if tags:
- account_data.append({
+ account_data_events.append({
"type": "m.tag",
"content": {"tags": tags},
})
- d["account_data"] = account_data
+
+ account_data = account_data_by_room.get(event.room_id, {})
+ for account_data_type, content in account_data.items():
+ account_data_events.append({
+ "type": account_data_type,
+ "content": content,
+ })
+
+ d["account_data"] = account_data_events
except:
logger.exception("Failed to get snapshot")
@@ -456,9 +505,17 @@ class MessageHandler(BaseHandler):
consumeErrors=True
).addErrback(unwrapFirstError)
+ account_data_events = []
+ for account_data_type, content in account_data.items():
+ account_data_events.append({
+ "type": account_data_type,
+ "content": content,
+ })
+
ret = {
"rooms": rooms_ret,
"presence": presence,
+ "account_data": account_data_events,
"receipts": receipt,
"end": now_token.to_string(),
}
@@ -498,14 +555,22 @@ class MessageHandler(BaseHandler):
user_id, room_id, pagin_config, membership, member_event_id, is_guest
)
- account_data = []
+ account_data_events = []
tags = yield self.store.get_tags_for_room(user_id, room_id)
if tags:
- account_data.append({
+ account_data_events.append({
"type": "m.tag",
"content": {"tags": tags},
})
- result["account_data"] = account_data
+
+ account_data = yield self.store.get_account_data_for_room(user_id, room_id)
+ for account_data_type, content in account_data.items():
+ account_data_events.append({
+ "type": account_data_type,
+ "content": content,
+ })
+
+ result["account_data"] = account_data_events
defer.returnValue(result)
@@ -588,23 +653,28 @@ class MessageHandler(BaseHandler):
@defer.inlineCallbacks
def get_presence():
- states = {}
- if not is_guest:
- states = yield presence_handler.get_states(
- target_users=[UserID.from_string(m.user_id) for m in room_members],
- auth_user=auth_user,
- as_event=True,
- check_auth=False,
- )
+ states = yield presence_handler.get_states(
+ target_users=[UserID.from_string(m.user_id) for m in room_members],
+ auth_user=auth_user,
+ as_event=True,
+ check_auth=False,
+ )
defer.returnValue(states.values())
- receipts_handler = self.hs.get_handlers().receipts_handler
+ @defer.inlineCallbacks
+ def get_receipts():
+ receipts_handler = self.hs.get_handlers().receipts_handler
+ receipts = yield receipts_handler.get_receipts_for_room(
+ room_id,
+ now_token.receipt_key
+ )
+ defer.returnValue(receipts)
presence, receipts, (messages, token) = yield defer.gatherResults(
[
get_presence(),
- receipts_handler.get_receipts_for_room(room_id, now_token.receipt_key),
+ get_receipts(),
self.store.get_recent_events_for_room(
room_id,
limit=limit,
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index aca65096fc..63d6f30a7b 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -62,6 +62,14 @@ def partitionbool(l, func):
return ret.get(True, []), ret.get(False, [])
+def user_presence_changed(distributor, user, statuscache):
+ return distributor.fire("user_presence_changed", user, statuscache)
+
+
+def collect_presencelike_data(distributor, user, content):
+ return distributor.fire("collect_presencelike_data", user, content)
+
+
class PresenceHandler(BaseHandler):
STATE_LEVELS = {
@@ -361,9 +369,7 @@ class PresenceHandler(BaseHandler):
yield self.store.set_presence_state(
target_user.localpart, state_to_store
)
- yield self.distributor.fire(
- "collect_presencelike_data", target_user, state
- )
+ yield collect_presencelike_data(self.distributor, target_user, state)
if now_level > was_level:
state["last_active"] = self.clock.time_msec()
@@ -467,7 +473,7 @@ class PresenceHandler(BaseHandler):
)
@defer.inlineCallbacks
- def send_invite(self, observer_user, observed_user):
+ def send_presence_invite(self, observer_user, observed_user):
"""Request the presence of a local or remote user for a local user"""
if not self.hs.is_mine(observer_user):
raise SynapseError(400, "User is not hosted on this Home Server")
@@ -878,7 +884,7 @@ class PresenceHandler(BaseHandler):
room_ids=room_ids,
statuscache=statuscache,
)
- yield self.distributor.fire("user_presence_changed", user, statuscache)
+ yield user_presence_changed(self.distributor, user, statuscache)
@defer.inlineCallbacks
def incoming_presence(self, origin, content):
@@ -1116,9 +1122,7 @@ class PresenceHandler(BaseHandler):
self._user_cachemap[user].get_state()["last_active"]
)
- yield self.distributor.fire(
- "collect_presencelike_data", user, state
- )
+ yield collect_presencelike_data(self.distributor, user, state)
if "last_active" in state:
state = dict(state)
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index 799faffe53..576c6f09b4 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -28,6 +28,14 @@ import logging
logger = logging.getLogger(__name__)
+def changed_presencelike_data(distributor, user, state):
+ return distributor.fire("changed_presencelike_data", user, state)
+
+
+def collect_presencelike_data(distributor, user, content):
+ return distributor.fire("collect_presencelike_data", user, content)
+
+
class ProfileHandler(BaseHandler):
def __init__(self, hs):
@@ -95,11 +103,9 @@ class ProfileHandler(BaseHandler):
target_user.localpart, new_displayname
)
- yield self.distributor.fire(
- "changed_presencelike_data", target_user, {
- "displayname": new_displayname,
- }
- )
+ yield changed_presencelike_data(self.distributor, target_user, {
+ "displayname": new_displayname,
+ })
yield self._update_join_states(target_user)
@@ -144,11 +150,9 @@ class ProfileHandler(BaseHandler):
target_user.localpart, new_avatar_url
)
- yield self.distributor.fire(
- "changed_presencelike_data", target_user, {
- "avatar_url": new_avatar_url,
- }
- )
+ yield changed_presencelike_data(self.distributor, target_user, {
+ "avatar_url": new_avatar_url,
+ })
yield self._update_join_states(target_user)
@@ -208,9 +212,7 @@ class ProfileHandler(BaseHandler):
"membership": Membership.JOIN,
}
- yield self.distributor.fire(
- "collect_presencelike_data", user, content
- )
+ yield collect_presencelike_data(self.distributor, user, content)
msg_handler = self.hs.get_handlers().message_handler
try:
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 493a087031..baf7c14e40 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -31,6 +31,10 @@ import urllib
logger = logging.getLogger(__name__)
+def registered_user(distributor, user):
+ return distributor.fire("registered_user", user)
+
+
class RegistrationHandler(BaseHandler):
def __init__(self, hs):
@@ -38,6 +42,7 @@ class RegistrationHandler(BaseHandler):
self.distributor = hs.get_distributor()
self.distributor.declare("registered_user")
+ self.captcha_client = CaptchaServerHttpClient(hs)
@defer.inlineCallbacks
def check_username(self, localpart):
@@ -98,7 +103,7 @@ class RegistrationHandler(BaseHandler):
password_hash=password_hash
)
- yield self.distributor.fire("registered_user", user)
+ yield registered_user(self.distributor, user)
else:
# autogen a random user ID
attempts = 0
@@ -117,7 +122,7 @@ class RegistrationHandler(BaseHandler):
token=token,
password_hash=password_hash)
- self.distributor.fire("registered_user", user)
+ yield registered_user(self.distributor, user)
except SynapseError:
# if user id is taken, just generate another
user_id = None
@@ -127,25 +132,9 @@ class RegistrationHandler(BaseHandler):
raise RegistrationError(
500, "Cannot generate user ID.")
- # create a default avatar for the user
- # XXX: ideally clients would explicitly specify one, but given they don't
- # and we want consistent and pretty identicons for random users, we'll
- # do it here.
- try:
- auth_user = UserID.from_string(user_id)
- media_repository = self.hs.get_resource_for_media_repository()
- identicon_resource = media_repository.getChildWithDefault("identicon", None)
- upload_resource = media_repository.getChildWithDefault("upload", None)
- identicon_bytes = identicon_resource.generate_identicon(user_id, 320, 320)
- content_uri = yield upload_resource.create_content(
- "image/png", None, identicon_bytes, len(identicon_bytes), auth_user
- )
- profile_handler = self.hs.get_handlers().profile_handler
- profile_handler.set_avatar_url(
- auth_user, auth_user, ("%s#auto" % (content_uri,))
- )
- except NotImplementedError:
- pass # make tests pass without messing around creating default avatars
+ # We used to generate default identicons here, but nowadays
+ # we want clients to generate their own as part of their branding
+ # rather than there being consistent matrix-wide ones, so we don't.
defer.returnValue((user_id, token))
@@ -167,7 +156,7 @@ class RegistrationHandler(BaseHandler):
token=token,
password_hash=""
)
- self.distributor.fire("registered_user", user)
+ registered_user(self.distributor, user)
defer.returnValue((user_id, token))
@defer.inlineCallbacks
@@ -215,7 +204,7 @@ class RegistrationHandler(BaseHandler):
token=token,
password_hash=None
)
- yield self.distributor.fire("registered_user", user)
+ yield registered_user(self.distributor, user)
except Exception, e:
yield self.store.add_access_token_to_user(user_id, token)
# Ignore Registration errors
@@ -302,10 +291,7 @@ class RegistrationHandler(BaseHandler):
"""
Used only by c/s api v1
"""
- # TODO: get this from the homeserver rather than creating a new one for
- # each request
- client = CaptchaServerHttpClient(self.hs)
- data = yield client.post_urlencoded_get_raw(
+ data = yield self.captcha_client.post_urlencoded_get_raw(
"http://www.google.com:80/recaptcha/api/verify",
args={
'privatekey': private_key,
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 3f04752581..13f66e0df0 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -41,6 +41,18 @@ logger = logging.getLogger(__name__)
id_server_scheme = "https://"
+def collect_presencelike_data(distributor, user, content):
+ return distributor.fire("collect_presencelike_data", user, content)
+
+
+def user_left_room(distributor, user, room_id):
+ return distributor.fire("user_left_room", user=user, room_id=room_id)
+
+
+def user_joined_room(distributor, user, room_id):
+ return distributor.fire("user_joined_room", user=user, room_id=room_id)
+
+
class RoomCreationHandler(BaseHandler):
PRESETS_DICT = {
@@ -438,9 +450,7 @@ class RoomMemberHandler(BaseHandler):
if prev_state and prev_state.membership == Membership.JOIN:
user = UserID.from_string(event.user_id)
- self.distributor.fire(
- "user_left_room", user=user, room_id=event.room_id
- )
+ user_left_room(self.distributor, user, event.room_id)
defer.returnValue({"room_id": room_id})
@@ -458,9 +468,7 @@ class RoomMemberHandler(BaseHandler):
raise SynapseError(404, "No known servers")
# If event doesn't include a display name, add one.
- yield self.distributor.fire(
- "collect_presencelike_data", joinee, content
- )
+ yield collect_presencelike_data(self.distributor, joinee, content)
content.update({"membership": Membership.JOIN})
builder = self.event_builder_factory.new({
@@ -517,10 +525,13 @@ class RoomMemberHandler(BaseHandler):
do_auth=do_auth,
)
- user = UserID.from_string(event.user_id)
- yield self.distributor.fire(
- "user_joined_room", user=user, room_id=room_id
- )
+ prev_state = context.current_state.get((event.type, event.state_key))
+ if not prev_state or prev_state.membership != Membership.JOIN:
+ # Only fire user_joined_room if the user has acutally joined the
+ # room. Don't bother if the user is just changing their profile
+ # info.
+ user = UserID.from_string(event.user_id)
+ yield user_joined_room(self.distributor, user, room_id)
@defer.inlineCallbacks
def get_inviter(self, event):
@@ -693,13 +704,48 @@ class RoomMemberHandler(BaseHandler):
token_id,
txn_id
):
+ room_state = yield self.hs.get_state_handler().get_current_state(room_id)
+
+ inviter_display_name = ""
+ inviter_avatar_url = ""
+ member_event = room_state.get((EventTypes.Member, user.to_string()))
+ if member_event:
+ inviter_display_name = member_event.content.get("displayname", "")
+ inviter_avatar_url = member_event.content.get("avatar_url", "")
+
+ canonical_room_alias = ""
+ canonical_alias_event = room_state.get((EventTypes.CanonicalAlias, ""))
+ if canonical_alias_event:
+ canonical_room_alias = canonical_alias_event.content.get("alias", "")
+
+ room_name = ""
+ room_name_event = room_state.get((EventTypes.Name, ""))
+ if room_name_event:
+ room_name = room_name_event.content.get("name", "")
+
+ room_join_rules = ""
+ join_rules_event = room_state.get((EventTypes.JoinRules, ""))
+ if join_rules_event:
+ room_join_rules = join_rules_event.content.get("join_rule", "")
+
+ room_avatar_url = ""
+ room_avatar_event = room_state.get((EventTypes.RoomAvatar, ""))
+ if room_avatar_event:
+ room_avatar_url = room_avatar_event.content.get("url", "")
+
token, public_key, key_validity_url, display_name = (
yield self._ask_id_server_for_third_party_invite(
- id_server,
- medium,
- address,
- room_id,
- user.to_string()
+ id_server=id_server,
+ medium=medium,
+ address=address,
+ room_id=room_id,
+ inviter_user_id=user.to_string(),
+ room_alias=canonical_room_alias,
+ room_avatar_url=room_avatar_url,
+ room_join_rules=room_join_rules,
+ room_name=room_name,
+ inviter_display_name=inviter_display_name,
+ inviter_avatar_url=inviter_avatar_url
)
)
msg_handler = self.hs.get_handlers().message_handler
@@ -721,7 +767,19 @@ class RoomMemberHandler(BaseHandler):
@defer.inlineCallbacks
def _ask_id_server_for_third_party_invite(
- self, id_server, medium, address, room_id, sender):
+ self,
+ id_server,
+ medium,
+ address,
+ room_id,
+ inviter_user_id,
+ room_alias,
+ room_avatar_url,
+ room_join_rules,
+ room_name,
+ inviter_display_name,
+ inviter_avatar_url
+ ):
is_url = "%s%s/_matrix/identity/api/v1/store-invite" % (
id_server_scheme, id_server,
)
@@ -731,7 +789,13 @@ class RoomMemberHandler(BaseHandler):
"medium": medium,
"address": address,
"room_id": room_id,
- "sender": sender,
+ "room_alias": room_alias,
+ "room_avatar_url": room_avatar_url,
+ "room_join_rules": room_join_rules,
+ "room_name": room_name,
+ "sender": inviter_user_id,
+ "sender_display_name": inviter_display_name,
+ "sender_avatar_url": inviter_avatar_url,
}
)
# TODO: Check for success
@@ -743,13 +807,17 @@ class RoomMemberHandler(BaseHandler):
)
defer.returnValue((token, public_key, key_validity_url, display_name))
+ def forget(self, user, room_id):
+ return self.store.forget(user.to_string(), room_id)
+
class RoomListHandler(BaseHandler):
@defer.inlineCallbacks
def get_public_room_list(self):
chunk = yield self.store.get_rooms(is_public=True)
- results = yield defer.gatherResults(
+
+ room_members = yield defer.gatherResults(
[
self.store.get_users_in_room(room["room_id"])
for room in chunk
@@ -757,12 +825,30 @@ class RoomListHandler(BaseHandler):
consumeErrors=True,
).addErrback(unwrapFirstError)
+ avatar_urls = yield defer.gatherResults(
+ [
+ self.get_room_avatar_url(room["room_id"])
+ for room in chunk
+ ],
+ consumeErrors=True,
+ ).addErrback(unwrapFirstError)
+
for i, room in enumerate(chunk):
- room["num_joined_members"] = len(results[i])
+ room["num_joined_members"] = len(room_members[i])
+ if avatar_urls[i]:
+ room["avatar_url"] = avatar_urls[i]
# FIXME (erikj): START is no longer a valid value
defer.returnValue({"start": "START", "end": "END", "chunk": chunk})
+ @defer.inlineCallbacks
+ def get_room_avatar_url(self, room_id):
+ event = yield self.hs.get_state_handler().get_current_state(
+ room_id, "m.room.avatar"
+ )
+ if event and "url" in event.content:
+ defer.returnValue(event.content["url"])
+
class RoomContextHandler(BaseHandler):
@defer.inlineCallbacks
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index 50688e51a8..99ef56871c 100644
--- a/synapse/handlers/search.py
+++ b/synapse/handlers/search.py
@@ -131,6 +131,17 @@ class SearchHandler(BaseHandler):
if batch_group == "room_id":
room_ids.intersection_update({batch_group_key})
+ if not room_ids:
+ defer.returnValue({
+ "search_categories": {
+ "room_events": {
+ "results": [],
+ "count": 0,
+ "highlights": [],
+ }
+ }
+ })
+
rank_map = {} # event_id -> rank of event
allowed_events = []
room_groups = {} # Holds result of grouping by room, if applicable
@@ -139,11 +150,22 @@ class SearchHandler(BaseHandler):
# Holds the next_batch for the entire result set if one of those exists
global_next_batch = None
+ highlights = set()
+
+ count = None
+
if order_by == "rank":
- results = yield self.store.search_msgs(
+ search_result = yield self.store.search_msgs(
room_ids, search_term, keys
)
+ count = search_result["count"]
+
+ if search_result["highlights"]:
+ highlights.update(search_result["highlights"])
+
+ results = search_result["results"]
+
results_map = {r["event"].event_id: r for r in results}
rank_map.update({r["event"].event_id: r["rank"] for r in results})
@@ -171,80 +193,78 @@ class SearchHandler(BaseHandler):
s["results"].append(e.event_id)
elif order_by == "recent":
- # In this case we specifically loop through each room as the given
- # limit applies to each room, rather than a global list.
- # This is not necessarilly a good idea.
- for room_id in room_ids:
- room_events = []
- if batch_group == "room_id" and batch_group_key == room_id:
- pagination_token = batch_token
- else:
- pagination_token = None
- i = 0
-
- # We keep looping and we keep filtering until we reach the limit
- # or we run out of things.
- # But only go around 5 times since otherwise synapse will be sad.
- while len(room_events) < search_filter.limit() and i < 5:
- i += 1
- results = yield self.store.search_room(
- room_id, search_term, keys, search_filter.limit() * 2,
- pagination_token=pagination_token,
- )
+ room_events = []
+ i = 0
+
+ pagination_token = batch_token
+
+ # We keep looping and we keep filtering until we reach the limit
+ # or we run out of things.
+ # But only go around 5 times since otherwise synapse will be sad.
+ while len(room_events) < search_filter.limit() and i < 5:
+ i += 1
+ search_result = yield self.store.search_rooms(
+ room_ids, search_term, keys, search_filter.limit() * 2,
+ pagination_token=pagination_token,
+ )
- results_map = {r["event"].event_id: r for r in results}
+ if search_result["highlights"]:
+ highlights.update(search_result["highlights"])
- rank_map.update({r["event"].event_id: r["rank"] for r in results})
+ count = search_result["count"]
- filtered_events = search_filter.filter([
- r["event"] for r in results
- ])
+ results = search_result["results"]
- events = yield self._filter_events_for_client(
- user.to_string(), filtered_events
- )
+ results_map = {r["event"].event_id: r for r in results}
- room_events.extend(events)
- room_events = room_events[:search_filter.limit()]
+ rank_map.update({r["event"].event_id: r["rank"] for r in results})
- if len(results) < search_filter.limit() * 2:
- pagination_token = None
- break
- else:
- pagination_token = results[-1]["pagination_token"]
-
- if room_events:
- res = results_map[room_events[-1].event_id]
- pagination_token = res["pagination_token"]
-
- group = room_groups.setdefault(room_id, {})
- if pagination_token:
- next_batch = encode_base64("%s\n%s\n%s" % (
- "room_id", room_id, pagination_token
- ))
- group["next_batch"] = next_batch
-
- if batch_token:
- global_next_batch = next_batch
-
- group["results"] = [e.event_id for e in room_events]
- group["order"] = max(
- e.origin_server_ts/1000 for e in room_events
- if hasattr(e, "origin_server_ts")
- )
+ filtered_events = search_filter.filter([
+ r["event"] for r in results
+ ])
+
+ events = yield self._filter_events_for_client(
+ user.to_string(), filtered_events
+ )
- allowed_events.extend(room_events)
+ room_events.extend(events)
+ room_events = room_events[:search_filter.limit()]
- # Normalize the group orders
- if room_groups:
- if len(room_groups) > 1:
- mx = max(g["order"] for g in room_groups.values())
- mn = min(g["order"] for g in room_groups.values())
+ if len(results) < search_filter.limit() * 2:
+ pagination_token = None
+ break
+ else:
+ pagination_token = results[-1]["pagination_token"]
- for g in room_groups.values():
- g["order"] = (g["order"] - mn) * 1.0 / (mx - mn)
+ for event in room_events:
+ group = room_groups.setdefault(event.room_id, {
+ "results": [],
+ })
+ group["results"].append(event.event_id)
+
+ if room_events and len(room_events) >= search_filter.limit():
+ last_event_id = room_events[-1].event_id
+ pagination_token = results_map[last_event_id]["pagination_token"]
+
+ # We want to respect the given batch group and group keys so
+ # that if people blindly use the top level `next_batch` token
+ # it returns more from the same group (if applicable) rather
+ # than reverting to searching all results again.
+ if batch_group and batch_group_key:
+ global_next_batch = encode_base64("%s\n%s\n%s" % (
+ batch_group, batch_group_key, pagination_token
+ ))
else:
- room_groups.values()[0]["order"] = 1
+ global_next_batch = encode_base64("%s\n%s\n%s" % (
+ "all", "", pagination_token
+ ))
+
+ for room_id, group in room_groups.items():
+ group["next_batch"] = encode_base64("%s\n%s\n%s" % (
+ "room_id", room_id, pagination_token
+ ))
+
+ allowed_events.extend(room_events)
else:
# We should never get here due to the guard earlier.
@@ -334,20 +354,19 @@ class SearchHandler(BaseHandler):
# We're now about to serialize the events. We should not make any
# blocking calls after this. Otherwise the 'age' will be wrong
- results = {
- e.event_id: {
+ results = [
+ {
"rank": rank_map[e.event_id],
"result": serialize_event(e, time_now),
"context": contexts.get(e.event_id, {}),
}
for e in allowed_events
- }
-
- logger.info("Found %d results", len(results))
+ ]
rooms_cat_res = {
"results": results,
- "count": len(results)
+ "count": count,
+ "highlights": list(highlights),
}
if state_results:
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 877328b29e..feea407ea2 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -15,8 +15,9 @@
from ._base import BaseHandler
-from synapse.streams.config import PaginationConfig
from synapse.api.constants import Membership, EventTypes
+from synapse.api.errors import GuestAccessError
+from synapse.util import unwrapFirstError
from twisted.internet import defer
@@ -28,6 +29,7 @@ logger = logging.getLogger(__name__)
SyncConfig = collections.namedtuple("SyncConfig", [
"user",
+ "is_guest",
"filter",
])
@@ -100,6 +102,7 @@ class InvitedSyncResult(collections.namedtuple("InvitedSyncResult", [
class SyncResult(collections.namedtuple("SyncResult", [
"next_batch", # Token for the next sync
"presence", # List of presence events for the user.
+ "account_data", # List of account_data events for the user.
"joined", # JoinedSyncResult for each joined room.
"invited", # InvitedSyncResult for each invited room.
"archived", # ArchivedSyncResult for each archived room.
@@ -115,6 +118,8 @@ class SyncResult(collections.namedtuple("SyncResult", [
self.presence or self.joined or self.invited
)
+GuestRoom = collections.namedtuple("GuestRoom", ("room_id", "membership"))
+
class SyncHandler(BaseHandler):
@@ -133,6 +138,18 @@ class SyncHandler(BaseHandler):
A Deferred SyncResult.
"""
+ if sync_config.is_guest:
+ bad_rooms = []
+ for room_id in sync_config.filter.list_rooms():
+ world_readable = yield self._is_world_readable(room_id)
+ if not world_readable:
+ bad_rooms.append(room_id)
+
+ if bad_rooms:
+ raise GuestAccessError(
+ bad_rooms, 403, "Guest access not allowed"
+ )
+
if timeout == 0 or since_token is None or full_state:
# we are going to return immediately, so don't bother calling
# notifier.wait_for_events.
@@ -149,6 +166,17 @@ class SyncHandler(BaseHandler):
)
defer.returnValue(result)
+ @defer.inlineCallbacks
+ def _is_world_readable(self, room_id):
+ state = yield self.hs.get_state_handler().get_current_state(
+ room_id,
+ EventTypes.RoomHistoryVisibility
+ )
+ if state and "history_visibility" in state.content:
+ defer.returnValue(state.content["history_visibility"] == "world_readable")
+ else:
+ defer.returnValue(False)
+
def current_sync_for_user(self, sync_config, since_token=None,
full_state=False):
"""Get the sync for client needed to match what the server has now.
@@ -172,47 +200,71 @@ class SyncHandler(BaseHandler):
"""
now_token = yield self.event_sources.get_current_token()
- now_token, ephemeral_by_room = yield self.ephemeral_by_room(
- sync_config, now_token
- )
+ if sync_config.is_guest:
+ room_list = [
+ GuestRoom(room_id, Membership.JOIN)
+ for room_id in sync_config.filter.list_rooms()
+ ]
+
+ account_data = {}
+ account_data_by_room = {}
+ tags_by_room = {}
+
+ else:
+ membership_list = (Membership.INVITE, Membership.JOIN)
+ if sync_config.filter.include_leave:
+ membership_list += (Membership.LEAVE, Membership.BAN)
+
+ room_list = yield self.store.get_rooms_for_user_where_membership_is(
+ user_id=sync_config.user.to_string(),
+ membership_list=membership_list
+ )
+
+ account_data, account_data_by_room = (
+ yield self.store.get_account_data_for_user(
+ sync_config.user.to_string()
+ )
+ )
+
+ tags_by_room = yield self.store.get_tags_for_user(
+ sync_config.user.to_string()
+ )
presence_stream = self.event_sources.sources["presence"]
- # TODO (mjark): This looks wrong, shouldn't we be getting the presence
- # UP to the present rather than after the present?
- pagination_config = PaginationConfig(from_token=now_token)
- presence, _ = yield presence_stream.get_pagination_rows(
+
+ joined_room_ids = [
+ room.room_id for room in room_list
+ if room.membership == Membership.JOIN
+ ]
+
+ presence, _ = yield presence_stream.get_new_events(
+ from_key=0,
user=sync_config.user,
- pagination_config=pagination_config.get_source_config("presence"),
- key=None
- )
- room_list = yield self.store.get_rooms_for_user_where_membership_is(
- user_id=sync_config.user.to_string(),
- membership_list=(
- Membership.INVITE,
- Membership.JOIN,
- Membership.LEAVE,
- Membership.BAN
- )
+ room_ids=joined_room_ids,
+ is_guest=sync_config.is_guest,
)
- tags_by_room = yield self.store.get_tags_for_user(
- sync_config.user.to_string()
+ now_token, ephemeral_by_room = yield self.ephemeral_by_room(
+ sync_config, now_token, joined_room_ids
)
joined = []
invited = []
archived = []
+ deferreds = []
for event in room_list:
if event.membership == Membership.JOIN:
- room_sync = yield self.full_state_sync_for_joined_room(
+ room_sync_deferred = self.full_state_sync_for_joined_room(
room_id=event.room_id,
sync_config=sync_config,
now_token=now_token,
timeline_since_token=timeline_since_token,
ephemeral_by_room=ephemeral_by_room,
tags_by_room=tags_by_room,
+ account_data_by_room=account_data_by_room,
)
- joined.append(room_sync)
+ room_sync_deferred.addCallback(joined.append)
+ deferreds.append(room_sync_deferred)
elif event.membership == Membership.INVITE:
invite = yield self.store.get_event(event.event_id)
invited.append(InvitedSyncResult(
@@ -223,18 +275,25 @@ class SyncHandler(BaseHandler):
leave_token = now_token.copy_and_replace(
"room_key", "s%d" % (event.stream_ordering,)
)
- room_sync = yield self.full_state_sync_for_archived_room(
+ room_sync_deferred = self.full_state_sync_for_archived_room(
sync_config=sync_config,
room_id=event.room_id,
leave_event_id=event.event_id,
leave_token=leave_token,
timeline_since_token=timeline_since_token,
tags_by_room=tags_by_room,
+ account_data_by_room=account_data_by_room,
)
- archived.append(room_sync)
+ room_sync_deferred.addCallback(archived.append)
+ deferreds.append(room_sync_deferred)
+
+ yield defer.gatherResults(
+ deferreds, consumeErrors=True
+ ).addErrback(unwrapFirstError)
defer.returnValue(SyncResult(
presence=presence,
+ account_data=self.account_data_for_user(account_data),
joined=joined,
invited=invited,
archived=archived,
@@ -244,7 +303,8 @@ class SyncHandler(BaseHandler):
@defer.inlineCallbacks
def full_state_sync_for_joined_room(self, room_id, sync_config,
now_token, timeline_since_token,
- ephemeral_by_room, tags_by_room):
+ ephemeral_by_room, tags_by_room,
+ account_data_by_room):
"""Sync a room for a client which is starting without any state
Returns:
A Deferred JoinedSyncResult.
@@ -262,26 +322,47 @@ class SyncHandler(BaseHandler):
state=current_state,
ephemeral=ephemeral_by_room.get(room_id, []),
account_data=self.account_data_for_room(
- room_id, tags_by_room
+ room_id, tags_by_room, account_data_by_room
),
))
- def account_data_for_room(self, room_id, tags_by_room):
- account_data = []
+ def account_data_for_user(self, account_data):
+ account_data_events = []
+
+ for account_data_type, content in account_data.items():
+ account_data_events.append({
+ "type": account_data_type,
+ "content": content,
+ })
+
+ return account_data_events
+
+ def account_data_for_room(self, room_id, tags_by_room, account_data_by_room):
+ account_data_events = []
tags = tags_by_room.get(room_id)
if tags is not None:
- account_data.append({
+ account_data_events.append({
"type": "m.tag",
"content": {"tags": tags},
})
- return account_data
+
+ account_data = account_data_by_room.get(room_id, {})
+ for account_data_type, content in account_data.items():
+ account_data_events.append({
+ "type": account_data_type,
+ "content": content,
+ })
+
+ return account_data_events
@defer.inlineCallbacks
- def ephemeral_by_room(self, sync_config, now_token, since_token=None):
+ def ephemeral_by_room(self, sync_config, now_token, room_ids,
+ since_token=None):
"""Get the ephemeral events for each room the user is in
Args:
sync_config (SyncConfig): The flags, filters and user for the sync.
now_token (StreamToken): Where the server is currently up to.
+ room_ids (list): List of room id strings to get data for.
since_token (StreamToken): Where the server was when the client
last synced.
Returns:
@@ -292,9 +373,6 @@ class SyncHandler(BaseHandler):
typing_key = since_token.typing_key if since_token else "0"
- rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
- room_ids = [room.room_id for room in rooms]
-
typing_source = self.event_sources.sources["typing"]
typing, typing_key = yield typing_source.get_new_events(
user=sync_config.user,
@@ -341,7 +419,8 @@ class SyncHandler(BaseHandler):
@defer.inlineCallbacks
def full_state_sync_for_archived_room(self, room_id, sync_config,
leave_event_id, leave_token,
- timeline_since_token, tags_by_room):
+ timeline_since_token, tags_by_room,
+ account_data_by_room):
"""Sync a room for a client which is starting without any state
Returns:
A Deferred JoinedSyncResult.
@@ -358,7 +437,7 @@ class SyncHandler(BaseHandler):
timeline=batch,
state=leave_state,
account_data=self.account_data_for_room(
- room_id, tags_by_room
+ room_id, tags_by_room, account_data_by_room
),
))
@@ -371,8 +450,38 @@ class SyncHandler(BaseHandler):
"""
now_token = yield self.event_sources.get_current_token()
- rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
- room_ids = [room.room_id for room in rooms]
+ if sync_config.is_guest:
+ room_ids = sync_config.filter.list_rooms()
+
+ tags_by_room = {}
+ account_data = {}
+ account_data_by_room = {}
+
+ else:
+ rooms = yield self.store.get_rooms_for_user(
+ sync_config.user.to_string()
+ )
+ room_ids = [room.room_id for room in rooms]
+
+ now_token, ephemeral_by_room = yield self.ephemeral_by_room(
+ sync_config, now_token, since_token
+ )
+
+ tags_by_room = yield self.store.get_updated_tags(
+ sync_config.user.to_string(),
+ since_token.account_data_key,
+ )
+
+ account_data, account_data_by_room = (
+ yield self.store.get_updated_account_data_for_user(
+ sync_config.user.to_string(),
+ since_token.account_data_key,
+ )
+ )
+
+ now_token, ephemeral_by_room = yield self.ephemeral_by_room(
+ sync_config, now_token, room_ids, since_token
+ )
presence_source = self.event_sources.sources["presence"]
presence, presence_key = yield presence_source.get_new_events(
@@ -380,15 +489,10 @@ class SyncHandler(BaseHandler):
from_key=since_token.presence_key,
limit=sync_config.filter.presence_limit(),
room_ids=room_ids,
- # /sync doesn't support guest access, they can't get to this point in code
- is_guest=False,
+ is_guest=sync_config.is_guest,
)
now_token = now_token.copy_and_replace("presence_key", presence_key)
- now_token, ephemeral_by_room = yield self.ephemeral_by_room(
- sync_config, now_token, since_token
- )
-
rm_handler = self.hs.get_handlers().room_member_handler
app_service = yield self.store.get_app_service_by_user_id(
sync_config.user.to_string()
@@ -408,11 +512,8 @@ class SyncHandler(BaseHandler):
from_key=since_token.room_key,
to_key=now_token.room_key,
limit=timeline_limit + 1,
- )
-
- tags_by_room = yield self.store.get_updated_tags(
- sync_config.user.to_string(),
- since_token.account_data_key,
+ room_ids=room_ids if sync_config.is_guest else (),
+ is_guest=sync_config.is_guest,
)
joined = []
@@ -469,7 +570,7 @@ class SyncHandler(BaseHandler):
state=state,
ephemeral=ephemeral_by_room.get(room_id, []),
account_data=self.account_data_for_room(
- room_id, tags_by_room
+ room_id, tags_by_room, account_data_by_room
),
)
logger.debug("Result for room %s: %r", room_id, room_sync)
@@ -492,14 +593,15 @@ class SyncHandler(BaseHandler):
for room_id in joined_room_ids:
room_sync = yield self.incremental_sync_with_gap_for_room(
room_id, sync_config, since_token, now_token,
- ephemeral_by_room, tags_by_room
+ ephemeral_by_room, tags_by_room, account_data_by_room
)
if room_sync:
joined.append(room_sync)
for leave_event in leave_events:
room_sync = yield self.incremental_sync_for_archived_room(
- sync_config, leave_event, since_token, tags_by_room
+ sync_config, leave_event, since_token, tags_by_room,
+ account_data_by_room
)
archived.append(room_sync)
@@ -510,6 +612,7 @@ class SyncHandler(BaseHandler):
defer.returnValue(SyncResult(
presence=presence,
+ account_data=self.account_data_for_user(account_data),
joined=joined,
invited=invited,
archived=archived,
@@ -542,7 +645,10 @@ class SyncHandler(BaseHandler):
end_key = "s" + room_key.split('-')[-1]
loaded_recents = sync_config.filter.filter_room_timeline(events)
loaded_recents = yield self._filter_events_for_client(
- sync_config.user.to_string(), loaded_recents,
+ sync_config.user.to_string(),
+ loaded_recents,
+ is_guest=sync_config.is_guest,
+ require_all_visible_for_guests=False
)
loaded_recents.extend(recents)
recents = loaded_recents
@@ -566,7 +672,8 @@ class SyncHandler(BaseHandler):
@defer.inlineCallbacks
def incremental_sync_with_gap_for_room(self, room_id, sync_config,
since_token, now_token,
- ephemeral_by_room, tags_by_room):
+ ephemeral_by_room, tags_by_room,
+ account_data_by_room):
""" Get the incremental delta needed to bring the client up to date for
the room. Gives the client the most recent events and the changes to
state.
@@ -606,7 +713,7 @@ class SyncHandler(BaseHandler):
state=state,
ephemeral=ephemeral_by_room.get(room_id, []),
account_data=self.account_data_for_room(
- room_id, tags_by_room
+ room_id, tags_by_room, account_data_by_room
),
)
@@ -616,7 +723,8 @@ class SyncHandler(BaseHandler):
@defer.inlineCallbacks
def incremental_sync_for_archived_room(self, sync_config, leave_event,
- since_token, tags_by_room):
+ since_token, tags_by_room,
+ account_data_by_room):
""" Get the incremental delta needed to bring the client up to date for
the archived room.
Returns:
@@ -654,7 +762,7 @@ class SyncHandler(BaseHandler):
timeline=batch,
state=state_events_delta,
account_data=self.account_data_for_room(
- leave_event.room_id, tags_by_room
+ leave_event.room_id, tags_by_room, account_data_by_room
),
)
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 50feea6f1c..682b6b379b 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -15,7 +15,7 @@
from synapse.api.errors import (
- cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError
+ cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError, Codes
)
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
import synapse.metrics
@@ -53,6 +53,23 @@ response_timer = metrics.register_distribution(
labels=["method", "servlet"]
)
+response_ru_utime = metrics.register_distribution(
+ "response_ru_utime", labels=["method", "servlet"]
+)
+
+response_ru_stime = metrics.register_distribution(
+ "response_ru_stime", labels=["method", "servlet"]
+)
+
+response_db_txn_count = metrics.register_distribution(
+ "response_db_txn_count", labels=["method", "servlet"]
+)
+
+response_db_txn_duration = metrics.register_distribution(
+ "response_db_txn_duration", labels=["method", "servlet"]
+)
+
+
_next_request_id = 0
@@ -110,7 +127,10 @@ def request_handler(request_handler):
respond_with_json(
request,
500,
- {"error": "Internal server error"},
+ {
+ "error": "Internal server error",
+ "errcode": Codes.UNKNOWN,
+ },
send_cors=True
)
return wrapped_request_handler
@@ -120,7 +140,7 @@ class HttpServer(object):
""" Interface for registering callbacks on a HTTP server
"""
- def register_path(self, method, path_pattern, callback):
+ def register_paths(self, method, path_patterns, callback):
""" Register a callback that gets fired if we receive a http request
with the given method for a path that matches the given regex.
@@ -129,7 +149,7 @@ class HttpServer(object):
Args:
method (str): The method to listen to.
- path_pattern (str): The regex used to match requests.
+ path_patterns (list<SRE_Pattern>): The regex used to match requests.
callback (function): The function to fire if we receive a matched
request. The first argument will be the request object and
subsequent arguments will be any matched groups from the regex.
@@ -165,10 +185,11 @@ class JsonResource(HttpServer, resource.Resource):
self.version_string = hs.version_string
self.hs = hs
- def register_path(self, method, path_pattern, callback):
- self.path_regexs.setdefault(method, []).append(
- self._PathEntry(path_pattern, callback)
- )
+ def register_paths(self, method, path_patterns, callback):
+ for path_pattern in path_patterns:
+ self.path_regexs.setdefault(method, []).append(
+ self._PathEntry(path_pattern, callback)
+ )
def render(self, request):
""" This gets called by twisted every time someone sends us a request.
@@ -220,6 +241,21 @@ class JsonResource(HttpServer, resource.Resource):
self.clock.time_msec() - start, request.method, servlet_classname
)
+ try:
+ context = LoggingContext.current_context()
+ ru_utime, ru_stime = context.get_resource_usage()
+
+ response_ru_utime.inc_by(ru_utime, request.method, servlet_classname)
+ response_ru_stime.inc_by(ru_stime, request.method, servlet_classname)
+ response_db_txn_count.inc_by(
+ context.db_txn_count, request.method, servlet_classname
+ )
+ response_db_txn_duration.inc_by(
+ context.db_txn_duration, request.method, servlet_classname
+ )
+ except:
+ pass
+
return
# Huh. No one wanted to handle that? Fiiiiiine. Send 400.
diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py
index 9cda17fcf8..32b6d6cd72 100644
--- a/synapse/http/servlet.py
+++ b/synapse/http/servlet.py
@@ -19,7 +19,6 @@ from synapse.api.errors import SynapseError
import logging
-
logger = logging.getLogger(__name__)
@@ -102,12 +101,13 @@ class RestServlet(object):
def register(self, http_server):
""" Register this servlet with the given HTTP server. """
- if hasattr(self, "PATTERN"):
- pattern = self.PATTERN
+ if hasattr(self, "PATTERNS"):
+ patterns = self.PATTERNS
for method in ("GET", "PUT", "POST", "OPTIONS", "DELETE"):
if hasattr(self, "on_%s" % (method,)):
method_handler = getattr(self, "on_%s" % (method,))
- http_server.register_path(method, pattern, method_handler)
+ http_server.register_paths(method, patterns, method_handler)
+
else:
raise NotImplementedError("RestServlet must register something.")
diff --git a/synapse/notifier.py b/synapse/notifier.py
index e3b42e2331..fd52578325 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -349,7 +349,7 @@ class Notifier(object):
room_ids = []
if is_guest:
if guest_room_id:
- if not self._is_world_readable(guest_room_id):
+ if not (yield self._is_world_readable(guest_room_id)):
raise AuthError(403, "Guest access not allowed")
room_ids = [guest_room_id]
else:
diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py
index 0e0c61dec8..e7c964bcd2 100644
--- a/synapse/push/__init__.py
+++ b/synapse/push/__init__.py
@@ -16,14 +16,12 @@
from twisted.internet import defer
from synapse.streams.config import PaginationConfig
-from synapse.types import StreamToken, UserID
+from synapse.types import StreamToken
import synapse.util.async
-import baserules
+import push_rule_evaluator as push_rule_evaluator
import logging
-import simplejson as json
-import re
import random
logger = logging.getLogger(__name__)
@@ -33,9 +31,6 @@ class Pusher(object):
INITIAL_BACKOFF = 1000
MAX_BACKOFF = 60 * 60 * 1000
GIVE_UP_AFTER = 24 * 60 * 60 * 1000
- DEFAULT_ACTIONS = ['dont_notify']
-
- INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
def __init__(self, _hs, profile_tag, user_name, app_id,
app_display_name, device_display_name, pushkey, pushkey_ts,
@@ -63,161 +58,6 @@ class Pusher(object):
self.has_unread = True
@defer.inlineCallbacks
- def _actions_for_event(self, ev):
- """
- This should take into account notification settings that the user
- has configured both globally and per-room when we have the ability
- to do such things.
- """
- if ev['user_id'] == self.user_name:
- # let's assume you probably know about messages you sent yourself
- defer.returnValue(['dont_notify'])
-
- rawrules = yield self.store.get_push_rules_for_user(self.user_name)
-
- rules = []
- for rawrule in rawrules:
- rule = dict(rawrule)
- rule['conditions'] = json.loads(rawrule['conditions'])
- rule['actions'] = json.loads(rawrule['actions'])
- rules.append(rule)
-
- enabled_map = yield self.store.get_push_rules_enabled_for_user(self.user_name)
-
- user = UserID.from_string(self.user_name)
-
- rules = baserules.list_with_base_rules(rules, user)
-
- room_id = ev['room_id']
-
- # get *our* member event for display name matching
- my_display_name = None
- our_member_event = yield self.store.get_current_state(
- room_id=room_id,
- event_type='m.room.member',
- state_key=self.user_name,
- )
- if our_member_event:
- my_display_name = our_member_event[0].content.get("displayname")
-
- room_members = yield self.store.get_users_in_room(room_id)
- room_member_count = len(room_members)
-
- for r in rules:
- if r['rule_id'] in enabled_map:
- r['enabled'] = enabled_map[r['rule_id']]
- elif 'enabled' not in r:
- r['enabled'] = True
- if not r['enabled']:
- continue
- matches = True
-
- conditions = r['conditions']
- actions = r['actions']
-
- for c in conditions:
- matches &= self._event_fulfills_condition(
- ev, c, display_name=my_display_name,
- room_member_count=room_member_count
- )
- logger.debug(
- "Rule %s %s",
- r['rule_id'], "matches" if matches else "doesn't match"
- )
- # ignore rules with no actions (we have an explict 'dont_notify')
- if len(actions) == 0:
- logger.warn(
- "Ignoring rule id %s with no actions for user %s",
- r['rule_id'], self.user_name
- )
- continue
- if matches:
- logger.info(
- "%s matches for user %s, event %s",
- r['rule_id'], self.user_name, ev['event_id']
- )
- defer.returnValue(actions)
-
- logger.info(
- "No rules match for user %s, event %s",
- self.user_name, ev['event_id']
- )
- defer.returnValue(Pusher.DEFAULT_ACTIONS)
-
- @staticmethod
- def _glob_to_regexp(glob):
- r = re.escape(glob)
- r = re.sub(r'\\\*', r'.*?', r)
- r = re.sub(r'\\\?', r'.', r)
-
- # handle [abc], [a-z] and [!a-z] style ranges.
- r = re.sub(r'\\\[(\\\!|)(.*)\\\]',
- lambda x: ('[%s%s]' % (x.group(1) and '^' or '',
- re.sub(r'\\\-', '-', x.group(2)))), r)
- return r
-
- def _event_fulfills_condition(self, ev, condition, display_name, room_member_count):
- if condition['kind'] == 'event_match':
- if 'pattern' not in condition:
- logger.warn("event_match condition with no pattern")
- return False
- # XXX: optimisation: cache our pattern regexps
- if condition['key'] == 'content.body':
- r = r'\b%s\b' % self._glob_to_regexp(condition['pattern'])
- else:
- r = r'^%s$' % self._glob_to_regexp(condition['pattern'])
- val = _value_for_dotted_key(condition['key'], ev)
- if val is None:
- return False
- return re.search(r, val, flags=re.IGNORECASE) is not None
-
- elif condition['kind'] == 'device':
- if 'profile_tag' not in condition:
- return True
- return condition['profile_tag'] == self.profile_tag
-
- elif condition['kind'] == 'contains_display_name':
- # This is special because display names can be different
- # between rooms and so you can't really hard code it in a rule.
- # Optimisation: we should cache these names and update them from
- # the event stream.
- if 'content' not in ev or 'body' not in ev['content']:
- return False
- if not display_name:
- return False
- return re.search(
- r"\b%s\b" % re.escape(display_name), ev['content']['body'],
- flags=re.IGNORECASE
- ) is not None
-
- elif condition['kind'] == 'room_member_count':
- if 'is' not in condition:
- return False
- m = Pusher.INEQUALITY_EXPR.match(condition['is'])
- if not m:
- return False
- ineq = m.group(1)
- rhs = m.group(2)
- if not rhs.isdigit():
- return False
- rhs = int(rhs)
-
- if ineq == '' or ineq == '==':
- return room_member_count == rhs
- elif ineq == '<':
- return room_member_count < rhs
- elif ineq == '>':
- return room_member_count > rhs
- elif ineq == '>=':
- return room_member_count >= rhs
- elif ineq == '<=':
- return room_member_count <= rhs
- else:
- return False
- else:
- return True
-
- @defer.inlineCallbacks
def get_context_for_event(self, ev):
name_aliases = yield self.store.get_room_name_and_aliases(
ev['room_id']
@@ -308,8 +148,14 @@ class Pusher(object):
return
processed = False
- actions = yield self._actions_for_event(single_event)
- tweaks = _tweaks_for_actions(actions)
+
+ rule_evaluator = yield \
+ push_rule_evaluator.evaluator_for_user_name_and_profile_tag(
+ self.user_name, self.profile_tag, single_event['room_id'], self.store
+ )
+
+ actions = yield rule_evaluator.actions_for_event(single_event)
+ tweaks = rule_evaluator.tweaks_for_actions(actions)
if len(actions) == 0:
logger.warn("Empty actions! Using default action.")
@@ -448,27 +294,6 @@ class Pusher(object):
self.has_unread = False
-def _value_for_dotted_key(dotted_key, event):
- parts = dotted_key.split(".")
- val = event
- while len(parts) > 0:
- if parts[0] not in val:
- return None
- val = val[parts[0]]
- parts = parts[1:]
- return val
-
-
-def _tweaks_for_actions(actions):
- tweaks = {}
- for a in actions:
- if not isinstance(a, dict):
- continue
- if 'set_tweak' in a and 'value' in a:
- tweaks[a['set_tweak']] = a['value']
- return tweaks
-
-
class PusherConfigException(Exception):
def __init__(self, msg):
super(PusherConfigException, self).__init__(msg)
diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py
index 1f015a7f2e..7f76382a17 100644
--- a/synapse/push/baserules.py
+++ b/synapse/push/baserules.py
@@ -247,6 +247,7 @@ def make_base_append_underride_rules(user):
},
{
'rule_id': 'global/underride/.m.rule.message',
+ 'enabled': False,
'conditions': [
{
'kind': 'event_match',
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index a02fed57b4..5160775e59 100644
--- a/synapse/push/httppusher.py
+++ b/synapse/push/httppusher.py
@@ -14,7 +14,6 @@
# limitations under the License.
from synapse.push import Pusher, PusherConfigException
-from synapse.http.client import SimpleHttpClient
from twisted.internet import defer
@@ -46,7 +45,7 @@ class HttpPusher(Pusher):
"'url' required in data for HTTP pusher"
)
self.url = data['url']
- self.httpCli = SimpleHttpClient(self.hs)
+ self.http_client = _hs.get_simple_http_client()
self.data_minus_url = {}
self.data_minus_url.update(self.data)
del self.data_minus_url['url']
@@ -107,7 +106,7 @@ class HttpPusher(Pusher):
if not notification_dict:
defer.returnValue([])
try:
- resp = yield self.httpCli.post_json_get_json(self.url, notification_dict)
+ resp = yield self.http_client.post_json_get_json(self.url, notification_dict)
except:
logger.warn("Failed to push %s ", self.url)
defer.returnValue(False)
@@ -138,7 +137,7 @@ class HttpPusher(Pusher):
}
}
try:
- resp = yield self.httpCli.post_json_get_json(self.url, d)
+ resp = yield self.http_client.post_json_get_json(self.url, d)
except:
logger.exception("Failed to push %s ", self.url)
defer.returnValue(False)
diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py
new file mode 100644
index 0000000000..92c7fd048f
--- /dev/null
+++ b/synapse/push/push_rule_evaluator.py
@@ -0,0 +1,224 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.types import UserID
+
+import baserules
+
+import logging
+import simplejson as json
+import re
+
+logger = logging.getLogger(__name__)
+
+
+@defer.inlineCallbacks
+def evaluator_for_user_name_and_profile_tag(user_name, profile_tag, room_id, store):
+ rawrules = yield store.get_push_rules_for_user(user_name)
+ enabled_map = yield store.get_push_rules_enabled_for_user(user_name)
+ our_member_event = yield store.get_current_state(
+ room_id=room_id,
+ event_type='m.room.member',
+ state_key=user_name,
+ )
+
+ defer.returnValue(PushRuleEvaluator(
+ user_name, profile_tag, rawrules, enabled_map,
+ room_id, our_member_event, store
+ ))
+
+
+class PushRuleEvaluator:
+ DEFAULT_ACTIONS = ['dont_notify']
+ INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
+
+ def __init__(self, user_name, profile_tag, raw_rules, enabled_map, room_id,
+ our_member_event, store):
+ self.user_name = user_name
+ self.profile_tag = profile_tag
+ self.room_id = room_id
+ self.our_member_event = our_member_event
+ self.store = store
+
+ rules = []
+ for raw_rule in raw_rules:
+ rule = dict(raw_rule)
+ rule['conditions'] = json.loads(raw_rule['conditions'])
+ rule['actions'] = json.loads(raw_rule['actions'])
+ rules.append(rule)
+
+ user = UserID.from_string(self.user_name)
+ self.rules = baserules.list_with_base_rules(rules, user)
+
+ self.enabled_map = enabled_map
+
+ @staticmethod
+ def tweaks_for_actions(actions):
+ tweaks = {}
+ for a in actions:
+ if not isinstance(a, dict):
+ continue
+ if 'set_tweak' in a and 'value' in a:
+ tweaks[a['set_tweak']] = a['value']
+ return tweaks
+
+ @defer.inlineCallbacks
+ def actions_for_event(self, ev):
+ """
+ This should take into account notification settings that the user
+ has configured both globally and per-room when we have the ability
+ to do such things.
+ """
+ if ev['user_id'] == self.user_name:
+ # let's assume you probably know about messages you sent yourself
+ defer.returnValue(['dont_notify'])
+
+ room_id = ev['room_id']
+
+ # get *our* member event for display name matching
+ my_display_name = None
+
+ if self.our_member_event:
+ my_display_name = self.our_member_event[0].content.get("displayname")
+
+ room_members = yield self.store.get_users_in_room(room_id)
+ room_member_count = len(room_members)
+
+ for r in self.rules:
+ if r['rule_id'] in self.enabled_map:
+ r['enabled'] = self.enabled_map[r['rule_id']]
+ elif 'enabled' not in r:
+ r['enabled'] = True
+ if not r['enabled']:
+ continue
+ matches = True
+
+ conditions = r['conditions']
+ actions = r['actions']
+
+ for c in conditions:
+ matches &= self._event_fulfills_condition(
+ ev, c, display_name=my_display_name,
+ room_member_count=room_member_count
+ )
+ logger.debug(
+ "Rule %s %s",
+ r['rule_id'], "matches" if matches else "doesn't match"
+ )
+ # ignore rules with no actions (we have an explict 'dont_notify')
+ if len(actions) == 0:
+ logger.warn(
+ "Ignoring rule id %s with no actions for user %s",
+ r['rule_id'], self.user_name
+ )
+ continue
+ if matches:
+ logger.info(
+ "%s matches for user %s, event %s",
+ r['rule_id'], self.user_name, ev['event_id']
+ )
+ defer.returnValue(actions)
+
+ logger.info(
+ "No rules match for user %s, event %s",
+ self.user_name, ev['event_id']
+ )
+ defer.returnValue(PushRuleEvaluator.DEFAULT_ACTIONS)
+
+ @staticmethod
+ def _glob_to_regexp(glob):
+ r = re.escape(glob)
+ r = re.sub(r'\\\*', r'.*?', r)
+ r = re.sub(r'\\\?', r'.', r)
+
+ # handle [abc], [a-z] and [!a-z] style ranges.
+ r = re.sub(r'\\\[(\\\!|)(.*)\\\]',
+ lambda x: ('[%s%s]' % (x.group(1) and '^' or '',
+ re.sub(r'\\\-', '-', x.group(2)))), r)
+ return r
+
+ def _event_fulfills_condition(self, ev, condition, display_name, room_member_count):
+ if condition['kind'] == 'event_match':
+ if 'pattern' not in condition:
+ logger.warn("event_match condition with no pattern")
+ return False
+ # XXX: optimisation: cache our pattern regexps
+ if condition['key'] == 'content.body':
+ r = r'\b%s\b' % self._glob_to_regexp(condition['pattern'])
+ else:
+ r = r'^%s$' % self._glob_to_regexp(condition['pattern'])
+ val = _value_for_dotted_key(condition['key'], ev)
+ if val is None:
+ return False
+ return re.search(r, val, flags=re.IGNORECASE) is not None
+
+ elif condition['kind'] == 'device':
+ if 'profile_tag' not in condition:
+ return True
+ return condition['profile_tag'] == self.profile_tag
+
+ elif condition['kind'] == 'contains_display_name':
+ # This is special because display names can be different
+ # between rooms and so you can't really hard code it in a rule.
+ # Optimisation: we should cache these names and update them from
+ # the event stream.
+ if 'content' not in ev or 'body' not in ev['content']:
+ return False
+ if not display_name:
+ return False
+ return re.search(
+ r"\b%s\b" % re.escape(display_name), ev['content']['body'],
+ flags=re.IGNORECASE
+ ) is not None
+
+ elif condition['kind'] == 'room_member_count':
+ if 'is' not in condition:
+ return False
+ m = PushRuleEvaluator.INEQUALITY_EXPR.match(condition['is'])
+ if not m:
+ return False
+ ineq = m.group(1)
+ rhs = m.group(2)
+ if not rhs.isdigit():
+ return False
+ rhs = int(rhs)
+
+ if ineq == '' or ineq == '==':
+ return room_member_count == rhs
+ elif ineq == '<':
+ return room_member_count < rhs
+ elif ineq == '>':
+ return room_member_count > rhs
+ elif ineq == '>=':
+ return room_member_count >= rhs
+ elif ineq == '<=':
+ return room_member_count <= rhs
+ else:
+ return False
+ else:
+ return True
+
+
+def _value_for_dotted_key(dotted_key, event):
+ parts = dotted_key.split(".")
+ val = event
+ while len(parts) > 0:
+ if parts[0] not in val:
+ return None
+ val = val[parts[0]]
+ parts = parts[1:]
+ return val
diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py
index 1a84d94cd9..7b67e96204 100644
--- a/synapse/rest/__init__.py
+++ b/synapse/rest/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2015 OpenMarket Ltd
+# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,3 +12,69 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+
+from synapse.rest.client.v1 import (
+ room,
+ events,
+ profile,
+ presence,
+ initial_sync,
+ directory,
+ voip,
+ admin,
+ pusher,
+ push_rule,
+ register as v1_register,
+ login as v1_login,
+)
+
+from synapse.rest.client.v2_alpha import (
+ sync,
+ filter,
+ account,
+ register,
+ auth,
+ receipts,
+ keys,
+ tokenrefresh,
+ tags,
+ account_data,
+)
+
+from synapse.http.server import JsonResource
+
+
+class ClientRestResource(JsonResource):
+ """A resource for version 1 of the matrix client API."""
+
+ def __init__(self, hs):
+ JsonResource.__init__(self, hs, canonical_json=False)
+ self.register_servlets(self, hs)
+
+ @staticmethod
+ def register_servlets(client_resource, hs):
+ # "v1"
+ room.register_servlets(hs, client_resource)
+ events.register_servlets(hs, client_resource)
+ v1_register.register_servlets(hs, client_resource)
+ v1_login.register_servlets(hs, client_resource)
+ profile.register_servlets(hs, client_resource)
+ presence.register_servlets(hs, client_resource)
+ initial_sync.register_servlets(hs, client_resource)
+ directory.register_servlets(hs, client_resource)
+ voip.register_servlets(hs, client_resource)
+ admin.register_servlets(hs, client_resource)
+ pusher.register_servlets(hs, client_resource)
+ push_rule.register_servlets(hs, client_resource)
+
+ # "v2"
+ sync.register_servlets(hs, client_resource)
+ filter.register_servlets(hs, client_resource)
+ account.register_servlets(hs, client_resource)
+ register.register_servlets(hs, client_resource)
+ auth.register_servlets(hs, client_resource)
+ receipts.register_servlets(hs, client_resource)
+ keys.register_servlets(hs, client_resource)
+ tokenrefresh.register_servlets(hs, client_resource)
+ tags.register_servlets(hs, client_resource)
+ account_data.register_servlets(hs, client_resource)
diff --git a/synapse/rest/client/v1/__init__.py b/synapse/rest/client/v1/__init__.py
index cc9b49d539..c488b10d3c 100644
--- a/synapse/rest/client/v1/__init__.py
+++ b/synapse/rest/client/v1/__init__.py
@@ -12,33 +12,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-from . import (
- room, events, register, login, profile, presence, initial_sync, directory,
- voip, admin, pusher, push_rule
-)
-
-from synapse.http.server import JsonResource
-
-
-class ClientV1RestResource(JsonResource):
- """A resource for version 1 of the matrix client API."""
-
- def __init__(self, hs):
- JsonResource.__init__(self, hs, canonical_json=False)
- self.register_servlets(self, hs)
-
- @staticmethod
- def register_servlets(client_resource, hs):
- room.register_servlets(hs, client_resource)
- events.register_servlets(hs, client_resource)
- register.register_servlets(hs, client_resource)
- login.register_servlets(hs, client_resource)
- profile.register_servlets(hs, client_resource)
- presence.register_servlets(hs, client_resource)
- initial_sync.register_servlets(hs, client_resource)
- directory.register_servlets(hs, client_resource)
- voip.register_servlets(hs, client_resource)
- admin.register_servlets(hs, client_resource)
- pusher.register_servlets(hs, client_resource)
- push_rule.register_servlets(hs, client_resource)
diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py
index bdde43864c..886199a6da 100644
--- a/synapse/rest/client/v1/admin.py
+++ b/synapse/rest/client/v1/admin.py
@@ -18,7 +18,7 @@ from twisted.internet import defer
from synapse.api.errors import AuthError, SynapseError
from synapse.types import UserID
-from base import ClientV1RestServlet, client_path_pattern
+from base import ClientV1RestServlet, client_path_patterns
import logging
@@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
class WhoisRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/admin/whois/(?P<user_id>[^/]*)")
+ PATTERNS = client_path_patterns("/admin/whois/(?P<user_id>[^/]*)")
@defer.inlineCallbacks
def on_GET(self, request, user_id):
diff --git a/synapse/rest/client/v1/base.py b/synapse/rest/client/v1/base.py
index 504a5e432f..6273ce0795 100644
--- a/synapse/rest/client/v1/base.py
+++ b/synapse/rest/client/v1/base.py
@@ -27,7 +27,7 @@ import logging
logger = logging.getLogger(__name__)
-def client_path_pattern(path_regex):
+def client_path_patterns(path_regex, releases=(0,), include_in_unstable=True):
"""Creates a regex compiled client path with the correct client path
prefix.
@@ -37,7 +37,14 @@ def client_path_pattern(path_regex):
Returns:
SRE_Pattern
"""
- return re.compile("^" + CLIENT_PREFIX + path_regex)
+ patterns = [re.compile("^" + CLIENT_PREFIX + path_regex)]
+ if include_in_unstable:
+ unstable_prefix = CLIENT_PREFIX.replace("/api/v1", "/unstable")
+ patterns.append(re.compile("^" + unstable_prefix + path_regex))
+ for release in releases:
+ new_prefix = CLIENT_PREFIX.replace("/api/v1", "/r%d" % release)
+ patterns.append(re.compile("^" + new_prefix + path_regex))
+ return patterns
class ClientV1RestServlet(RestServlet):
diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py
index 240eedac75..f488e2dd41 100644
--- a/synapse/rest/client/v1/directory.py
+++ b/synapse/rest/client/v1/directory.py
@@ -18,7 +18,7 @@ from twisted.internet import defer
from synapse.api.errors import AuthError, SynapseError, Codes
from synapse.types import RoomAlias
-from .base import ClientV1RestServlet, client_path_pattern
+from .base import ClientV1RestServlet, client_path_patterns
import simplejson as json
import logging
@@ -32,7 +32,7 @@ def register_servlets(hs, http_server):
class ClientDirectoryServer(ClientV1RestServlet):
- PATTERN = client_path_pattern("/directory/room/(?P<room_alias>[^/]*)$")
+ PATTERNS = client_path_patterns("/directory/room/(?P<room_alias>[^/]*)$")
@defer.inlineCallbacks
def on_GET(self, request, room_alias):
diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py
index 3e1750d1a1..41b97e7d15 100644
--- a/synapse/rest/client/v1/events.py
+++ b/synapse/rest/client/v1/events.py
@@ -18,7 +18,7 @@ from twisted.internet import defer
from synapse.api.errors import SynapseError
from synapse.streams.config import PaginationConfig
-from .base import ClientV1RestServlet, client_path_pattern
+from .base import ClientV1RestServlet, client_path_patterns
from synapse.events.utils import serialize_event
import logging
@@ -28,7 +28,7 @@ logger = logging.getLogger(__name__)
class EventStreamRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/events$")
+ PATTERNS = client_path_patterns("/events$")
DEFAULT_LONGPOLL_TIME_MS = 30000
@@ -72,7 +72,7 @@ class EventStreamRestServlet(ClientV1RestServlet):
# TODO: Unit test gets, with and without auth, with different kinds of events.
class EventRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/events/(?P<event_id>[^/]*)$")
+ PATTERNS = client_path_patterns("/events/(?P<event_id>[^/]*)$")
def __init__(self, hs):
super(EventRestServlet, self).__init__(hs)
diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py
index 856a70f297..9ad3df8a9f 100644
--- a/synapse/rest/client/v1/initial_sync.py
+++ b/synapse/rest/client/v1/initial_sync.py
@@ -16,12 +16,12 @@
from twisted.internet import defer
from synapse.streams.config import PaginationConfig
-from base import ClientV1RestServlet, client_path_pattern
+from base import ClientV1RestServlet, client_path_patterns
# TODO: Needs unit testing
class InitialSyncRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/initialSync$")
+ PATTERNS = client_path_patterns("/initialSync$")
@defer.inlineCallbacks
def on_GET(self, request):
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index 720d6358e7..e8c35508cd 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -16,9 +16,8 @@
from twisted.internet import defer
from synapse.api.errors import SynapseError, LoginError, Codes
-from synapse.http.client import SimpleHttpClient
from synapse.types import UserID
-from base import ClientV1RestServlet, client_path_pattern
+from base import ClientV1RestServlet, client_path_patterns
import simplejson as json
import urllib
@@ -36,7 +35,7 @@ logger = logging.getLogger(__name__)
class LoginRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/login$")
+ PATTERNS = client_path_patterns("/login$")
PASS_TYPE = "m.login.password"
SAML2_TYPE = "m.login.saml2"
CAS_TYPE = "m.login.cas"
@@ -51,6 +50,7 @@ class LoginRestServlet(ClientV1RestServlet):
self.cas_server_url = hs.config.cas_server_url
self.cas_required_attributes = hs.config.cas_required_attributes
self.servername = hs.config.server_name
+ self.http_client = hs.get_simple_http_client()
def on_GET(self, request):
flows = []
@@ -98,15 +98,12 @@ class LoginRestServlet(ClientV1RestServlet):
# TODO Delete this after all CAS clients switch to token login instead
elif self.cas_enabled and (login_submission["type"] ==
LoginRestServlet.CAS_TYPE):
- # TODO: get this from the homeserver rather than creating a new one for
- # each request
- http_client = SimpleHttpClient(self.hs)
uri = "%s/proxyValidate" % (self.cas_server_url,)
args = {
"ticket": login_submission["ticket"],
"service": login_submission["service"]
}
- body = yield http_client.get_raw(uri, args)
+ body = yield self.http_client.get_raw(uri, args)
result = yield self.do_cas_login(body)
defer.returnValue(result)
elif login_submission["type"] == LoginRestServlet.TOKEN_TYPE:
@@ -238,7 +235,7 @@ class LoginRestServlet(ClientV1RestServlet):
class SAML2RestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/login/saml2")
+ PATTERNS = client_path_patterns("/login/saml2", releases=())
def __init__(self, hs):
super(SAML2RestServlet, self).__init__(hs)
@@ -282,7 +279,7 @@ class SAML2RestServlet(ClientV1RestServlet):
# TODO Delete this after all CAS clients switch to token login instead
class CasRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/login/cas")
+ PATTERNS = client_path_patterns("/login/cas", releases=())
def __init__(self, hs):
super(CasRestServlet, self).__init__(hs)
@@ -293,7 +290,7 @@ class CasRestServlet(ClientV1RestServlet):
class CasRedirectServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/login/cas/redirect")
+ PATTERNS = client_path_patterns("/login/cas/redirect", releases=())
def __init__(self, hs):
super(CasRedirectServlet, self).__init__(hs)
@@ -316,7 +313,7 @@ class CasRedirectServlet(ClientV1RestServlet):
class CasTicketServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/login/cas/ticket")
+ PATTERNS = client_path_patterns("/login/cas/ticket", releases=())
def __init__(self, hs):
super(CasTicketServlet, self).__init__(hs)
diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py
index 6fe5d19a22..e0949fe4bb 100644
--- a/synapse/rest/client/v1/presence.py
+++ b/synapse/rest/client/v1/presence.py
@@ -19,7 +19,7 @@ from twisted.internet import defer
from synapse.api.errors import SynapseError
from synapse.types import UserID
-from .base import ClientV1RestServlet, client_path_pattern
+from .base import ClientV1RestServlet, client_path_patterns
import simplejson as json
import logging
@@ -28,7 +28,7 @@ logger = logging.getLogger(__name__)
class PresenceStatusRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/presence/(?P<user_id>[^/]*)/status")
+ PATTERNS = client_path_patterns("/presence/(?P<user_id>[^/]*)/status")
@defer.inlineCallbacks
def on_GET(self, request, user_id):
@@ -73,7 +73,7 @@ class PresenceStatusRestServlet(ClientV1RestServlet):
class PresenceListRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/presence/list/(?P<user_id>[^/]*)")
+ PATTERNS = client_path_patterns("/presence/list/(?P<user_id>[^/]*)")
@defer.inlineCallbacks
def on_GET(self, request, user_id):
@@ -120,7 +120,7 @@ class PresenceListRestServlet(ClientV1RestServlet):
if len(u) == 0:
continue
invited_user = UserID.from_string(u)
- yield self.handlers.presence_handler.send_invite(
+ yield self.handlers.presence_handler.send_presence_invite(
observer_user=user, observed_user=invited_user
)
diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py
index 3218e47025..e6c6e5d024 100644
--- a/synapse/rest/client/v1/profile.py
+++ b/synapse/rest/client/v1/profile.py
@@ -16,14 +16,14 @@
""" This module contains REST servlets to do with profile: /profile/<paths> """
from twisted.internet import defer
-from .base import ClientV1RestServlet, client_path_pattern
+from .base import ClientV1RestServlet, client_path_patterns
from synapse.types import UserID
import simplejson as json
class ProfileDisplaynameRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/profile/(?P<user_id>[^/]*)/displayname")
+ PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)/displayname")
@defer.inlineCallbacks
def on_GET(self, request, user_id):
@@ -56,7 +56,7 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet):
class ProfileAvatarURLRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/profile/(?P<user_id>[^/]*)/avatar_url")
+ PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)/avatar_url")
@defer.inlineCallbacks
def on_GET(self, request, user_id):
@@ -89,7 +89,7 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet):
class ProfileRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/profile/(?P<user_id>[^/]*)")
+ PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)")
@defer.inlineCallbacks
def on_GET(self, request, user_id):
diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py
index b0870db1ac..9270bdd079 100644
--- a/synapse/rest/client/v1/push_rule.py
+++ b/synapse/rest/client/v1/push_rule.py
@@ -18,7 +18,7 @@ from twisted.internet import defer
from synapse.api.errors import (
SynapseError, Codes, UnrecognizedRequestError, NotFoundError, StoreError
)
-from .base import ClientV1RestServlet, client_path_pattern
+from .base import ClientV1RestServlet, client_path_patterns
from synapse.storage.push_rule import (
InconsistentRuleException, RuleNotFoundException
)
@@ -31,7 +31,7 @@ import simplejson as json
class PushRuleRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/pushrules/.*$")
+ PATTERNS = client_path_patterns("/pushrules/.*$")
SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR = (
"Unrecognised request: You probably wanted a trailing slash")
@@ -207,7 +207,12 @@ class PushRuleRestServlet(ClientV1RestServlet):
def set_rule_attr(self, user_name, spec, val):
if spec['attr'] == 'enabled':
+ if isinstance(val, dict) and "enabled" in val:
+ val = val["enabled"]
if not isinstance(val, bool):
+ # Legacy fallback
+ # This should *actually* take a dict, but many clients pass
+ # bools directly, so let's not break them.
raise SynapseError(400, "Value for 'enabled' must be boolean")
namespaced_rule_id = _namespaced_rule_id_from_spec(spec)
self.hs.get_datastore().set_push_rule_enabled(
diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py
index a110c0a4f0..d6d1ad528e 100644
--- a/synapse/rest/client/v1/pusher.py
+++ b/synapse/rest/client/v1/pusher.py
@@ -17,13 +17,16 @@ from twisted.internet import defer
from synapse.api.errors import SynapseError, Codes
from synapse.push import PusherConfigException
-from .base import ClientV1RestServlet, client_path_pattern
+from .base import ClientV1RestServlet, client_path_patterns
import simplejson as json
+import logging
+
+logger = logging.getLogger(__name__)
class PusherRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/pushers/set$")
+ PATTERNS = client_path_patterns("/pushers/set$")
@defer.inlineCallbacks
def on_POST(self, request):
@@ -51,6 +54,9 @@ class PusherRestServlet(ClientV1RestServlet):
raise SynapseError(400, "Missing parameters: "+','.join(missing),
errcode=Codes.MISSING_PARAM)
+ logger.debug("set pushkey %s to kind %s", content['pushkey'], content['kind'])
+ logger.debug("Got pushers request with body: %r", content)
+
append = False
if 'append' in content:
append = content['append']
diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py
index a56834e365..4b02311e05 100644
--- a/synapse/rest/client/v1/register.py
+++ b/synapse/rest/client/v1/register.py
@@ -18,7 +18,7 @@ from twisted.internet import defer
from synapse.api.errors import SynapseError, Codes
from synapse.api.constants import LoginType
-from base import ClientV1RestServlet, client_path_pattern
+from base import ClientV1RestServlet, client_path_patterns
import synapse.util.stringutils as stringutils
from synapse.util.async import run_on_reactor
@@ -48,7 +48,7 @@ class RegisterRestServlet(ClientV1RestServlet):
handler doesn't have a concept of multi-stages or sessions.
"""
- PATTERN = client_path_pattern("/register$")
+ PATTERNS = client_path_patterns("/register$", releases=(), include_in_unstable=False)
def __init__(self, hs):
super(RegisterRestServlet, self).__init__(hs)
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index 139dac1cc3..6fe53f70e5 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -16,7 +16,7 @@
""" This module contains REST servlets to do with rooms: /rooms/<paths> """
from twisted.internet import defer
-from base import ClientV1RestServlet, client_path_pattern
+from base import ClientV1RestServlet, client_path_patterns
from synapse.api.errors import SynapseError, Codes, AuthError
from synapse.streams.config import PaginationConfig
from synapse.api.constants import EventTypes, Membership
@@ -34,16 +34,16 @@ class RoomCreateRestServlet(ClientV1RestServlet):
# No PATTERN; we have custom dispatch rules here
def register(self, http_server):
- PATTERN = "/createRoom"
- register_txn_path(self, PATTERN, http_server)
+ PATTERNS = "/createRoom"
+ register_txn_path(self, PATTERNS, http_server)
# define CORS for all of /rooms in RoomCreateRestServlet for simplicity
- http_server.register_path("OPTIONS",
- client_path_pattern("/rooms(?:/.*)?$"),
- self.on_OPTIONS)
+ http_server.register_paths("OPTIONS",
+ client_path_patterns("/rooms(?:/.*)?$"),
+ self.on_OPTIONS)
# define CORS for /createRoom[/txnid]
- http_server.register_path("OPTIONS",
- client_path_pattern("/createRoom(?:/.*)?$"),
- self.on_OPTIONS)
+ http_server.register_paths("OPTIONS",
+ client_path_patterns("/createRoom(?:/.*)?$"),
+ self.on_OPTIONS)
@defer.inlineCallbacks
def on_PUT(self, request, txn_id):
@@ -103,18 +103,18 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
state_key = ("/rooms/(?P<room_id>[^/]*)/state/"
"(?P<event_type>[^/]*)/(?P<state_key>[^/]*)$")
- http_server.register_path("GET",
- client_path_pattern(state_key),
- self.on_GET)
- http_server.register_path("PUT",
- client_path_pattern(state_key),
- self.on_PUT)
- http_server.register_path("GET",
- client_path_pattern(no_state_key),
- self.on_GET_no_state_key)
- http_server.register_path("PUT",
- client_path_pattern(no_state_key),
- self.on_PUT_no_state_key)
+ http_server.register_paths("GET",
+ client_path_patterns(state_key),
+ self.on_GET)
+ http_server.register_paths("PUT",
+ client_path_patterns(state_key),
+ self.on_PUT)
+ http_server.register_paths("GET",
+ client_path_patterns(no_state_key),
+ self.on_GET_no_state_key)
+ http_server.register_paths("PUT",
+ client_path_patterns(no_state_key),
+ self.on_PUT_no_state_key)
def on_GET_no_state_key(self, request, room_id, event_type):
return self.on_GET(request, room_id, event_type, "")
@@ -170,8 +170,8 @@ class RoomSendEventRestServlet(ClientV1RestServlet):
def register(self, http_server):
# /rooms/$roomid/send/$event_type[/$txn_id]
- PATTERN = ("/rooms/(?P<room_id>[^/]*)/send/(?P<event_type>[^/]*)")
- register_txn_path(self, PATTERN, http_server, with_get=True)
+ PATTERNS = ("/rooms/(?P<room_id>[^/]*)/send/(?P<event_type>[^/]*)")
+ register_txn_path(self, PATTERNS, http_server, with_get=True)
@defer.inlineCallbacks
def on_POST(self, request, room_id, event_type, txn_id=None):
@@ -215,8 +215,8 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
def register(self, http_server):
# /join/$room_identifier[/$txn_id]
- PATTERN = ("/join/(?P<room_identifier>[^/]*)")
- register_txn_path(self, PATTERN, http_server)
+ PATTERNS = ("/join/(?P<room_identifier>[^/]*)")
+ register_txn_path(self, PATTERNS, http_server)
@defer.inlineCallbacks
def on_POST(self, request, room_identifier, txn_id=None):
@@ -280,7 +280,7 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
# TODO: Needs unit testing
class PublicRoomListRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/publicRooms$")
+ PATTERNS = client_path_patterns("/publicRooms$")
@defer.inlineCallbacks
def on_GET(self, request):
@@ -291,7 +291,7 @@ class PublicRoomListRestServlet(ClientV1RestServlet):
# TODO: Needs unit testing
class RoomMemberListRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/members$")
+ PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/members$")
@defer.inlineCallbacks
def on_GET(self, request, room_id):
@@ -328,7 +328,7 @@ class RoomMemberListRestServlet(ClientV1RestServlet):
# TODO: Needs better unit testing
class RoomMessageListRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/messages$")
+ PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/messages$")
@defer.inlineCallbacks
def on_GET(self, request, room_id):
@@ -351,7 +351,7 @@ class RoomMessageListRestServlet(ClientV1RestServlet):
# TODO: Needs unit testing
class RoomStateRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/state$")
+ PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/state$")
@defer.inlineCallbacks
def on_GET(self, request, room_id):
@@ -368,7 +368,7 @@ class RoomStateRestServlet(ClientV1RestServlet):
# TODO: Needs unit testing
class RoomInitialSyncRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/initialSync$")
+ PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/initialSync$")
@defer.inlineCallbacks
def on_GET(self, request, room_id):
@@ -383,32 +383,8 @@ class RoomInitialSyncRestServlet(ClientV1RestServlet):
defer.returnValue((200, content))
-class RoomTriggerBackfill(ClientV1RestServlet):
- PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/backfill$")
-
- def __init__(self, hs):
- super(RoomTriggerBackfill, self).__init__(hs)
- self.clock = hs.get_clock()
-
- @defer.inlineCallbacks
- def on_GET(self, request, room_id):
- remote_server = urllib.unquote(
- request.args["remote"][0]
- ).decode("UTF-8")
-
- limit = int(request.args["limit"][0])
-
- handler = self.handlers.federation_handler
- events = yield handler.backfill(remote_server, room_id, limit)
-
- time_now = self.clock.time_msec()
-
- res = [serialize_event(event, time_now) for event in events]
- defer.returnValue((200, res))
-
-
class RoomEventContext(ClientV1RestServlet):
- PATTERN = client_path_pattern(
+ PATTERNS = client_path_patterns(
"/rooms/(?P<room_id>[^/]*)/context/(?P<event_id>[^/]*)$"
)
@@ -447,9 +423,9 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
def register(self, http_server):
# /rooms/$roomid/[invite|join|leave]
- PATTERN = ("/rooms/(?P<room_id>[^/]*)/"
- "(?P<membership_action>join|invite|leave|ban|kick)")
- register_txn_path(self, PATTERN, http_server)
+ PATTERNS = ("/rooms/(?P<room_id>[^/]*)/"
+ "(?P<membership_action>join|invite|leave|ban|kick|forget)")
+ register_txn_path(self, PATTERNS, http_server)
@defer.inlineCallbacks
def on_POST(self, request, room_id, membership_action, txn_id=None):
@@ -458,6 +434,8 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
allow_guest=True
)
+ effective_membership_action = membership_action
+
if is_guest and membership_action not in {Membership.JOIN, Membership.LEAVE}:
raise AuthError(403, "Guest access not allowed")
@@ -488,11 +466,13 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
UserID.from_string(state_key)
if membership_action == "kick":
- membership_action = "leave"
+ effective_membership_action = "leave"
+ elif membership_action == "forget":
+ effective_membership_action = "leave"
msg_handler = self.handlers.message_handler
- content = {"membership": unicode(membership_action)}
+ content = {"membership": unicode(effective_membership_action)}
if is_guest:
content["kind"] = "guest"
@@ -509,6 +489,9 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
is_guest=is_guest,
)
+ if membership_action == "forget":
+ yield self.handlers.room_member_handler.forget(user, room_id)
+
defer.returnValue((200, {}))
def _has_3pid_invite_keys(self, content):
@@ -536,8 +519,8 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
class RoomRedactEventRestServlet(ClientV1RestServlet):
def register(self, http_server):
- PATTERN = ("/rooms/(?P<room_id>[^/]*)/redact/(?P<event_id>[^/]*)")
- register_txn_path(self, PATTERN, http_server)
+ PATTERNS = ("/rooms/(?P<room_id>[^/]*)/redact/(?P<event_id>[^/]*)")
+ register_txn_path(self, PATTERNS, http_server)
@defer.inlineCallbacks
def on_POST(self, request, room_id, event_id, txn_id=None):
@@ -575,7 +558,7 @@ class RoomRedactEventRestServlet(ClientV1RestServlet):
class RoomTypingRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern(
+ PATTERNS = client_path_patterns(
"/rooms/(?P<room_id>[^/]*)/typing/(?P<user_id>[^/]*)$"
)
@@ -608,7 +591,7 @@ class RoomTypingRestServlet(ClientV1RestServlet):
class SearchRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern(
+ PATTERNS = client_path_patterns(
"/search$"
)
@@ -648,20 +631,20 @@ def register_txn_path(servlet, regex_string, http_server, with_get=False):
http_server : The http_server to register paths with.
with_get: True to also register respective GET paths for the PUTs.
"""
- http_server.register_path(
+ http_server.register_paths(
"POST",
- client_path_pattern(regex_string + "$"),
+ client_path_patterns(regex_string + "$"),
servlet.on_POST
)
- http_server.register_path(
+ http_server.register_paths(
"PUT",
- client_path_pattern(regex_string + "/(?P<txn_id>[^/]*)$"),
+ client_path_patterns(regex_string + "/(?P<txn_id>[^/]*)$"),
servlet.on_PUT
)
if with_get:
- http_server.register_path(
+ http_server.register_paths(
"GET",
- client_path_pattern(regex_string + "/(?P<txn_id>[^/]*)$"),
+ client_path_patterns(regex_string + "/(?P<txn_id>[^/]*)$"),
servlet.on_GET
)
@@ -672,7 +655,6 @@ def register_servlets(hs, http_server):
RoomMemberListRestServlet(hs).register(http_server)
RoomMessageListRestServlet(hs).register(http_server)
JoinRoomAliasServlet(hs).register(http_server)
- RoomTriggerBackfill(hs).register(http_server)
RoomMembershipRestServlet(hs).register(http_server)
RoomSendEventRestServlet(hs).register(http_server)
PublicRoomListRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v1/voip.py b/synapse/rest/client/v1/voip.py
index eb7c57cade..1567a03c89 100644
--- a/synapse/rest/client/v1/voip.py
+++ b/synapse/rest/client/v1/voip.py
@@ -15,7 +15,7 @@
from twisted.internet import defer
-from base import ClientV1RestServlet, client_path_pattern
+from base import ClientV1RestServlet, client_path_patterns
import hmac
@@ -24,7 +24,7 @@ import base64
class VoipRestServlet(ClientV1RestServlet):
- PATTERN = client_path_pattern("/voip/turnServer$")
+ PATTERNS = client_path_patterns("/voip/turnServer$")
@defer.inlineCallbacks
def on_GET(self, request):
diff --git a/synapse/rest/client/v2_alpha/__init__.py b/synapse/rest/client/v2_alpha/__init__.py
index a108132346..c488b10d3c 100644
--- a/synapse/rest/client/v2_alpha/__init__.py
+++ b/synapse/rest/client/v2_alpha/__init__.py
@@ -12,37 +12,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-from . import (
- sync,
- filter,
- account,
- register,
- auth,
- receipts,
- keys,
- tokenrefresh,
- tags,
-)
-
-from synapse.http.server import JsonResource
-
-
-class ClientV2AlphaRestResource(JsonResource):
- """A resource for version 2 alpha of the matrix client API."""
-
- def __init__(self, hs):
- JsonResource.__init__(self, hs, canonical_json=False)
- self.register_servlets(self, hs)
-
- @staticmethod
- def register_servlets(client_resource, hs):
- sync.register_servlets(hs, client_resource)
- filter.register_servlets(hs, client_resource)
- account.register_servlets(hs, client_resource)
- register.register_servlets(hs, client_resource)
- auth.register_servlets(hs, client_resource)
- receipts.register_servlets(hs, client_resource)
- keys.register_servlets(hs, client_resource)
- tokenrefresh.register_servlets(hs, client_resource)
- tags.register_servlets(hs, client_resource)
diff --git a/synapse/rest/client/v2_alpha/_base.py b/synapse/rest/client/v2_alpha/_base.py
index 4540e8dcf7..7b8b879c03 100644
--- a/synapse/rest/client/v2_alpha/_base.py
+++ b/synapse/rest/client/v2_alpha/_base.py
@@ -27,7 +27,7 @@ import simplejson
logger = logging.getLogger(__name__)
-def client_v2_pattern(path_regex):
+def client_v2_patterns(path_regex, releases=(0,)):
"""Creates a regex compiled client path with the correct client path
prefix.
@@ -37,7 +37,13 @@ def client_v2_pattern(path_regex):
Returns:
SRE_Pattern
"""
- return re.compile("^" + CLIENT_V2_ALPHA_PREFIX + path_regex)
+ patterns = [re.compile("^" + CLIENT_V2_ALPHA_PREFIX + path_regex)]
+ unstable_prefix = CLIENT_V2_ALPHA_PREFIX.replace("/v2_alpha", "/unstable")
+ patterns.append(re.compile("^" + unstable_prefix + path_regex))
+ for release in releases:
+ new_prefix = CLIENT_V2_ALPHA_PREFIX.replace("/v2_alpha", "/r%d" % release)
+ patterns.append(re.compile("^" + new_prefix + path_regex))
+ return patterns
def parse_request_allow_empty(request):
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index 1970ad3458..3e1459d5b9 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -20,7 +20,7 @@ from synapse.api.errors import LoginError, SynapseError, Codes
from synapse.http.servlet import RestServlet
from synapse.util.async import run_on_reactor
-from ._base import client_v2_pattern, parse_json_dict_from_request
+from ._base import client_v2_patterns, parse_json_dict_from_request
import logging
@@ -29,7 +29,7 @@ logger = logging.getLogger(__name__)
class PasswordRestServlet(RestServlet):
- PATTERN = client_v2_pattern("/account/password")
+ PATTERNS = client_v2_patterns("/account/password")
def __init__(self, hs):
super(PasswordRestServlet, self).__init__()
@@ -89,7 +89,7 @@ class PasswordRestServlet(RestServlet):
class ThreepidRestServlet(RestServlet):
- PATTERN = client_v2_pattern("/account/3pid")
+ PATTERNS = client_v2_patterns("/account/3pid")
def __init__(self, hs):
super(ThreepidRestServlet, self).__init__()
diff --git a/synapse/rest/client/v2_alpha/account_data.py b/synapse/rest/client/v2_alpha/account_data.py
new file mode 100644
index 0000000000..5b8f454bf1
--- /dev/null
+++ b/synapse/rest/client/v2_alpha/account_data.py
@@ -0,0 +1,111 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import client_v2_patterns
+
+from synapse.http.servlet import RestServlet
+from synapse.api.errors import AuthError, SynapseError
+
+from twisted.internet import defer
+
+import logging
+
+import simplejson as json
+
+logger = logging.getLogger(__name__)
+
+
+class AccountDataServlet(RestServlet):
+ """
+ PUT /user/{user_id}/account_data/{account_dataType} HTTP/1.1
+ """
+ PATTERNS = client_v2_patterns(
+ "/user/(?P<user_id>[^/]*)/account_data/(?P<account_data_type>[^/]*)"
+ )
+
+ def __init__(self, hs):
+ super(AccountDataServlet, self).__init__()
+ self.auth = hs.get_auth()
+ self.store = hs.get_datastore()
+ self.notifier = hs.get_notifier()
+
+ @defer.inlineCallbacks
+ def on_PUT(self, request, user_id, account_data_type):
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
+ if user_id != auth_user.to_string():
+ raise AuthError(403, "Cannot add account data for other users.")
+
+ try:
+ content_bytes = request.content.read()
+ body = json.loads(content_bytes)
+ except:
+ raise SynapseError(400, "Invalid JSON")
+
+ max_id = yield self.store.add_account_data_for_user(
+ user_id, account_data_type, body
+ )
+
+ yield self.notifier.on_new_event(
+ "account_data_key", max_id, users=[user_id]
+ )
+
+ defer.returnValue((200, {}))
+
+
+class RoomAccountDataServlet(RestServlet):
+ """
+ PUT /user/{user_id}/rooms/{room_id}/account_data/{account_dataType} HTTP/1.1
+ """
+ PATTERNS = client_v2_patterns(
+ "/user/(?P<user_id>[^/]*)"
+ "/rooms/(?P<room_id>[^/]*)"
+ "/account_data/(?P<account_data_type>[^/]*)"
+ )
+
+ def __init__(self, hs):
+ super(RoomAccountDataServlet, self).__init__()
+ self.auth = hs.get_auth()
+ self.store = hs.get_datastore()
+ self.notifier = hs.get_notifier()
+
+ @defer.inlineCallbacks
+ def on_PUT(self, request, user_id, room_id, account_data_type):
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
+ if user_id != auth_user.to_string():
+ raise AuthError(403, "Cannot add account data for other users.")
+
+ try:
+ content_bytes = request.content.read()
+ body = json.loads(content_bytes)
+ except:
+ raise SynapseError(400, "Invalid JSON")
+
+ if not isinstance(body, dict):
+ raise ValueError("Expected a JSON object")
+
+ max_id = yield self.store.add_account_data_to_room(
+ user_id, room_id, account_data_type, body
+ )
+
+ yield self.notifier.on_new_event(
+ "account_data_key", max_id, users=[user_id]
+ )
+
+ defer.returnValue((200, {}))
+
+
+def register_servlets(hs, http_server):
+ AccountDataServlet(hs).register(http_server)
+ RoomAccountDataServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py
index 4c726f05f5..fb5947a141 100644
--- a/synapse/rest/client/v2_alpha/auth.py
+++ b/synapse/rest/client/v2_alpha/auth.py
@@ -20,7 +20,7 @@ from synapse.api.errors import SynapseError
from synapse.api.urls import CLIENT_V2_ALPHA_PREFIX
from synapse.http.servlet import RestServlet
-from ._base import client_v2_pattern
+from ._base import client_v2_patterns
import logging
@@ -97,7 +97,7 @@ class AuthRestServlet(RestServlet):
cannot be handled in the normal flow (with requests to the same endpoint).
Current use is for web fallback auth.
"""
- PATTERN = client_v2_pattern("/auth/(?P<stagetype>[\w\.]*)/fallback/web")
+ PATTERNS = client_v2_patterns("/auth/(?P<stagetype>[\w\.]*)/fallback/web")
def __init__(self, hs):
super(AuthRestServlet, self).__init__()
diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py
index 97956a4b91..3cd0364b56 100644
--- a/synapse/rest/client/v2_alpha/filter.py
+++ b/synapse/rest/client/v2_alpha/filter.py
@@ -19,7 +19,7 @@ from synapse.api.errors import AuthError, SynapseError
from synapse.http.servlet import RestServlet
from synapse.types import UserID
-from ._base import client_v2_pattern
+from ._base import client_v2_patterns
import simplejson as json
import logging
@@ -29,7 +29,7 @@ logger = logging.getLogger(__name__)
class GetFilterRestServlet(RestServlet):
- PATTERN = client_v2_pattern("/user/(?P<user_id>[^/]*)/filter/(?P<filter_id>[^/]*)")
+ PATTERNS = client_v2_patterns("/user/(?P<user_id>[^/]*)/filter/(?P<filter_id>[^/]*)")
def __init__(self, hs):
super(GetFilterRestServlet, self).__init__()
@@ -65,7 +65,7 @@ class GetFilterRestServlet(RestServlet):
class CreateFilterRestServlet(RestServlet):
- PATTERN = client_v2_pattern("/user/(?P<user_id>[^/]*)/filter")
+ PATTERNS = client_v2_patterns("/user/(?P<user_id>[^/]*)/filter")
def __init__(self, hs):
super(CreateFilterRestServlet, self).__init__()
diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py
index 820d33336f..753f2988a1 100644
--- a/synapse/rest/client/v2_alpha/keys.py
+++ b/synapse/rest/client/v2_alpha/keys.py
@@ -21,7 +21,7 @@ from synapse.types import UserID
from canonicaljson import encode_canonical_json
-from ._base import client_v2_pattern
+from ._base import client_v2_patterns
import simplejson as json
import logging
@@ -54,7 +54,7 @@ class KeyUploadServlet(RestServlet):
},
}
"""
- PATTERN = client_v2_pattern("/keys/upload/(?P<device_id>[^/]*)")
+ PATTERNS = client_v2_patterns("/keys/upload/(?P<device_id>[^/]*)", releases=())
def __init__(self, hs):
super(KeyUploadServlet, self).__init__()
@@ -154,12 +154,13 @@ class KeyQueryServlet(RestServlet):
} } } } } }
"""
- PATTERN = client_v2_pattern(
+ PATTERNS = client_v2_patterns(
"/keys/query(?:"
"/(?P<user_id>[^/]*)(?:"
"/(?P<device_id>[^/]*)"
")?"
- ")?"
+ ")?",
+ releases=()
)
def __init__(self, hs):
@@ -245,10 +246,11 @@ class OneTimeKeyServlet(RestServlet):
} } } }
"""
- PATTERN = client_v2_pattern(
+ PATTERNS = client_v2_patterns(
"/keys/claim(?:/?|(?:/"
"(?P<user_id>[^/]*)/(?P<device_id>[^/]*)/(?P<algorithm>[^/]*)"
- ")?)"
+ ")?)",
+ releases=()
)
def __init__(self, hs):
diff --git a/synapse/rest/client/v2_alpha/receipts.py b/synapse/rest/client/v2_alpha/receipts.py
index 788acd4adb..aa214e13b6 100644
--- a/synapse/rest/client/v2_alpha/receipts.py
+++ b/synapse/rest/client/v2_alpha/receipts.py
@@ -17,7 +17,7 @@ from twisted.internet import defer
from synapse.api.errors import SynapseError
from synapse.http.servlet import RestServlet
-from ._base import client_v2_pattern
+from ._base import client_v2_patterns
import logging
@@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
class ReceiptRestServlet(RestServlet):
- PATTERN = client_v2_pattern(
+ PATTERNS = client_v2_patterns(
"/rooms/(?P<room_id>[^/]*)"
"/receipt/(?P<receipt_type>[^/]*)"
"/(?P<event_id>[^/]*)$"
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index f899376311..b2b89652c6 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -19,7 +19,7 @@ from synapse.api.constants import LoginType
from synapse.api.errors import SynapseError, Codes, UnrecognizedRequestError
from synapse.http.servlet import RestServlet
-from ._base import client_v2_pattern, parse_json_dict_from_request
+from ._base import client_v2_patterns, parse_json_dict_from_request
import logging
import hmac
@@ -41,7 +41,7 @@ logger = logging.getLogger(__name__)
class RegisterRestServlet(RestServlet):
- PATTERN = client_v2_pattern("/register")
+ PATTERNS = client_v2_patterns("/register")
def __init__(self, hs):
super(RegisterRestServlet, self).__init__()
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
index 775f49885b..35a70ffad1 100644
--- a/synapse/rest/client/v2_alpha/sync.py
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -25,11 +25,14 @@ from synapse.events.utils import (
serialize_event, format_event_for_client_v2_without_room_id,
)
from synapse.api.filtering import FilterCollection
-from ._base import client_v2_pattern
+from synapse.api.errors import SynapseError
+from ._base import client_v2_patterns
import copy
import logging
+import ujson as json
+
logger = logging.getLogger(__name__)
@@ -48,7 +51,7 @@ class SyncRestServlet(RestServlet):
"next_batch": // batch token for the next /sync
"presence": // presence data for the user.
"rooms": {
- "joined": { // Joined rooms being updated.
+ "join": { // Joined rooms being updated.
"${room_id}": { // Id of the room being updated
"event_map": // Map of EventID -> event JSON.
"timeline": { // The recent events in the room if gap is "true"
@@ -63,13 +66,13 @@ class SyncRestServlet(RestServlet):
"ephemeral": {"events": []} // list of event objects
}
},
- "invited": {}, // Invited rooms being updated.
- "archived": {} // Archived rooms being updated.
+ "invite": {}, // Invited rooms being updated.
+ "leave": {} // Archived rooms being updated.
}
}
"""
- PATTERN = client_v2_pattern("/sync$")
+ PATTERNS = client_v2_patterns("/sync$")
ALLOWED_PRESENCE = set(["online", "offline"])
def __init__(self, hs):
@@ -82,7 +85,9 @@ class SyncRestServlet(RestServlet):
@defer.inlineCallbacks
def on_GET(self, request):
- user, token_id, _ = yield self.auth.get_user_by_req(request)
+ user, token_id, is_guest = yield self.auth.get_user_by_req(
+ request, allow_guest=True
+ )
timeout = parse_integer(request, "timeout", default=0)
since = parse_string(request, "since")
@@ -100,15 +105,29 @@ class SyncRestServlet(RestServlet):
)
)
- try:
- filter = yield self.filtering.get_user_filter(
- user.localpart, filter_id
+ if filter_id and filter_id.startswith('{'):
+ try:
+ filter_object = json.loads(filter_id)
+ except:
+ raise SynapseError(400, "Invalid filter JSON")
+ self.filtering._check_valid_filter(filter_object)
+ filter = FilterCollection(filter_object)
+ else:
+ try:
+ filter = yield self.filtering.get_user_filter(
+ user.localpart, filter_id
+ )
+ except:
+ filter = FilterCollection({})
+
+ if is_guest and filter.list_rooms() is None:
+ raise SynapseError(
+ 400, "Guest users must provide a list of rooms in the filter"
)
- except:
- filter = FilterCollection({})
sync_config = SyncConfig(
user=user,
+ is_guest=is_guest,
filter=filter,
)
@@ -144,6 +163,9 @@ class SyncRestServlet(RestServlet):
)
response_content = {
+ "account_data": self.encode_account_data(
+ sync_result.account_data, filter, time_now
+ ),
"presence": self.encode_presence(
sync_result.presence, filter, time_now
),
@@ -165,6 +187,9 @@ class SyncRestServlet(RestServlet):
formatted.append(event)
return {"events": filter.filter_presence(formatted)}
+ def encode_account_data(self, events, filter, time_now):
+ return {"events": filter.filter_account_data(events)}
+
def encode_joined(self, rooms, filter, time_now, token_id):
"""
Encode the joined rooms in a sync result
@@ -333,20 +358,36 @@ class SyncRestServlet(RestServlet):
continue
prev_event_id = timeline_event.unsigned.get("replaces_state", None)
- logger.debug("Replacing %s with %s in state dict",
- timeline_event.event_id, prev_event_id)
- if prev_event_id is None:
+ prev_content = timeline_event.unsigned.get('prev_content')
+ prev_sender = timeline_event.unsigned.get('prev_sender')
+ # Empircally it seems possible for the event to have a
+ # "replaces_state" key but not a prev_content or prev_sender
+ # markjh conjectures that it could be due to the server not
+ # having a copy of that event.
+ # If this is the case the we ignore the previous event. This will
+ # cause the displayname calculations on the client to be incorrect
+ if prev_event_id is None or not prev_content or not prev_sender:
+ logger.debug(
+ "Removing %r from the state dict, as it is missing"
+ " prev_content (prev_event_id=%r)",
+ timeline_event.event_id, prev_event_id
+ )
del result[event_key]
else:
+ logger.debug(
+ "Replacing %r with %r in state dict",
+ timeline_event.event_id, prev_event_id
+ )
result[event_key] = FrozenEvent({
"type": timeline_event.type,
"state_key": timeline_event.state_key,
- "content": timeline_event.unsigned['prev_content'],
- "sender": timeline_event.unsigned['prev_sender'],
+ "content": prev_content,
+ "sender": prev_sender,
"event_id": prev_event_id,
"room_id": timeline_event.room_id,
})
+
logger.debug("New value: %r", result.get(event_key))
return result
diff --git a/synapse/rest/client/v2_alpha/tags.py b/synapse/rest/client/v2_alpha/tags.py
index ba7223be11..b5d0db5569 100644
--- a/synapse/rest/client/v2_alpha/tags.py
+++ b/synapse/rest/client/v2_alpha/tags.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from ._base import client_v2_pattern
+from ._base import client_v2_patterns
from synapse.http.servlet import RestServlet
from synapse.api.errors import AuthError, SynapseError
@@ -31,7 +31,7 @@ class TagListServlet(RestServlet):
"""
GET /user/{user_id}/rooms/{room_id}/tags HTTP/1.1
"""
- PATTERN = client_v2_pattern(
+ PATTERNS = client_v2_patterns(
"/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags"
)
@@ -56,7 +56,7 @@ class TagServlet(RestServlet):
PUT /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1
DELETE /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1
"""
- PATTERN = client_v2_pattern(
+ PATTERNS = client_v2_patterns(
"/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags/(?P<tag>[^/]*)"
)
diff --git a/synapse/rest/client/v2_alpha/tokenrefresh.py b/synapse/rest/client/v2_alpha/tokenrefresh.py
index 901e777983..5a63afd51e 100644
--- a/synapse/rest/client/v2_alpha/tokenrefresh.py
+++ b/synapse/rest/client/v2_alpha/tokenrefresh.py
@@ -18,7 +18,7 @@ from twisted.internet import defer
from synapse.api.errors import AuthError, StoreError, SynapseError
from synapse.http.servlet import RestServlet
-from ._base import client_v2_pattern, parse_json_dict_from_request
+from ._base import client_v2_patterns, parse_json_dict_from_request
class TokenRefreshRestServlet(RestServlet):
@@ -26,7 +26,7 @@ class TokenRefreshRestServlet(RestServlet):
Exchanges refresh tokens for a pair of an access token and a new refresh
token.
"""
- PATTERN = client_v2_pattern("/tokenrefresh")
+ PATTERNS = client_v2_patterns("/tokenrefresh")
def __init__(self, hs):
super(TokenRefreshRestServlet, self).__init__()
diff --git a/synapse/server.py b/synapse/server.py
index f75d5358b2..f5c8329873 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -71,8 +71,7 @@ class BaseHomeServer(object):
'state_handler',
'notifier',
'distributor',
- 'resource_for_client',
- 'resource_for_client_v2_alpha',
+ 'client_resource',
'resource_for_federation',
'resource_for_static_content',
'resource_for_web_client',
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index e7443f2838..c46b653f11 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -42,6 +42,7 @@ from .end_to_end_keys import EndToEndKeyStore
from .receipts import ReceiptsStore
from .search import SearchStore
from .tags import TagsStore
+from .account_data import AccountDataStore
import logging
@@ -73,6 +74,7 @@ class DataStore(RoomMemberStore, RoomStore,
EndToEndKeyStore,
SearchStore,
TagsStore,
+ AccountDataStore,
):
def __init__(self, hs):
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 218e708054..17a14e001c 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -214,7 +214,8 @@ class SQLBaseStore(object):
self._clock.looping_call(loop, 10000)
- def _new_transaction(self, conn, desc, after_callbacks, func, *args, **kwargs):
+ def _new_transaction(self, conn, desc, after_callbacks, logging_context,
+ func, *args, **kwargs):
start = time.time() * 1000
txn_id = self._TXN_ID
@@ -277,6 +278,9 @@ class SQLBaseStore(object):
end = time.time() * 1000
duration = end - start
+ if logging_context is not None:
+ logging_context.add_database_transaction(duration)
+
transaction_logger.debug("[TXN END] {%s} %f", name, duration)
self._current_txn_total_time += duration
@@ -302,7 +306,8 @@ class SQLBaseStore(object):
current_context.copy_to(context)
return self._new_transaction(
- conn, desc, after_callbacks, func, *args, **kwargs
+ conn, desc, after_callbacks, current_context,
+ func, *args, **kwargs
)
result = yield preserve_context_over_fn(
diff --git a/synapse/storage/account_data.py b/synapse/storage/account_data.py
new file mode 100644
index 0000000000..d1829f84e8
--- /dev/null
+++ b/synapse/storage/account_data.py
@@ -0,0 +1,211 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import SQLBaseStore
+from twisted.internet import defer
+
+import ujson as json
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class AccountDataStore(SQLBaseStore):
+
+ def get_account_data_for_user(self, user_id):
+ """Get all the client account_data for a user.
+
+ Args:
+ user_id(str): The user to get the account_data for.
+ Returns:
+ A deferred pair of a dict of global account_data and a dict
+ mapping from room_id string to per room account_data dicts.
+ """
+
+ def get_account_data_for_user_txn(txn):
+ rows = self._simple_select_list_txn(
+ txn, "account_data", {"user_id": user_id},
+ ["account_data_type", "content"]
+ )
+
+ global_account_data = {
+ row["account_data_type"]: json.loads(row["content"]) for row in rows
+ }
+
+ rows = self._simple_select_list_txn(
+ txn, "room_account_data", {"user_id": user_id},
+ ["room_id", "account_data_type", "content"]
+ )
+
+ by_room = {}
+ for row in rows:
+ room_data = by_room.setdefault(row["room_id"], {})
+ room_data[row["account_data_type"]] = json.loads(row["content"])
+
+ return (global_account_data, by_room)
+
+ return self.runInteraction(
+ "get_account_data_for_user", get_account_data_for_user_txn
+ )
+
+ def get_account_data_for_room(self, user_id, room_id):
+ """Get all the client account_data for a user for a room.
+
+ Args:
+ user_id(str): The user to get the account_data for.
+ room_id(str): The room to get the account_data for.
+ Returns:
+ A deferred dict of the room account_data
+ """
+ def get_account_data_for_room_txn(txn):
+ rows = self._simple_select_list_txn(
+ txn, "room_account_data", {"user_id": user_id, "room_id": room_id},
+ ["account_data_type", "content"]
+ )
+
+ return {
+ row["account_data_type"]: json.loads(row["content"]) for row in rows
+ }
+
+ return self.runInteraction(
+ "get_account_data_for_room", get_account_data_for_room_txn
+ )
+
+ def get_updated_account_data_for_user(self, user_id, stream_id):
+ """Get all the client account_data for a that's changed.
+
+ Args:
+ user_id(str): The user to get the account_data for.
+ stream_id(int): The point in the stream since which to get updates
+ Returns:
+ A deferred pair of a dict of global account_data and a dict
+ mapping from room_id string to per room account_data dicts.
+ """
+
+ def get_updated_account_data_for_user_txn(txn):
+ sql = (
+ "SELECT account_data_type, content FROM account_data"
+ " WHERE user_id = ? AND stream_id > ?"
+ )
+
+ txn.execute(sql, (user_id, stream_id))
+
+ global_account_data = {
+ row[0]: json.loads(row[1]) for row in txn.fetchall()
+ }
+
+ sql = (
+ "SELECT room_id, account_data_type, content FROM room_account_data"
+ " WHERE user_id = ? AND stream_id > ?"
+ )
+
+ txn.execute(sql, (user_id, stream_id))
+
+ account_data_by_room = {}
+ for row in txn.fetchall():
+ room_account_data = account_data_by_room.setdefault(row[0], {})
+ room_account_data[row[1]] = json.loads(row[2])
+
+ return (global_account_data, account_data_by_room)
+
+ return self.runInteraction(
+ "get_updated_account_data_for_user", get_updated_account_data_for_user_txn
+ )
+
+ @defer.inlineCallbacks
+ def add_account_data_to_room(self, user_id, room_id, account_data_type, content):
+ """Add some account_data to a room for a user.
+ Args:
+ user_id(str): The user to add a tag for.
+ room_id(str): The room to add a tag for.
+ account_data_type(str): The type of account_data to add.
+ content(dict): A json object to associate with the tag.
+ Returns:
+ A deferred that completes once the account_data has been added.
+ """
+ content_json = json.dumps(content)
+
+ def add_account_data_txn(txn, next_id):
+ self._simple_upsert_txn(
+ txn,
+ table="room_account_data",
+ keyvalues={
+ "user_id": user_id,
+ "room_id": room_id,
+ "account_data_type": account_data_type,
+ },
+ values={
+ "stream_id": next_id,
+ "content": content_json,
+ }
+ )
+ self._update_max_stream_id(txn, next_id)
+
+ with (yield self._account_data_id_gen.get_next(self)) as next_id:
+ yield self.runInteraction(
+ "add_room_account_data", add_account_data_txn, next_id
+ )
+
+ result = yield self._account_data_id_gen.get_max_token(self)
+ defer.returnValue(result)
+
+ @defer.inlineCallbacks
+ def add_account_data_for_user(self, user_id, account_data_type, content):
+ """Add some account_data to a room for a user.
+ Args:
+ user_id(str): The user to add a tag for.
+ account_data_type(str): The type of account_data to add.
+ content(dict): A json object to associate with the tag.
+ Returns:
+ A deferred that completes once the account_data has been added.
+ """
+ content_json = json.dumps(content)
+
+ def add_account_data_txn(txn, next_id):
+ self._simple_upsert_txn(
+ txn,
+ table="account_data",
+ keyvalues={
+ "user_id": user_id,
+ "account_data_type": account_data_type,
+ },
+ values={
+ "stream_id": next_id,
+ "content": content_json,
+ }
+ )
+ self._update_max_stream_id(txn, next_id)
+
+ with (yield self._account_data_id_gen.get_next(self)) as next_id:
+ yield self.runInteraction(
+ "add_user_account_data", add_account_data_txn, next_id
+ )
+
+ result = yield self._account_data_id_gen.get_max_token(self)
+ defer.returnValue(result)
+
+ def _update_max_stream_id(self, txn, next_id):
+ """Update the max stream_id
+
+ Args:
+ txn: The database cursor
+ next_id(int): The the revision to advance to.
+ """
+ update_max_id_sql = (
+ "UPDATE account_data_max_stream_id"
+ " SET stream_id = ?"
+ " WHERE stream_id < ?"
+ )
+ txn.execute(update_max_id_sql, (next_id, next_id))
diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index 5d35ca90b9..fc5725097c 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -51,6 +51,14 @@ EVENT_QUEUE_TIMEOUT_S = 0.1 # Timeout when waiting for requests for events
class EventsStore(SQLBaseStore):
+ EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts"
+
+ def __init__(self, hs):
+ super(EventsStore, self).__init__(hs)
+ self.register_background_update_handler(
+ self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts
+ )
+
@defer.inlineCallbacks
def persist_events(self, events_and_contexts, backfilled=False,
is_new_state=True):
@@ -365,6 +373,7 @@ class EventsStore(SQLBaseStore):
"processed": True,
"outlier": event.internal_metadata.is_outlier(),
"content": encode_json(event.content).decode("UTF-8"),
+ "origin_server_ts": int(event.origin_server_ts),
}
for event, _ in events_and_contexts
],
@@ -640,7 +649,7 @@ class EventsStore(SQLBaseStore):
]
rows = self._new_transaction(
- conn, "do_fetch", [], self._fetch_event_rows, event_ids
+ conn, "do_fetch", [], None, self._fetch_event_rows, event_ids
)
row_dict = {
@@ -964,3 +973,71 @@ class EventsStore(SQLBaseStore):
ret = yield self.runInteraction("count_messages", _count_messages)
defer.returnValue(ret)
+
+ @defer.inlineCallbacks
+ def _background_reindex_origin_server_ts(self, progress, batch_size):
+ target_min_stream_id = progress["target_min_stream_id_inclusive"]
+ max_stream_id = progress["max_stream_id_exclusive"]
+ rows_inserted = progress.get("rows_inserted", 0)
+
+ INSERT_CLUMP_SIZE = 1000
+
+ def reindex_search_txn(txn):
+ sql = (
+ "SELECT stream_ordering, event_id FROM events"
+ " WHERE ? <= stream_ordering AND stream_ordering < ?"
+ " ORDER BY stream_ordering DESC"
+ " LIMIT ?"
+ )
+
+ txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
+
+ rows = txn.fetchall()
+ if not rows:
+ return 0
+
+ min_stream_id = rows[-1][0]
+ event_ids = [row[1] for row in rows]
+
+ events = self._get_events_txn(txn, event_ids)
+
+ rows = []
+ for event in events:
+ try:
+ event_id = event.event_id
+ origin_server_ts = event.origin_server_ts
+ except (KeyError, AttributeError):
+ # If the event is missing a necessary field then
+ # skip over it.
+ continue
+
+ rows.append((origin_server_ts, event_id))
+
+ sql = (
+ "UPDATE events SET origin_server_ts = ? WHERE event_id = ?"
+ )
+
+ for index in range(0, len(rows), INSERT_CLUMP_SIZE):
+ clump = rows[index:index + INSERT_CLUMP_SIZE]
+ txn.executemany(sql, clump)
+
+ progress = {
+ "target_min_stream_id_inclusive": target_min_stream_id,
+ "max_stream_id_exclusive": min_stream_id,
+ "rows_inserted": rows_inserted + len(rows)
+ }
+
+ self._background_update_progress_txn(
+ txn, self.EVENT_ORIGIN_SERVER_TS_NAME, progress
+ )
+
+ return len(rows)
+
+ result = yield self.runInteraction(
+ self.EVENT_ORIGIN_SERVER_TS_NAME, reindex_search_txn
+ )
+
+ if not result:
+ yield self._end_background_update(self.EVENT_ORIGIN_SERVER_TS_NAME)
+
+ defer.returnValue(result)
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index 9800fd4203..16eff62544 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
# Remember to update this number every time a change is made to database
# schema files, so the users will be informed on server restarts.
-SCHEMA_VERSION = 26
+SCHEMA_VERSION = 27
dir_path = os.path.abspath(os.path.dirname(__file__))
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index 2e5eddd259..09a05b08ef 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -258,10 +258,10 @@ class RegistrationStore(SQLBaseStore):
@defer.inlineCallbacks
def user_add_threepid(self, user_id, medium, address, validated_at, added_at):
yield self._simple_upsert("user_threepids", {
- "user_id": user_id,
"medium": medium,
"address": address,
}, {
+ "user_id": user_id,
"validated_at": validated_at,
"added_at": added_at,
})
diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py
index ae1ad56d9a..4e0e9ab59a 100644
--- a/synapse/storage/roommember.py
+++ b/synapse/storage/roommember.py
@@ -18,7 +18,7 @@ from twisted.internet import defer
from collections import namedtuple
from ._base import SQLBaseStore
-from synapse.util.caches.descriptors import cached
+from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
from synapse.api.constants import Membership
from synapse.types import UserID
@@ -121,7 +121,7 @@ class RoomMemberStore(SQLBaseStore):
return self.get_rooms_for_user_where_membership_is(
user_id, [Membership.INVITE]
).addCallback(lambda invites: self._get_events([
- invites.event_id for invite in invites
+ invite.event_id for invite in invites
]))
def get_leave_and_ban_events_for_user(self, user_id):
@@ -160,7 +160,7 @@ class RoomMemberStore(SQLBaseStore):
def _get_rooms_for_user_where_membership_is_txn(self, txn, user_id,
membership_list):
- where_clause = "user_id = ? AND (%s)" % (
+ where_clause = "user_id = ? AND (%s) AND forgotten = 0" % (
" OR ".join(["membership = ?" for _ in membership_list]),
)
@@ -269,3 +269,70 @@ class RoomMemberStore(SQLBaseStore):
ret = len(room_id_lists.pop(0).intersection(*room_id_lists)) > 0
defer.returnValue(ret)
+
+ @defer.inlineCallbacks
+ def forget(self, user_id, room_id):
+ """Indicate that user_id wishes to discard history for room_id."""
+ def f(txn):
+ sql = (
+ "UPDATE"
+ " room_memberships"
+ " SET"
+ " forgotten = 1"
+ " WHERE"
+ " user_id = ?"
+ " AND"
+ " room_id = ?"
+ )
+ txn.execute(sql, (user_id, room_id))
+ yield self.runInteraction("forget_membership", f)
+ self.was_forgotten_at.invalidate_all()
+ self.did_forget.invalidate((user_id, room_id))
+
+ @cachedInlineCallbacks(num_args=2)
+ def did_forget(self, user_id, room_id):
+ """Returns whether user_id has elected to discard history for room_id.
+
+ Returns False if they have since re-joined."""
+ def f(txn):
+ sql = (
+ "SELECT"
+ " COUNT(*)"
+ " FROM"
+ " room_memberships"
+ " WHERE"
+ " user_id = ?"
+ " AND"
+ " room_id = ?"
+ " AND"
+ " forgotten = 0"
+ )
+ txn.execute(sql, (user_id, room_id))
+ rows = txn.fetchall()
+ return rows[0][0]
+ count = yield self.runInteraction("did_forget_membership", f)
+ defer.returnValue(count == 0)
+
+ @cachedInlineCallbacks(num_args=3)
+ def was_forgotten_at(self, user_id, room_id, event_id):
+ """Returns whether user_id has elected to discard history for room_id at event_id.
+
+ event_id must be a membership event."""
+ def f(txn):
+ sql = (
+ "SELECT"
+ " forgotten"
+ " FROM"
+ " room_memberships"
+ " WHERE"
+ " user_id = ?"
+ " AND"
+ " room_id = ?"
+ " AND"
+ " event_id = ?"
+ )
+ txn.execute(sql, (user_id, room_id, event_id))
+ rows = txn.fetchall()
+ return rows[0][0]
+ forgot = yield self.runInteraction("did_forget_membership_at", f)
+ defer.returnValue(forgot == 1)
diff --git a/synapse/storage/schema/delta/15/v15.sql b/synapse/storage/schema/delta/15/v15.sql
index f5b2a08ca4..9523d2bcc3 100644
--- a/synapse/storage/schema/delta/15/v15.sql
+++ b/synapse/storage/schema/delta/15/v15.sql
@@ -1,23 +1,22 @@
-- Drop, copy & recreate pushers table to change unique key
-- Also add access_token column at the same time
CREATE TABLE IF NOT EXISTS pushers2 (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
+ id BIGINT PRIMARY KEY,
user_name TEXT NOT NULL,
- access_token INTEGER DEFAULT NULL,
- profile_tag varchar(32) NOT NULL,
- kind varchar(8) NOT NULL,
- app_id varchar(64) NOT NULL,
- app_display_name varchar(64) NOT NULL,
- device_display_name varchar(128) NOT NULL,
- pushkey blob NOT NULL,
+ access_token BIGINT DEFAULT NULL,
+ profile_tag VARCHAR(32) NOT NULL,
+ kind VARCHAR(8) NOT NULL,
+ app_id VARCHAR(64) NOT NULL,
+ app_display_name VARCHAR(64) NOT NULL,
+ device_display_name VARCHAR(128) NOT NULL,
+ pushkey bytea NOT NULL,
ts BIGINT NOT NULL,
- lang varchar(8),
- data blob,
+ lang VARCHAR(8),
+ data bytea,
last_token TEXT,
last_success BIGINT,
failing_since BIGINT,
- FOREIGN KEY(user_name) REFERENCES users(name),
- UNIQUE (app_id, pushkey, user_name)
+ UNIQUE (app_id, pushkey)
);
INSERT INTO pushers2 (id, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, ts, lang, data, last_token, last_success, failing_since)
SELECT id, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, ts, lang, data, last_token, last_success, failing_since FROM pushers;
diff --git a/synapse/storage/schema/delta/25/fts.py b/synapse/storage/schema/delta/25/fts.py
index 5239d69073..ba48e43792 100644
--- a/synapse/storage/schema/delta/25/fts.py
+++ b/synapse/storage/schema/delta/25/fts.py
@@ -38,7 +38,7 @@ CREATE INDEX event_search_ev_ridx ON event_search(room_id);
SQLITE_TABLE = (
- "CREATE VIRTUAL TABLE IF NOT EXISTS event_search"
+ "CREATE VIRTUAL TABLE event_search"
" USING fts4 ( event_id, room_id, sender, key, value )"
)
diff --git a/synapse/storage/schema/delta/27/account_data.sql b/synapse/storage/schema/delta/27/account_data.sql
new file mode 100644
index 0000000000..9f25416005
--- /dev/null
+++ b/synapse/storage/schema/delta/27/account_data.sql
@@ -0,0 +1,36 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+CREATE TABLE IF NOT EXISTS account_data(
+ user_id TEXT NOT NULL,
+ account_data_type TEXT NOT NULL, -- The type of the account_data.
+ stream_id BIGINT NOT NULL, -- The version of the account_data.
+ content TEXT NOT NULL, -- The JSON content of the account_data
+ CONSTRAINT account_data_uniqueness UNIQUE (user_id, account_data_type)
+);
+
+
+CREATE TABLE IF NOT EXISTS room_account_data(
+ user_id TEXT NOT NULL,
+ room_id TEXT NOT NULL,
+ account_data_type TEXT NOT NULL, -- The type of the account_data.
+ stream_id BIGINT NOT NULL, -- The version of the account_data.
+ content TEXT NOT NULL, -- The JSON content of the account_data
+ CONSTRAINT room_account_data_uniqueness UNIQUE (user_id, room_id, account_data_type)
+);
+
+
+CREATE INDEX account_data_stream_id on account_data(user_id, stream_id);
+CREATE INDEX room_account_data_stream_id on room_account_data(user_id, stream_id);
diff --git a/synapse/storage/schema/delta/27/forgotten_memberships.sql b/synapse/storage/schema/delta/27/forgotten_memberships.sql
new file mode 100644
index 0000000000..beeb8a288b
--- /dev/null
+++ b/synapse/storage/schema/delta/27/forgotten_memberships.sql
@@ -0,0 +1,26 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Keeps track of what rooms users have left and don't want to be able to
+ * access again.
+ *
+ * If all users on this server have left a room, we can delete the room
+ * entirely.
+ *
+ * This column should always contain either 0 or 1.
+ */
+
+ ALTER TABLE room_memberships ADD COLUMN forgotten INTEGER DEFAULT 0;
diff --git a/synapse/storage/schema/delta/27/ts.py b/synapse/storage/schema/delta/27/ts.py
new file mode 100644
index 0000000000..8d4a981975
--- /dev/null
+++ b/synapse/storage/schema/delta/27/ts.py
@@ -0,0 +1,57 @@
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from synapse.storage.prepare_database import get_statements
+
+import ujson
+
+logger = logging.getLogger(__name__)
+
+
+ALTER_TABLE = (
+ "ALTER TABLE events ADD COLUMN origin_server_ts BIGINT;"
+ "CREATE INDEX events_ts ON events(origin_server_ts, stream_ordering);"
+)
+
+
+def run_upgrade(cur, database_engine, *args, **kwargs):
+ for statement in get_statements(ALTER_TABLE.splitlines()):
+ cur.execute(statement)
+
+ cur.execute("SELECT MIN(stream_ordering) FROM events")
+ rows = cur.fetchall()
+ min_stream_id = rows[0][0]
+
+ cur.execute("SELECT MAX(stream_ordering) FROM events")
+ rows = cur.fetchall()
+ max_stream_id = rows[0][0]
+
+ if min_stream_id is not None and max_stream_id is not None:
+ progress = {
+ "target_min_stream_id_inclusive": min_stream_id,
+ "max_stream_id_exclusive": max_stream_id + 1,
+ "rows_inserted": 0,
+ }
+ progress_json = ujson.dumps(progress)
+
+ sql = (
+ "INSERT into background_updates (update_name, progress_json)"
+ " VALUES (?, ?)"
+ )
+
+ sql = database_engine.convert_param_style(sql)
+
+ cur.execute(sql, ("event_origin_server_ts", progress_json))
diff --git a/synapse/storage/search.py b/synapse/storage/search.py
index 380270b009..6cb5e73b6e 100644
--- a/synapse/storage/search.py
+++ b/synapse/storage/search.py
@@ -20,6 +20,7 @@ from synapse.api.errors import SynapseError
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
import logging
+import re
logger = logging.getLogger(__name__)
@@ -84,6 +85,11 @@ class SearchStore(BackgroundUpdateStore):
# skip over it.
continue
+ if not isinstance(value, basestring):
+ # If the event body, name or topic isn't a string
+ # then skip over it
+ continue
+
event_search_rows.append((event_id, room_id, key, value))
if isinstance(self.database_engine, PostgresEngine):
@@ -139,6 +145,9 @@ class SearchStore(BackgroundUpdateStore):
list of dicts
"""
clauses = []
+
+ search_query = search_query = _parse_query(self.database_engine, search_term)
+
args = []
# Make sure we don't explode because the person is in too many rooms.
@@ -158,18 +167,36 @@ class SearchStore(BackgroundUpdateStore):
"(%s)" % (" OR ".join(local_clauses),)
)
+ count_args = args
+ count_clauses = clauses
+
if isinstance(self.database_engine, PostgresEngine):
sql = (
- "SELECT ts_rank_cd(vector, query) AS rank, room_id, event_id"
- " FROM plainto_tsquery('english', ?) as query, event_search"
- " WHERE vector @@ query"
+ "SELECT ts_rank_cd(vector, to_tsquery('english', ?)) AS rank,"
+ " room_id, event_id"
+ " FROM event_search"
+ " WHERE vector @@ to_tsquery('english', ?)"
+ )
+ args = [search_query, search_query] + args
+
+ count_sql = (
+ "SELECT room_id, count(*) as count FROM event_search"
+ " WHERE vector @@ to_tsquery('english', ?)"
)
+ count_args = [search_query] + count_args
elif isinstance(self.database_engine, Sqlite3Engine):
sql = (
"SELECT rank(matchinfo(event_search)) as rank, room_id, event_id"
" FROM event_search"
" WHERE value MATCH ?"
)
+ args = [search_query] + args
+
+ count_sql = (
+ "SELECT room_id, count(*) as count FROM event_search"
+ " WHERE value MATCH ?"
+ )
+ count_args = [search_term] + count_args
else:
# This should be unreachable.
raise Exception("Unrecognized database engine")
@@ -177,12 +204,15 @@ class SearchStore(BackgroundUpdateStore):
for clause in clauses:
sql += " AND " + clause
+ for clause in count_clauses:
+ count_sql += " AND " + clause
+
# We add an arbitrary limit here to ensure we don't try to pull the
# entire table from the database.
sql += " ORDER BY rank DESC LIMIT 500"
results = yield self._execute(
- "search_msgs", self.cursor_to_dict, sql, *([search_term] + args)
+ "search_msgs", self.cursor_to_dict, sql, *args
)
results = filter(lambda row: row["room_id"] in room_ids, results)
@@ -194,21 +224,37 @@ class SearchStore(BackgroundUpdateStore):
for ev in events
}
- defer.returnValue([
- {
- "event": event_map[r["event_id"]],
- "rank": r["rank"],
- }
- for r in results
- if r["event_id"] in event_map
- ])
+ highlights = None
+ if isinstance(self.database_engine, PostgresEngine):
+ highlights = yield self._find_highlights_in_postgres(search_query, events)
+
+ count_sql += " GROUP BY room_id"
+
+ count_results = yield self._execute(
+ "search_rooms_count", self.cursor_to_dict, count_sql, *count_args
+ )
+
+ count = sum(row["count"] for row in count_results if row["room_id"] in room_ids)
+
+ defer.returnValue({
+ "results": [
+ {
+ "event": event_map[r["event_id"]],
+ "rank": r["rank"],
+ }
+ for r in results
+ if r["event_id"] in event_map
+ ],
+ "highlights": highlights,
+ "count": count,
+ })
@defer.inlineCallbacks
- def search_room(self, room_id, search_term, keys, limit, pagination_token=None):
+ def search_rooms(self, room_ids, search_term, keys, limit, pagination_token=None):
"""Performs a full text search over events with given keys.
Args:
- room_id (str): The room_id to search in
+ room_id (list): The room_ids to search in
search_term (str): Search term to search for
keys (list): List of keys to search in, currently supports
"content.body", "content.name", "content.topic"
@@ -218,7 +264,18 @@ class SearchStore(BackgroundUpdateStore):
list of dicts
"""
clauses = []
- args = [search_term, room_id]
+
+ search_query = search_query = _parse_query(self.database_engine, search_term)
+
+ args = []
+
+ # Make sure we don't explode because the person is in too many rooms.
+ # We filter the results below regardless.
+ if len(room_ids) < 500:
+ clauses.append(
+ "room_id IN (%s)" % (",".join(["?"] * len(room_ids)),)
+ )
+ args.extend(room_ids)
local_clauses = []
for key in keys:
@@ -229,28 +286,40 @@ class SearchStore(BackgroundUpdateStore):
"(%s)" % (" OR ".join(local_clauses),)
)
+ # take copies of the current args and clauses lists, before adding
+ # pagination clauses to main query.
+ count_args = list(args)
+ count_clauses = list(clauses)
+
if pagination_token:
try:
- topo, stream = pagination_token.split(",")
- topo = int(topo)
+ origin_server_ts, stream = pagination_token.split(",")
+ origin_server_ts = int(origin_server_ts)
stream = int(stream)
except:
raise SynapseError(400, "Invalid pagination token")
clauses.append(
- "(topological_ordering < ?"
- " OR (topological_ordering = ? AND stream_ordering < ?))"
+ "(origin_server_ts < ?"
+ " OR (origin_server_ts = ? AND stream_ordering < ?))"
)
- args.extend([topo, topo, stream])
+ args.extend([origin_server_ts, origin_server_ts, stream])
if isinstance(self.database_engine, PostgresEngine):
sql = (
- "SELECT ts_rank_cd(vector, query) as rank,"
- " topological_ordering, stream_ordering, room_id, event_id"
- " FROM plainto_tsquery('english', ?) as query, event_search"
+ "SELECT ts_rank_cd(vector, to_tsquery('english', ?)) as rank,"
+ " origin_server_ts, stream_ordering, room_id, event_id"
+ " FROM event_search"
" NATURAL JOIN events"
- " WHERE vector @@ query AND room_id = ?"
+ " WHERE vector @@ to_tsquery('english', ?) AND "
+ )
+ args = [search_query, search_query] + args
+
+ count_sql = (
+ "SELECT room_id, count(*) as count FROM event_search"
+ " WHERE vector @@ to_tsquery('english', ?) AND "
)
+ count_args = [search_query] + count_args
elif isinstance(self.database_engine, Sqlite3Engine):
# We use CROSS JOIN here to ensure we use the right indexes.
# https://sqlite.org/optoverview.html#crossjoin
@@ -262,24 +331,31 @@ class SearchStore(BackgroundUpdateStore):
# MATCH unless it uses the full text search index
sql = (
"SELECT rank(matchinfo) as rank, room_id, event_id,"
- " topological_ordering, stream_ordering"
+ " origin_server_ts, stream_ordering"
" FROM (SELECT key, event_id, matchinfo(event_search) as matchinfo"
" FROM event_search"
" WHERE value MATCH ?"
" )"
" CROSS JOIN events USING (event_id)"
- " WHERE room_id = ?"
+ " WHERE "
)
+ args = [search_query] + args
+
+ count_sql = (
+ "SELECT room_id, count(*) as count FROM event_search"
+ " WHERE value MATCH ? AND "
+ )
+ count_args = [search_term] + count_args
else:
# This should be unreachable.
raise Exception("Unrecognized database engine")
- for clause in clauses:
- sql += " AND " + clause
+ sql += " AND ".join(clauses)
+ count_sql += " AND ".join(count_clauses)
# We add an arbitrary limit here to ensure we don't try to pull the
# entire table from the database.
- sql += " ORDER BY topological_ordering DESC, stream_ordering DESC LIMIT ?"
+ sql += " ORDER BY origin_server_ts DESC, stream_ordering DESC LIMIT ?"
args.append(limit)
@@ -287,6 +363,8 @@ class SearchStore(BackgroundUpdateStore):
"search_rooms", self.cursor_to_dict, sql, *args
)
+ results = filter(lambda row: row["room_id"] in room_ids, results)
+
events = yield self._get_events([r["event_id"] for r in results])
event_map = {
@@ -294,14 +372,119 @@ class SearchStore(BackgroundUpdateStore):
for ev in events
}
- defer.returnValue([
- {
- "event": event_map[r["event_id"]],
- "rank": r["rank"],
- "pagination_token": "%s,%s" % (
- r["topological_ordering"], r["stream_ordering"]
- ),
- }
- for r in results
- if r["event_id"] in event_map
- ])
+ highlights = None
+ if isinstance(self.database_engine, PostgresEngine):
+ highlights = yield self._find_highlights_in_postgres(search_query, events)
+
+ count_sql += " GROUP BY room_id"
+
+ count_results = yield self._execute(
+ "search_rooms_count", self.cursor_to_dict, count_sql, *count_args
+ )
+
+ count = sum(row["count"] for row in count_results if row["room_id"] in room_ids)
+
+ defer.returnValue({
+ "results": [
+ {
+ "event": event_map[r["event_id"]],
+ "rank": r["rank"],
+ "pagination_token": "%s,%s" % (
+ r["origin_server_ts"], r["stream_ordering"]
+ ),
+ }
+ for r in results
+ if r["event_id"] in event_map
+ ],
+ "highlights": highlights,
+ "count": count,
+ })
+
+ def _find_highlights_in_postgres(self, search_query, events):
+ """Given a list of events and a search term, return a list of words
+ that match from the content of the event.
+
+ This is used to give a list of words that clients can match against to
+ highlight the matching parts.
+
+ Args:
+ search_query (str)
+ events (list): A list of events
+
+ Returns:
+ deferred : A set of strings.
+ """
+ def f(txn):
+ highlight_words = set()
+ for event in events:
+ # As a hack we simply join values of all possible keys. This is
+ # fine since we're only using them to find possible highlights.
+ values = []
+ for key in ("body", "name", "topic"):
+ v = event.content.get(key, None)
+ if v:
+ values.append(v)
+
+ if not values:
+ continue
+
+ value = " ".join(values)
+
+ # We need to find some values for StartSel and StopSel that
+ # aren't in the value so that we can pick results out.
+ start_sel = "<"
+ stop_sel = ">"
+
+ while start_sel in value:
+ start_sel += "<"
+ while stop_sel in value:
+ stop_sel += ">"
+
+ query = "SELECT ts_headline(?, to_tsquery('english', ?), %s)" % (
+ _to_postgres_options({
+ "StartSel": start_sel,
+ "StopSel": stop_sel,
+ "MaxFragments": "50",
+ })
+ )
+ txn.execute(query, (value, search_query,))
+ headline, = txn.fetchall()[0]
+
+ # Now we need to pick the possible highlights out of the haedline
+ # result.
+ matcher_regex = "%s(.*?)%s" % (
+ re.escape(start_sel),
+ re.escape(stop_sel),
+ )
+
+ res = re.findall(matcher_regex, headline)
+ highlight_words.update([r.lower() for r in res])
+
+ return highlight_words
+
+ return self.runInteraction("_find_highlights", f)
+
+
+def _to_postgres_options(options_dict):
+ return "'%s'" % (
+ ",".join("%s=%s" % (k, v) for k, v in options_dict.items()),
+ )
+
+
+def _parse_query(database_engine, search_term):
+ """Takes a plain unicode string from the user and converts it into a form
+ that can be passed to database.
+ We use this so that we can add prefix matching, which isn't something
+ that is supported by default.
+ """
+
+ # Pull out the individual words, discarding any non-word characters.
+ results = re.findall(r"([\w\-]+)", search_term, re.UNICODE)
+
+ if isinstance(database_engine, PostgresEngine):
+ return " & ".join(result + ":*" for result in results)
+ elif isinstance(database_engine, Sqlite3Engine):
+ return " & ".join(result + "*" for result in results)
+ else:
+ # This should be unreachable.
+ raise Exception("Unrecognized database engine")
diff --git a/synapse/storage/tags.py b/synapse/storage/tags.py
index f6d826cc59..f520f60c6c 100644
--- a/synapse/storage/tags.py
+++ b/synapse/storage/tags.py
@@ -48,8 +48,8 @@ class TagsStore(SQLBaseStore):
Args:
user_id(str): The user to get the tags for.
Returns:
- A deferred dict mapping from room_id strings to lists of tag
- strings.
+ A deferred dict mapping from room_id strings to dicts mapping from
+ tag strings to tag content.
"""
deferred = self._simple_select_list(
diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py
index d69c7cb991..2170746025 100644
--- a/synapse/util/__init__.py
+++ b/synapse/util/__init__.py
@@ -64,8 +64,7 @@ class Clock(object):
current_context = LoggingContext.current_context()
def wrapped_callback(*args, **kwargs):
- with PreserveLoggingContext():
- LoggingContext.thread_local.current_context = current_context
+ with PreserveLoggingContext(current_context):
callback(*args, **kwargs)
with PreserveLoggingContext():
diff --git a/synapse/util/caches/snapshot_cache.py b/synapse/util/caches/snapshot_cache.py
new file mode 100644
index 0000000000..09f00afbc5
--- /dev/null
+++ b/synapse/util/caches/snapshot_cache.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from synapse.util.async import ObservableDeferred
+
+
+class SnapshotCache(object):
+ """Cache for snapshots like the response of /initialSync.
+ The response of initialSync only has to be a recent snapshot of the
+ server state. It shouldn't matter to clients if it is a few minutes out
+ of date.
+
+ This caches a deferred response. Until the deferred completes it will be
+ returned from the cache. This means that if the client retries the request
+ while the response is still being computed, that original response will be
+ used rather than trying to compute a new response.
+
+ Once the deferred completes it will removed from the cache after 5 minutes.
+ We delay removing it from the cache because a client retrying its request
+ could race with us finishing computing the response.
+
+ Rather than tracking precisely how long something has been in the cache we
+ keep two generations of completed responses. Every 5 minutes discard the
+ old generation, move the new generation to the old generation, and set the
+ new generation to be empty. This means that a result will be in the cache
+ somewhere between 5 and 10 minutes.
+ """
+
+ DURATION_MS = 5 * 60 * 1000 # Cache results for 5 minutes.
+
+ def __init__(self):
+ self.pending_result_cache = {} # Request that haven't finished yet.
+ self.prev_result_cache = {} # The older requests that have finished.
+ self.next_result_cache = {} # The newer requests that have finished.
+ self.time_last_rotated_ms = 0
+
+ def rotate(self, time_now_ms):
+ # Rotate once if the cache duration has passed since the last rotation.
+ if time_now_ms - self.time_last_rotated_ms >= self.DURATION_MS:
+ self.prev_result_cache = self.next_result_cache
+ self.next_result_cache = {}
+ self.time_last_rotated_ms += self.DURATION_MS
+
+ # Rotate again if the cache duration has passed twice since the last
+ # rotation.
+ if time_now_ms - self.time_last_rotated_ms >= self.DURATION_MS:
+ self.prev_result_cache = self.next_result_cache
+ self.next_result_cache = {}
+ self.time_last_rotated_ms = time_now_ms
+
+ def get(self, time_now_ms, key):
+ self.rotate(time_now_ms)
+ # This cache is intended to deduplicate requests, so we expect it to be
+ # missed most of the time. So we just lookup the key in all of the
+ # dictionaries rather than trying to short circuit the lookup if the
+ # key is found.
+ result = self.prev_result_cache.get(key)
+ result = self.next_result_cache.get(key, result)
+ result = self.pending_result_cache.get(key, result)
+ if result is not None:
+ return result.observe()
+ else:
+ return None
+
+ def set(self, time_now_ms, key, deferred):
+ self.rotate(time_now_ms)
+
+ result = ObservableDeferred(deferred)
+
+ self.pending_result_cache[key] = result
+
+ def shuffle_along(r):
+ # When the deferred completes we shuffle it along to the first
+ # generation of the result cache. So that it will eventually
+ # expire from the rotation of that cache.
+ self.next_result_cache[key] = result
+ self.pending_result_cache.pop(key, None)
+
+ result.observe().addBoth(shuffle_along)
+
+ return result.observe()
diff --git a/synapse/util/debug.py b/synapse/util/debug.py
index f6a5a841a4..b2bee7958f 100644
--- a/synapse/util/debug.py
+++ b/synapse/util/debug.py
@@ -30,8 +30,7 @@ def debug_deferreds():
context = LoggingContext.current_context()
def restore_context_callback(x):
- with PreserveLoggingContext():
- LoggingContext.thread_local.current_context = context
+ with PreserveLoggingContext(context):
return fn(x)
return restore_context_callback
diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py
index 7e6062c1b8..d528ced55a 100644
--- a/synapse/util/logcontext.py
+++ b/synapse/util/logcontext.py
@@ -19,6 +19,25 @@ import logging
logger = logging.getLogger(__name__)
+try:
+ import resource
+
+ # Python doesn't ship with a definition of RUSAGE_THREAD but it's defined
+ # to be 1 on linux so we hard code it.
+ RUSAGE_THREAD = 1
+
+ # If the system doesn't support RUSAGE_THREAD then this should throw an
+ # exception.
+ resource.getrusage(RUSAGE_THREAD)
+
+ def get_thread_resource_usage():
+ return resource.getrusage(RUSAGE_THREAD)
+except:
+ # If the system doesn't support resource.getrusage(RUSAGE_THREAD) then we
+ # won't track resource usage by returning None.
+ def get_thread_resource_usage():
+ return None
+
class LoggingContext(object):
"""Additional context for log formatting. Contexts are scoped within a
@@ -27,7 +46,9 @@ class LoggingContext(object):
name (str): Name for the context for debugging.
"""
- __slots__ = ["parent_context", "name", "__dict__"]
+ __slots__ = [
+ "parent_context", "name", "usage_start", "usage_end", "main_thread", "__dict__"
+ ]
thread_local = threading.local()
@@ -42,11 +63,26 @@ class LoggingContext(object):
def copy_to(self, record):
pass
+ def start(self):
+ pass
+
+ def stop(self):
+ pass
+
+ def add_database_transaction(self, duration_ms):
+ pass
+
sentinel = Sentinel()
def __init__(self, name=None):
self.parent_context = None
self.name = name
+ self.ru_stime = 0.
+ self.ru_utime = 0.
+ self.db_txn_count = 0
+ self.db_txn_duration = 0.
+ self.usage_start = None
+ self.main_thread = threading.current_thread()
def __str__(self):
return "%s@%x" % (self.name, id(self))
@@ -56,12 +92,26 @@ class LoggingContext(object):
"""Get the current logging context from thread local storage"""
return getattr(cls.thread_local, "current_context", cls.sentinel)
+ @classmethod
+ def set_current_context(cls, context):
+ """Set the current logging context in thread local storage
+ Args:
+ context(LoggingContext): The context to activate.
+ Returns:
+ The context that was previously active
+ """
+ current = cls.current_context()
+ if current is not context:
+ current.stop()
+ cls.thread_local.current_context = context
+ context.start()
+ return current
+
def __enter__(self):
"""Enters this logging context into thread local storage"""
if self.parent_context is not None:
raise Exception("Attempt to enter logging context multiple times")
- self.parent_context = self.current_context()
- self.thread_local.current_context = self
+ self.parent_context = self.set_current_context(self)
return self
def __exit__(self, type, value, traceback):
@@ -70,16 +120,16 @@ class LoggingContext(object):
Returns:
None to avoid suppressing any exeptions that were thrown.
"""
- if self.thread_local.current_context is not self:
- if self.thread_local.current_context is self.sentinel:
+ current = self.set_current_context(self.parent_context)
+ if current is not self:
+ if current is self.sentinel:
logger.debug("Expected logging context %s has been lost", self)
else:
logger.warn(
"Current logging context %s is not expected context %s",
- self.thread_local.current_context,
+ current,
self
)
- self.thread_local.current_context = self.parent_context
self.parent_context = None
def __getattr__(self, name):
@@ -93,6 +143,43 @@ class LoggingContext(object):
for key, value in self.__dict__.items():
setattr(record, key, value)
+ record.ru_utime, record.ru_stime = self.get_resource_usage()
+
+ def start(self):
+ if threading.current_thread() is not self.main_thread:
+ return
+
+ if self.usage_start and self.usage_end:
+ self.ru_utime += self.usage_end.ru_utime - self.usage_start.ru_utime
+ self.ru_stime += self.usage_end.ru_stime - self.usage_start.ru_stime
+ self.usage_start = None
+ self.usage_end = None
+
+ if not self.usage_start:
+ self.usage_start = get_thread_resource_usage()
+
+ def stop(self):
+ if threading.current_thread() is not self.main_thread:
+ return
+
+ if self.usage_start:
+ self.usage_end = get_thread_resource_usage()
+
+ def get_resource_usage(self):
+ ru_utime = self.ru_utime
+ ru_stime = self.ru_stime
+
+ if self.usage_start and threading.current_thread() is self.main_thread:
+ current = get_thread_resource_usage()
+ ru_utime += current.ru_utime - self.usage_start.ru_utime
+ ru_stime += current.ru_stime - self.usage_start.ru_stime
+
+ return ru_utime, ru_stime
+
+ def add_database_transaction(self, duration_ms):
+ self.db_txn_count += 1
+ self.db_txn_duration += duration_ms / 1000.
+
class LoggingContextFilter(logging.Filter):
"""Logging filter that adds values from the current logging context to each
@@ -121,17 +208,20 @@ class PreserveLoggingContext(object):
exited. Used to restore the context after a function using
@defer.inlineCallbacks is resumed by a callback from the reactor."""
- __slots__ = ["current_context"]
+ __slots__ = ["current_context", "new_context"]
+
+ def __init__(self, new_context=LoggingContext.sentinel):
+ self.new_context = new_context
def __enter__(self):
"""Captures the current logging context"""
- self.current_context = LoggingContext.current_context()
- LoggingContext.thread_local.current_context = LoggingContext.sentinel
+ self.current_context = LoggingContext.set_current_context(
+ self.new_context
+ )
def __exit__(self, type, value, traceback):
"""Restores the current logging context"""
- LoggingContext.thread_local.current_context = self.current_context
-
+ LoggingContext.set_current_context(self.current_context)
if self.current_context is not LoggingContext.sentinel:
if self.current_context.parent_context is None:
logger.warn(
@@ -164,8 +254,7 @@ class _PreservingContextDeferred(defer.Deferred):
def _wrap_callback(self, f):
def g(res, *args, **kwargs):
- with PreserveLoggingContext():
- LoggingContext.thread_local.current_context = self._log_context
+ with PreserveLoggingContext(self._log_context):
res = f(res, *args, **kwargs)
return res
return g
diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py
index 1172ceae8b..c42b5b80d7 100644
--- a/tests/handlers/test_presence.py
+++ b/tests/handlers/test_presence.py
@@ -365,7 +365,7 @@ class PresenceInvitesTestCase(PresenceTestCase):
# TODO(paul): This test will likely break if/when real auth permissions
# are added; for now the HS will always accept any invite
- yield self.handler.send_invite(
+ yield self.handler.send_presence_invite(
observer_user=self.u_apple, observed_user=self.u_banana)
self.assertEquals(
@@ -384,7 +384,7 @@ class PresenceInvitesTestCase(PresenceTestCase):
@defer.inlineCallbacks
def test_invite_local_nonexistant(self):
- yield self.handler.send_invite(
+ yield self.handler.send_presence_invite(
observer_user=self.u_apple, observed_user=self.u_durian)
self.assertEquals(
@@ -414,7 +414,7 @@ class PresenceInvitesTestCase(PresenceTestCase):
defer.succeed((200, "OK"))
)
- yield self.handler.send_invite(
+ yield self.handler.send_presence_invite(
observer_user=self.u_apple, observed_user=u_rocket)
self.assertEquals(
diff --git a/tests/util/test_snapshot_cache.py b/tests/util/test_snapshot_cache.py
new file mode 100644
index 0000000000..f58576c941
--- /dev/null
+++ b/tests/util/test_snapshot_cache.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from .. import unittest
+
+from synapse.util.caches.snapshot_cache import SnapshotCache
+from twisted.internet.defer import Deferred
+
+class SnapshotCacheTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.cache = SnapshotCache()
+ self.cache.DURATION_MS = 1
+
+ def test_get_set(self):
+ # Check that getting a missing key returns None
+ self.assertEquals(self.cache.get(0, "key"), None)
+
+ # Check that setting a key with a deferred returns
+ # a deferred that resolves when the initial deferred does
+ d = Deferred()
+ set_result = self.cache.set(0, "key", d)
+ self.assertIsNotNone(set_result)
+ self.assertFalse(set_result.called)
+
+ # Check that getting the key before the deferred has resolved
+ # returns a deferred that resolves when the initial deferred does.
+ get_result_at_10 = self.cache.get(10, "key")
+ self.assertIsNotNone(get_result_at_10)
+ self.assertFalse(get_result_at_10.called)
+
+ # Check that the returned deferreds resolve when the initial deferred
+ # does.
+ d.callback("v")
+ self.assertTrue(set_result.called)
+ self.assertTrue(get_result_at_10.called)
+
+ # Check that getting the key after the deferred has resolved
+ # before the cache expires returns a resolved deferred.
+ get_result_at_11 = self.cache.get(11, "key")
+ self.assertIsNotNone(get_result_at_11)
+ self.assertTrue(get_result_at_11.called)
+
+ # Check that getting the key after the deferred has resolved
+ # after the cache expires returns None
+ get_result_at_12 = self.cache.get(12, "key")
+ self.assertIsNone(get_result_at_12)
diff --git a/tests/utils.py b/tests/utils.py
index 91040c2efd..aee69b1caa 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -168,8 +168,9 @@ class MockHttpResource(HttpServer):
raise KeyError("No event can handle %s" % path)
- def register_path(self, method, path_pattern, callback):
- self.callbacks.append((method, path_pattern, callback))
+ def register_paths(self, method, path_patterns, callback):
+ for path_pattern in path_patterns:
+ self.callbacks.append((method, path_pattern, callback))
class MockKey(object):
diff --git a/tox.ini b/tox.ini
index 95424765c3..bd313a4f36 100644
--- a/tox.ini
+++ b/tox.ini
@@ -11,7 +11,8 @@ deps =
setenv =
PYTHONDONTWRITEBYTECODE = no_byte_code
commands =
- /bin/bash -c "coverage run --source=synapse {envbindir}/trial {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}"
+ /bin/bash -c "coverage run {env:COVERAGE_OPTS:} --source={toxinidir}/synapse \
+ {envbindir}/trial {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}"
{env:DUMP_COVERAGE_COMMAND:coverage report -m}
[testenv:packaging]
|