summary refs log tree commit diff
diff options
context:
space:
mode:
authorErik Johnston <erik@matrix.org>2015-11-17 15:45:43 +0000
committerErik Johnston <erik@matrix.org>2015-11-17 15:45:43 +0000
commitd3861b44424aa6f03cc65719bb1527330157abea (patch)
tree4377eb0dc5e221862489bdcc802e50e2f1f41cb1
parentMerge branch 'hotfixes-v0.10.0-r2' of github.com:matrix-org/synapse (diff)
parentSlightly more aggressive retry timers at HTTP level (diff)
downloadsynapse-d3861b44424aa6f03cc65719bb1527330157abea.tar.xz
Merge branch 'release-v0.11.0' of github.com:matrix-org/synapse v0.11.0
-rw-r--r--.gitignore4
-rw-r--r--AUTHORS.rst5
-rw-r--r--CHANGES.rst46
-rw-r--r--MANIFEST.in16
-rw-r--r--README.rst14
-rwxr-xr-xdemo/start.sh8
-rwxr-xr-xjenkins.sh39
-rw-r--r--scripts-dev/check_auth.py3
-rw-r--r--scripts-dev/check_event_hash.py2
-rw-r--r--scripts-dev/check_signature.py8
-rw-r--r--scripts-dev/convert_server_keys.py8
-rwxr-xr-xscripts-dev/definitions.py142
-rw-r--r--scripts-dev/hash_history.py4
-rwxr-xr-xscripts/synapse_port_db4
-rw-r--r--setup.cfg3
-rwxr-xr-xsetup.py42
-rw-r--r--synapse/__init__.py2
-rw-r--r--synapse/api/auth.py354
-rw-r--r--synapse/api/constants.py14
-rw-r--r--synapse/api/errors.py16
-rw-r--r--synapse/api/filtering.py200
-rwxr-xr-xsynapse/app/homeserver.py82
-rwxr-xr-xsynapse/app/synctl.py64
-rw-r--r--synapse/appservice/scheduler.py4
-rw-r--r--synapse/config/_base.py51
-rw-r--r--synapse/config/appservice.py2
-rw-r--r--synapse/config/captcha.py2
-rw-r--r--synapse/config/cas.py47
-rw-r--r--synapse/config/database.py2
-rw-r--r--synapse/config/homeserver.py5
-rw-r--r--synapse/config/key.py37
-rw-r--r--synapse/config/logger.py24
-rw-r--r--synapse/config/metrics.py8
-rw-r--r--synapse/config/password.py32
-rw-r--r--synapse/config/ratelimiting.py2
-rw-r--r--synapse/config/registration.py18
-rw-r--r--synapse/config/repository.py2
-rw-r--r--synapse/config/saml2.py5
-rw-r--r--synapse/config/server.py3
-rw-r--r--synapse/config/tls.py10
-rw-r--r--synapse/config/voip.py2
-rw-r--r--synapse/crypto/event_signing.py9
-rw-r--r--synapse/crypto/keyring.py29
-rw-r--r--synapse/events/utils.py9
-rw-r--r--synapse/federation/federation_client.py99
-rw-r--r--synapse/federation/federation_server.py26
-rw-r--r--synapse/federation/transaction_queue.py28
-rw-r--r--synapse/federation/transport/client.py39
-rw-r--r--synapse/federation/transport/server.py57
-rw-r--r--synapse/handlers/__init__.py5
-rw-r--r--synapse/handlers/_base.py178
-rw-r--r--synapse/handlers/admin.py1
-rw-r--r--synapse/handlers/auth.py149
-rw-r--r--synapse/handlers/events.py110
-rw-r--r--synapse/handlers/federation.py546
-rw-r--r--synapse/handlers/message.py363
-rw-r--r--synapse/handlers/presence.py18
-rw-r--r--synapse/handlers/private_user_data.py46
-rw-r--r--synapse/handlers/receipts.py14
-rw-r--r--synapse/handlers/register.py30
-rw-r--r--synapse/handlers/room.py425
-rw-r--r--synapse/handlers/search.py319
-rw-r--r--synapse/handlers/sync.py705
-rw-r--r--synapse/handlers/typing.py11
-rw-r--r--synapse/http/client.py97
-rw-r--r--synapse/http/matrixfederationclient.py19
-rw-r--r--synapse/http/server.py12
-rw-r--r--synapse/metrics/__init__.py11
-rw-r--r--synapse/notifier.py60
-rw-r--r--synapse/push/__init__.py2
-rw-r--r--synapse/python_dependencies.py38
-rw-r--r--synapse/rest/client/v1/admin.py2
-rw-r--r--synapse/rest/client/v1/directory.py4
-rw-r--r--synapse/rest/client/v1/events.py15
-rw-r--r--synapse/rest/client/v1/initial_sync.py8
-rw-r--r--synapse/rest/client/v1/login.py278
-rw-r--r--synapse/rest/client/v1/presence.py8
-rw-r--r--synapse/rest/client/v1/profile.py4
-rw-r--r--synapse/rest/client/v1/push_rule.py6
-rw-r--r--synapse/rest/client/v1/pusher.py4
-rw-r--r--synapse/rest/client/v1/room.py167
-rw-r--r--synapse/rest/client/v1/voip.py4
-rw-r--r--synapse/rest/client/v2_alpha/__init__.py4
-rw-r--r--synapse/rest/client/v2_alpha/account.py6
-rw-r--r--synapse/rest/client/v2_alpha/filter.py4
-rw-r--r--synapse/rest/client/v2_alpha/keys.py9
-rw-r--r--synapse/rest/client/v2_alpha/receipts.py6
-rw-r--r--synapse/rest/client/v2_alpha/register.py27
-rw-r--r--synapse/rest/client/v2_alpha/sync.py322
-rw-r--r--synapse/rest/client/v2_alpha/tags.py106
-rw-r--r--synapse/rest/client/v2_alpha/tokenrefresh.py56
-rw-r--r--synapse/rest/key/v1/server_key_resource.py6
-rw-r--r--synapse/rest/key/v2/local_key_resource.py6
-rw-r--r--synapse/rest/media/v0/content_repository.py2
-rw-r--r--synapse/rest/media/v1/upload_resource.py2
-rw-r--r--synapse/server.py20
-rw-r--r--synapse/state.py39
-rw-r--r--synapse/static/client/login/index.html50
-rw-r--r--synapse/static/client/login/js/jquery-2.1.3.min.js (renamed from static/client/register/js/jquery-2.1.3.min.js)0
-rw-r--r--synapse/static/client/login/js/login.js153
-rw-r--r--synapse/static/client/login/spinner.gifbin0 -> 1849 bytes
-rw-r--r--synapse/static/client/login/style.css57
-rw-r--r--synapse/static/client/register/index.html (renamed from static/client/register/index.html)0
-rw-r--r--synapse/static/client/register/js/jquery-2.1.3.min.js4
-rw-r--r--synapse/static/client/register/js/recaptcha_ajax.js (renamed from static/client/register/js/recaptcha_ajax.js)0
-rw-r--r--synapse/static/client/register/js/register.js (renamed from static/client/register/js/register.js)0
-rw-r--r--synapse/static/client/register/register_config.sample.js (renamed from static/client/register/register_config.sample.js)0
-rw-r--r--synapse/static/client/register/style.css (renamed from static/client/register/style.css)0
-rw-r--r--synapse/storage/__init__.py407
-rw-r--r--synapse/storage/_base.py193
-rw-r--r--synapse/storage/background_updates.py256
-rw-r--r--synapse/storage/engines/postgres.py2
-rw-r--r--synapse/storage/engines/sqlite3.py31
-rw-r--r--synapse/storage/event_federation.py94
-rw-r--r--synapse/storage/events.py108
-rw-r--r--synapse/storage/filtering.py4
-rw-r--r--synapse/storage/keys.py2
-rw-r--r--synapse/storage/prepare_database.py395
-rw-r--r--synapse/storage/pusher.py6
-rw-r--r--synapse/storage/registration.py102
-rw-r--r--synapse/storage/room.py106
-rw-r--r--synapse/storage/roommember.py47
-rw-r--r--synapse/storage/schema/delta/23/drop_state_index.sql16
-rw-r--r--synapse/storage/schema/delta/23/refresh_tokens.sql21
-rw-r--r--synapse/storage/schema/delta/24/stats_reporting.sql22
-rw-r--r--synapse/storage/schema/delta/25/00background_updates.sql21
-rw-r--r--synapse/storage/schema/delta/25/fts.py78
-rw-r--r--synapse/storage/schema/delta/25/guest_access.sql25
-rw-r--r--synapse/storage/schema/delta/25/history_visibility.sql25
-rw-r--r--synapse/storage/schema/delta/25/tags.sql38
-rw-r--r--synapse/storage/search.py307
-rw-r--r--synapse/storage/signatures.py114
-rw-r--r--synapse/storage/state.py31
-rw-r--r--synapse/storage/stream.py224
-rw-r--r--synapse/storage/tags.py216
-rw-r--r--synapse/storage/transactions.py52
-rw-r--r--synapse/streams/config.py11
-rw-r--r--synapse/streams/events.py33
-rw-r--r--synapse/types.py30
-rw-r--r--synapse/util/__init__.py36
-rw-r--r--synapse/util/debug.py72
-rw-r--r--synapse/util/emailutils.py71
-rw-r--r--synapse/util/lockutils.py74
-rw-r--r--synapse/util/retryutils.py7
-rw-r--r--tests/api/test_auth.py190
-rw-r--r--tests/api/test_filtering.py69
-rw-r--r--tests/crypto/__init__.py15
-rw-r--r--tests/crypto/test_event_signing.py114
-rw-r--r--tests/events/__init__.py0
-rw-r--r--tests/events/test_utils.py115
-rw-r--r--tests/handlers/test_auth.py70
-rw-r--r--tests/handlers/test_presence.py71
-rw-r--r--tests/handlers/test_typing.py30
-rw-r--r--tests/rest/client/v1/test_presence.py50
-rw-r--r--tests/rest/client/v1/test_profile.py4
-rw-r--r--tests/rest/client/v1/test_rooms.py123
-rw-r--r--tests/rest/client/v1/test_typing.py12
-rw-r--r--tests/rest/client/v1/utils.py3
-rw-r--r--tests/rest/client/v2_alpha/__init__.py7
-rw-r--r--tests/storage/event_injector.py81
-rw-r--r--tests/storage/test_background_update.py76
-rw-r--r--tests/storage/test_base.py20
-rw-r--r--tests/storage/test_events.py116
-rw-r--r--tests/storage/test_redaction.py4
-rw-r--r--tests/storage/test_registration.py67
-rw-r--r--tests/storage/test_room.py4
-rw-r--r--tests/storage/test_stream.py72
-rw-r--r--tests/test_state.py132
-rw-r--r--tests/test_types.py9
-rw-r--r--tests/util/test_lock.py108
-rw-r--r--tests/utils.py28
-rw-r--r--tox.ini28
172 files changed, 8255 insertions, 2810 deletions
diff --git a/.gitignore b/.gitignore
index 960183a794..f8c4000134 100644
--- a/.gitignore
+++ b/.gitignore
@@ -42,3 +42,7 @@ build/
 
 localhost-800*/
 static/client/register/register_config.js
+.tox
+
+env/
+*.config
diff --git a/AUTHORS.rst b/AUTHORS.rst
index 54ced67000..58a67c6b12 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -44,4 +44,7 @@ Eric Myhre <hash at exultant.us>
    repository API.
 
 Muthu Subramanian <muthu.subramanian.karunanidhi at ericsson.com>
- * Add SAML2 support for registration and logins.
+ * Add SAML2 support for registration and login.
+
+Steven Hammerton <steven.hammerton at openmarket.com>
+ * Add CAS support for registration and login.
diff --git a/CHANGES.rst b/CHANGES.rst
index f1d2c7a765..068ed9f2ff 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,3 +1,49 @@
+Changes in synapse v0.11.0 (2015-11-17)
+=======================================
+
+* Change CAS login API (PR #349)
+
+Changes in synapse v0.11.0-rc2 (2015-11-13)
+===========================================
+
+* Various changes to /sync API response format (PR #373)
+* Fix regression when setting display name in newly joined room over
+  federation (PR #368)
+* Fix problem where /search was slow when using SQLite (PR #366)
+
+Changes in synapse v0.11.0-rc1 (2015-11-11)
+===========================================
+
+* Add Search API (PR #307, #324, #327, #336, #350, #359)
+* Add 'archived' state to v2 /sync API (PR #316)
+* Add ability to reject invites (PR #317)
+* Add config option to disable password login (PR #322)
+* Add the login fallback API (PR #330)
+* Add room context API (PR #334)
+* Add room tagging support (PR #335)
+* Update v2 /sync API to match spec (PR #305, #316, #321, #332, #337, #341)
+* Change retry schedule for application services (PR #320)
+* Change retry schedule for remote servers (PR #340)
+* Fix bug where we hosted static content in the incorrect place (PR #329)
+* Fix bug where we didn't increment retry interval for remote servers (PR #343)
+
+Changes in synapse v0.10.1-rc1 (2015-10-15)
+===========================================
+
+* Add support for CAS, thanks to Steven Hammerton (PR #295, #296)
+* Add support for using macaroons for ``access_token`` (PR #256, #229)
+* Add support for ``m.room.canonical_alias`` (PR #287)
+* Add support for viewing the history of rooms that they have left. (PR #276,
+  #294)
+* Add support for refresh tokens (PR #240)
+* Add flag on creation which disables federation of the room (PR #279)
+* Add some room state to invites. (PR #275)
+* Atomically persist events when joining a room over federation (PR #283)
+* Change default history visibility for private rooms (PR #271)
+* Allow users to redact their own sent events (PR #262)
+* Use tox for tests (PR #247)
+* Split up syutil into separate libraries (PR #243)
+
 Changes in synapse v0.10.0-r2 (2015-09-16)
 ==========================================
 
diff --git a/MANIFEST.in b/MANIFEST.in
index a9b543af82..5668665db7 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -3,13 +3,23 @@ include LICENSE
 include VERSION
 include *.rst
 include demo/README
+include demo/demo.tls.dh
+include demo/*.py
+include demo/*.sh
 
 recursive-include synapse/storage/schema *.sql
 recursive-include synapse/storage/schema *.py
 
-recursive-include demo *.dh
-recursive-include demo *.py
-recursive-include demo *.sh
 recursive-include docs *
 recursive-include scripts *
+recursive-include scripts-dev *
 recursive-include tests *.py
+
+recursive-include synapse/static *.css
+recursive-include synapse/static *.gif
+recursive-include synapse/static *.html
+recursive-include synapse/static *.js
+
+exclude jenkins.sh
+
+prune demo/etc
diff --git a/README.rst b/README.rst
index 6c8431aa86..a8f0c62158 100644
--- a/README.rst
+++ b/README.rst
@@ -20,8 +20,8 @@ The overall architecture is::
              https://somewhere.org/_matrix      https://elsewhere.net/_matrix
 
 ``#matrix:matrix.org`` is the official support room for Matrix, and can be
-accessed by the web client at http://matrix.org/beta or via an IRC bridge at
-irc://irc.freenode.net/matrix.
+accessed by any client from https://matrix.org/blog/try-matrix-now or via IRC
+bridge at irc://irc.freenode.net/matrix.
 
 Synapse is currently in rapid development, but as of version 0.5 we believe it
 is sufficiently stable to be run as an internet-facing service for real usage!
@@ -77,14 +77,14 @@ Meanwhile, iOS and Android SDKs and clients are available from:
 - https://github.com/matrix-org/matrix-android-sdk
 
 We'd like to invite you to join #matrix:matrix.org (via
-https://matrix.org/beta), run a homeserver, take a look at the Matrix spec at
-https://matrix.org/docs/spec and API docs at https://matrix.org/docs/api,
-experiment with the APIs and the demo clients, and report any bugs via
-https://matrix.org/jira.
+https://matrix.org/blog/try-matrix-now), run a homeserver, take a look at the
+Matrix spec at https://matrix.org/docs/spec and API docs at
+https://matrix.org/docs/api, experiment with the APIs and the demo clients, and
+report any bugs via https://matrix.org/jira.
 
 Thanks for using Matrix!
 
-[1] End-to-end encryption is currently in development
+[1] End-to-end encryption is currently in development - see https://matrix.org/git/olm
 
 Synapse Installation
 ====================
diff --git a/demo/start.sh b/demo/start.sh
index 572dbfab0b..dcc4d6f4fa 100755
--- a/demo/start.sh
+++ b/demo/start.sh
@@ -25,6 +25,7 @@ for port in 8080 8081 8082; do
         --generate-config \
         -H "localhost:$https_port" \
         --config-path "$DIR/etc/$port.config" \
+        --report-stats no
 
     # Check script parameters
     if [ $# -eq 1 ]; then
@@ -37,6 +38,13 @@ for port in 8080 8081 8082; do
 
     perl -p -i -e 's/^enable_registration:.*/enable_registration: true/g' $DIR/etc/$port.config
 
+    if ! grep -F "full_twisted_stacktraces" -q  $DIR/etc/$port.config; then
+        echo "full_twisted_stacktraces: true" >> $DIR/etc/$port.config
+    fi
+    if ! grep -F "report_stats" -q  $DIR/etc/$port.config ; then
+        echo "report_stats: false" >> $DIR/etc/$port.config
+    fi
+
     python -m synapse.app.homeserver \
         --config-path "$DIR/etc/$port.config" \
         -D \
diff --git a/jenkins.sh b/jenkins.sh
new file mode 100755
index 0000000000..4804022e80
--- /dev/null
+++ b/jenkins.sh
@@ -0,0 +1,39 @@
+#!/bin/bash -eu
+
+export PYTHONDONTWRITEBYTECODE=yep
+
+# Output test results as junit xml
+export TRIAL_FLAGS="--reporter=subunit"
+export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
+
+# Output coverage to coverage.xml
+export DUMP_COVERAGE_COMMAND="coverage xml -o coverage.xml"
+
+# Output flake8 violations to violations.flake8.log
+# Don't exit with non-0 status code on Jenkins,
+# so that the build steps continue and a later step can decided whether to
+# UNSTABLE or FAILURE this build.
+export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
+
+tox
+
+: ${GIT_BRANCH:="$(git rev-parse --abbrev-ref HEAD)"}
+
+set +u
+. .tox/py27/bin/activate
+set -u
+
+rm -rf sytest
+git clone https://github.com/matrix-org/sytest.git sytest
+cd sytest
+
+git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
+
+: ${PERL5LIB:=$WORKSPACE/perl5/lib/perl5}
+: ${PERL_MB_OPT:=--install_base=$WORKSPACE/perl5}
+: ${PERL_MM_OPT:=INSTALL_BASE=$WORKSPACE/perl5}
+export PERL5LIB PERL_MB_OPT PERL_MM_OPT
+
+./install-deps.pl
+
+./run-tests.pl -O tap --synapse-directory .. --all > results.tap
diff --git a/scripts-dev/check_auth.py b/scripts-dev/check_auth.py
index b889ac7fa7..4fa8792a5f 100644
--- a/scripts-dev/check_auth.py
+++ b/scripts-dev/check_auth.py
@@ -56,10 +56,9 @@ if __name__ == '__main__':
 
     js = json.load(args.json)
 
-
     auth = Auth(Mock())
     check_auth(
         auth,
         [FrozenEvent(d) for d in js["auth_chain"]],
-        [FrozenEvent(d) for d in js["pdus"]],
+        [FrozenEvent(d) for d in js.get("pdus", [])],
     )
diff --git a/scripts-dev/check_event_hash.py b/scripts-dev/check_event_hash.py
index 679afbd268..7ccae34d48 100644
--- a/scripts-dev/check_event_hash.py
+++ b/scripts-dev/check_event_hash.py
@@ -1,5 +1,5 @@
 from synapse.crypto.event_signing import *
-from syutil.base64util import encode_base64
+from unpaddedbase64 import encode_base64
 
 import argparse
 import hashlib
diff --git a/scripts-dev/check_signature.py b/scripts-dev/check_signature.py
index 59e3d603ac..079577908a 100644
--- a/scripts-dev/check_signature.py
+++ b/scripts-dev/check_signature.py
@@ -1,9 +1,7 @@
 
-from syutil.crypto.jsonsign import verify_signed_json
-from syutil.crypto.signing_key import (
-    decode_verify_key_bytes, write_signing_keys
-)
-from syutil.base64util import decode_base64
+from signedjson.sign import verify_signed_json
+from signedjson.key import decode_verify_key_bytes, write_signing_keys
+from unpaddedbase64 import decode_base64
 
 import urllib2
 import json
diff --git a/scripts-dev/convert_server_keys.py b/scripts-dev/convert_server_keys.py
index a1ee39059c..151551f22c 100644
--- a/scripts-dev/convert_server_keys.py
+++ b/scripts-dev/convert_server_keys.py
@@ -4,10 +4,10 @@ import sys
 import json
 import time
 import hashlib
-from syutil.base64util import encode_base64
-from syutil.crypto.signing_key import read_signing_keys
-from syutil.crypto.jsonsign import sign_json
-from syutil.jsonutil import encode_canonical_json
+from unpaddedbase64 import encode_base64
+from signedjson.key import read_signing_keys
+from signedjson.sign import sign_json
+from canonicaljson import encode_canonical_json
 
 
 def select_v1_keys(connection):
diff --git a/scripts-dev/definitions.py b/scripts-dev/definitions.py
new file mode 100755
index 0000000000..f0d0cd8a3f
--- /dev/null
+++ b/scripts-dev/definitions.py
@@ -0,0 +1,142 @@
+#! /usr/bin/python
+
+import ast
+import yaml
+
+class DefinitionVisitor(ast.NodeVisitor):
+    def __init__(self):
+        super(DefinitionVisitor, self).__init__()
+        self.functions = {}
+        self.classes = {}
+        self.names = {}
+        self.attrs = set()
+        self.definitions = {
+            'def': self.functions,
+            'class': self.classes,
+            'names': self.names,
+            'attrs': self.attrs,
+        }
+
+    def visit_Name(self, node):
+        self.names.setdefault(type(node.ctx).__name__, set()).add(node.id)
+
+    def visit_Attribute(self, node):
+        self.attrs.add(node.attr)
+        for child in ast.iter_child_nodes(node):
+            self.visit(child)
+
+    def visit_ClassDef(self, node):
+        visitor = DefinitionVisitor()
+        self.classes[node.name] = visitor.definitions
+        for child in ast.iter_child_nodes(node):
+            visitor.visit(child)
+
+    def visit_FunctionDef(self, node):
+        visitor = DefinitionVisitor()
+        self.functions[node.name] = visitor.definitions
+        for child in ast.iter_child_nodes(node):
+            visitor.visit(child)
+
+
+def non_empty(defs):
+    functions = {name: non_empty(f) for name, f in defs['def'].items()}
+    classes = {name: non_empty(f) for name, f in defs['class'].items()}
+    result = {}
+    if functions: result['def'] = functions
+    if classes: result['class'] = classes
+    names = defs['names']
+    uses = []
+    for name in names.get('Load', ()):
+        if name not in names.get('Param', ()) and name not in names.get('Store', ()):
+            uses.append(name)
+    uses.extend(defs['attrs'])
+    if uses: result['uses'] = uses
+    result['names'] = names
+    result['attrs'] = defs['attrs']
+    return result
+
+
+def definitions_in_code(input_code):
+    input_ast = ast.parse(input_code)
+    visitor = DefinitionVisitor()
+    visitor.visit(input_ast)
+    definitions = non_empty(visitor.definitions)
+    return definitions
+
+
+def definitions_in_file(filepath):
+    with open(filepath) as f:
+        return definitions_in_code(f.read())
+
+
+def defined_names(prefix, defs, names):
+    for name, funcs in defs.get('def', {}).items():
+        names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
+        defined_names(prefix + name + ".", funcs, names)
+
+    for name, funcs in defs.get('class', {}).items():
+        names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
+        defined_names(prefix + name + ".", funcs, names)
+
+
+def used_names(prefix, defs, names):
+    for name, funcs in defs.get('def', {}).items():
+        used_names(prefix + name + ".", funcs, names)
+
+    for name, funcs in defs.get('class', {}).items():
+        used_names(prefix + name + ".", funcs, names)
+
+    for used in defs.get('uses', ()):
+        if used in names:
+            names[used].setdefault('used', []).append(prefix.rstrip('.'))
+
+
+if __name__ == '__main__':
+    import sys, os, argparse, re
+
+    parser = argparse.ArgumentParser(description='Find definitions.')
+    parser.add_argument(
+        "--unused", action="store_true", help="Only list unused definitions"
+    )
+    parser.add_argument(
+        "--ignore", action="append", metavar="REGEXP", help="Ignore a pattern"
+    )
+    parser.add_argument(
+        "--pattern", action="append", metavar="REGEXP",
+        help="Search for a pattern"
+    )
+    parser.add_argument(
+        "directories", nargs='+', metavar="DIR",
+        help="Directories to search for definitions"
+    )
+    args = parser.parse_args()
+
+    definitions = {}
+    for directory in args.directories:
+        for root, dirs, files in os.walk(directory):
+            for filename in files:
+                if filename.endswith(".py"):
+                    filepath = os.path.join(root, filename)
+                    definitions[filepath] = definitions_in_file(filepath)
+
+    names = {}
+    for filepath, defs in definitions.items():
+        defined_names(filepath + ":", defs, names)
+
+    for filepath, defs in definitions.items():
+        used_names(filepath + ":", defs, names)
+
+    patterns = [re.compile(pattern) for pattern in args.pattern or ()]
+    ignore = [re.compile(pattern) for pattern in args.ignore or ()]
+
+    result = {}
+    for name, definition in names.items():
+        if patterns and not any(pattern.match(name) for pattern in patterns):
+            continue
+        if ignore and any(pattern.match(name) for pattern in ignore):
+            continue
+        if args.unused and definition.get('used'):
+            continue
+        result[name] = definition
+
+    yaml.dump(result, sys.stdout, default_flow_style=False)
diff --git a/scripts-dev/hash_history.py b/scripts-dev/hash_history.py
index bdad530af8..616d6a10e7 100644
--- a/scripts-dev/hash_history.py
+++ b/scripts-dev/hash_history.py
@@ -6,8 +6,8 @@ from synapse.crypto.event_signing import (
     add_event_pdu_content_hash, compute_pdu_event_reference_hash
 )
 from synapse.api.events.utils import prune_pdu
-from syutil.base64util import encode_base64, decode_base64
-from syutil.jsonutil import encode_canonical_json
+from unpaddedbase64 import encode_base64, decode_base64
+from canonicaljson import encode_canonical_json
 import sqlite3
 import sys
 
diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db
index c02dff5ba4..62515997b1 100755
--- a/scripts/synapse_port_db
+++ b/scripts/synapse_port_db
@@ -29,7 +29,7 @@ import traceback
 import yaml
 
 
-logger = logging.getLogger("port_from_sqlite_to_postgres")
+logger = logging.getLogger("synapse_port_db")
 
 
 BOOLEAN_COLUMNS = {
@@ -95,8 +95,6 @@ class Store(object):
     _simple_update_one = SQLBaseStore.__dict__["_simple_update_one"]
     _simple_update_one_txn = SQLBaseStore.__dict__["_simple_update_one_txn"]
 
-    _execute_and_decode = SQLBaseStore.__dict__["_execute_and_decode"]
-
     def runInteraction(self, desc, func, *args, **kwargs):
         def r(conn):
             try:
diff --git a/setup.cfg b/setup.cfg
index abb649958e..ba027c7d13 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -3,9 +3,6 @@ source-dir = docs/sphinx
 build-dir  = docs/build
 all_files  = 1
 
-[aliases]
-test = trial
-
 [trial]
 test_suite = tests
 
diff --git a/setup.py b/setup.py
index 60ab8c7893..9d24761d44 100755
--- a/setup.py
+++ b/setup.py
@@ -16,7 +16,8 @@
 
 import glob
 import os
-from setuptools import setup, find_packages
+from setuptools import setup, find_packages, Command
+import sys
 
 
 here = os.path.abspath(os.path.dirname(__file__))
@@ -37,6 +38,39 @@ def exec_file(path_segments):
     exec(code, result)
     return result
 
+
+class Tox(Command):
+    user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
+
+    def initialize_options(self):
+        self.tox_args = None
+
+    def finalize_options(self):
+        self.test_args = []
+        self.test_suite = True
+
+    def run(self):
+        #import here, cause outside the eggs aren't loaded
+        try:
+            import tox
+        except ImportError:
+            try:
+                self.distribution.fetch_build_eggs("tox")
+                import tox
+            except:
+                raise RuntimeError(
+                    "The tests need 'tox' to run. Please install 'tox'."
+                )
+        import shlex
+        args = self.tox_args
+        if args:
+            args = shlex.split(self.tox_args)
+        else:
+            args = []
+        errno = tox.cmdline(args=args)
+        sys.exit(errno)
+
+
 version = exec_file(("synapse", "__init__.py"))["__version__"]
 dependencies = exec_file(("synapse", "python_dependencies.py"))
 long_description = read_file(("README.rst",))
@@ -47,14 +81,10 @@ setup(
     packages=find_packages(exclude=["tests", "tests.*"]),
     description="Reference Synapse Home Server",
     install_requires=dependencies['requirements'](include_conditional=True).keys(),
-    setup_requires=[
-        "Twisted>=15.1.0", # Here to override setuptools_trial's dependency on Twisted>=2.4.0
-        "setuptools_trial",
-        "mock"
-    ],
     dependency_links=dependencies["DEPENDENCY_LINKS"].values(),
     include_package_data=True,
     zip_safe=False,
     long_description=long_description,
     scripts=["synctl"] + glob.glob("scripts/*"),
+    cmdclass={'test': Tox},
 )
diff --git a/synapse/__init__.py b/synapse/__init__.py
index d62294e6bb..f0eac97bab 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -16,4 +16,4 @@
 """ This is a reference implementation of a Matrix home server.
 """
 
-__version__ = "0.10.0-r2"
+__version__ = "0.11.0"
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 1e3b0fbfb7..8111b34428 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -14,15 +14,20 @@
 # limitations under the License.
 
 """This module contains classes for authenticating the user."""
+from canonicaljson import encode_canonical_json
+from signedjson.key import decode_verify_key_bytes
+from signedjson.sign import verify_signed_json, SignatureVerifyException
 
 from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, Membership, JoinRules
-from synapse.api.errors import AuthError, Codes, SynapseError
+from synapse.api.errors import AuthError, Codes, SynapseError, EventSizeError
+from synapse.types import RoomID, UserID, EventID
 from synapse.util.logutils import log_function
-from synapse.types import UserID, ClientInfo
+from unpaddedbase64 import decode_base64
 
 import logging
+import pymacaroons
 
 logger = logging.getLogger(__name__)
 
@@ -30,6 +35,7 @@ logger = logging.getLogger(__name__)
 AuthEventTypes = (
     EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels,
     EventTypes.JoinRules, EventTypes.RoomHistoryVisibility,
+    EventTypes.ThirdPartyInvite,
 )
 
 
@@ -40,6 +46,13 @@ class Auth(object):
         self.store = hs.get_datastore()
         self.state = hs.get_state_handler()
         self.TOKEN_NOT_FOUND_HTTP_STATUS = 401
+        self._KNOWN_CAVEAT_PREFIXES = set([
+            "gen = ",
+            "guest = ",
+            "type = ",
+            "time < ",
+            "user_id = ",
+        ])
 
     def check(self, event, auth_events):
         """ Checks if this event is correctly authed.
@@ -52,6 +65,8 @@ class Auth(object):
         Returns:
             True if the auth checks pass.
         """
+        self.check_size_limits(event)
+
         try:
             if not hasattr(event, "room_id"):
                 raise AuthError(500, "Event has no room_id: %s" % event)
@@ -65,6 +80,23 @@ class Auth(object):
                 # FIXME
                 return True
 
+            creation_event = auth_events.get((EventTypes.Create, ""), None)
+
+            if not creation_event:
+                raise SynapseError(
+                    403,
+                    "Room %r does not exist" % (event.room_id,)
+                )
+
+            creating_domain = RoomID.from_string(event.room_id).domain
+            originating_domain = UserID.from_string(event.sender).domain
+            if creating_domain != originating_domain:
+                if not self.can_federate(event, auth_events):
+                    raise AuthError(
+                        403,
+                        "This room has been marked as unfederatable."
+                    )
+
             # FIXME: Temp hack
             if event.type == EventTypes.Aliases:
                 return True
@@ -91,7 +123,7 @@ class Auth(object):
                 self._check_power_levels(event, auth_events)
 
             if event.type == EventTypes.Redaction:
-                self._check_redaction(event, auth_events)
+                self.check_redaction(event, auth_events)
 
             logger.debug("Allowing! %s", event)
         except AuthError as e:
@@ -102,8 +134,39 @@ class Auth(object):
             logger.info("Denying! %s", event)
             raise
 
+    def check_size_limits(self, event):
+        def too_big(field):
+            raise EventSizeError("%s too large" % (field,))
+
+        if len(event.user_id) > 255:
+            too_big("user_id")
+        if len(event.room_id) > 255:
+            too_big("room_id")
+        if event.is_state() and len(event.state_key) > 255:
+            too_big("state_key")
+        if len(event.type) > 255:
+            too_big("type")
+        if len(event.event_id) > 255:
+            too_big("event_id")
+        if len(encode_canonical_json(event.get_pdu_json())) > 65536:
+            too_big("event")
+
     @defer.inlineCallbacks
     def check_joined_room(self, room_id, user_id, current_state=None):
+        """Check if the user is currently joined in the room
+        Args:
+            room_id(str): The room to check.
+            user_id(str): The user to check.
+            current_state(dict): Optional map of the current state of the room.
+                If provided then that map is used to check whether they are a
+                member of the room. Otherwise the current membership is
+                loaded from the database.
+        Raises:
+            AuthError if the user is not in the room.
+        Returns:
+            A deferred membership event for the user if the user is in
+            the room.
+        """
         if current_state:
             member = current_state.get(
                 (EventTypes.Member, user_id),
@@ -120,6 +183,33 @@ class Auth(object):
         defer.returnValue(member)
 
     @defer.inlineCallbacks
+    def check_user_was_in_room(self, room_id, user_id):
+        """Check if the user was in the room at some point.
+        Args:
+            room_id(str): The room to check.
+            user_id(str): The user to check.
+        Raises:
+            AuthError if the user was never in the room.
+        Returns:
+            A deferred membership event for the user if the user was in the
+            room. This will be the join event if they are currently joined to
+            the room. This will be the leave event if they have left the room.
+        """
+        member = yield self.state.get_current_state(
+            room_id=room_id,
+            event_type=EventTypes.Member,
+            state_key=user_id
+        )
+        membership = member.membership if member else None
+
+        if membership not in (Membership.JOIN, Membership.LEAVE):
+            raise AuthError(403, "User %s not in room %s" % (
+                user_id, room_id
+            ))
+
+        defer.returnValue(member)
+
+    @defer.inlineCallbacks
     def check_host_in_room(self, room_id, host):
         curr_state = yield self.state.get_current_state(room_id)
 
@@ -153,6 +243,11 @@ class Auth(object):
                 user_id, room_id, repr(member)
             ))
 
+    def can_federate(self, event, auth_events):
+        creation_event = auth_events.get((EventTypes.Create, ""))
+
+        return creation_event.content.get("m.federate", True) is True
+
     @log_function
     def is_membership_change_allowed(self, event, auth_events):
         membership = event.content["membership"]
@@ -168,6 +263,15 @@ class Auth(object):
 
         target_user_id = event.state_key
 
+        creating_domain = RoomID.from_string(event.room_id).domain
+        target_domain = UserID.from_string(target_user_id).domain
+        if creating_domain != target_domain:
+            if not self.can_federate(event, auth_events):
+                raise AuthError(
+                    403,
+                    "This room has been marked as unfederatable."
+                )
+
         # get info about the caller
         key = (EventTypes.Member, event.user_id, )
         caller = auth_events.get(key)
@@ -213,8 +317,17 @@ class Auth(object):
             }
         )
 
+        if Membership.INVITE == membership and "third_party_invite" in event.content:
+            if not self._verify_third_party_invite(event, auth_events):
+                raise AuthError(403, "You are not invited to this room.")
+            return True
+
         if Membership.JOIN != membership:
-            # JOIN is the only action you can perform if you're not in the room
+            if (caller_invited
+                    and Membership.LEAVE == membership
+                    and target_user_id == event.user_id):
+                return True
+
             if not caller_in_room:  # caller isn't joined
                 raise AuthError(
                     403,
@@ -278,6 +391,66 @@ class Auth(object):
 
         return True
 
+    def _verify_third_party_invite(self, event, auth_events):
+        """
+        Validates that the invite event is authorized by a previous third-party invite.
+
+        Checks that the public key, and keyserver, match those in the third party invite,
+        and that the invite event has a signature issued using that public key.
+
+        Args:
+            event: The m.room.member join event being validated.
+            auth_events: All relevant previous context events which may be used
+                for authorization decisions.
+
+        Return:
+            True if the event fulfills the expectations of a previous third party
+            invite event.
+        """
+        if "third_party_invite" not in event.content:
+            return False
+        if "signed" not in event.content["third_party_invite"]:
+            return False
+        signed = event.content["third_party_invite"]["signed"]
+        for key in {"mxid", "token"}:
+            if key not in signed:
+                return False
+
+        token = signed["token"]
+
+        invite_event = auth_events.get(
+            (EventTypes.ThirdPartyInvite, token,)
+        )
+        if not invite_event:
+            return False
+
+        if event.user_id != invite_event.user_id:
+            return False
+        try:
+            public_key = invite_event.content["public_key"]
+            if signed["mxid"] != event.state_key:
+                return False
+            if signed["token"] != token:
+                return False
+            for server, signature_block in signed["signatures"].items():
+                for key_name, encoded_signature in signature_block.items():
+                    if not key_name.startswith("ed25519:"):
+                        return False
+                    verify_key = decode_verify_key_bytes(
+                        key_name,
+                        decode_base64(public_key)
+                    )
+                    verify_signed_json(signed, server, verify_key)
+
+                    # We got the public key from the invite, so we know that the
+                    # correct server signed the signed bundle.
+                    # The caller is responsible for checking that the signing
+                    # server has not revoked that public key.
+                    return True
+            return False
+        except (KeyError, SignatureVerifyException,):
+            return False
+
     def _get_power_level_event(self, auth_events):
         key = (EventTypes.PowerLevels, "", )
         return auth_events.get(key)
@@ -316,15 +489,15 @@ class Auth(object):
             return default
 
     @defer.inlineCallbacks
-    def get_user_by_req(self, request):
+    def get_user_by_req(self, request, allow_guest=False):
         """ Get a registered user's ID.
 
         Args:
             request - An HTTP request with an access_token query parameter.
         Returns:
-            tuple : of UserID and device string:
-                User ID object of the user making the request
-                ClientInfo object of the client instance the user is using
+            tuple of:
+                UserID (str)
+                Access token ID (str)
         Raises:
             AuthError if no user by that token exists or the token is invalid.
         """
@@ -354,17 +527,15 @@ class Auth(object):
 
                 request.authenticated_entity = user_id
 
-                defer.returnValue(
-                    (UserID.from_string(user_id), ClientInfo("", ""))
-                )
+                defer.returnValue((UserID.from_string(user_id), "", False))
                 return
             except KeyError:
                 pass  # normal users won't have the user_id query parameter set.
 
-            user_info = yield self.get_user_by_token(access_token)
+            user_info = yield self._get_user_by_access_token(access_token)
             user = user_info["user"]
-            device_id = user_info["device_id"]
             token_id = user_info["token_id"]
+            is_guest = user_info["is_guest"]
 
             ip_addr = self.hs.get_ip_from_request(request)
             user_agent = request.requestHeaders.getRawHeaders(
@@ -375,14 +546,18 @@ class Auth(object):
                 self.store.insert_client_ip(
                     user=user,
                     access_token=access_token,
-                    device_id=user_info["device_id"],
                     ip=ip_addr,
                     user_agent=user_agent
                 )
 
+            if is_guest and not allow_guest:
+                raise AuthError(
+                    403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
+                )
+
             request.authenticated_entity = user.to_string()
 
-            defer.returnValue((user, ClientInfo(device_id, token_id)))
+            defer.returnValue((user, token_id, is_guest,))
         except KeyError:
             raise AuthError(
                 self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.",
@@ -390,30 +565,124 @@ class Auth(object):
             )
 
     @defer.inlineCallbacks
-    def get_user_by_token(self, token):
+    def _get_user_by_access_token(self, token):
         """ Get a registered user's ID.
 
         Args:
             token (str): The access token to get the user by.
         Returns:
-            dict : dict that includes the user, device_id, and whether the
-                user is a server admin.
+            dict : dict that includes the user and the ID of their access token.
         Raises:
             AuthError if no user by that token exists or the token is invalid.
         """
-        ret = yield self.store.get_user_by_token(token)
+        try:
+            ret = yield self._get_user_from_macaroon(token)
+        except AuthError:
+            # TODO(daniel): Remove this fallback when all existing access tokens
+            # have been re-issued as macaroons.
+            ret = yield self._look_up_user_by_access_token(token)
+        defer.returnValue(ret)
+
+    @defer.inlineCallbacks
+    def _get_user_from_macaroon(self, macaroon_str):
+        try:
+            macaroon = pymacaroons.Macaroon.deserialize(macaroon_str)
+            self.validate_macaroon(
+                macaroon, "access",
+                [lambda c: c.startswith("time < ")]
+            )
+
+            user_prefix = "user_id = "
+            user = None
+            guest = False
+            for caveat in macaroon.caveats:
+                if caveat.caveat_id.startswith(user_prefix):
+                    user = UserID.from_string(caveat.caveat_id[len(user_prefix):])
+                elif caveat.caveat_id == "guest = true":
+                    guest = True
+
+            if user is None:
+                raise AuthError(
+                    self.TOKEN_NOT_FOUND_HTTP_STATUS, "No user caveat in macaroon",
+                    errcode=Codes.UNKNOWN_TOKEN
+                )
+
+            if guest:
+                ret = {
+                    "user": user,
+                    "is_guest": True,
+                    "token_id": None,
+                }
+            else:
+                # This codepath exists so that we can actually return a
+                # token ID, because we use token IDs in place of device
+                # identifiers throughout the codebase.
+                # TODO(daniel): Remove this fallback when device IDs are
+                # properly implemented.
+                ret = yield self._look_up_user_by_access_token(macaroon_str)
+                if ret["user"] != user:
+                    logger.error(
+                        "Macaroon user (%s) != DB user (%s)",
+                        user,
+                        ret["user"]
+                    )
+                    raise AuthError(
+                        self.TOKEN_NOT_FOUND_HTTP_STATUS,
+                        "User mismatch in macaroon",
+                        errcode=Codes.UNKNOWN_TOKEN
+                    )
+            defer.returnValue(ret)
+        except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
+            raise AuthError(
+                self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
+                errcode=Codes.UNKNOWN_TOKEN
+            )
+
+    def validate_macaroon(self, macaroon, type_string, additional_validation_functions):
+        v = pymacaroons.Verifier()
+        v.satisfy_exact("gen = 1")
+        v.satisfy_exact("type = " + type_string)
+        v.satisfy_general(lambda c: c.startswith("user_id = "))
+        v.satisfy_exact("guest = true")
+
+        for validation_function in additional_validation_functions:
+            v.satisfy_general(validation_function)
+        v.verify(macaroon, self.hs.config.macaroon_secret_key)
+
+        v = pymacaroons.Verifier()
+        v.satisfy_general(self._verify_recognizes_caveats)
+        v.verify(macaroon, self.hs.config.macaroon_secret_key)
+
+    def verify_expiry(self, caveat):
+        prefix = "time < "
+        if not caveat.startswith(prefix):
+            return False
+        expiry = int(caveat[len(prefix):])
+        now = self.hs.get_clock().time_msec()
+        return now < expiry
+
+    def _verify_recognizes_caveats(self, caveat):
+        first_space = caveat.find(" ")
+        if first_space < 0:
+            return False
+        second_space = caveat.find(" ", first_space + 1)
+        if second_space < 0:
+            return False
+        return caveat[:second_space + 1] in self._KNOWN_CAVEAT_PREFIXES
+
+    @defer.inlineCallbacks
+    def _look_up_user_by_access_token(self, token):
+        ret = yield self.store.get_user_by_access_token(token)
         if not ret:
             raise AuthError(
                 self.TOKEN_NOT_FOUND_HTTP_STATUS, "Unrecognised access token.",
                 errcode=Codes.UNKNOWN_TOKEN
             )
         user_info = {
-            "admin": bool(ret.get("admin", False)),
-            "device_id": ret.get("device_id"),
             "user": UserID.from_string(ret.get("name")),
             "token_id": ret.get("token_id", None),
+            "is_guest": False,
         }
-
         defer.returnValue(user_info)
 
     @defer.inlineCallbacks
@@ -488,6 +757,16 @@ class Auth(object):
             else:
                 if member_event:
                     auth_ids.append(member_event.event_id)
+
+            if e_type == Membership.INVITE:
+                if "third_party_invite" in event.content:
+                    key = (
+                        EventTypes.ThirdPartyInvite,
+                        event.content["third_party_invite"]["token"]
+                    )
+                    third_party_invite = current_state.get(key)
+                    if third_party_invite:
+                        auth_ids.append(third_party_invite.event_id)
         elif member_event:
             if member_event.content["membership"] == Membership.JOIN:
                 auth_ids.append(member_event.event_id)
@@ -548,16 +827,35 @@ class Auth(object):
 
         return True
 
-    def _check_redaction(self, event, auth_events):
+    def check_redaction(self, event, auth_events):
+        """Check whether the event sender is allowed to redact the target event.
+
+        Returns:
+            True if the the sender is allowed to redact the target event if the
+            target event was created by them.
+            False if the sender is allowed to redact the target event with no
+            further checks.
+
+        Raises:
+            AuthError if the event sender is definitely not allowed to redact
+            the target event.
+        """
         user_level = self._get_user_power_level(event.user_id, auth_events)
 
         redact_level = self._get_named_level(auth_events, "redact", 50)
 
-        if user_level < redact_level:
-            raise AuthError(
-                403,
-                "You don't have permission to redact events"
-            )
+        if user_level > redact_level:
+            return False
+
+        redacter_domain = EventID.from_string(event.event_id).domain
+        redactee_domain = EventID.from_string(event.redacts).domain
+        if redacter_domain == redactee_domain:
+            return True
+
+        raise AuthError(
+            403,
+            "You don't have permission to redact events"
+        )
 
     def _check_power_levels(self, event, auth_events):
         user_list = event.content.get("users", {})
diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index 1423986c1e..c2450b771a 100644
--- a/synapse/api/constants.py
+++ b/synapse/api/constants.py
@@ -27,16 +27,6 @@ class Membership(object):
     LIST = (INVITE, JOIN, KNOCK, LEAVE, BAN)
 
 
-class Feedback(object):
-
-    """Represents the types of feedback a user can send in response to a
-    message."""
-
-    DELIVERED = u"delivered"
-    READ = u"read"
-    LIST = (DELIVERED, READ)
-
-
 class PresenceState(object):
     """Represents the presence state of a user."""
     OFFLINE = u"offline"
@@ -73,11 +63,12 @@ class EventTypes(object):
     PowerLevels = "m.room.power_levels"
     Aliases = "m.room.aliases"
     Redaction = "m.room.redaction"
-    Feedback = "m.room.message.feedback"
+    ThirdPartyInvite = "m.room.third_party_invite"
 
     RoomHistoryVisibility = "m.room.history_visibility"
     CanonicalAlias = "m.room.canonical_alias"
     RoomAvatar = "m.room.avatar"
+    GuestAccess = "m.room.guest_access"
 
     # These are used for validation
     Message = "m.room.message"
@@ -94,3 +85,4 @@ class RejectedReason(object):
 class RoomCreationPreset(object):
     PRIVATE_CHAT = "private_chat"
     PUBLIC_CHAT = "public_chat"
+    TRUSTED_PRIVATE_CHAT = "trusted_private_chat"
diff --git a/synapse/api/errors.py b/synapse/api/errors.py
index c3b4d971a8..d4037b3d55 100644
--- a/synapse/api/errors.py
+++ b/synapse/api/errors.py
@@ -33,6 +33,7 @@ class Codes(object):
     NOT_FOUND = "M_NOT_FOUND"
     MISSING_TOKEN = "M_MISSING_TOKEN"
     UNKNOWN_TOKEN = "M_UNKNOWN_TOKEN"
+    GUEST_ACCESS_FORBIDDEN = "M_GUEST_ACCESS_FORBIDDEN"
     LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED"
     CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED"
     CAPTCHA_INVALID = "M_CAPTCHA_INVALID"
@@ -47,7 +48,6 @@ class CodeMessageException(RuntimeError):
     """An exception with integer code and message string attributes."""
 
     def __init__(self, code, msg):
-        logger.info("%s: %s, %s", type(self).__name__, code, msg)
         super(CodeMessageException, self).__init__("%d: %s" % (code, msg))
         self.code = code
         self.msg = msg
@@ -77,11 +77,6 @@ class SynapseError(CodeMessageException):
         )
 
 
-class RoomError(SynapseError):
-    """An error raised when a room event fails."""
-    pass
-
-
 class RegistrationError(SynapseError):
     """An error raised when a registration event fails."""
     pass
@@ -125,6 +120,15 @@ class AuthError(SynapseError):
         super(AuthError, self).__init__(*args, **kwargs)
 
 
+class EventSizeError(SynapseError):
+    """An error raised when an event is too big."""
+
+    def __init__(self, *args, **kwargs):
+        if "errcode" not in kwargs:
+            kwargs["errcode"] = Codes.TOO_LARGE
+        super(EventSizeError, self).__init__(413, *args, **kwargs)
+
+
 class EventStreamError(SynapseError):
     """An error raised when there a problem with the event stream."""
     def __init__(self, *args, **kwargs):
diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py
index 4d570b74f8..aaa2433cae 100644
--- a/synapse/api/filtering.py
+++ b/synapse/api/filtering.py
@@ -24,7 +24,7 @@ class Filtering(object):
 
     def get_user_filter(self, user_localpart, filter_id):
         result = self.store.get_user_filter(user_localpart, filter_id)
-        result.addCallback(Filter)
+        result.addCallback(FilterCollection)
         return result
 
     def add_user_filter(self, user_localpart, user_filter):
@@ -50,11 +50,11 @@ class Filtering(object):
         # many definitions.
 
         top_level_definitions = [
-            "public_user_data", "private_user_data", "server_data"
+            "presence"
         ]
 
         room_level_definitions = [
-            "state", "events", "ephemeral"
+            "state", "timeline", "ephemeral", "private_user_data"
         ]
 
         for key in top_level_definitions:
@@ -114,116 +114,134 @@ class Filtering(object):
                     if not isinstance(event_type, basestring):
                         raise SynapseError(400, "Event type should be a string")
 
-        if "format" in definition:
-            event_format = definition["format"]
-            if event_format not in ["federation", "events"]:
-                raise SynapseError(400, "Invalid format: %s" % (event_format,))
 
-        if "select" in definition:
-            event_select_list = definition["select"]
-            for select_key in event_select_list:
-                if select_key not in ["event_id", "origin_server_ts",
-                                      "thread_id", "content", "content.body"]:
-                    raise SynapseError(400, "Bad select: %s" % (select_key,))
+class FilterCollection(object):
+    def __init__(self, filter_json):
+        self.filter_json = filter_json
 
-        if ("bundle_updates" in definition and
-                type(definition["bundle_updates"]) != bool):
-            raise SynapseError(400, "Bad bundle_updates: expected bool.")
+        self.room_timeline_filter = Filter(
+            self.filter_json.get("room", {}).get("timeline", {})
+        )
 
+        self.room_state_filter = Filter(
+            self.filter_json.get("room", {}).get("state", {})
+        )
 
-class Filter(object):
-    def __init__(self, filter_json):
-        self.filter_json = filter_json
+        self.room_ephemeral_filter = Filter(
+            self.filter_json.get("room", {}).get("ephemeral", {})
+        )
+
+        self.room_private_user_data = Filter(
+            self.filter_json.get("room", {}).get("private_user_data", {})
+        )
+
+        self.presence_filter = Filter(
+            self.filter_json.get("presence", {})
+        )
+
+    def timeline_limit(self):
+        return self.room_timeline_filter.limit()
+
+    def presence_limit(self):
+        return self.presence_filter.limit()
 
-    def filter_public_user_data(self, events):
-        return self._filter_on_key(events, ["public_user_data"])
+    def ephemeral_limit(self):
+        return self.room_ephemeral_filter.limit()
 
-    def filter_private_user_data(self, events):
-        return self._filter_on_key(events, ["private_user_data"])
+    def filter_presence(self, events):
+        return self.presence_filter.filter(events)
 
     def filter_room_state(self, events):
-        return self._filter_on_key(events, ["room", "state"])
+        return self.room_state_filter.filter(events)
 
-    def filter_room_events(self, events):
-        return self._filter_on_key(events, ["room", "events"])
+    def filter_room_timeline(self, events):
+        return self.room_timeline_filter.filter(events)
 
     def filter_room_ephemeral(self, events):
-        return self._filter_on_key(events, ["room", "ephemeral"])
+        return self.room_ephemeral_filter.filter(events)
 
-    def _filter_on_key(self, events, keys):
-        filter_json = self.filter_json
-        if not filter_json:
-            return events
+    def filter_room_private_user_data(self, events):
+        return self.room_private_user_data.filter(events)
 
-        try:
-            # extract the right definition from the filter
-            definition = filter_json
-            for key in keys:
-                definition = definition[key]
-            return self._filter_with_definition(events, definition)
-        except KeyError:
-            # return all events if definition isn't specified.
-            return events
 
-    def _filter_with_definition(self, events, definition):
-        return [e for e in events if self._passes_definition(definition, e)]
+class Filter(object):
+    def __init__(self, filter_json):
+        self.filter_json = filter_json
 
-    def _passes_definition(self, definition, event):
-        """Check if the event passes through the given definition.
+    def check(self, event):
+        """Checks whether the filter matches the given event.
 
-        Args:
-            definition(dict): The definition to check against.
-            event(Event): The event to check.
         Returns:
-            True if the event passes through the filter.
+            bool: True if the event matches
         """
-        # Algorithm notes:
-        # For each key in the definition, check the event meets the criteria:
-        #   * For types: Literal match or prefix match (if ends with wildcard)
-        #   * For senders/rooms: Literal match only
-        #   * "not_" checks take presedence (e.g. if "m.*" is in both 'types'
-        #     and 'not_types' then it is treated as only being in 'not_types')
-
-        # room checks
-        if hasattr(event, "room_id"):
-            room_id = event.room_id
-            allow_rooms = definition.get("rooms", None)
-            reject_rooms = definition.get("not_rooms", None)
-            if reject_rooms and room_id in reject_rooms:
-                return False
-            if allow_rooms and room_id not in allow_rooms:
-                return False
+        if isinstance(event, dict):
+            return self.check_fields(
+                event.get("room_id", None),
+                event.get("sender", None),
+                event.get("type", None),
+            )
+        else:
+            return self.check_fields(
+                getattr(event, "room_id", None),
+                getattr(event, "sender", None),
+                event.type,
+            )
 
-        # sender checks
-        if hasattr(event, "sender"):
-            # Should we be including event.state_key for some event types?
-            sender = event.sender
-            allow_senders = definition.get("senders", None)
-            reject_senders = definition.get("not_senders", None)
-            if reject_senders and sender in reject_senders:
-                return False
-            if allow_senders and sender not in allow_senders:
+    def check_fields(self, room_id, sender, event_type):
+        """Checks whether the filter matches the given event fields.
+
+        Returns:
+            bool: True if the event fields match
+        """
+        literal_keys = {
+            "rooms": lambda v: room_id == v,
+            "senders": lambda v: sender == v,
+            "types": lambda v: _matches_wildcard(event_type, v)
+        }
+
+        for name, match_func in literal_keys.items():
+            not_name = "not_%s" % (name,)
+            disallowed_values = self.filter_json.get(not_name, [])
+            if any(map(match_func, disallowed_values)):
                 return False
 
-        # type checks
-        if "not_types" in definition:
-            for def_type in definition["not_types"]:
-                if self._event_matches_type(event, def_type):
+            allowed_values = self.filter_json.get(name, None)
+            if allowed_values is not None:
+                if not any(map(match_func, allowed_values)):
                     return False
-        if "types" in definition:
-            included = False
-            for def_type in definition["types"]:
-                if self._event_matches_type(event, def_type):
-                    included = True
-                    break
-            if not included:
-                return False
 
         return True
 
-    def _event_matches_type(self, event, def_type):
-        if def_type.endswith("*"):
-            type_prefix = def_type[:-1]
-            return event.type.startswith(type_prefix)
-        else:
-            return event.type == def_type
+    def filter_rooms(self, room_ids):
+        """Apply the 'rooms' filter to a given list of rooms.
+
+        Args:
+            room_ids (list): A list of room_ids.
+
+        Returns:
+            list: A list of room_ids that match the filter
+        """
+        room_ids = set(room_ids)
+
+        disallowed_rooms = set(self.filter_json.get("not_rooms", []))
+        room_ids -= disallowed_rooms
+
+        allowed_rooms = self.filter_json.get("rooms", None)
+        if allowed_rooms is not None:
+            room_ids &= set(allowed_rooms)
+
+        return room_ids
+
+    def filter(self, events):
+        return filter(self.check, events)
+
+    def limit(self):
+        return self.filter_json.get("limit", 10)
+
+
+def _matches_wildcard(actual_value, filter_value):
+    if filter_value.endswith("*"):
+        type_prefix = filter_value[:-1]
+        return actual_value.startswith(type_prefix)
+    else:
+        return actual_value == filter_value
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index fefefffb8f..cd7a52ec07 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -16,20 +16,31 @@
 
 import sys
 sys.dont_write_bytecode = True
-from synapse.python_dependencies import check_requirements, DEPENDENCY_LINKS
+from synapse.python_dependencies import (
+    check_requirements, DEPENDENCY_LINKS, MissingRequirementError
+)
 
 if __name__ == '__main__':
-    check_requirements()
+    try:
+        check_requirements()
+    except MissingRequirementError as e:
+        message = "\n".join([
+            "Missing Requirement: %s" % (e.message,),
+            "To install run:",
+            "    pip install --upgrade --force \"%s\"" % (e.dependency,),
+            "",
+        ])
+        sys.stderr.writelines(message)
+        sys.exit(1)
 
 from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
-from synapse.storage import (
-    are_all_users_on_domain, UpgradeDatabaseException,
-)
+from synapse.storage import are_all_users_on_domain
+from synapse.storage.prepare_database import UpgradeDatabaseException
 
 from synapse.server import HomeServer
 
 
-from twisted.internet import reactor
+from twisted.internet import reactor, task, defer
 from twisted.application import service
 from twisted.enterprise import adbapi
 from twisted.web.resource import Resource, EncodingResourceWrapper
@@ -72,12 +83,6 @@ import time
 logger = logging.getLogger("synapse.app.homeserver")
 
 
-class GzipFile(File):
-    def getChild(self, path, request):
-        child = File.getChild(self, path, request)
-        return EncodingResourceWrapper(child, [GzipEncoderFactory()])
-
-
 def gz_wrap(r):
     return EncodingResourceWrapper(r, [GzipEncoderFactory()])
 
@@ -121,12 +126,15 @@ class SynapseHomeServer(HomeServer):
         # (It can stay enabled for the API resources: they call
         # write() with the whole body and then finish() straight
         # after and so do not trigger the bug.
+        # GzipFile was removed in commit 184ba09
         # return GzipFile(webclient_path)  # TODO configurable?
         return File(webclient_path)  # TODO configurable?
 
     def build_resource_for_static_content(self):
         # This is old and should go away: not going to bother adding gzip
-        return File("static")
+        return File(
+            os.path.join(os.path.dirname(synapse.__file__), "static")
+        )
 
     def build_resource_for_content_repo(self):
         return ContentRepoResource(
@@ -221,7 +229,7 @@ class SynapseHomeServer(HomeServer):
                     listener_config,
                     root_resource,
                 ),
-                self.tls_context_factory,
+                self.tls_server_context_factory,
                 interface=bind_address
             )
         else:
@@ -365,7 +373,6 @@ def setup(config_options):
     Args:
         config_options_options: The options passed to Synapse. Usually
             `sys.argv[1:]`.
-        should_run (bool): Whether to start the reactor.
 
     Returns:
         HomeServer
@@ -388,7 +395,7 @@ def setup(config_options):
 
     events.USE_FROZEN_DICTS = config.use_frozen_dicts
 
-    tls_context_factory = context_factory.ServerContextFactory(config)
+    tls_server_context_factory = context_factory.ServerContextFactory(config)
 
     database_engine = create_engine(config.database_config["name"])
     config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
@@ -396,14 +403,14 @@ def setup(config_options):
     hs = SynapseHomeServer(
         config.server_name,
         db_config=config.database_config,
-        tls_context_factory=tls_context_factory,
+        tls_server_context_factory=tls_server_context_factory,
         config=config,
         content_addr=config.content_addr,
         version_string=version_string,
         database_engine=database_engine,
     )
 
-    logger.info("Preparing database: %r...", config.database_config)
+    logger.info("Preparing database: %s...", config.database_config['name'])
 
     try:
         db_conn = database_engine.module.connect(
@@ -425,13 +432,14 @@ def setup(config_options):
         )
         sys.exit(1)
 
-    logger.info("Database prepared in %r.", config.database_config)
+    logger.info("Database prepared in %s.", config.database_config['name'])
 
     hs.start_listening()
 
     hs.get_pusherpool().start()
     hs.get_state_handler().start_caching()
     hs.get_datastore().start_profiling()
+    hs.get_datastore().start_doing_background_updates()
     hs.get_replication_layer().start_get_pdu_cache()
 
     return hs
@@ -665,6 +673,42 @@ def run(hs):
         ThreadPool._worker = profile(ThreadPool._worker)
         reactor.run = profile(reactor.run)
 
+    start_time = hs.get_clock().time()
+
+    @defer.inlineCallbacks
+    def phone_stats_home():
+        now = int(hs.get_clock().time())
+        uptime = int(now - start_time)
+        if uptime < 0:
+            uptime = 0
+
+        stats = {}
+        stats["homeserver"] = hs.config.server_name
+        stats["timestamp"] = now
+        stats["uptime_seconds"] = uptime
+        stats["total_users"] = yield hs.get_datastore().count_all_users()
+
+        all_rooms = yield hs.get_datastore().get_rooms(False)
+        stats["total_room_count"] = len(all_rooms)
+
+        stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
+        daily_messages = yield hs.get_datastore().count_daily_messages()
+        if daily_messages is not None:
+            stats["daily_messages"] = daily_messages
+
+        logger.info("Reporting stats to matrix.org: %s" % (stats,))
+        try:
+            yield hs.get_simple_http_client().put_json(
+                "https://matrix.org/report-usage-stats/push",
+                stats
+            )
+        except Exception as e:
+            logger.warn("Error reporting stats: %s", e)
+
+    if hs.config.report_stats:
+        phone_home_task = task.LoopingCall(phone_stats_home)
+        phone_home_task.start(60 * 60 * 24, now=False)
+
     def in_thread():
         with LoggingContext("run"):
             change_resource_limit(hs.config.soft_file_limit)
diff --git a/synapse/app/synctl.py b/synapse/app/synctl.py
index 1f7d543c31..5d82beed0e 100755
--- a/synapse/app/synctl.py
+++ b/synapse/app/synctl.py
@@ -16,57 +16,67 @@
 
 import sys
 import os
+import os.path
 import subprocess
 import signal
 import yaml
 
 SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"]
 
-CONFIGFILE = "homeserver.yaml"
-
 GREEN = "\x1b[1;32m"
+RED = "\x1b[1;31m"
 NORMAL = "\x1b[m"
 
-if not os.path.exists(CONFIGFILE):
-    sys.stderr.write(
-        "No config file found\n"
-        "To generate a config file, run '%s -c %s --generate-config"
-        " --server-name=<server name>'\n" % (
-            " ".join(SYNAPSE), CONFIGFILE
-        )
-    )
-    sys.exit(1)
-
-CONFIG = yaml.load(open(CONFIGFILE))
-PIDFILE = CONFIG["pid_file"]
 
-
-def start():
+def start(configfile):
     print "Starting ...",
     args = SYNAPSE
-    args.extend(["--daemonize", "-c", CONFIGFILE])
-    subprocess.check_call(args)
-    print GREEN + "started" + NORMAL
+    args.extend(["--daemonize", "-c", configfile])
+
+    try:
+        subprocess.check_call(args)
+        print GREEN + "started" + NORMAL
+    except subprocess.CalledProcessError as e:
+        print (
+            RED +
+            "error starting (exit code: %d); see above for logs" % e.returncode +
+            NORMAL
+        )
 
 
-def stop():
-    if os.path.exists(PIDFILE):
-        pid = int(open(PIDFILE).read())
+def stop(pidfile):
+    if os.path.exists(pidfile):
+        pid = int(open(pidfile).read())
         os.kill(pid, signal.SIGTERM)
         print GREEN + "stopped" + NORMAL
 
 
 def main():
+    configfile = sys.argv[2] if len(sys.argv) == 3 else "homeserver.yaml"
+
+    if not os.path.exists(configfile):
+        sys.stderr.write(
+            "No config file found\n"
+            "To generate a config file, run '%s -c %s --generate-config"
+            " --server-name=<server name>'\n" % (
+                " ".join(SYNAPSE), configfile
+            )
+        )
+        sys.exit(1)
+
+    config = yaml.load(open(configfile))
+    pidfile = config["pid_file"]
+
     action = sys.argv[1] if sys.argv[1:] else "usage"
     if action == "start":
-        start()
+        start(configfile)
     elif action == "stop":
-        stop()
+        stop(pidfile)
     elif action == "restart":
-        stop()
-        start()
+        stop(pidfile)
+        start(configfile)
     else:
-        sys.stderr.write("Usage: %s [start|stop|restart]\n" % (sys.argv[0],))
+        sys.stderr.write("Usage: %s [start|stop|restart] [configfile]\n" % (sys.argv[0],))
         sys.exit(1)
 
 
diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py
index 59b0b1f4ac..44dc2c4744 100644
--- a/synapse/appservice/scheduler.py
+++ b/synapse/appservice/scheduler.py
@@ -224,8 +224,8 @@ class _Recoverer(object):
         self.clock.call_later((2 ** self.backoff_counter), self.retry)
 
     def _backoff(self):
-        # cap the backoff to be around 18h => (2^16) = 65536 secs
-        if self.backoff_counter < 16:
+        # cap the backoff to be around 8.5min => (2^9) = 512 secs
+        if self.backoff_counter < 9:
             self.backoff_counter += 1
         self.recover()
 
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index 8a75c48733..c18e0bdbb8 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -14,6 +14,7 @@
 # limitations under the License.
 
 import argparse
+import errno
 import os
 import yaml
 import sys
@@ -26,6 +27,16 @@ class ConfigError(Exception):
 
 class Config(object):
 
+    stats_reporting_begging_spiel = (
+        "We would really appreciate it if you could help our project out by"
+        " reporting anonymized usage statistics from your homeserver. Only very"
+        " basic aggregate data (e.g. number of users) will be reported, but it"
+        " helps us to track the growth of the Matrix community, and helps us to"
+        " make Matrix a success, as well as to convince other networks that they"
+        " should peer with us."
+        "\nThank you."
+    )
+
     @staticmethod
     def parse_size(value):
         if isinstance(value, int) or isinstance(value, long):
@@ -81,8 +92,11 @@ class Config(object):
     @classmethod
     def ensure_directory(cls, dir_path):
         dir_path = cls.abspath(dir_path)
-        if not os.path.exists(dir_path):
+        try:
             os.makedirs(dir_path)
+        except OSError, e:
+            if e.errno != errno.EEXIST:
+                raise
         if not os.path.isdir(dir_path):
             raise ConfigError(
                 "%s is not a directory" % (dir_path,)
@@ -111,11 +125,14 @@ class Config(object):
                 results.append(getattr(cls, name)(self, *args, **kargs))
         return results
 
-    def generate_config(self, config_dir_path, server_name):
+    def generate_config(self, config_dir_path, server_name, report_stats=None):
         default_config = "# vim:ft=yaml\n"
 
         default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all(
-            "default_config", config_dir_path, server_name
+            "default_config",
+            config_dir_path=config_dir_path,
+            server_name=server_name,
+            report_stats=report_stats,
         ))
 
         config = yaml.load(default_config)
@@ -140,6 +157,12 @@ class Config(object):
             help="Generate a config file for the server name"
         )
         config_parser.add_argument(
+            "--report-stats",
+            action="store",
+            help="Stuff",
+            choices=["yes", "no"]
+        )
+        config_parser.add_argument(
             "--generate-keys",
             action="store_true",
             help="Generate any missing key files then exit"
@@ -189,6 +212,11 @@ class Config(object):
                     config_files.append(config_path)
 
         if config_args.generate_config:
+            if config_args.report_stats is None:
+                config_parser.error(
+                    "Please specify either --report-stats=yes or --report-stats=no\n\n" +
+                    cls.stats_reporting_begging_spiel
+                )
             if not config_files:
                 config_parser.error(
                     "Must supply a config file.\nA config file can be automatically"
@@ -211,7 +239,9 @@ class Config(object):
                     os.makedirs(config_dir_path)
                 with open(config_path, "wb") as config_file:
                     config_bytes, config = obj.generate_config(
-                        config_dir_path, server_name
+                        config_dir_path=config_dir_path,
+                        server_name=server_name,
+                        report_stats=(config_args.report_stats == "yes"),
                     )
                     obj.invoke_all("generate_files", config)
                     config_file.write(config_bytes)
@@ -261,9 +291,20 @@ class Config(object):
             specified_config.update(yaml_config)
 
         server_name = specified_config["server_name"]
-        _, config = obj.generate_config(config_dir_path, server_name)
+        _, config = obj.generate_config(
+            config_dir_path=config_dir_path,
+            server_name=server_name
+        )
         config.pop("log_config")
         config.update(specified_config)
+        if "report_stats" not in config:
+            sys.stderr.write(
+                "Please opt in or out of reporting anonymized homeserver usage "
+                "statistics, by setting the report_stats key in your config file "
+                " ( " + config_path + " ) " +
+                "to either True or False.\n\n" +
+                Config.stats_reporting_begging_spiel + "\n")
+            sys.exit(1)
 
         if generate_keys:
             obj.invoke_all("generate_files", config)
diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py
index 38f41933b7..b8d301995e 100644
--- a/synapse/config/appservice.py
+++ b/synapse/config/appservice.py
@@ -20,7 +20,7 @@ class AppServiceConfig(Config):
     def read_config(self, config):
         self.app_service_config_files = config.get("app_service_config_files", [])
 
-    def default_config(cls, config_dir_path, server_name):
+    def default_config(cls, **kwargs):
         return """\
         # A list of application service config file to use
         app_service_config_files: []
diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py
index 15a132b4e3..dd92fcd0dc 100644
--- a/synapse/config/captcha.py
+++ b/synapse/config/captcha.py
@@ -24,7 +24,7 @@ class CaptchaConfig(Config):
         self.captcha_bypass_secret = config.get("captcha_bypass_secret")
         self.recaptcha_siteverify_api = config["recaptcha_siteverify_api"]
 
-    def default_config(self, config_dir_path, server_name):
+    def default_config(self, **kwargs):
         return """\
         ## Captcha ##
 
diff --git a/synapse/config/cas.py b/synapse/config/cas.py
new file mode 100644
index 0000000000..326e405841
--- /dev/null
+++ b/synapse/config/cas.py
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import Config
+
+
+class CasConfig(Config):
+    """Cas Configuration
+
+    cas_server_url: URL of CAS server
+    """
+
+    def read_config(self, config):
+        cas_config = config.get("cas_config", None)
+        if cas_config:
+            self.cas_enabled = cas_config.get("enabled", True)
+            self.cas_server_url = cas_config["server_url"]
+            self.cas_service_url = cas_config["service_url"]
+            self.cas_required_attributes = cas_config.get("required_attributes", {})
+        else:
+            self.cas_enabled = False
+            self.cas_server_url = None
+            self.cas_service_url = None
+            self.cas_required_attributes = {}
+
+    def default_config(self, config_dir_path, server_name, **kwargs):
+        return """
+        # Enable CAS for registration and login.
+        #cas_config:
+        #   enabled: true
+        #   server_url: "https://cas-server.com"
+        #   service_url: "https://homesever.domain.com:8448"
+        #   #required_attributes:
+        #   #    name: value
+        """
diff --git a/synapse/config/database.py b/synapse/config/database.py
index f0611e8884..baeda8f300 100644
--- a/synapse/config/database.py
+++ b/synapse/config/database.py
@@ -45,7 +45,7 @@ class DatabaseConfig(Config):
 
         self.set_databasepath(config.get("database_path"))
 
-    def default_config(self, config, config_dir_path):
+    def default_config(self, **kwargs):
         database_path = self.abspath("homeserver.db")
         return """\
         # Database configuration
diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index d77f045406..4743e6abc5 100644
--- a/synapse/config/homeserver.py
+++ b/synapse/config/homeserver.py
@@ -26,12 +26,15 @@ from .metrics import MetricsConfig
 from .appservice import AppServiceConfig
 from .key import KeyConfig
 from .saml2 import SAML2Config
+from .cas import CasConfig
+from .password import PasswordConfig
 
 
 class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
                        RatelimitConfig, ContentRepositoryConfig, CaptchaConfig,
                        VoipConfig, RegistrationConfig, MetricsConfig,
-                       AppServiceConfig, KeyConfig, SAML2Config, ):
+                       AppServiceConfig, KeyConfig, SAML2Config, CasConfig,
+                       PasswordConfig,):
     pass
 
 
diff --git a/synapse/config/key.py b/synapse/config/key.py
index 0494c0cb77..2c187065e5 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -13,14 +13,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os
 from ._base import Config, ConfigError
-import syutil.crypto.signing_key
-from syutil.crypto.signing_key import (
-    is_signing_algorithm_supported, decode_verify_key_bytes
-)
-from syutil.base64util import decode_base64
+
 from synapse.util.stringutils import random_string
+from signedjson.key import (
+    generate_signing_key, is_signing_algorithm_supported,
+    decode_signing_key_base64, decode_verify_key_bytes,
+    read_signing_keys, write_signing_keys, NACL_ED25519
+)
+from unpaddedbase64 import decode_base64
+
+import os
 
 
 class KeyConfig(Config):
@@ -37,7 +40,7 @@ class KeyConfig(Config):
             config["perspectives"]
         )
 
-    def default_config(self, config_dir_path, server_name):
+    def default_config(self, config_dir_path, server_name, **kwargs):
         base_key_name = os.path.join(config_dir_path, server_name)
         return """\
         ## Signing Keys ##
@@ -83,9 +86,7 @@ class KeyConfig(Config):
     def read_signing_key(self, signing_key_path):
         signing_keys = self.read_file(signing_key_path, "signing_key")
         try:
-            return syutil.crypto.signing_key.read_signing_keys(
-                signing_keys.splitlines(True)
-            )
+            return read_signing_keys(signing_keys.splitlines(True))
         except Exception:
             raise ConfigError(
                 "Error reading signing_key."
@@ -112,22 +113,18 @@ class KeyConfig(Config):
         if not os.path.exists(signing_key_path):
             with open(signing_key_path, "w") as signing_key_file:
                 key_id = "a_" + random_string(4)
-                syutil.crypto.signing_key.write_signing_keys(
-                    signing_key_file,
-                    (syutil.crypto.signing_key.generate_signing_key(key_id),),
+                write_signing_keys(
+                    signing_key_file, (generate_signing_key(key_id),),
                 )
         else:
             signing_keys = self.read_file(signing_key_path, "signing_key")
             if len(signing_keys.split("\n")[0].split()) == 1:
                 # handle keys in the old format.
                 key_id = "a_" + random_string(4)
-                key = syutil.crypto.signing_key.decode_signing_key_base64(
-                    syutil.crypto.signing_key.NACL_ED25519,
-                    key_id,
-                    signing_keys.split("\n")[0]
+                key = decode_signing_key_base64(
+                    NACL_ED25519, key_id, signing_keys.split("\n")[0]
                 )
                 with open(signing_key_path, "w") as signing_key_file:
-                    syutil.crypto.signing_key.write_signing_keys(
-                        signing_key_file,
-                        (key,),
+                    write_signing_keys(
+                        signing_key_file, (key,),
                     )
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index fa542623b7..a13dc170c4 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -21,6 +21,8 @@ import logging.config
 import yaml
 from string import Template
 import os
+import signal
+from synapse.util.debug import debug_deferreds
 
 
 DEFAULT_LOG_CONFIG = Template("""
@@ -68,8 +70,10 @@ class LoggingConfig(Config):
         self.verbosity = config.get("verbose", 0)
         self.log_config = self.abspath(config.get("log_config"))
         self.log_file = self.abspath(config.get("log_file"))
+        if config.get("full_twisted_stacktraces"):
+            debug_deferreds()
 
-    def default_config(self, config_dir_path, server_name):
+    def default_config(self, config_dir_path, server_name, **kwargs):
         log_file = self.abspath("homeserver.log")
         log_config = self.abspath(
             os.path.join(config_dir_path, server_name + ".log.config")
@@ -83,6 +87,11 @@ class LoggingConfig(Config):
 
         # A yaml python logging config file
         log_config: "%(log_config)s"
+
+        # Stop twisted from discarding the stack traces of exceptions in
+        # deferreds by waiting a reactor tick before running a deferred's
+        # callbacks.
+        # full_twisted_stacktraces: true
         """ % locals()
 
     def read_arguments(self, args):
@@ -142,6 +151,19 @@ class LoggingConfig(Config):
                 handler = logging.handlers.RotatingFileHandler(
                     self.log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
                 )
+
+                def sighup(signum, stack):
+                    logger.info("Closing log file due to SIGHUP")
+                    handler.doRollover()
+                    logger.info("Opened new log file due to SIGHUP")
+
+                # TODO(paul): obviously this is a terrible mechanism for
+                #   stealing SIGHUP, because it means no other part of synapse
+                #   can use it instead. If we want to catch SIGHUP anywhere
+                #   else as well, I'd suggest we find a nicer way to broadcast
+                #   it around.
+                if getattr(signal, "SIGHUP"):
+                    signal.signal(signal.SIGHUP, sighup)
             else:
                 handler = logging.StreamHandler()
             handler.setFormatter(formatter)
diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py
index ae5a691527..825fec9a38 100644
--- a/synapse/config/metrics.py
+++ b/synapse/config/metrics.py
@@ -19,13 +19,15 @@ from ._base import Config
 class MetricsConfig(Config):
     def read_config(self, config):
         self.enable_metrics = config["enable_metrics"]
+        self.report_stats = config.get("report_stats", None)
         self.metrics_port = config.get("metrics_port")
         self.metrics_bind_host = config.get("metrics_bind_host", "127.0.0.1")
 
-    def default_config(self, config_dir_path, server_name):
-        return """\
+    def default_config(self, report_stats=None, **kwargs):
+        suffix = "" if report_stats is None else "report_stats: %(report_stats)s\n"
+        return ("""\
         ## Metrics ###
 
         # Enable collection and rendering of performance metrics
         enable_metrics: False
-        """
+        """ + suffix) % locals()
diff --git a/synapse/config/password.py b/synapse/config/password.py
new file mode 100644
index 0000000000..1a3e278472
--- /dev/null
+++ b/synapse/config/password.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import Config
+
+
+class PasswordConfig(Config):
+    """Password login configuration
+    """
+
+    def read_config(self, config):
+        password_config = config.get("password_config", {})
+        self.password_enabled = password_config.get("enabled", True)
+
+    def default_config(self, config_dir_path, server_name, **kwargs):
+        return """
+        # Enable password for login.
+        password_config:
+           enabled: true
+        """
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 76d9970e5b..611b598ec7 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -27,7 +27,7 @@ class RatelimitConfig(Config):
         self.federation_rc_reject_limit = config["federation_rc_reject_limit"]
         self.federation_rc_concurrent = config["federation_rc_concurrent"]
 
-    def default_config(self, config_dir_path, server_name):
+    def default_config(self, **kwargs):
         return """\
         ## Ratelimiting ##
 
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index 67e780864e..dca391f7af 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -32,9 +32,13 @@ class RegistrationConfig(Config):
             )
 
         self.registration_shared_secret = config.get("registration_shared_secret")
+        self.macaroon_secret_key = config.get("macaroon_secret_key")
+        self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
+        self.allow_guest_access = config.get("allow_guest_access", False)
 
-    def default_config(self, config_dir, server_name):
+    def default_config(self, **kwargs):
         registration_shared_secret = random_string_with_symbols(50)
+        macaroon_secret_key = random_string_with_symbols(50)
         return """\
         ## Registration ##
 
@@ -44,6 +48,18 @@ class RegistrationConfig(Config):
         # If set, allows registration by anyone who also has the shared
         # secret, even if registration is otherwise disabled.
         registration_shared_secret: "%(registration_shared_secret)s"
+
+        macaroon_secret_key: "%(macaroon_secret_key)s"
+
+        # Set the number of bcrypt rounds used to generate password hash.
+        # Larger numbers increase the work factor needed to generate the hash.
+        # The default number of rounds is 12.
+        bcrypt_rounds: 12
+
+        # Allows users to register as guests without a password/email/etc, and
+        # participate in rooms hosted on this server which have been made
+        # accessible to anonymous users.
+        allow_guest_access: False
         """ % locals()
 
     def add_arguments(self, parser):
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 64644b9a7a..2fcf872449 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -60,7 +60,7 @@ class ContentRepositoryConfig(Config):
             config["thumbnail_sizes"]
         )
 
-    def default_config(self, config_dir_path, server_name):
+    def default_config(self, **kwargs):
         media_store = self.default_path("media_store")
         uploads_path = self.default_path("uploads")
         return """
diff --git a/synapse/config/saml2.py b/synapse/config/saml2.py
index 1532036876..8d7f443021 100644
--- a/synapse/config/saml2.py
+++ b/synapse/config/saml2.py
@@ -33,7 +33,7 @@ class SAML2Config(Config):
     def read_config(self, config):
         saml2_config = config.get("saml2_config", None)
         if saml2_config:
-            self.saml2_enabled = True
+            self.saml2_enabled = saml2_config.get("enabled", True)
             self.saml2_config_path = saml2_config["config_path"]
             self.saml2_idp_redirect_url = saml2_config["idp_redirect_url"]
         else:
@@ -41,7 +41,7 @@ class SAML2Config(Config):
             self.saml2_config_path = None
             self.saml2_idp_redirect_url = None
 
-    def default_config(self, config_dir_path, server_name):
+    def default_config(self, config_dir_path, server_name, **kwargs):
         return """
         # Enable SAML2 for registration and login. Uses pysaml2
         # config_path:      Path to the sp_conf.py configuration file
@@ -49,6 +49,7 @@ class SAML2Config(Config):
         #                   the user back to /login/saml2 with proper info.
         # See pysaml2 docs for format of config.
         #saml2_config:
+        #   enabled: true
         #   config_path: "%s/sp_conf.py"
         #   idp_redirect_url: "http://%s/idp"
         """ % (config_dir_path, server_name)
diff --git a/synapse/config/server.py b/synapse/config/server.py
index a03e55c223..5c2d6bfeab 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -26,6 +26,7 @@ class ServerConfig(Config):
         self.soft_file_limit = config["soft_file_limit"]
         self.daemonize = config.get("daemonize")
         self.print_pidfile = config.get("print_pidfile")
+        self.user_agent_suffix = config.get("user_agent_suffix")
         self.use_frozen_dicts = config.get("use_frozen_dicts", True)
 
         self.listeners = config.get("listeners", [])
@@ -117,7 +118,7 @@ class ServerConfig(Config):
 
         self.content_addr = content_addr
 
-    def default_config(self, config_dir_path, server_name):
+    def default_config(self, server_name, **kwargs):
         if ":" in server_name:
             bind_port = int(server_name.split(":")[1])
             unsecure_port = bind_port - 400
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index 4751d39bc9..0ac2698293 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -42,7 +42,15 @@ class TlsConfig(Config):
             config.get("tls_dh_params_path"), "tls_dh_params"
         )
 
-    def default_config(self, config_dir_path, server_name):
+        # This config option applies to non-federation HTTP clients
+        # (e.g. for talking to recaptcha, identity servers, and such)
+        # It should never be used in production, and is intended for
+        # use only when running tests.
+        self.use_insecure_ssl_client_just_for_testing_do_not_use = config.get(
+            "use_insecure_ssl_client_just_for_testing_do_not_use"
+        )
+
+    def default_config(self, config_dir_path, server_name, **kwargs):
         base_key_name = os.path.join(config_dir_path, server_name)
 
         tls_certificate_path = base_key_name + ".tls.crt"
diff --git a/synapse/config/voip.py b/synapse/config/voip.py
index a1707223d3..a093354ccd 100644
--- a/synapse/config/voip.py
+++ b/synapse/config/voip.py
@@ -22,7 +22,7 @@ class VoipConfig(Config):
         self.turn_shared_secret = config["turn_shared_secret"]
         self.turn_user_lifetime = self.parse_duration(config["turn_user_lifetime"])
 
-    def default_config(self, config_dir_path, server_name):
+    def default_config(self, **kwargs):
         return """\
         ## Turn ##
 
diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py
index 6633b19565..64e40864af 100644
--- a/synapse/crypto/event_signing.py
+++ b/synapse/crypto/event_signing.py
@@ -15,11 +15,12 @@
 # limitations under the License.
 
 
-from synapse.events.utils import prune_event
-from syutil.jsonutil import encode_canonical_json
-from syutil.base64util import encode_base64, decode_base64
-from syutil.crypto.jsonsign import sign_json
 from synapse.api.errors import SynapseError, Codes
+from synapse.events.utils import prune_event
+
+from canonicaljson import encode_canonical_json
+from unpaddedbase64 import encode_base64, decode_base64
+from signedjson.sign import sign_json
 
 import hashlib
 import logging
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index 644c7b14a9..8b6a59866f 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -14,20 +14,20 @@
 # limitations under the License.
 
 from synapse.crypto.keyclient import fetch_server_key
+from synapse.api.errors import SynapseError, Codes
+from synapse.util.retryutils import get_retry_limiter
+from synapse.util import unwrapFirstError
+from synapse.util.async import ObservableDeferred
+
 from twisted.internet import defer
-from syutil.crypto.jsonsign import (
+
+from signedjson.sign import (
     verify_signed_json, signature_ids, sign_json, encode_canonical_json
 )
-from syutil.crypto.signing_key import (
+from signedjson.key import (
     is_signing_algorithm_supported, decode_verify_key_bytes
 )
-from syutil.base64util import decode_base64, encode_base64
-from synapse.api.errors import SynapseError, Codes
-
-from synapse.util.retryutils import get_retry_limiter
-from synapse.util import unwrapFirstError
-
-from synapse.util.async import ObservableDeferred
+from unpaddedbase64 import decode_base64, encode_base64
 
 from OpenSSL import crypto
 
@@ -228,10 +228,9 @@ class Keyring(object):
         def do_iterations():
             merged_results = {}
 
-            missing_keys = {
-                group.server_name: set(group.key_ids)
-                for group in group_id_to_group.values()
-            }
+            missing_keys = {}
+            for group in group_id_to_group.values():
+                missing_keys.setdefault(group.server_name, set()).union(group.key_ids)
 
             for fn in key_fetch_fns:
                 results = yield fn(missing_keys.items())
@@ -470,7 +469,7 @@ class Keyring(object):
                 continue
 
             (response, tls_certificate) = yield fetch_server_key(
-                server_name, self.hs.tls_context_factory,
+                server_name, self.hs.tls_server_context_factory,
                 path=(b"/_matrix/key/v2/server/%s" % (
                     urllib.quote(requested_key_id),
                 )).encode("ascii"),
@@ -604,7 +603,7 @@ class Keyring(object):
         # Try to fetch the key from the remote server.
 
         (response, tls_certificate) = yield fetch_server_key(
-            server_name, self.hs.tls_context_factory
+            server_name, self.hs.tls_server_context_factory
         )
 
         # Check the response.
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index 7bd78343f0..9989b76591 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -66,7 +66,6 @@ def prune_event(event):
             "users_default",
             "events",
             "events_default",
-            "events_default",
             "state_default",
             "ban",
             "kick",
@@ -103,7 +102,10 @@ def format_event_raw(d):
 def format_event_for_client_v1(d):
     d["user_id"] = d.pop("sender", None)
 
-    move_keys = ("age", "redacted_because", "replaces_state", "prev_content")
+    move_keys = (
+        "age", "redacted_because", "replaces_state", "prev_content",
+        "invite_room_state",
+    )
     for key in move_keys:
         if key in d["unsigned"]:
             d[key] = d["unsigned"][key]
@@ -152,7 +154,8 @@ def serialize_event(e, time_now_ms, as_client_event=True,
 
     if "redacted_because" in e.unsigned:
         d["unsigned"]["redacted_because"] = serialize_event(
-            e.unsigned["redacted_because"], time_now_ms
+            e.unsigned["redacted_because"], time_now_ms,
+            event_format=event_format
         )
 
     if token_id is not None:
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index f5e346cdbc..c6a8c1249a 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -17,6 +17,7 @@
 from twisted.internet import defer
 
 from .federation_base import FederationBase
+from synapse.api.constants import Membership
 from .units import Edu
 
 from synapse.api.errors import (
@@ -356,19 +357,55 @@ class FederationClient(FederationBase):
         defer.returnValue(signed_auth)
 
     @defer.inlineCallbacks
-    def make_join(self, destinations, room_id, user_id):
+    def make_membership_event(self, destinations, room_id, user_id, membership,
+                              content={},):
+        """
+        Creates an m.room.member event, with context, without participating in the room.
+
+        Does so by asking one of the already participating servers to create an
+        event with proper context.
+
+        Note that this does not append any events to any graphs.
+
+        Args:
+            destinations (str): Candidate homeservers which are probably
+                participating in the room.
+            room_id (str): The room in which the event will happen.
+            user_id (str): The user whose membership is being evented.
+            membership (str): The "membership" property of the event. Must be
+                one of "join" or "leave".
+            content (object): Any additional data to put into the content field
+                of the event.
+        Return:
+            A tuple of (origin (str), event (object)) where origin is the remote
+            homeserver which generated the event.
+        """
+        valid_memberships = {Membership.JOIN, Membership.LEAVE}
+        if membership not in valid_memberships:
+            raise RuntimeError(
+                "make_membership_event called with membership='%s', must be one of %s" %
+                (membership, ",".join(valid_memberships))
+            )
         for destination in destinations:
             if destination == self.server_name:
                 continue
 
             try:
-                ret = yield self.transport_layer.make_join(
-                    destination, room_id, user_id
+                ret = yield self.transport_layer.make_membership_event(
+                    destination, room_id, user_id, membership
                 )
 
                 pdu_dict = ret["event"]
 
-                logger.debug("Got response to make_join: %s", pdu_dict)
+                logger.debug("Got response to make_%s: %s", membership, pdu_dict)
+
+                pdu_dict["content"].update(content)
+
+                # The protoevent received over the JSON wire may not have all
+                # the required fields. Lets just gloss over that because
+                # there's some we never care about
+                if "prev_state" not in pdu_dict:
+                    pdu_dict["prev_state"] = []
 
                 defer.returnValue(
                     (destination, self.event_from_pdu_json(pdu_dict))
@@ -378,8 +415,8 @@ class FederationClient(FederationBase):
                 raise
             except Exception as e:
                 logger.warn(
-                    "Failed to make_join via %s: %s",
-                    destination, e.message
+                    "Failed to make_%s via %s: %s",
+                    membership, destination, e.message
                 )
 
         raise RuntimeError("Failed to send to any server.")
@@ -486,6 +523,33 @@ class FederationClient(FederationBase):
         defer.returnValue(pdu)
 
     @defer.inlineCallbacks
+    def send_leave(self, destinations, pdu):
+        for destination in destinations:
+            if destination == self.server_name:
+                continue
+
+            try:
+                time_now = self._clock.time_msec()
+                _, content = yield self.transport_layer.send_leave(
+                    destination=destination,
+                    room_id=pdu.room_id,
+                    event_id=pdu.event_id,
+                    content=pdu.get_pdu_json(time_now),
+                )
+
+                logger.debug("Got content: %s", content)
+                defer.returnValue(None)
+            except CodeMessageException:
+                raise
+            except Exception as e:
+                logger.exception(
+                    "Failed to send_leave via %s: %s",
+                    destination, e.message
+                )
+
+        raise RuntimeError("Failed to send to any server.")
+
+    @defer.inlineCallbacks
     def query_auth(self, destination, room_id, event_id, local_auth):
         """
         Params:
@@ -643,3 +707,26 @@ class FederationClient(FederationBase):
         event.internal_metadata.outlier = outlier
 
         return event
+
+    @defer.inlineCallbacks
+    def forward_third_party_invite(self, destinations, room_id, event_dict):
+        for destination in destinations:
+            if destination == self.server_name:
+                continue
+
+            try:
+                yield self.transport_layer.exchange_third_party_invite(
+                    destination=destination,
+                    room_id=room_id,
+                    event_dict=event_dict,
+                )
+                defer.returnValue(None)
+            except CodeMessageException:
+                raise
+            except Exception as e:
+                logger.exception(
+                    "Failed to send_third_party_invite via %s: %s",
+                    destination, e.message
+                )
+
+        raise RuntimeError("Failed to send to any server.")
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index 725c6f3fa5..7a59436a91 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -255,6 +255,20 @@ class FederationServer(FederationBase):
         }))
 
     @defer.inlineCallbacks
+    def on_make_leave_request(self, room_id, user_id):
+        pdu = yield self.handler.on_make_leave_request(room_id, user_id)
+        time_now = self._clock.time_msec()
+        defer.returnValue({"event": pdu.get_pdu_json(time_now)})
+
+    @defer.inlineCallbacks
+    def on_send_leave_request(self, origin, content):
+        logger.debug("on_send_leave_request: content: %s", content)
+        pdu = self.event_from_pdu_json(content)
+        logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures)
+        yield self.handler.on_send_leave_request(origin, pdu)
+        defer.returnValue((200, {}))
+
+    @defer.inlineCallbacks
     def on_event_auth(self, origin, room_id, event_id):
         time_now = self._clock.time_msec()
         auth_pdus = yield self.handler.on_event_auth(event_id)
@@ -529,3 +543,15 @@ class FederationServer(FederationBase):
         event.internal_metadata.outlier = outlier
 
         return event
+
+    @defer.inlineCallbacks
+    def exchange_third_party_invite(self, invite):
+        ret = yield self.handler.exchange_third_party_invite(invite)
+        defer.returnValue(ret)
+
+    @defer.inlineCallbacks
+    def on_exchange_third_party_invite_request(self, origin, room_id, event_dict):
+        ret = yield self.handler.on_exchange_third_party_invite_request(
+            origin, room_id, event_dict
+        )
+        defer.returnValue(ret)
diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py
index 32fa5e8c15..aac6f1c167 100644
--- a/synapse/federation/transaction_queue.py
+++ b/synapse/federation/transaction_queue.py
@@ -202,6 +202,7 @@ class TransactionQueue(object):
     @defer.inlineCallbacks
     @log_function
     def _attempt_new_transaction(self, destination):
+        # list of (pending_pdu, deferred, order)
         if destination in self.pending_transactions:
             # XXX: pending_transactions can get stuck on by a never-ending
             # request at which point pending_pdus_by_dest just keeps growing.
@@ -213,9 +214,6 @@ class TransactionQueue(object):
             )
             return
 
-        logger.debug("TX [%s] _attempt_new_transaction", destination)
-
-        # list of (pending_pdu, deferred, order)
         pending_pdus = self.pending_pdus_by_dest.pop(destination, [])
         pending_edus = self.pending_edus_by_dest.pop(destination, [])
         pending_failures = self.pending_failures_by_dest.pop(destination, [])
@@ -228,20 +226,22 @@ class TransactionQueue(object):
             logger.debug("TX [%s] Nothing to send", destination)
             return
 
-        # Sort based on the order field
-        pending_pdus.sort(key=lambda t: t[2])
-
-        pdus = [x[0] for x in pending_pdus]
-        edus = [x[0] for x in pending_edus]
-        failures = [x[0].get_dict() for x in pending_failures]
-        deferreds = [
-            x[1]
-            for x in pending_pdus + pending_edus + pending_failures
-        ]
-
         try:
             self.pending_transactions[destination] = 1
 
+            logger.debug("TX [%s] _attempt_new_transaction", destination)
+
+            # Sort based on the order field
+            pending_pdus.sort(key=lambda t: t[2])
+
+            pdus = [x[0] for x in pending_pdus]
+            edus = [x[0] for x in pending_edus]
+            failures = [x[0].get_dict() for x in pending_failures]
+            deferreds = [
+                x[1]
+                for x in pending_pdus + pending_edus + pending_failures
+            ]
+
             txn_id = str(self._next_txn_id)
 
             limiter = yield get_retry_limiter(
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index ced703364b..3d59e1c650 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -14,6 +14,7 @@
 # limitations under the License.
 
 from twisted.internet import defer
+from synapse.api.constants import Membership
 
 from synapse.api.urls import FEDERATION_PREFIX as PREFIX
 from synapse.util.logutils import log_function
@@ -160,13 +161,19 @@ class TransportLayerClient(object):
 
     @defer.inlineCallbacks
     @log_function
-    def make_join(self, destination, room_id, user_id, retry_on_dns_fail=True):
-        path = PREFIX + "/make_join/%s/%s" % (room_id, user_id)
+    def make_membership_event(self, destination, room_id, user_id, membership):
+        valid_memberships = {Membership.JOIN, Membership.LEAVE}
+        if membership not in valid_memberships:
+            raise RuntimeError(
+                "make_membership_event called with membership='%s', must be one of %s" %
+                (membership, ",".join(valid_memberships))
+            )
+        path = PREFIX + "/make_%s/%s/%s" % (membership, room_id, user_id)
 
         content = yield self.client.get_json(
             destination=destination,
             path=path,
-            retry_on_dns_fail=retry_on_dns_fail,
+            retry_on_dns_fail=True,
         )
 
         defer.returnValue(content)
@@ -186,6 +193,19 @@ class TransportLayerClient(object):
 
     @defer.inlineCallbacks
     @log_function
+    def send_leave(self, destination, room_id, event_id, content):
+        path = PREFIX + "/send_leave/%s/%s" % (room_id, event_id)
+
+        response = yield self.client.put_json(
+            destination=destination,
+            path=path,
+            data=content,
+        )
+
+        defer.returnValue(response)
+
+    @defer.inlineCallbacks
+    @log_function
     def send_invite(self, destination, room_id, event_id, content):
         path = PREFIX + "/invite/%s/%s" % (room_id, event_id)
 
@@ -199,6 +219,19 @@ class TransportLayerClient(object):
 
     @defer.inlineCallbacks
     @log_function
+    def exchange_third_party_invite(self, destination, room_id, event_dict):
+        path = PREFIX + "/exchange_third_party_invite/%s" % (room_id,)
+
+        response = yield self.client.put_json(
+            destination=destination,
+            path=path,
+            data=event_dict,
+        )
+
+        defer.returnValue(response)
+
+    @defer.inlineCallbacks
+    @log_function
     def get_event_auth(self, destination, room_id, event_id):
         path = PREFIX + "/event_auth/%s/%s" % (room_id, event_id)
 
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 36f250e1a3..127b4da4f8 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -296,6 +296,24 @@ class FederationMakeJoinServlet(BaseFederationServlet):
         defer.returnValue((200, content))
 
 
+class FederationMakeLeaveServlet(BaseFederationServlet):
+    PATH = "/make_leave/([^/]*)/([^/]*)"
+
+    @defer.inlineCallbacks
+    def on_GET(self, origin, content, query, context, user_id):
+        content = yield self.handler.on_make_leave_request(context, user_id)
+        defer.returnValue((200, content))
+
+
+class FederationSendLeaveServlet(BaseFederationServlet):
+    PATH = "/send_leave/([^/]*)/([^/]*)"
+
+    @defer.inlineCallbacks
+    def on_PUT(self, origin, content, query, room_id, txid):
+        content = yield self.handler.on_send_leave_request(origin, content)
+        defer.returnValue((200, content))
+
+
 class FederationEventAuthServlet(BaseFederationServlet):
     PATH = "/event_auth/([^/]*)/([^/]*)"
 
@@ -325,6 +343,17 @@ class FederationInviteServlet(BaseFederationServlet):
         defer.returnValue((200, content))
 
 
+class FederationThirdPartyInviteExchangeServlet(BaseFederationServlet):
+    PATH = "/exchange_third_party_invite/([^/]*)"
+
+    @defer.inlineCallbacks
+    def on_PUT(self, origin, content, query, room_id):
+        content = yield self.handler.on_exchange_third_party_invite_request(
+            origin, room_id, content
+        )
+        defer.returnValue((200, content))
+
+
 class FederationClientKeysQueryServlet(BaseFederationServlet):
     PATH = "/user/keys/query"
 
@@ -378,6 +407,30 @@ class FederationGetMissingEventsServlet(BaseFederationServlet):
         defer.returnValue((200, content))
 
 
+class On3pidBindServlet(BaseFederationServlet):
+    PATH = "/3pid/onbind"
+
+    @defer.inlineCallbacks
+    def on_POST(self, request):
+        content_bytes = request.content.read()
+        content = json.loads(content_bytes)
+        if "invites" in content:
+            last_exception = None
+            for invite in content["invites"]:
+                try:
+                    yield self.handler.exchange_third_party_invite(invite)
+                except Exception as e:
+                    last_exception = e
+            if last_exception:
+                raise last_exception
+        defer.returnValue((200, {}))
+
+    # Avoid doing remote HS authorization checks which are done by default by
+    # BaseFederationServlet.
+    def _wrap(self, code):
+        return code
+
+
 SERVLET_CLASSES = (
     FederationPullServlet,
     FederationEventServlet,
@@ -385,12 +438,16 @@ SERVLET_CLASSES = (
     FederationBackfillServlet,
     FederationQueryServlet,
     FederationMakeJoinServlet,
+    FederationMakeLeaveServlet,
     FederationEventServlet,
     FederationSendJoinServlet,
+    FederationSendLeaveServlet,
     FederationInviteServlet,
     FederationQueryAuthServlet,
     FederationGetMissingEventsServlet,
     FederationEventAuthServlet,
     FederationClientKeysQueryServlet,
     FederationClientKeysClaimServlet,
+    FederationThirdPartyInviteExchangeServlet,
+    On3pidBindServlet,
 )
diff --git a/synapse/handlers/__init__.py b/synapse/handlers/__init__.py
index 8725c3c420..6a2339f2eb 100644
--- a/synapse/handlers/__init__.py
+++ b/synapse/handlers/__init__.py
@@ -17,7 +17,7 @@ from synapse.appservice.scheduler import AppServiceScheduler
 from synapse.appservice.api import ApplicationServiceApi
 from .register import RegistrationHandler
 from .room import (
-    RoomCreationHandler, RoomMemberHandler, RoomListHandler
+    RoomCreationHandler, RoomMemberHandler, RoomListHandler, RoomContextHandler,
 )
 from .message import MessageHandler
 from .events import EventStreamHandler, EventHandler
@@ -32,6 +32,7 @@ from .sync import SyncHandler
 from .auth import AuthHandler
 from .identity import IdentityHandler
 from .receipts import ReceiptsHandler
+from .search import SearchHandler
 
 
 class Handlers(object):
@@ -68,3 +69,5 @@ class Handlers(object):
         self.sync_handler = SyncHandler(hs)
         self.auth_handler = AuthHandler(hs)
         self.identity_handler = IdentityHandler(hs)
+        self.search_handler = SearchHandler(hs)
+        self.room_context_handler = RoomContextHandler(hs)
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index cb992143f5..6519f183df 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -15,7 +15,7 @@
 
 from twisted.internet import defer
 
-from synapse.api.errors import LimitExceededError, SynapseError
+from synapse.api.errors import LimitExceededError, SynapseError, AuthError
 from synapse.crypto.event_signing import add_hashes_and_signatures
 from synapse.api.constants import Membership, EventTypes
 from synapse.types import UserID, RoomAlias
@@ -29,6 +29,12 @@ logger = logging.getLogger(__name__)
 
 
 class BaseHandler(object):
+    """
+    Common base class for the event handlers.
+
+    :type store: synapse.storage.events.StateStore
+    :type state_handler: synapse.state.StateHandler
+    """
 
     def __init__(self, hs):
         self.store = hs.get_datastore()
@@ -45,6 +51,74 @@ class BaseHandler(object):
 
         self.event_builder_factory = hs.get_event_builder_factory()
 
+    @defer.inlineCallbacks
+    def _filter_events_for_client(self, user_id, events, is_guest=False,
+                                  require_all_visible_for_guests=True):
+        # Assumes that user has at some point joined the room if not is_guest.
+
+        def allowed(event, membership, visibility):
+            if visibility == "world_readable":
+                return True
+
+            if is_guest:
+                return False
+
+            if membership == Membership.JOIN:
+                return True
+
+            if event.type == EventTypes.RoomHistoryVisibility:
+                return not is_guest
+
+            if visibility == "shared":
+                return True
+            elif visibility == "joined":
+                return membership == Membership.JOIN
+            elif visibility == "invited":
+                return membership == Membership.INVITE
+
+            return True
+
+        event_id_to_state = yield self.store.get_state_for_events(
+            frozenset(e.event_id for e in events),
+            types=(
+                (EventTypes.RoomHistoryVisibility, ""),
+                (EventTypes.Member, user_id),
+            )
+        )
+
+        events_to_return = []
+        for event in events:
+            state = event_id_to_state[event.event_id]
+
+            membership_event = state.get((EventTypes.Member, user_id), None)
+            if membership_event:
+                membership = membership_event.membership
+            else:
+                membership = None
+
+            visibility_event = state.get((EventTypes.RoomHistoryVisibility, ""), None)
+            if visibility_event:
+                visibility = visibility_event.content.get("history_visibility", "shared")
+            else:
+                visibility = "shared"
+
+            should_include = allowed(event, membership, visibility)
+            if should_include:
+                events_to_return.append(event)
+
+        if (require_all_visible_for_guests
+                and is_guest
+                and len(events_to_return) < len(events)):
+            # This indicates that some events in the requested range were not
+            # visible to guest users. To be safe, we reject the entire request,
+            # so that we don't have to worry about interpreting visibility
+            # boundaries.
+            raise AuthError(403, "User %s does not have permission" % (
+                user_id
+            ))
+
+        defer.returnValue(events_to_return)
+
     def ratelimit(self, user_id):
         time_now = self.clock.time()
         allowed, time_allowed = self.ratelimiter.send_message(
@@ -107,6 +181,8 @@ class BaseHandler(object):
         if not suppress_auth:
             self.auth.check(event, auth_events=context.current_state)
 
+        yield self.maybe_kick_guest_users(event, context.current_state.values())
+
         if event.type == EventTypes.CanonicalAlias:
             # Check the alias is acually valid (at this time at least)
             room_alias_str = event.content.get("alias", None)
@@ -123,29 +199,63 @@ class BaseHandler(object):
                         )
                     )
 
-        (event_stream_id, max_stream_id) = yield self.store.persist_event(
-            event, context=context
-        )
-
         federation_handler = self.hs.get_handlers().federation_handler
 
         if event.type == EventTypes.Member:
             if event.content["membership"] == Membership.INVITE:
+                event.unsigned["invite_room_state"] = [
+                    {
+                        "type": e.type,
+                        "state_key": e.state_key,
+                        "content": e.content,
+                        "sender": e.sender,
+                    }
+                    for k, e in context.current_state.items()
+                    if e.type in (
+                        EventTypes.JoinRules,
+                        EventTypes.CanonicalAlias,
+                        EventTypes.RoomAvatar,
+                        EventTypes.Name,
+                    )
+                ]
+
                 invitee = UserID.from_string(event.state_key)
                 if not self.hs.is_mine(invitee):
                     # TODO: Can we add signature from remote server in a nicer
                     # way? If we have been invited by a remote server, we need
                     # to get them to sign the event.
+
                     returned_invite = yield federation_handler.send_invite(
                         invitee.domain,
                         event,
                     )
 
+                    event.unsigned.pop("room_state", None)
+
                     # TODO: Make sure the signatures actually are correct.
                     event.signatures.update(
                         returned_invite.signatures
                     )
 
+        if event.type == EventTypes.Redaction:
+            if self.auth.check_redaction(event, auth_events=context.current_state):
+                original_event = yield self.store.get_event(
+                    event.redacts,
+                    check_redacted=False,
+                    get_prev_content=False,
+                    allow_rejected=False,
+                    allow_none=False
+                )
+                if event.user_id != original_event.user_id:
+                    raise AuthError(
+                        403,
+                        "You don't have permission to redact events"
+                    )
+
+        (event_stream_id, max_stream_id) = yield self.store.persist_event(
+            event, context=context
+        )
+
         destinations = set(extra_destinations)
         for k, s in context.current_state.items():
             try:
@@ -174,6 +284,64 @@ class BaseHandler(object):
 
         notify_d.addErrback(log_failure)
 
+        # If invite, remove room_state from unsigned before sending.
+        event.unsigned.pop("invite_room_state", None)
+
         federation_handler.handle_new_event(
             event, destinations=destinations,
         )
+
+    @defer.inlineCallbacks
+    def maybe_kick_guest_users(self, event, current_state):
+        # Technically this function invalidates current_state by changing it.
+        # Hopefully this isn't that important to the caller.
+        if event.type == EventTypes.GuestAccess:
+            guest_access = event.content.get("guest_access", "forbidden")
+            if guest_access != "can_join":
+                yield self.kick_guest_users(current_state)
+
+    @defer.inlineCallbacks
+    def kick_guest_users(self, current_state):
+        for member_event in current_state:
+            try:
+                if member_event.type != EventTypes.Member:
+                    continue
+
+                if not self.hs.is_mine(UserID.from_string(member_event.state_key)):
+                    continue
+
+                if member_event.content["membership"] not in {
+                    Membership.JOIN,
+                    Membership.INVITE
+                }:
+                    continue
+
+                if (
+                    "kind" not in member_event.content
+                    or member_event.content["kind"] != "guest"
+                ):
+                    continue
+
+                # We make the user choose to leave, rather than have the
+                # event-sender kick them. This is partially because we don't
+                # need to worry about power levels, and partially because guest
+                # users are a concept which doesn't hugely work over federation,
+                # and having homeservers have their own users leave keeps more
+                # of that decision-making and control local to the guest-having
+                # homeserver.
+                message_handler = self.hs.get_handlers().message_handler
+                yield message_handler.create_and_send_event(
+                    {
+                        "type": EventTypes.Member,
+                        "state_key": member_event.state_key,
+                        "content": {
+                            "membership": Membership.LEAVE,
+                            "kind": "guest"
+                        },
+                        "room_id": member_event.room_id,
+                        "sender": member_event.state_key
+                    },
+                    ratelimit=False,
+                )
+            except Exception as e:
+                logger.warn("Error kicking guest user: %s" % (e,))
diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py
index 1c9e7152c7..d852a18555 100644
--- a/synapse/handlers/admin.py
+++ b/synapse/handlers/admin.py
@@ -34,6 +34,7 @@ class AdminHandler(BaseHandler):
 
         d = {}
         for r in res:
+            # Note that device_id is always None
             device = d.setdefault(r["device_id"], {})
             session = device.setdefault(r["access_token"], [])
             session.append({
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 602c5bcd89..be157e2bb7 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -18,14 +18,14 @@ from twisted.internet import defer
 from ._base import BaseHandler
 from synapse.api.constants import LoginType
 from synapse.types import UserID
-from synapse.api.errors import LoginError, Codes
-from synapse.http.client import SimpleHttpClient
+from synapse.api.errors import AuthError, LoginError, Codes
 from synapse.util.async import run_on_reactor
 
 from twisted.web.client import PartialDownloadError
 
 import logging
 import bcrypt
+import pymacaroons
 import simplejson
 
 import synapse.util.stringutils as stringutils
@@ -44,7 +44,9 @@ class AuthHandler(BaseHandler):
             LoginType.EMAIL_IDENTITY: self._check_email_identity,
             LoginType.DUMMY: self._check_dummy_auth,
         }
+        self.bcrypt_rounds = hs.config.bcrypt_rounds
         self.sessions = {}
+        self.INVALID_TOKEN_HTTP_STATUS = 401
 
     @defer.inlineCallbacks
     def check_auth(self, flows, clientdict, clientip):
@@ -186,7 +188,7 @@ class AuthHandler(BaseHandler):
         # TODO: get this from the homeserver rather than creating a new one for
         # each request
         try:
-            client = SimpleHttpClient(self.hs)
+            client = self.hs.get_simple_http_client()
             resp_body = yield client.post_urlencoded_get_json(
                 self.hs.config.recaptcha_siteverify_api,
                 args={
@@ -279,7 +281,10 @@ class AuthHandler(BaseHandler):
             user_id (str): User ID
             password (str): Password
         Returns:
-            The access token for the user's session.
+            A tuple of:
+              The user's ID.
+              The access token for the user's session.
+              The refresh token for the user's session.
         Raises:
             StoreError if there was a problem storing the token.
             LoginError if there was an authentication problem.
@@ -287,11 +292,43 @@ class AuthHandler(BaseHandler):
         user_id, password_hash = yield self._find_user_id_and_pwd_hash(user_id)
         self._check_password(user_id, password, password_hash)
 
-        reg_handler = self.hs.get_handlers().registration_handler
-        access_token = reg_handler.generate_token(user_id)
         logger.info("Logging in user %s", user_id)
-        yield self.store.add_access_token_to_user(user_id, access_token)
-        defer.returnValue((user_id, access_token))
+        access_token = yield self.issue_access_token(user_id)
+        refresh_token = yield self.issue_refresh_token(user_id)
+        defer.returnValue((user_id, access_token, refresh_token))
+
+    @defer.inlineCallbacks
+    def get_login_tuple_for_user_id(self, user_id):
+        """
+        Gets login tuple for the user with the given user ID.
+        The user is assumed to have been authenticated by some other
+        machanism (e.g. CAS)
+
+        Args:
+            user_id (str): User ID
+        Returns:
+            A tuple of:
+              The user's ID.
+              The access token for the user's session.
+              The refresh token for the user's session.
+        Raises:
+            StoreError if there was a problem storing the token.
+            LoginError if there was an authentication problem.
+        """
+        user_id, ignored = yield self._find_user_id_and_pwd_hash(user_id)
+
+        logger.info("Logging in user %s", user_id)
+        access_token = yield self.issue_access_token(user_id)
+        refresh_token = yield self.issue_refresh_token(user_id)
+        defer.returnValue((user_id, access_token, refresh_token))
+
+    @defer.inlineCallbacks
+    def does_user_exist(self, user_id):
+        try:
+            yield self._find_user_id_and_pwd_hash(user_id)
+            defer.returnValue(True)
+        except LoginError:
+            defer.returnValue(False)
 
     @defer.inlineCallbacks
     def _find_user_id_and_pwd_hash(self, user_id):
@@ -321,13 +358,82 @@ class AuthHandler(BaseHandler):
 
     def _check_password(self, user_id, password, stored_hash):
         """Checks that user_id has passed password, raises LoginError if not."""
-        if not bcrypt.checkpw(password, stored_hash):
+        if not self.validate_hash(password, stored_hash):
             logger.warn("Failed password login for user %s", user_id)
             raise LoginError(403, "", errcode=Codes.FORBIDDEN)
 
     @defer.inlineCallbacks
+    def issue_access_token(self, user_id):
+        access_token = self.generate_access_token(user_id)
+        yield self.store.add_access_token_to_user(user_id, access_token)
+        defer.returnValue(access_token)
+
+    @defer.inlineCallbacks
+    def issue_refresh_token(self, user_id):
+        refresh_token = self.generate_refresh_token(user_id)
+        yield self.store.add_refresh_token_to_user(user_id, refresh_token)
+        defer.returnValue(refresh_token)
+
+    def generate_access_token(self, user_id, extra_caveats=None):
+        extra_caveats = extra_caveats or []
+        macaroon = self._generate_base_macaroon(user_id)
+        macaroon.add_first_party_caveat("type = access")
+        now = self.hs.get_clock().time_msec()
+        expiry = now + (60 * 60 * 1000)
+        macaroon.add_first_party_caveat("time < %d" % (expiry,))
+        for caveat in extra_caveats:
+            macaroon.add_first_party_caveat(caveat)
+        return macaroon.serialize()
+
+    def generate_refresh_token(self, user_id):
+        m = self._generate_base_macaroon(user_id)
+        m.add_first_party_caveat("type = refresh")
+        # Important to add a nonce, because otherwise every refresh token for a
+        # user will be the same.
+        m.add_first_party_caveat("nonce = %s" % (
+            stringutils.random_string_with_symbols(16),
+        ))
+        return m.serialize()
+
+    def generate_short_term_login_token(self, user_id):
+        macaroon = self._generate_base_macaroon(user_id)
+        macaroon.add_first_party_caveat("type = login")
+        now = self.hs.get_clock().time_msec()
+        expiry = now + (2 * 60 * 1000)
+        macaroon.add_first_party_caveat("time < %d" % (expiry,))
+        return macaroon.serialize()
+
+    def validate_short_term_login_token_and_get_user_id(self, login_token):
+        try:
+            macaroon = pymacaroons.Macaroon.deserialize(login_token)
+            auth_api = self.hs.get_auth()
+            auth_api.validate_macaroon(macaroon, "login", [auth_api.verify_expiry])
+            return self._get_user_from_macaroon(macaroon)
+        except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
+            raise AuthError(401, "Invalid token", errcode=Codes.UNKNOWN_TOKEN)
+
+    def _generate_base_macaroon(self, user_id):
+        macaroon = pymacaroons.Macaroon(
+            location=self.hs.config.server_name,
+            identifier="key",
+            key=self.hs.config.macaroon_secret_key)
+        macaroon.add_first_party_caveat("gen = 1")
+        macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
+        return macaroon
+
+    def _get_user_from_macaroon(self, macaroon):
+        user_prefix = "user_id = "
+        for caveat in macaroon.caveats:
+            if caveat.caveat_id.startswith(user_prefix):
+                return caveat.caveat_id[len(user_prefix):]
+        raise AuthError(
+            self.INVALID_TOKEN_HTTP_STATUS, "No user_id found in token",
+            errcode=Codes.UNKNOWN_TOKEN
+        )
+
+    @defer.inlineCallbacks
     def set_password(self, user_id, newpassword):
-        password_hash = bcrypt.hashpw(newpassword, bcrypt.gensalt())
+        password_hash = self.hash(newpassword)
 
         yield self.store.user_set_password_hash(user_id, password_hash)
         yield self.store.user_delete_access_tokens(user_id)
@@ -349,3 +455,26 @@ class AuthHandler(BaseHandler):
     def _remove_session(self, session):
         logger.debug("Removing session %s", session)
         del self.sessions[session["id"]]
+
+    def hash(self, password):
+        """Computes a secure hash of password.
+
+        Args:
+            password (str): Password to hash.
+
+        Returns:
+            Hashed password (str).
+        """
+        return bcrypt.hashpw(password, bcrypt.gensalt(self.bcrypt_rounds))
+
+    def validate_hash(self, password, stored_hash):
+        """Validates that self.hash(password) == stored_hash.
+
+        Args:
+            password (str): Password to hash.
+            stored_hash (str): Expected hash value.
+
+        Returns:
+            Whether self.hash(password) == stored_hash (bool).
+        """
+        return bcrypt.checkpw(password, stored_hash)
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index 891502c04f..0e4c0d4d06 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -47,10 +47,60 @@ class EventStreamHandler(BaseHandler):
         self.notifier = hs.get_notifier()
 
     @defer.inlineCallbacks
+    def started_stream(self, user):
+        """Tells the presence handler that we have started an eventstream for
+        the user:
+
+        Args:
+            user (User): The user who started a stream.
+        Returns:
+            A deferred that completes once their presence has been updated.
+        """
+        if user not in self._streams_per_user:
+            self._streams_per_user[user] = 0
+            if user in self._stop_timer_per_user:
+                try:
+                    self.clock.cancel_call_later(
+                        self._stop_timer_per_user.pop(user)
+                    )
+                except:
+                    logger.exception("Failed to cancel event timer")
+            else:
+                yield self.distributor.fire("started_user_eventstream", user)
+
+        self._streams_per_user[user] += 1
+
+    def stopped_stream(self, user):
+        """If there are no streams for a user this starts a timer that will
+        notify the presence handler that we haven't got an event stream for
+        the user unless the user starts a new stream in 30 seconds.
+
+        Args:
+            user (User): The user who stopped a stream.
+        """
+        self._streams_per_user[user] -= 1
+        if not self._streams_per_user[user]:
+            del self._streams_per_user[user]
+
+            # 30 seconds of grace to allow the client to reconnect again
+            #   before we think they're gone
+            def _later():
+                logger.debug("_later stopped_user_eventstream %s", user)
+
+                self._stop_timer_per_user.pop(user, None)
+
+                return self.distributor.fire("stopped_user_eventstream", user)
+
+            logger.debug("Scheduling _later: for %s", user)
+            self._stop_timer_per_user[user] = (
+                self.clock.call_later(30, _later)
+            )
+
+    @defer.inlineCallbacks
     @log_function
     def get_stream(self, auth_user_id, pagin_config, timeout=0,
                    as_client_event=True, affect_presence=True,
-                   only_room_events=False):
+                   only_room_events=False, room_id=None, is_guest=False):
         """Fetches the events stream for a given user.
 
         If `only_room_events` is `True` only room events will be returned.
@@ -59,31 +109,7 @@ class EventStreamHandler(BaseHandler):
 
         try:
             if affect_presence:
-                if auth_user not in self._streams_per_user:
-                    self._streams_per_user[auth_user] = 0
-                    if auth_user in self._stop_timer_per_user:
-                        try:
-                            self.clock.cancel_call_later(
-                                self._stop_timer_per_user.pop(auth_user)
-                            )
-                        except:
-                            logger.exception("Failed to cancel event timer")
-                    else:
-                        yield self.distributor.fire(
-                            "started_user_eventstream", auth_user
-                        )
-                self._streams_per_user[auth_user] += 1
-
-            rm_handler = self.hs.get_handlers().room_member_handler
-
-            app_service = yield self.store.get_app_service_by_user_id(
-                auth_user.to_string()
-            )
-            if app_service:
-                rooms = yield self.store.get_app_service_rooms(app_service)
-                room_ids = set(r.room_id for r in rooms)
-            else:
-                room_ids = yield rm_handler.get_joined_rooms_for_user(auth_user)
+                yield self.started_stream(auth_user)
 
             if timeout:
                 # If they've set a timeout set a minimum limit.
@@ -93,9 +119,15 @@ class EventStreamHandler(BaseHandler):
                 # thundering herds on restart.
                 timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
 
+            if is_guest:
+                yield self.distributor.fire(
+                    "user_joined_room", user=auth_user, room_id=room_id
+                )
+
             events, tokens = yield self.notifier.get_events_for(
-                auth_user, room_ids, pagin_config, timeout,
-                only_room_events=only_room_events
+                auth_user, pagin_config, timeout,
+                only_room_events=only_room_events,
+                is_guest=is_guest, guest_room_id=room_id
             )
 
             time_now = self.clock.time_msec()
@@ -114,27 +146,7 @@ class EventStreamHandler(BaseHandler):
 
         finally:
             if affect_presence:
-                self._streams_per_user[auth_user] -= 1
-                if not self._streams_per_user[auth_user]:
-                    del self._streams_per_user[auth_user]
-
-                    # 10 seconds of grace to allow the client to reconnect again
-                    #   before we think they're gone
-                    def _later():
-                        logger.debug(
-                            "_later stopped_user_eventstream %s", auth_user
-                        )
-
-                        self._stop_timer_per_user.pop(auth_user, None)
-
-                        return self.distributor.fire(
-                            "stopped_user_eventstream", auth_user
-                        )
-
-                    logger.debug("Scheduling _later: for %s", auth_user)
-                    self._stop_timer_per_user[auth_user] = (
-                        self.clock.call_later(30, _later)
-                    )
+                self.stopped_stream(auth_user)
 
 
 class EventHandler(BaseHandler):
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 4ff20599d6..c1bce07e31 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -21,6 +21,7 @@ from synapse.api.errors import (
     AuthError, FederationError, StoreError, CodeMessageException, SynapseError,
 )
 from synapse.api.constants import EventTypes, Membership, RejectedReason
+from synapse.events.validator import EventValidator
 from synapse.util import unwrapFirstError
 from synapse.util.logcontext import PreserveLoggingContext
 from synapse.util.logutils import log_function
@@ -40,7 +41,6 @@ from twisted.internet import defer
 import itertools
 import logging
 
-
 logger = logging.getLogger(__name__)
 
 
@@ -58,6 +58,8 @@ class FederationHandler(BaseHandler):
     def __init__(self, hs):
         super(FederationHandler, self).__init__(hs)
 
+        self.hs = hs
+
         self.distributor.observe(
             "user_joined_room",
             self._on_user_joined
@@ -68,12 +70,9 @@ class FederationHandler(BaseHandler):
         self.store = hs.get_datastore()
         self.replication_layer = hs.get_replication_layer()
         self.state_handler = hs.get_state_handler()
-        # self.auth_handler = gs.get_auth_handler()
         self.server_name = hs.hostname
         self.keyring = hs.get_keyring()
 
-        self.lock_manager = hs.get_room_lock_manager()
-
         self.replication_layer.set_handler(self)
 
         # When joining a room we need to queue any events for that room up
@@ -125,60 +124,72 @@ class FederationHandler(BaseHandler):
         )
         if not is_in_room and not event.internal_metadata.is_outlier():
             logger.debug("Got event for room we're not in.")
-            current_state = state
 
-        event_ids = set()
-        if state:
-            event_ids |= {e.event_id for e in state}
-        if auth_chain:
-            event_ids |= {e.event_id for e in auth_chain}
+            try:
+                event_stream_id, max_stream_id = yield self._persist_auth_tree(
+                    auth_chain, state, event
+                )
+            except AuthError as e:
+                raise FederationError(
+                    "ERROR",
+                    e.code,
+                    e.msg,
+                    affected=event.event_id,
+                )
 
-        seen_ids = set(
-            (yield self.store.have_events(event_ids)).keys()
-        )
+        else:
+            event_ids = set()
+            if state:
+                event_ids |= {e.event_id for e in state}
+            if auth_chain:
+                event_ids |= {e.event_id for e in auth_chain}
+
+            seen_ids = set(
+                (yield self.store.have_events(event_ids)).keys()
+            )
 
-        if state and auth_chain is not None:
-            # If we have any state or auth_chain given to us by the replication
-            # layer, then we should handle them (if we haven't before.)
+            if state and auth_chain is not None:
+                # If we have any state or auth_chain given to us by the replication
+                # layer, then we should handle them (if we haven't before.)
 
-            event_infos = []
+                event_infos = []
 
-            for e in itertools.chain(auth_chain, state):
-                if e.event_id in seen_ids:
-                    continue
-                e.internal_metadata.outlier = True
-                auth_ids = [e_id for e_id, _ in e.auth_events]
-                auth = {
-                    (e.type, e.state_key): e for e in auth_chain
-                    if e.event_id in auth_ids
-                }
-                event_infos.append({
-                    "event": e,
-                    "auth_events": auth,
-                })
-                seen_ids.add(e.event_id)
+                for e in itertools.chain(auth_chain, state):
+                    if e.event_id in seen_ids:
+                        continue
+                    e.internal_metadata.outlier = True
+                    auth_ids = [e_id for e_id, _ in e.auth_events]
+                    auth = {
+                        (e.type, e.state_key): e for e in auth_chain
+                        if e.event_id in auth_ids or e.type == EventTypes.Create
+                    }
+                    event_infos.append({
+                        "event": e,
+                        "auth_events": auth,
+                    })
+                    seen_ids.add(e.event_id)
 
-            yield self._handle_new_events(
-                origin,
-                event_infos,
-                outliers=True
-            )
+                yield self._handle_new_events(
+                    origin,
+                    event_infos,
+                    outliers=True
+                )
 
-        try:
-            _, event_stream_id, max_stream_id = yield self._handle_new_event(
-                origin,
-                event,
-                state=state,
-                backfilled=backfilled,
-                current_state=current_state,
-            )
-        except AuthError as e:
-            raise FederationError(
-                "ERROR",
-                e.code,
-                e.msg,
-                affected=event.event_id,
-            )
+            try:
+                _, event_stream_id, max_stream_id = yield self._handle_new_event(
+                    origin,
+                    event,
+                    state=state,
+                    backfilled=backfilled,
+                    current_state=current_state,
+                )
+            except AuthError as e:
+                raise FederationError(
+                    "ERROR",
+                    e.code,
+                    e.msg,
+                    affected=event.event_id,
+                )
 
         # if we're receiving valid events from an origin,
         # it's probably a good idea to mark it as not in retry-state
@@ -230,7 +241,7 @@ class FederationHandler(BaseHandler):
     @defer.inlineCallbacks
     def _filter_events_for_server(self, server_name, room_id, events):
         event_to_state = yield self.store.get_state_for_events(
-            room_id, frozenset(e.event_id for e in events),
+            frozenset(e.event_id for e in events),
             types=(
                 (EventTypes.RoomHistoryVisibility, ""),
                 (EventTypes.Member, None),
@@ -553,7 +564,7 @@ class FederationHandler(BaseHandler):
 
     @log_function
     @defer.inlineCallbacks
-    def do_invite_join(self, target_hosts, room_id, joinee, content, snapshot):
+    def do_invite_join(self, target_hosts, room_id, joinee, content):
         """ Attempts to join the `joinee` to the room `room_id` via the
         server `target_host`.
 
@@ -569,49 +580,19 @@ class FederationHandler(BaseHandler):
 
         yield self.store.clean_room_for_join(room_id)
 
-        origin, pdu = yield self.replication_layer.make_join(
+        origin, event = yield self._make_and_verify_event(
             target_hosts,
             room_id,
-            joinee
+            joinee,
+            "join",
+            content,
         )
 
-        logger.debug("Got response to make_join: %s", pdu)
-
-        event = pdu
-
-        # We should assert some things.
-        # FIXME: Do this in a nicer way
-        assert(event.type == EventTypes.Member)
-        assert(event.user_id == joinee)
-        assert(event.state_key == joinee)
-        assert(event.room_id == room_id)
-
-        event.internal_metadata.outlier = False
-
         self.room_queues[room_id] = []
-
-        builder = self.event_builder_factory.new(
-            unfreeze(event.get_pdu_json())
-        )
-
         handled_events = set()
 
         try:
-            builder.event_id = self.event_builder_factory.create_event_id()
-            builder.origin = self.hs.hostname
-            builder.content = content
-
-            if not hasattr(event, "signatures"):
-                builder.signatures = {}
-
-            add_hashes_and_signatures(
-                builder,
-                self.hs.hostname,
-                self.hs.config.signing_key[0],
-            )
-
-            new_event = builder.build()
-
+            new_event = self._sign_event(event)
             # Try the host we successfully got a response to /make_join/
             # request first.
             try:
@@ -619,11 +600,7 @@ class FederationHandler(BaseHandler):
                 target_hosts.insert(0, origin)
             except ValueError:
                 pass
-
-            ret = yield self.replication_layer.send_join(
-                target_hosts,
-                new_event
-            )
+            ret = yield self.replication_layer.send_join(target_hosts, new_event)
 
             origin = ret["origin"]
             state = ret["state"]
@@ -649,35 +626,8 @@ class FederationHandler(BaseHandler):
                 # FIXME
                 pass
 
-            ev_infos = []
-            for e in itertools.chain(state, auth_chain):
-                if e.event_id == event.event_id:
-                    continue
-
-                e.internal_metadata.outlier = True
-                auth_ids = [e_id for e_id, _ in e.auth_events]
-                ev_infos.append({
-                    "event": e,
-                    "auth_events": {
-                        (e.type, e.state_key): e for e in auth_chain
-                        if e.event_id in auth_ids
-                    }
-                })
-
-            yield self._handle_new_events(origin, ev_infos, outliers=True)
-
-            auth_ids = [e_id for e_id, _ in event.auth_events]
-            auth_events = {
-                (e.type, e.state_key): e for e in auth_chain
-                if e.event_id in auth_ids
-            }
-
-            _, event_stream_id, max_stream_id = yield self._handle_new_event(
-                origin,
-                new_event,
-                state=state,
-                current_state=state,
-                auth_events=auth_events,
+            event_stream_id, max_stream_id = yield self._persist_auth_tree(
+                auth_chain, state, event
             )
 
             with PreserveLoggingContext():
@@ -714,12 +664,14 @@ class FederationHandler(BaseHandler):
     @log_function
     def on_make_join_request(self, room_id, user_id):
         """ We've received a /make_join/ request, so we create a partial
-        join event for the room and return that. We don *not* persist or
+        join event for the room and return that. We do *not* persist or
         process it until the other server has signed it and sent it back.
         """
+        event_content = {"membership": Membership.JOIN}
+
         builder = self.event_builder_factory.new({
             "type": EventTypes.Member,
-            "content": {"membership": Membership.JOIN},
+            "content": event_content,
             "room_id": room_id,
             "sender": user_id,
             "state_key": user_id,
@@ -865,6 +817,168 @@ class FederationHandler(BaseHandler):
         defer.returnValue(event)
 
     @defer.inlineCallbacks
+    def do_remotely_reject_invite(self, target_hosts, room_id, user_id):
+        origin, event = yield self._make_and_verify_event(
+            target_hosts,
+            room_id,
+            user_id,
+            "leave"
+        )
+        signed_event = self._sign_event(event)
+
+        # Try the host we successfully got a response to /make_join/
+        # request first.
+        try:
+            target_hosts.remove(origin)
+            target_hosts.insert(0, origin)
+        except ValueError:
+            pass
+
+        yield self.replication_layer.send_leave(
+            target_hosts,
+            signed_event
+        )
+        defer.returnValue(None)
+
+    @defer.inlineCallbacks
+    def _make_and_verify_event(self, target_hosts, room_id, user_id, membership,
+                               content={},):
+        origin, pdu = yield self.replication_layer.make_membership_event(
+            target_hosts,
+            room_id,
+            user_id,
+            membership,
+            content,
+        )
+
+        logger.debug("Got response to make_%s: %s", membership, pdu)
+
+        event = pdu
+
+        # We should assert some things.
+        # FIXME: Do this in a nicer way
+        assert(event.type == EventTypes.Member)
+        assert(event.user_id == user_id)
+        assert(event.state_key == user_id)
+        assert(event.room_id == room_id)
+        defer.returnValue((origin, event))
+
+    def _sign_event(self, event):
+        event.internal_metadata.outlier = False
+
+        builder = self.event_builder_factory.new(
+            unfreeze(event.get_pdu_json())
+        )
+
+        builder.event_id = self.event_builder_factory.create_event_id()
+        builder.origin = self.hs.hostname
+
+        if not hasattr(event, "signatures"):
+            builder.signatures = {}
+
+        add_hashes_and_signatures(
+            builder,
+            self.hs.hostname,
+            self.hs.config.signing_key[0],
+        )
+
+        return builder.build()
+
+    @defer.inlineCallbacks
+    @log_function
+    def on_make_leave_request(self, room_id, user_id):
+        """ We've received a /make_leave/ request, so we create a partial
+        join event for the room and return that. We do *not* persist or
+        process it until the other server has signed it and sent it back.
+        """
+        builder = self.event_builder_factory.new({
+            "type": EventTypes.Member,
+            "content": {"membership": Membership.LEAVE},
+            "room_id": room_id,
+            "sender": user_id,
+            "state_key": user_id,
+        })
+
+        event, context = yield self._create_new_client_event(
+            builder=builder,
+        )
+
+        self.auth.check(event, auth_events=context.current_state)
+
+        defer.returnValue(event)
+
+    @defer.inlineCallbacks
+    @log_function
+    def on_send_leave_request(self, origin, pdu):
+        """ We have received a leave event for a room. Fully process it."""
+        event = pdu
+
+        logger.debug(
+            "on_send_leave_request: Got event: %s, signatures: %s",
+            event.event_id,
+            event.signatures,
+        )
+
+        event.internal_metadata.outlier = False
+
+        context, event_stream_id, max_stream_id = yield self._handle_new_event(
+            origin, event
+        )
+
+        logger.debug(
+            "on_send_leave_request: After _handle_new_event: %s, sigs: %s",
+            event.event_id,
+            event.signatures,
+        )
+
+        extra_users = []
+        if event.type == EventTypes.Member:
+            target_user_id = event.state_key
+            target_user = UserID.from_string(target_user_id)
+            extra_users.append(target_user)
+
+        with PreserveLoggingContext():
+            d = self.notifier.on_new_room_event(
+                event, event_stream_id, max_stream_id, extra_users=extra_users
+            )
+
+        def log_failure(f):
+            logger.warn(
+                "Failed to notify about %s: %s",
+                event.event_id, f.value
+            )
+
+        d.addErrback(log_failure)
+
+        new_pdu = event
+
+        destinations = set()
+
+        for k, s in context.current_state.items():
+            try:
+                if k[0] == EventTypes.Member:
+                    if s.content["membership"] == Membership.LEAVE:
+                        destinations.add(
+                            UserID.from_string(s.state_key).domain
+                        )
+            except:
+                logger.warn(
+                    "Failed to get destination from event %s", s.event_id
+                )
+
+        destinations.discard(origin)
+
+        logger.debug(
+            "on_send_leave_request: Sending event: %s, signatures: %s",
+            event.event_id,
+            event.signatures,
+        )
+
+        self.replication_layer.send_pdu(new_pdu, destinations)
+
+        defer.returnValue(None)
+
+    @defer.inlineCallbacks
     def get_state_for_pdu(self, origin, room_id, event_id, do_auth=True):
         yield run_on_reactor()
 
@@ -986,8 +1100,6 @@ class FederationHandler(BaseHandler):
         context = yield self._prep_event(
             origin, event,
             state=state,
-            backfilled=backfilled,
-            current_state=current_state,
             auth_events=auth_events,
         )
 
@@ -1010,7 +1122,6 @@ class FederationHandler(BaseHandler):
                     origin,
                     ev_info["event"],
                     state=ev_info.get("state"),
-                    backfilled=backfilled,
                     auth_events=ev_info.get("auth_events"),
                 )
                 for ev_info in event_infos
@@ -1027,8 +1138,77 @@ class FederationHandler(BaseHandler):
         )
 
     @defer.inlineCallbacks
-    def _prep_event(self, origin, event, state=None, backfilled=False,
-                    current_state=None, auth_events=None):
+    def _persist_auth_tree(self, auth_events, state, event):
+        """Checks the auth chain is valid (and passes auth checks) for the
+        state and event. Then persists the auth chain and state atomically.
+        Persists the event seperately.
+
+        Returns:
+            2-tuple of (event_stream_id, max_stream_id) from the persist_event
+            call for `event`
+        """
+        events_to_context = {}
+        for e in itertools.chain(auth_events, state):
+            ctx = yield self.state_handler.compute_event_context(
+                e, outlier=True,
+            )
+            events_to_context[e.event_id] = ctx
+            e.internal_metadata.outlier = True
+
+        event_map = {
+            e.event_id: e
+            for e in auth_events
+        }
+
+        create_event = None
+        for e in auth_events:
+            if (e.type, e.state_key) == (EventTypes.Create, ""):
+                create_event = e
+                break
+
+        for e in itertools.chain(auth_events, state, [event]):
+            auth_for_e = {
+                (event_map[e_id].type, event_map[e_id].state_key): event_map[e_id]
+                for e_id, _ in e.auth_events
+            }
+            if create_event:
+                auth_for_e[(EventTypes.Create, "")] = create_event
+
+            try:
+                self.auth.check(e, auth_events=auth_for_e)
+            except AuthError as err:
+                logger.warn(
+                    "Rejecting %s because %s",
+                    e.event_id, err.msg
+                )
+
+                if e == event:
+                    raise
+                events_to_context[e.event_id].rejected = RejectedReason.AUTH_ERROR
+
+        yield self.store.persist_events(
+            [
+                (e, events_to_context[e.event_id])
+                for e in itertools.chain(auth_events, state)
+            ],
+            is_new_state=False,
+        )
+
+        new_event_context = yield self.state_handler.compute_event_context(
+            event, old_state=state, outlier=False,
+        )
+
+        event_stream_id, max_stream_id = yield self.store.persist_event(
+            event, new_event_context,
+            backfilled=False,
+            is_new_state=True,
+            current_state=state,
+        )
+
+        defer.returnValue((event_stream_id, max_stream_id))
+
+    @defer.inlineCallbacks
+    def _prep_event(self, origin, event, state=None, auth_events=None):
         outlier = event.internal_metadata.is_outlier()
 
         context = yield self.state_handler.compute_event_context(
@@ -1061,6 +1241,10 @@ class FederationHandler(BaseHandler):
 
             context.rejected = RejectedReason.AUTH_ERROR
 
+        if event.type == EventTypes.GuestAccess:
+            full_context = yield self.store.get_current_state(room_id=event.room_id)
+            yield self.maybe_kick_guest_users(event, full_context)
+
         defer.returnValue(context)
 
     @defer.inlineCallbacks
@@ -1166,7 +1350,7 @@ class FederationHandler(BaseHandler):
                         auth_ids = [e_id for e_id, _ in e.auth_events]
                         auth = {
                             (e.type, e.state_key): e for e in remote_auth_chain
-                            if e.event_id in auth_ids
+                            if e.event_id in auth_ids or e.type == EventTypes.Create
                         }
                         e.internal_metadata.outlier = True
 
@@ -1284,6 +1468,7 @@ class FederationHandler(BaseHandler):
                                 (e.type, e.state_key): e
                                 for e in result["auth_chain"]
                                 if e.event_id in auth_ids
+                                or event.type == EventTypes.Create
                             }
                             ev.internal_metadata.outlier = True
 
@@ -1458,50 +1643,73 @@ class FederationHandler(BaseHandler):
         })
 
     @defer.inlineCallbacks
-    def _handle_auth_events(self, origin, auth_events):
-        auth_ids_to_deferred = {}
-
-        def process_auth_ev(ev):
-            auth_ids = [e_id for e_id, _ in ev.auth_events]
-
-            prev_ds = [
-                auth_ids_to_deferred[i]
-                for i in auth_ids
-                if i in auth_ids_to_deferred
-            ]
-
-            d = defer.Deferred()
+    @log_function
+    def exchange_third_party_invite(self, invite):
+        sender = invite["sender"]
+        room_id = invite["room_id"]
 
-            auth_ids_to_deferred[ev.event_id] = d
+        event_dict = {
+            "type": EventTypes.Member,
+            "content": {
+                "membership": Membership.INVITE,
+                "third_party_invite": invite,
+            },
+            "room_id": room_id,
+            "sender": sender,
+            "state_key": invite["mxid"],
+        }
+
+        if (yield self.auth.check_host_in_room(room_id, self.hs.hostname)):
+            builder = self.event_builder_factory.new(event_dict)
+            EventValidator().validate_new(builder)
+            event, context = yield self._create_new_client_event(builder=builder)
+            self.auth.check(event, context.current_state)
+            yield self._validate_keyserver(event, auth_events=context.current_state)
+            member_handler = self.hs.get_handlers().room_member_handler
+            yield member_handler.change_membership(event, context)
+        else:
+            destinations = set([x.split(":", 1)[-1] for x in (sender, room_id)])
+            yield self.replication_layer.forward_third_party_invite(
+                destinations,
+                room_id,
+                event_dict,
+            )
 
-            @defer.inlineCallbacks
-            def f(*_):
-                ev.internal_metadata.outlier = True
+    @defer.inlineCallbacks
+    @log_function
+    def on_exchange_third_party_invite_request(self, origin, room_id, event_dict):
+        builder = self.event_builder_factory.new(event_dict)
 
-                try:
-                    auth = {
-                        (e.type, e.state_key): e for e in auth_events
-                        if e.event_id in auth_ids
-                    }
+        event, context = yield self._create_new_client_event(
+            builder=builder,
+        )
 
-                    yield self._handle_new_event(
-                        origin, ev, auth_events=auth
-                    )
-                except:
-                    logger.exception(
-                        "Failed to handle auth event %s",
-                        ev.event_id,
-                    )
+        self.auth.check(event, auth_events=context.current_state)
+        yield self._validate_keyserver(event, auth_events=context.current_state)
 
-                d.callback(None)
+        returned_invite = yield self.send_invite(origin, event)
+        # TODO: Make sure the signatures actually are correct.
+        event.signatures.update(returned_invite.signatures)
+        member_handler = self.hs.get_handlers().room_member_handler
+        yield member_handler.change_membership(event, context)
 
-            if prev_ds:
-                dx = defer.DeferredList(prev_ds)
-                dx.addBoth(f)
-            else:
-                f()
+    @defer.inlineCallbacks
+    def _validate_keyserver(self, event, auth_events):
+        token = event.content["third_party_invite"]["signed"]["token"]
 
-        for e in auth_events:
-            process_auth_ev(e)
+        invite_event = auth_events.get(
+            (EventTypes.ThirdPartyInvite, token,)
+        )
 
-        yield defer.DeferredList(auth_ids_to_deferred.values())
+        try:
+            response = yield self.hs.get_simple_http_client().get_json(
+                invite_event.content["key_validity_url"],
+                {"public_key": invite_event.content["public_key"]}
+            )
+        except Exception:
+            raise SynapseError(
+                502,
+                "Third party certificate could not be checked"
+            )
+        if "valid" not in response or not response["valid"]:
+            raise AuthError(403, "Third party certificate was invalid")
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index f12465fa2c..14051aee99 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -16,13 +16,13 @@
 from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, Membership
-from synapse.api.errors import RoomError, SynapseError
+from synapse.api.errors import SynapseError, AuthError, Codes
 from synapse.streams.config import PaginationConfig
 from synapse.events.utils import serialize_event
 from synapse.events.validator import EventValidator
 from synapse.util import unwrapFirstError
 from synapse.util.logcontext import PreserveLoggingContext
-from synapse.types import UserID, RoomStreamToken
+from synapse.types import UserID, RoomStreamToken, StreamToken
 
 from ._base import BaseHandler
 
@@ -71,34 +71,64 @@ class MessageHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def get_messages(self, user_id=None, room_id=None, pagin_config=None,
-                     feedback=False, as_client_event=True):
+                     as_client_event=True, is_guest=False):
         """Get messages in a room.
 
         Args:
             user_id (str): The user requesting messages.
             room_id (str): The room they want messages from.
             pagin_config (synapse.api.streams.PaginationConfig): The pagination
-            config rules to apply, if any.
-            feedback (bool): True to get compressed feedback with the messages
+                config rules to apply, if any.
             as_client_event (bool): True to get events in client-server format.
+            is_guest (bool): Whether the requesting user is a guest (as opposed
+                to a fully registered user).
         Returns:
             dict: Pagination API results
         """
-        yield self.auth.check_joined_room(room_id, user_id)
-
         data_source = self.hs.get_event_sources().sources["room"]
 
-        if not pagin_config.from_token:
+        if pagin_config.from_token:
+            room_token = pagin_config.from_token.room_key
+        else:
             pagin_config.from_token = (
                 yield self.hs.get_event_sources().get_current_token(
                     direction='b'
                 )
             )
+            room_token = pagin_config.from_token.room_key
 
-        room_token = RoomStreamToken.parse(pagin_config.from_token.room_key)
+        room_token = RoomStreamToken.parse(room_token)
         if room_token.topological is None:
             raise SynapseError(400, "Invalid token")
 
+        pagin_config.from_token = pagin_config.from_token.copy_and_replace(
+            "room_key", str(room_token)
+        )
+
+        source_config = pagin_config.get_source_config("room")
+
+        if not is_guest:
+            member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
+            if member_event.membership == Membership.LEAVE:
+                # If they have left the room then clamp the token to be before
+                # they left the room.
+                # If they're a guest, we'll just 403 them if they're asking for
+                # events they can't see.
+                leave_token = yield self.store.get_topological_token_for_event(
+                    member_event.event_id
+                )
+                leave_token = RoomStreamToken.parse(leave_token)
+                if leave_token.topological < room_token.topological:
+                    source_config.from_key = str(leave_token)
+
+                if source_config.direction == "f":
+                    if source_config.to_key is None:
+                        source_config.to_key = str(leave_token)
+                    else:
+                        to_token = RoomStreamToken.parse(source_config.to_key)
+                        if leave_token.topological < to_token.topological:
+                            source_config.to_key = str(leave_token)
+
         yield self.hs.get_handlers().federation_handler.maybe_backfill(
             room_id, room_token.topological
         )
@@ -106,7 +136,7 @@ class MessageHandler(BaseHandler):
         user = UserID.from_string(user_id)
 
         events, next_key = yield data_source.get_pagination_rows(
-            user, pagin_config.get_source_config("room"), room_id
+            user, source_config, room_id
         )
 
         next_token = pagin_config.from_token.copy_and_replace(
@@ -120,7 +150,7 @@ class MessageHandler(BaseHandler):
                 "end": next_token.to_string(),
             })
 
-        events = yield self._filter_events_for_client(user_id, room_id, events)
+        events = yield self._filter_events_for_client(user_id, events, is_guest=is_guest)
 
         time_now = self.clock.time_msec()
 
@@ -136,54 +166,8 @@ class MessageHandler(BaseHandler):
         defer.returnValue(chunk)
 
     @defer.inlineCallbacks
-    def _filter_events_for_client(self, user_id, room_id, events):
-        event_id_to_state = yield self.store.get_state_for_events(
-            room_id, frozenset(e.event_id for e in events),
-            types=(
-                (EventTypes.RoomHistoryVisibility, ""),
-                (EventTypes.Member, user_id),
-            )
-        )
-
-        def allowed(event, state):
-            if event.type == EventTypes.RoomHistoryVisibility:
-                return True
-
-            membership_ev = state.get((EventTypes.Member, user_id), None)
-            if membership_ev:
-                membership = membership_ev.membership
-            else:
-                membership = Membership.LEAVE
-
-            if membership == Membership.JOIN:
-                return True
-
-            history = state.get((EventTypes.RoomHistoryVisibility, ''), None)
-            if history:
-                visibility = history.content.get("history_visibility", "shared")
-            else:
-                visibility = "shared"
-
-            if visibility == "public":
-                return True
-            elif visibility == "shared":
-                return True
-            elif visibility == "joined":
-                return membership == Membership.JOIN
-            elif visibility == "invited":
-                return membership == Membership.INVITE
-
-            return True
-
-        defer.returnValue([
-            event
-            for event in events
-            if allowed(event, event_id_to_state[event.event_id])
-        ])
-
-    @defer.inlineCallbacks
     def create_and_send_event(self, event_dict, ratelimit=True,
-                              client=None, txn_id=None):
+                              token_id=None, txn_id=None, is_guest=False):
         """ Given a dict from a client, create and handle a new event.
 
         Creates an FrozenEvent object, filling out auth_events, prev_events,
@@ -217,11 +201,8 @@ class MessageHandler(BaseHandler):
                     builder.content
                 )
 
-        if client is not None:
-            if client.token_id is not None:
-                builder.internal_metadata.token_id = client.token_id
-            if client.device_id is not None:
-                builder.internal_metadata.device_id = client.device_id
+        if token_id is not None:
+            builder.internal_metadata.token_id = token_id
 
         if txn_id is not None:
             builder.internal_metadata.txn_id = txn_id
@@ -232,7 +213,7 @@ class MessageHandler(BaseHandler):
 
         if event.type == EventTypes.Member:
             member_handler = self.hs.get_handlers().room_member_handler
-            yield member_handler.change_membership(event, context)
+            yield member_handler.change_membership(event, context, is_guest=is_guest)
         else:
             yield self.handle_new_client_event(
                 event=event,
@@ -248,7 +229,7 @@ class MessageHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def get_room_data(self, user_id=None, room_id=None,
-                      event_type=None, state_key=""):
+                      event_type=None, state_key="", is_guest=False):
         """ Get data from a room.
 
         Args:
@@ -258,29 +239,55 @@ class MessageHandler(BaseHandler):
         Raises:
             SynapseError if something went wrong.
         """
-        have_joined = yield self.auth.check_joined_room(room_id, user_id)
-        if not have_joined:
-            raise RoomError(403, "User not in room.")
-
-        data = yield self.state_handler.get_current_state(
-            room_id, event_type, state_key
+        membership, membership_event_id = yield self._check_in_room_or_world_readable(
+            room_id, user_id, is_guest
         )
-        defer.returnValue(data)
 
-    @defer.inlineCallbacks
-    def get_feedback(self, event_id):
-        # yield self.auth.check_joined_room(room_id, user_id)
+        if membership == Membership.JOIN:
+            data = yield self.state_handler.get_current_state(
+                room_id, event_type, state_key
+            )
+        elif membership == Membership.LEAVE:
+            key = (event_type, state_key)
+            room_state = yield self.store.get_state_for_events(
+                [membership_event_id], [key]
+            )
+            data = room_state[membership_event_id].get(key)
 
-        # Pull out the feedback from the db
-        fb = yield self.store.get_feedback(event_id)
+        defer.returnValue(data)
 
-        if fb:
-            defer.returnValue(fb)
-        defer.returnValue(None)
+    @defer.inlineCallbacks
+    def _check_in_room_or_world_readable(self, room_id, user_id, is_guest):
+        try:
+            # check_user_was_in_room will return the most recent membership
+            # event for the user if:
+            #  * The user is a non-guest user, and was ever in the room
+            #  * The user is a guest user, and has joined the room
+            # else it will throw.
+            member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
+            defer.returnValue((member_event.membership, member_event.event_id))
+            return
+        except AuthError, auth_error:
+            visibility = yield self.state_handler.get_current_state(
+                room_id, EventTypes.RoomHistoryVisibility, ""
+            )
+            if (
+                visibility and
+                visibility.content["history_visibility"] == "world_readable"
+            ):
+                defer.returnValue((Membership.JOIN, None))
+                return
+            if not is_guest:
+                raise auth_error
+            raise AuthError(
+                403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
+            )
 
     @defer.inlineCallbacks
-    def get_state_events(self, user_id, room_id):
-        """Retrieve all state events for a given room.
+    def get_state_events(self, user_id, room_id, is_guest=False):
+        """Retrieve all state events for a given room. If the user is
+        joined to the room then return the current state. If the user has
+        left the room return the state events from when they left.
 
         Args:
             user_id(str): The user requesting state events.
@@ -288,18 +295,26 @@ class MessageHandler(BaseHandler):
         Returns:
             A list of dicts representing state events. [{}, {}, {}]
         """
-        yield self.auth.check_joined_room(room_id, user_id)
+        membership, membership_event_id = yield self._check_in_room_or_world_readable(
+            room_id, user_id, is_guest
+        )
+
+        if membership == Membership.JOIN:
+            room_state = yield self.state_handler.get_current_state(room_id)
+        elif membership == Membership.LEAVE:
+            room_state = yield self.store.get_state_for_events(
+                [membership_event_id], None
+            )
+            room_state = room_state[membership_event_id]
 
-        # TODO: This is duplicating logic from snapshot_all_rooms
-        current_state = yield self.state_handler.get_current_state(room_id)
         now = self.clock.time_msec()
         defer.returnValue(
-            [serialize_event(c, now) for c in current_state.values()]
+            [serialize_event(c, now) for c in room_state.values()]
         )
 
     @defer.inlineCallbacks
     def snapshot_all_rooms(self, user_id=None, pagin_config=None,
-                           feedback=False, as_client_event=True):
+                           as_client_event=True, include_archived=False):
         """Retrieve a snapshot of all rooms the user is invited or has joined.
 
         This snapshot may include messages for all rooms where the user is
@@ -309,17 +324,20 @@ class MessageHandler(BaseHandler):
             user_id (str): The ID of the user making the request.
             pagin_config (synapse.api.streams.PaginationConfig): The pagination
             config used to determine how many messages *PER ROOM* to return.
-            feedback (bool): True to get feedback along with these messages.
             as_client_event (bool): True to get events in client-server format.
+            include_archived (bool): True to get rooms that the user has left
         Returns:
             A list of dicts with "room_id" and "membership" keys for all rooms
             the user is currently invited or joined in on. Rooms where the user
             is joined on, may return a "messages" key with messages, depending
             on the specified PaginationConfig.
         """
+        memberships = [Membership.INVITE, Membership.JOIN]
+        if include_archived:
+            memberships.append(Membership.LEAVE)
+
         room_list = yield self.store.get_rooms_for_user_where_membership_is(
-            user_id=user_id,
-            membership_list=[Membership.INVITE, Membership.JOIN]
+            user_id=user_id, membership_list=memberships
         )
 
         user = UserID.from_string(user_id)
@@ -339,6 +357,8 @@ class MessageHandler(BaseHandler):
             user, pagination_config.get_source_config("receipt"), None
         )
 
+        tags_by_room = yield self.store.get_tags_for_user(user_id)
+
         public_room_ids = yield self.store.get_public_room_ids()
 
         limit = pagin_config.limit
@@ -357,28 +377,45 @@ class MessageHandler(BaseHandler):
             }
 
             if event.membership == Membership.INVITE:
+                time_now = self.clock.time_msec()
                 d["inviter"] = event.sender
 
+                invite_event = yield self.store.get_event(event.event_id)
+                d["invite"] = serialize_event(invite_event, time_now, as_client_event)
+
             rooms_ret.append(d)
 
-            if event.membership != Membership.JOIN:
+            if event.membership not in (Membership.JOIN, Membership.LEAVE):
                 return
+
             try:
+                if event.membership == Membership.JOIN:
+                    room_end_token = now_token.room_key
+                    deferred_room_state = self.state_handler.get_current_state(
+                        event.room_id
+                    )
+                elif event.membership == Membership.LEAVE:
+                    room_end_token = "s%d" % (event.stream_ordering,)
+                    deferred_room_state = self.store.get_state_for_events(
+                        [event.event_id], None
+                    )
+                    deferred_room_state.addCallback(
+                        lambda states: states[event.event_id]
+                    )
+
                 (messages, token), current_state = yield defer.gatherResults(
                     [
                         self.store.get_recent_events_for_room(
                             event.room_id,
                             limit=limit,
-                            end_token=now_token.room_key,
-                        ),
-                        self.state_handler.get_current_state(
-                            event.room_id
+                            end_token=room_end_token,
                         ),
+                        deferred_room_state,
                     ]
                 ).addErrback(unwrapFirstError)
 
                 messages = yield self._filter_events_for_client(
-                    user_id, event.room_id, messages
+                    user_id, messages
                 )
 
                 start_token = now_token.copy_and_replace("room_key", token[0])
@@ -398,6 +435,15 @@ class MessageHandler(BaseHandler):
                     serialize_event(c, time_now, as_client_event)
                     for c in current_state.values()
                 ]
+
+                private_user_data = []
+                tags = tags_by_room.get(event.room_id)
+                if tags:
+                    private_user_data.append({
+                        "type": "m.tag",
+                        "content": {"tags": tags},
+                    })
+                d["private_user_data"] = private_user_data
             except:
                 logger.exception("Failed to get snapshot")
 
@@ -420,15 +466,99 @@ class MessageHandler(BaseHandler):
         defer.returnValue(ret)
 
     @defer.inlineCallbacks
-    def room_initial_sync(self, user_id, room_id, pagin_config=None,
-                          feedback=False):
-        current_state = yield self.state.get_current_state(
-            room_id=room_id,
+    def room_initial_sync(self, user_id, room_id, pagin_config=None, is_guest=False):
+        """Capture the a snapshot of a room. If user is currently a member of
+        the room this will be what is currently in the room. If the user left
+        the room this will be what was in the room when they left.
+
+        Args:
+            user_id(str): The user to get a snapshot for.
+            room_id(str): The room to get a snapshot of.
+            pagin_config(synapse.streams.config.PaginationConfig):
+                The pagination config used to determine how many messages to
+                return.
+        Raises:
+            AuthError if the user wasn't in the room.
+        Returns:
+            A JSON serialisable dict with the snapshot of the room.
+        """
+
+        membership, member_event_id = yield self._check_in_room_or_world_readable(
+            room_id,
+            user_id,
+            is_guest
         )
 
-        yield self.auth.check_joined_room(
-            room_id, user_id,
-            current_state=current_state
+        if membership == Membership.JOIN:
+            result = yield self._room_initial_sync_joined(
+                user_id, room_id, pagin_config, membership, is_guest
+            )
+        elif membership == Membership.LEAVE:
+            result = yield self._room_initial_sync_parted(
+                user_id, room_id, pagin_config, membership, member_event_id, is_guest
+            )
+
+        private_user_data = []
+        tags = yield self.store.get_tags_for_room(user_id, room_id)
+        if tags:
+            private_user_data.append({
+                "type": "m.tag",
+                "content": {"tags": tags},
+            })
+        result["private_user_data"] = private_user_data
+
+        defer.returnValue(result)
+
+    @defer.inlineCallbacks
+    def _room_initial_sync_parted(self, user_id, room_id, pagin_config,
+                                  membership, member_event_id, is_guest):
+        room_state = yield self.store.get_state_for_events(
+            [member_event_id], None
+        )
+
+        room_state = room_state[member_event_id]
+
+        limit = pagin_config.limit if pagin_config else None
+        if limit is None:
+            limit = 10
+
+        stream_token = yield self.store.get_stream_token_for_event(
+            member_event_id
+        )
+
+        messages, token = yield self.store.get_recent_events_for_room(
+            room_id,
+            limit=limit,
+            end_token=stream_token
+        )
+
+        messages = yield self._filter_events_for_client(
+            user_id, messages, is_guest=is_guest
+        )
+
+        start_token = StreamToken(token[0], 0, 0, 0, 0)
+        end_token = StreamToken(token[1], 0, 0, 0, 0)
+
+        time_now = self.clock.time_msec()
+
+        defer.returnValue({
+            "membership": membership,
+            "room_id": room_id,
+            "messages": {
+                "chunk": [serialize_event(m, time_now) for m in messages],
+                "start": start_token.to_string(),
+                "end": end_token.to_string(),
+            },
+            "state": [serialize_event(s, time_now) for s in room_state.values()],
+            "presence": [],
+            "receipts": [],
+        })
+
+    @defer.inlineCallbacks
+    def _room_initial_sync_joined(self, user_id, room_id, pagin_config,
+                                  membership, is_guest):
+        current_state = yield self.state.get_current_state(
+            room_id=room_id,
         )
 
         # TODO(paul): I wish I was called with user objects not user_id
@@ -442,8 +572,6 @@ class MessageHandler(BaseHandler):
             for x in current_state.values()
         ]
 
-        member_event = current_state.get((EventTypes.Member, user_id,))
-
         now_token = yield self.hs.get_event_sources().get_current_token()
 
         limit = pagin_config.limit if pagin_config else None
@@ -460,12 +588,14 @@ class MessageHandler(BaseHandler):
 
         @defer.inlineCallbacks
         def get_presence():
-            states = yield presence_handler.get_states(
-                target_users=[UserID.from_string(m.user_id) for m in room_members],
-                auth_user=auth_user,
-                as_event=True,
-                check_auth=False,
-            )
+            states = {}
+            if not is_guest:
+                states = yield presence_handler.get_states(
+                    target_users=[UserID.from_string(m.user_id) for m in room_members],
+                    auth_user=auth_user,
+                    as_event=True,
+                    check_auth=False,
+                )
 
             defer.returnValue(states.values())
 
@@ -485,7 +615,7 @@ class MessageHandler(BaseHandler):
         ).addErrback(unwrapFirstError)
 
         messages = yield self._filter_events_for_client(
-            user_id, room_id, messages
+            user_id, messages, is_guest=is_guest, require_all_visible_for_guests=False
         )
 
         start_token = now_token.copy_and_replace("room_key", token[0])
@@ -493,8 +623,7 @@ class MessageHandler(BaseHandler):
 
         time_now = self.clock.time_msec()
 
-        defer.returnValue({
-            "membership": member_event.membership,
+        ret = {
             "room_id": room_id,
             "messages": {
                 "chunk": [serialize_event(m, time_now) for m in messages],
@@ -504,4 +633,8 @@ class MessageHandler(BaseHandler):
             "state": state,
             "presence": presence,
             "receipts": receipts,
-        })
+        }
+        if not is_guest:
+            ret["membership"] = membership
+
+        defer.returnValue(ret)
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index e91e81831e..aca65096fc 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -378,7 +378,7 @@ class PresenceHandler(BaseHandler):
 
         # TODO(paul): perform a presence push as part of start/stop poll so
         #   we don't have to do this all the time
-        self.changed_presencelike_data(target_user, state)
+        yield self.changed_presencelike_data(target_user, state)
 
     def bump_presence_active_time(self, user, now=None):
         if now is None:
@@ -422,12 +422,12 @@ class PresenceHandler(BaseHandler):
     @log_function
     def started_user_eventstream(self, user):
         # TODO(paul): Use "last online" state
-        self.set_state(user, user, {"presence": PresenceState.ONLINE})
+        return self.set_state(user, user, {"presence": PresenceState.ONLINE})
 
     @log_function
     def stopped_user_eventstream(self, user):
         # TODO(paul): Save current state as "last online" state
-        self.set_state(user, user, {"presence": PresenceState.OFFLINE})
+        return self.set_state(user, user, {"presence": PresenceState.OFFLINE})
 
     @defer.inlineCallbacks
     def user_joined_room(self, user, room_id):
@@ -950,7 +950,8 @@ class PresenceHandler(BaseHandler):
                 )
                 while len(self._remote_offline_serials) > MAX_OFFLINE_SERIALS:
                     self._remote_offline_serials.pop()  # remove the oldest
-                del self._user_cachemap[user]
+                if user in self._user_cachemap:
+                    del self._user_cachemap[user]
             else:
                 # Remove the user from remote_offline_serials now that they're
                 # no longer offline
@@ -1142,8 +1143,9 @@ class PresenceEventSource(object):
 
     @defer.inlineCallbacks
     @log_function
-    def get_new_events_for_user(self, user, from_key, limit):
+    def get_new_events(self, user, from_key, room_ids=None, **kwargs):
         from_key = int(from_key)
+        room_ids = room_ids or []
 
         presence = self.hs.get_handlers().presence_handler
         cachemap = presence._user_cachemap
@@ -1161,7 +1163,6 @@ class PresenceEventSource(object):
             user_ids_to_check |= set(
                 UserID.from_string(p["observed_user_id"]) for p in presence_list
             )
-        room_ids = yield presence.get_joined_rooms_for_user(user)
         for room_id in set(room_ids) & set(presence._room_serials):
             if presence._room_serials[room_id] > from_key:
                 joined = yield presence.get_joined_users_for_room_id(room_id)
@@ -1263,6 +1264,11 @@ class UserPresenceCache(object):
         self.state = {"presence": PresenceState.OFFLINE}
         self.serial = None
 
+    def __repr__(self):
+        return "UserPresenceCache(state=%r, serial=%r)" % (
+            self.state, self.serial
+        )
+
     def update(self, state, serial):
         assert("mtime_age" not in state)
 
diff --git a/synapse/handlers/private_user_data.py b/synapse/handlers/private_user_data.py
new file mode 100644
index 0000000000..1abe45ed7b
--- /dev/null
+++ b/synapse/handlers/private_user_data.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+
+class PrivateUserDataEventSource(object):
+    def __init__(self, hs):
+        self.store = hs.get_datastore()
+
+    def get_current_key(self, direction='f'):
+        return self.store.get_max_private_user_data_stream_id()
+
+    @defer.inlineCallbacks
+    def get_new_events(self, user, from_key, **kwargs):
+        user_id = user.to_string()
+        last_stream_id = from_key
+
+        current_stream_id = yield self.store.get_max_private_user_data_stream_id()
+        tags = yield self.store.get_updated_tags(user_id, last_stream_id)
+
+        results = []
+        for room_id, room_tags in tags.items():
+            results.append({
+                "type": "m.tag",
+                "content": {"tags": room_tags},
+                "room_id": room_id,
+            })
+
+        defer.returnValue((results, current_stream_id))
+
+    @defer.inlineCallbacks
+    def get_pagination_rows(self, user, config, key):
+        defer.returnValue(([], config.to_id))
diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py
index 86c911c4bf..973f4d5cae 100644
--- a/synapse/handlers/receipts.py
+++ b/synapse/handlers/receipts.py
@@ -156,13 +156,7 @@ class ReceiptsHandler(BaseHandler):
         if not result:
             defer.returnValue([])
 
-        event = {
-            "type": "m.receipt",
-            "room_id": room_id,
-            "content": result,
-        }
-
-        defer.returnValue([event])
+        defer.returnValue(result)
 
 
 class ReceiptEventSource(object):
@@ -170,17 +164,15 @@ class ReceiptEventSource(object):
         self.store = hs.get_datastore()
 
     @defer.inlineCallbacks
-    def get_new_events_for_user(self, user, from_key, limit):
+    def get_new_events(self, from_key, room_ids, **kwargs):
         from_key = int(from_key)
         to_key = yield self.get_current_key()
 
         if from_key == to_key:
             defer.returnValue(([], to_key))
 
-        rooms = yield self.store.get_rooms_for_user(user.to_string())
-        rooms = [room.room_id for room in rooms]
         events = yield self.store.get_linearized_receipts_for_rooms(
-            rooms,
+            room_ids,
             from_key=from_key,
             to_key=to_key,
         )
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 86390a3671..493a087031 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -25,8 +25,6 @@ import synapse.util.stringutils as stringutils
 from synapse.util.async import run_on_reactor
 from synapse.http.client import CaptchaServerHttpClient
 
-import base64
-import bcrypt
 import logging
 import urllib
 
@@ -66,7 +64,7 @@ class RegistrationHandler(BaseHandler):
             )
 
     @defer.inlineCallbacks
-    def register(self, localpart=None, password=None):
+    def register(self, localpart=None, password=None, generate_token=True):
         """Registers a new client on the server.
 
         Args:
@@ -83,7 +81,7 @@ class RegistrationHandler(BaseHandler):
         yield run_on_reactor()
         password_hash = None
         if password:
-            password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
+            password_hash = self.auth_handler().hash(password)
 
         if localpart:
             yield self.check_username(localpart)
@@ -91,7 +89,9 @@ class RegistrationHandler(BaseHandler):
             user = UserID(localpart, self.hs.hostname)
             user_id = user.to_string()
 
-            token = self.generate_token(user_id)
+            token = None
+            if generate_token:
+                token = self.auth_handler().generate_access_token(user_id)
             yield self.store.register(
                 user_id=user_id,
                 token=token,
@@ -104,14 +104,14 @@ class RegistrationHandler(BaseHandler):
             attempts = 0
             user_id = None
             token = None
-            while not user_id and not token:
+            while not user_id:
                 try:
                     localpart = self._generate_user_id()
                     user = UserID(localpart, self.hs.hostname)
                     user_id = user.to_string()
                     yield self.check_user_id_is_valid(user_id)
-
-                    token = self.generate_token(user_id)
+                    if generate_token:
+                        token = self.auth_handler().generate_access_token(user_id)
                     yield self.store.register(
                         user_id=user_id,
                         token=token,
@@ -161,7 +161,7 @@ class RegistrationHandler(BaseHandler):
                 400, "Invalid user localpart for this application service.",
                 errcode=Codes.EXCLUSIVE
             )
-        token = self.generate_token(user_id)
+        token = self.auth_handler().generate_access_token(user_id)
         yield self.store.register(
             user_id=user_id,
             token=token,
@@ -208,7 +208,7 @@ class RegistrationHandler(BaseHandler):
         user_id = user.to_string()
 
         yield self.check_user_id_is_valid(user_id)
-        token = self.generate_token(user_id)
+        token = self.auth_handler().generate_access_token(user_id)
         try:
             yield self.store.register(
                 user_id=user_id,
@@ -273,13 +273,6 @@ class RegistrationHandler(BaseHandler):
                     errcode=Codes.EXCLUSIVE
                 )
 
-    def generate_token(self, user_id):
-        # urlsafe variant uses _ and - so use . as the separator and replace
-        # all =s with .s so http clients don't quote =s when it is used as
-        # query params.
-        return (base64.urlsafe_b64encode(user_id).replace('=', '.') + '.' +
-                stringutils.random_string(18))
-
     def _generate_user_id(self):
         return "-" + stringutils.random_string(18)
 
@@ -322,3 +315,6 @@ class RegistrationHandler(BaseHandler):
             }
         )
         defer.returnValue(data)
+
+    def auth_handler(self):
+        return self.hs.get_handlers().auth_handler
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index c5d1001b50..3f04752581 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -22,26 +22,38 @@ from synapse.types import UserID, RoomAlias, RoomID
 from synapse.api.constants import (
     EventTypes, Membership, JoinRules, RoomCreationPreset,
 )
-from synapse.api.errors import StoreError, SynapseError
+from synapse.api.errors import AuthError, StoreError, SynapseError
 from synapse.util import stringutils, unwrapFirstError
 from synapse.util.async import run_on_reactor
-from synapse.events.utils import serialize_event
+
+from signedjson.sign import verify_signed_json
+from signedjson.key import decode_verify_key_bytes
 
 from collections import OrderedDict
+from unpaddedbase64 import decode_base64
+
 import logging
+import math
 import string
 
 logger = logging.getLogger(__name__)
 
+id_server_scheme = "https://"
+
 
 class RoomCreationHandler(BaseHandler):
 
     PRESETS_DICT = {
         RoomCreationPreset.PRIVATE_CHAT: {
             "join_rules": JoinRules.INVITE,
-            "history_visibility": "invited",
+            "history_visibility": "shared",
             "original_invitees_have_ops": False,
         },
+        RoomCreationPreset.TRUSTED_PRIVATE_CHAT: {
+            "join_rules": JoinRules.INVITE,
+            "history_visibility": "shared",
+            "original_invitees_have_ops": True,
+        },
         RoomCreationPreset.PUBLIC_CHAT: {
             "join_rules": JoinRules.PUBLIC,
             "history_visibility": "shared",
@@ -150,12 +162,16 @@ class RoomCreationHandler(BaseHandler):
         for val in raw_initial_state:
             initial_state[(val["type"], val.get("state_key", ""))] = val["content"]
 
+        creation_content = config.get("creation_content", {})
+
         user = UserID.from_string(user_id)
         creation_events = self._create_events_for_new_room(
             user, room_id,
             preset_config=preset_config,
             invite_list=invite_list,
             initial_state=initial_state,
+            creation_content=creation_content,
+            room_alias=room_alias,
         )
 
         msg_handler = self.hs.get_handlers().message_handler
@@ -203,7 +219,8 @@ class RoomCreationHandler(BaseHandler):
         defer.returnValue(result)
 
     def _create_events_for_new_room(self, creator, room_id, preset_config,
-                                    invite_list, initial_state):
+                                    invite_list, initial_state, creation_content,
+                                    room_alias):
         config = RoomCreationHandler.PRESETS_DICT[preset_config]
 
         creator_id = creator.to_string()
@@ -225,9 +242,10 @@ class RoomCreationHandler(BaseHandler):
 
             return e
 
+        creation_content.update({"creator": creator.to_string()})
         creation_event = create(
             etype=EventTypes.Create,
-            content={"creator": creator.to_string()},
+            content=creation_content,
         )
 
         join_event = create(
@@ -272,6 +290,14 @@ class RoomCreationHandler(BaseHandler):
 
             returned_events.append(power_levels_event)
 
+        if room_alias and (EventTypes.CanonicalAlias, '') not in initial_state:
+            room_alias_event = create(
+                etype=EventTypes.CanonicalAlias,
+                content={"alias": room_alias.to_string()},
+            )
+
+            returned_events.append(room_alias_event)
+
         if (EventTypes.JoinRules, '') not in initial_state:
             join_rules_event = create(
                 etype=EventTypes.JoinRules,
@@ -343,42 +369,7 @@ class RoomMemberHandler(BaseHandler):
                     remotedomains.add(member.domain)
 
     @defer.inlineCallbacks
-    def get_room_members_as_pagination_chunk(self, room_id=None, user_id=None,
-                                             limit=0, start_tok=None,
-                                             end_tok=None):
-        """Retrieve a list of room members in the room.
-
-        Args:
-            room_id (str): The room to get the member list for.
-            user_id (str): The ID of the user making the request.
-            limit (int): The max number of members to return.
-            start_tok (str): Optional. The start token if known.
-            end_tok (str): Optional. The end token if known.
-        Returns:
-            dict: A Pagination streamable dict.
-        Raises:
-            SynapseError if something goes wrong.
-        """
-        yield self.auth.check_joined_room(room_id, user_id)
-
-        member_list = yield self.store.get_room_members(room_id=room_id)
-        time_now = self.clock.time_msec()
-        event_list = [
-            serialize_event(entry, time_now)
-            for entry in member_list
-        ]
-        chunk_data = {
-            "start": "START",  # FIXME (erikj): START is no longer valid
-            "end": "END",
-            "chunk": event_list
-        }
-        # TODO honor Pagination stream params
-        # TODO snapshot this list to return on subsequent requests when
-        # paginating
-        defer.returnValue(chunk_data)
-
-    @defer.inlineCallbacks
-    def change_membership(self, event, context, do_auth=True):
+    def change_membership(self, event, context, do_auth=True, is_guest=False):
         """ Change the membership status of a user in a room.
 
         Args:
@@ -399,9 +390,38 @@ class RoomMemberHandler(BaseHandler):
         # if this HS is not currently in the room, i.e. we have to do the
         # invite/join dance.
         if event.membership == Membership.JOIN:
+            if is_guest:
+                guest_access = context.current_state.get(
+                    (EventTypes.GuestAccess, ""),
+                    None
+                )
+                is_guest_access_allowed = (
+                    guest_access
+                    and guest_access.content
+                    and "guest_access" in guest_access.content
+                    and guest_access.content["guest_access"] == "can_join"
+                )
+                if not is_guest_access_allowed:
+                    raise AuthError(403, "Guest access not allowed")
+
             yield self._do_join(event, context, do_auth=do_auth)
         else:
-            # This is not a JOIN, so we can handle it normally.
+            if event.membership == Membership.LEAVE:
+                is_host_in_room = yield self.is_host_in_room(room_id, context)
+                if not is_host_in_room:
+                    # Rejecting an invite, rather than leaving a joined room
+                    handler = self.hs.get_handlers().federation_handler
+                    inviter = yield self.get_inviter(event)
+                    if not inviter:
+                        # return the same error as join_room_alias does
+                        raise SynapseError(404, "No known servers")
+                    yield handler.do_remotely_reject_invite(
+                        [inviter.domain],
+                        room_id,
+                        event.user_id
+                    )
+                    defer.returnValue({"room_id": room_id})
+                    return
 
             # FIXME: This isn't idempotency.
             if prev_state and prev_state.membership == event.membership:
@@ -425,7 +445,7 @@ class RoomMemberHandler(BaseHandler):
         defer.returnValue({"room_id": room_id})
 
     @defer.inlineCallbacks
-    def join_room_alias(self, joinee, room_alias, do_auth=True, content={}):
+    def join_room_alias(self, joinee, room_alias, content={}):
         directory_handler = self.hs.get_handlers().directory_handler
         mapping = yield directory_handler.get_association(room_alias)
 
@@ -459,8 +479,6 @@ class RoomMemberHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def _do_join(self, event, context, room_hosts=None, do_auth=True):
-        joinee = UserID.from_string(event.state_key)
-        # room_id = RoomID.from_string(event.room_id, self.hs)
         room_id = event.room_id
 
         # XXX: We don't do an auth check if we are doing an invite
@@ -468,41 +486,18 @@ class RoomMemberHandler(BaseHandler):
         # that we are allowed to join when we decide whether or not we
         # need to do the invite/join dance.
 
-        is_host_in_room = yield self.auth.check_host_in_room(
-            event.room_id,
-            self.hs.hostname
-        )
-        if not is_host_in_room:
-            # is *anyone* in the room?
-            room_member_keys = [
-                v for (k, v) in context.current_state.keys() if (
-                    k == "m.room.member"
-                )
-            ]
-            if len(room_member_keys) == 0:
-                # has the room been created so we can join it?
-                create_event = context.current_state.get(("m.room.create", ""))
-                if create_event:
-                    is_host_in_room = True
-
+        is_host_in_room = yield self.is_host_in_room(room_id, context)
         if is_host_in_room:
             should_do_dance = False
         elif room_hosts:  # TODO: Shouldn't this be remote_room_host?
             should_do_dance = True
         else:
-            # TODO(markjh): get prev_state from snapshot
-            prev_state = yield self.store.get_room_member(
-                joinee.to_string(), room_id
-            )
-
-            if prev_state and prev_state.membership == Membership.INVITE:
-                inviter = UserID.from_string(prev_state.user_id)
-
-                should_do_dance = not self.hs.is_mine(inviter)
-                room_hosts = [inviter.domain]
-            else:
+            inviter = yield self.get_inviter(event)
+            if not inviter:
                 # return the same error as join_room_alias does
                 raise SynapseError(404, "No known servers")
+            should_do_dance = not self.hs.is_mine(inviter)
+            room_hosts = [inviter.domain]
 
         if should_do_dance:
             handler = self.hs.get_handlers().federation_handler
@@ -510,8 +505,7 @@ class RoomMemberHandler(BaseHandler):
                 room_hosts,
                 room_id,
                 event.user_id,
-                event.content,  # FIXME To get a non-frozen dict
-                context
+                event.content,
             )
         else:
             logger.debug("Doing normal join")
@@ -529,30 +523,42 @@ class RoomMemberHandler(BaseHandler):
         )
 
     @defer.inlineCallbacks
-    def _should_invite_join(self, room_id, prev_state, do_auth):
-        logger.debug("_should_invite_join: room_id: %s", room_id)
-
-        # XXX: We don't do an auth check if we are doing an invite
-        # join dance for now, since we're kinda implicitly checking
-        # that we are allowed to join when we decide whether or not we
-        # need to do the invite/join dance.
+    def get_inviter(self, event):
+        # TODO(markjh): get prev_state from snapshot
+        prev_state = yield self.store.get_room_member(
+            event.user_id, event.room_id
+        )
 
-        # Only do an invite join dance if a) we were invited,
-        # b) the person inviting was from a differnt HS and c) we are
-        # not currently in the room
-        room_host = None
         if prev_state and prev_state.membership == Membership.INVITE:
-            room = yield self.store.get_room(room_id)
-            inviter = UserID.from_string(
-                prev_state.sender
-            )
-
-            is_remote_invite_join = not self.hs.is_mine(inviter) and not room
-            room_host = inviter.domain
-        else:
-            is_remote_invite_join = False
+            defer.returnValue(UserID.from_string(prev_state.user_id))
+            return
+        elif "third_party_invite" in event.content:
+            if "sender" in event.content["third_party_invite"]:
+                inviter = UserID.from_string(
+                    event.content["third_party_invite"]["sender"]
+                )
+                defer.returnValue(inviter)
+        defer.returnValue(None)
 
-        defer.returnValue((is_remote_invite_join, room_host))
+    @defer.inlineCallbacks
+    def is_host_in_room(self, room_id, context):
+        is_host_in_room = yield self.auth.check_host_in_room(
+            room_id,
+            self.hs.hostname
+        )
+        if not is_host_in_room:
+            # is *anyone* in the room?
+            room_member_keys = [
+                v for (k, v) in context.current_state.keys() if (
+                    k == "m.room.member"
+                )
+            ]
+            if len(room_member_keys) == 0:
+                # has the room been created so we can join it?
+                create_event = context.current_state.get(("m.room.create", ""))
+                if create_event:
+                    is_host_in_room = True
+        defer.returnValue(is_host_in_room)
 
     @defer.inlineCallbacks
     def get_joined_rooms_for_user(self, user):
@@ -583,6 +589,160 @@ class RoomMemberHandler(BaseHandler):
             suppress_auth=(not do_auth),
         )
 
+    @defer.inlineCallbacks
+    def do_3pid_invite(
+            self,
+            room_id,
+            inviter,
+            medium,
+            address,
+            id_server,
+            token_id,
+            txn_id
+    ):
+        invitee = yield self._lookup_3pid(
+            id_server, medium, address
+        )
+
+        if invitee:
+            # make sure it looks like a user ID; it'll throw if it's invalid.
+            UserID.from_string(invitee)
+            yield self.hs.get_handlers().message_handler.create_and_send_event(
+                {
+                    "type": EventTypes.Member,
+                    "content": {
+                        "membership": unicode("invite")
+                    },
+                    "room_id": room_id,
+                    "sender": inviter.to_string(),
+                    "state_key": invitee,
+                },
+                token_id=token_id,
+                txn_id=txn_id,
+            )
+        else:
+            yield self._make_and_store_3pid_invite(
+                id_server,
+                medium,
+                address,
+                room_id,
+                inviter,
+                token_id,
+                txn_id=txn_id
+            )
+
+    @defer.inlineCallbacks
+    def _lookup_3pid(self, id_server, medium, address):
+        """Looks up a 3pid in the passed identity server.
+
+        Args:
+            id_server (str): The server name (including port, if required)
+                of the identity server to use.
+            medium (str): The type of the third party identifier (e.g. "email").
+            address (str): The third party identifier (e.g. "foo@example.com").
+
+        Returns:
+            (str) the matrix ID of the 3pid, or None if it is not recognized.
+        """
+        try:
+            data = yield self.hs.get_simple_http_client().get_json(
+                "%s%s/_matrix/identity/api/v1/lookup" % (id_server_scheme, id_server,),
+                {
+                    "medium": medium,
+                    "address": address,
+                }
+            )
+
+            if "mxid" in data:
+                if "signatures" not in data:
+                    raise AuthError(401, "No signatures on 3pid binding")
+                self.verify_any_signature(data, id_server)
+                defer.returnValue(data["mxid"])
+
+        except IOError as e:
+            logger.warn("Error from identity server lookup: %s" % (e,))
+            defer.returnValue(None)
+
+    @defer.inlineCallbacks
+    def verify_any_signature(self, data, server_hostname):
+        if server_hostname not in data["signatures"]:
+            raise AuthError(401, "No signature from server %s" % (server_hostname,))
+        for key_name, signature in data["signatures"][server_hostname].items():
+            key_data = yield self.hs.get_simple_http_client().get_json(
+                "%s%s/_matrix/identity/api/v1/pubkey/%s" %
+                (id_server_scheme, server_hostname, key_name,),
+            )
+            if "public_key" not in key_data:
+                raise AuthError(401, "No public key named %s from %s" %
+                                (key_name, server_hostname,))
+            verify_signed_json(
+                data,
+                server_hostname,
+                decode_verify_key_bytes(key_name, decode_base64(key_data["public_key"]))
+            )
+            return
+
+    @defer.inlineCallbacks
+    def _make_and_store_3pid_invite(
+            self,
+            id_server,
+            medium,
+            address,
+            room_id,
+            user,
+            token_id,
+            txn_id
+    ):
+        token, public_key, key_validity_url, display_name = (
+            yield self._ask_id_server_for_third_party_invite(
+                id_server,
+                medium,
+                address,
+                room_id,
+                user.to_string()
+            )
+        )
+        msg_handler = self.hs.get_handlers().message_handler
+        yield msg_handler.create_and_send_event(
+            {
+                "type": EventTypes.ThirdPartyInvite,
+                "content": {
+                    "display_name": display_name,
+                    "key_validity_url": key_validity_url,
+                    "public_key": public_key,
+                },
+                "room_id": room_id,
+                "sender": user.to_string(),
+                "state_key": token,
+            },
+            token_id=token_id,
+            txn_id=txn_id,
+        )
+
+    @defer.inlineCallbacks
+    def _ask_id_server_for_third_party_invite(
+            self, id_server, medium, address, room_id, sender):
+        is_url = "%s%s/_matrix/identity/api/v1/store-invite" % (
+            id_server_scheme, id_server,
+        )
+        data = yield self.hs.get_simple_http_client().post_urlencoded_get_json(
+            is_url,
+            {
+                "medium": medium,
+                "address": address,
+                "room_id": room_id,
+                "sender": sender,
+            }
+        )
+        # TODO: Check for success
+        token = data["token"]
+        public_key = data["public_key"]
+        display_name = data["display_name"]
+        key_validity_url = "%s%s/_matrix/identity/api/v1/pubkey/isvalid" % (
+            id_server_scheme, id_server,
+        )
+        defer.returnValue((token, public_key, key_validity_url, display_name))
+
 
 class RoomListHandler(BaseHandler):
 
@@ -604,12 +764,79 @@ class RoomListHandler(BaseHandler):
         defer.returnValue({"start": "START", "end": "END", "chunk": chunk})
 
 
+class RoomContextHandler(BaseHandler):
+    @defer.inlineCallbacks
+    def get_event_context(self, user, room_id, event_id, limit, is_guest):
+        """Retrieves events, pagination tokens and state around a given event
+        in a room.
+
+        Args:
+            user (UserID)
+            room_id (str)
+            event_id (str)
+            limit (int): The maximum number of events to return in total
+                (excluding state).
+
+        Returns:
+            dict
+        """
+        before_limit = math.floor(limit/2.)
+        after_limit = limit - before_limit
+
+        now_token = yield self.hs.get_event_sources().get_current_token()
+
+        results = yield self.store.get_events_around(
+            room_id, event_id, before_limit, after_limit
+        )
+
+        results["events_before"] = yield self._filter_events_for_client(
+            user.to_string(),
+            results["events_before"],
+            is_guest=is_guest,
+            require_all_visible_for_guests=False
+        )
+
+        results["events_after"] = yield self._filter_events_for_client(
+            user.to_string(),
+            results["events_after"],
+            is_guest=is_guest,
+            require_all_visible_for_guests=False
+        )
+
+        if results["events_after"]:
+            last_event_id = results["events_after"][-1].event_id
+        else:
+            last_event_id = event_id
+
+        state = yield self.store.get_state_for_events(
+            [last_event_id], None
+        )
+        results["state"] = state[last_event_id].values()
+
+        results["start"] = now_token.copy_and_replace(
+            "room_key", results["start"]
+        ).to_string()
+
+        results["end"] = now_token.copy_and_replace(
+            "room_key", results["end"]
+        ).to_string()
+
+        defer.returnValue(results)
+
+
 class RoomEventSource(object):
     def __init__(self, hs):
         self.store = hs.get_datastore()
 
     @defer.inlineCallbacks
-    def get_new_events_for_user(self, user, from_key, limit):
+    def get_new_events(
+            self,
+            user,
+            from_key,
+            limit,
+            room_ids,
+            is_guest,
+    ):
         # We just ignore the key for now.
 
         to_key = yield self.get_current_key()
@@ -629,8 +856,9 @@ class RoomEventSource(object):
                 user_id=user.to_string(),
                 from_key=from_key,
                 to_key=to_key,
-                room_id=None,
                 limit=limit,
+                room_ids=room_ids,
+                is_guest=is_guest,
             )
 
         defer.returnValue((events, end_key))
@@ -646,7 +874,6 @@ class RoomEventSource(object):
             to_key=config.to_key,
             direction=config.direction,
             limit=config.limit,
-            with_feedback=True
         )
 
         defer.returnValue((events, next_key))
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
new file mode 100644
index 0000000000..b7545c111f
--- /dev/null
+++ b/synapse/handlers/search.py
@@ -0,0 +1,319 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from ._base import BaseHandler
+
+from synapse.api.constants import Membership
+from synapse.api.filtering import Filter
+from synapse.api.errors import SynapseError
+from synapse.events.utils import serialize_event
+
+from unpaddedbase64 import decode_base64, encode_base64
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class SearchHandler(BaseHandler):
+
+    def __init__(self, hs):
+        super(SearchHandler, self).__init__(hs)
+
+    @defer.inlineCallbacks
+    def search(self, user, content, batch=None):
+        """Performs a full text search for a user.
+
+        Args:
+            user (UserID)
+            content (dict): Search parameters
+            batch (str): The next_batch parameter. Used for pagination.
+
+        Returns:
+            dict to be returned to the client with results of search
+        """
+
+        batch_group = None
+        batch_group_key = None
+        batch_token = None
+        if batch:
+            try:
+                b = decode_base64(batch)
+                batch_group, batch_group_key, batch_token = b.split("\n")
+
+                assert batch_group is not None
+                assert batch_group_key is not None
+                assert batch_token is not None
+            except:
+                raise SynapseError(400, "Invalid batch")
+
+        try:
+            room_cat = content["search_categories"]["room_events"]
+
+            # The actual thing to query in FTS
+            search_term = room_cat["search_term"]
+
+            # Which "keys" to search over in FTS query
+            keys = room_cat.get("keys", [
+                "content.body", "content.name", "content.topic",
+            ])
+
+            # Filter to apply to results
+            filter_dict = room_cat.get("filter", {})
+
+            # What to order results by (impacts whether pagination can be doen)
+            order_by = room_cat.get("order_by", "rank")
+
+            # Include context around each event?
+            event_context = room_cat.get(
+                "event_context", None
+            )
+
+            # Group results together? May allow clients to paginate within a
+            # group
+            group_by = room_cat.get("groupings", {}).get("group_by", {})
+            group_keys = [g["key"] for g in group_by]
+
+            if event_context is not None:
+                before_limit = int(event_context.get(
+                    "before_limit", 5
+                ))
+                after_limit = int(event_context.get(
+                    "after_limit", 5
+                ))
+        except KeyError:
+            raise SynapseError(400, "Invalid search query")
+
+        if order_by not in ("rank", "recent"):
+            raise SynapseError(400, "Invalid order by: %r" % (order_by,))
+
+        if set(group_keys) - {"room_id", "sender"}:
+            raise SynapseError(
+                400,
+                "Invalid group by keys: %r" % (set(group_keys) - {"room_id", "sender"},)
+            )
+
+        search_filter = Filter(filter_dict)
+
+        # TODO: Search through left rooms too
+        rooms = yield self.store.get_rooms_for_user_where_membership_is(
+            user.to_string(),
+            membership_list=[Membership.JOIN],
+            # membership_list=[Membership.JOIN, Membership.LEAVE, Membership.Ban],
+        )
+        room_ids = set(r.room_id for r in rooms)
+
+        room_ids = search_filter.filter_rooms(room_ids)
+
+        if batch_group == "room_id":
+            room_ids.intersection_update({batch_group_key})
+
+        rank_map = {}  # event_id -> rank of event
+        allowed_events = []
+        room_groups = {}  # Holds result of grouping by room, if applicable
+        sender_group = {}  # Holds result of grouping by sender, if applicable
+
+        # Holds the next_batch for the entire result set if one of those exists
+        global_next_batch = None
+
+        if order_by == "rank":
+            results = yield self.store.search_msgs(
+                room_ids, search_term, keys
+            )
+
+            results_map = {r["event"].event_id: r for r in results}
+
+            rank_map.update({r["event"].event_id: r["rank"] for r in results})
+
+            filtered_events = search_filter.filter([r["event"] for r in results])
+
+            events = yield self._filter_events_for_client(
+                user.to_string(), filtered_events
+            )
+
+            events.sort(key=lambda e: -rank_map[e.event_id])
+            allowed_events = events[:search_filter.limit()]
+
+            for e in allowed_events:
+                rm = room_groups.setdefault(e.room_id, {
+                    "results": [],
+                    "order": rank_map[e.event_id],
+                })
+                rm["results"].append(e.event_id)
+
+                s = sender_group.setdefault(e.sender, {
+                    "results": [],
+                    "order": rank_map[e.event_id],
+                })
+                s["results"].append(e.event_id)
+
+        elif order_by == "recent":
+            # In this case we specifically loop through each room as the given
+            # limit applies to each room, rather than a global list.
+            # This is not necessarilly a good idea.
+            for room_id in room_ids:
+                room_events = []
+                if batch_group == "room_id" and batch_group_key == room_id:
+                    pagination_token = batch_token
+                else:
+                    pagination_token = None
+                i = 0
+
+                # We keep looping and we keep filtering until we reach the limit
+                # or we run out of things.
+                # But only go around 5 times since otherwise synapse will be sad.
+                while len(room_events) < search_filter.limit() and i < 5:
+                    i += 1
+                    results = yield self.store.search_room(
+                        room_id, search_term, keys, search_filter.limit() * 2,
+                        pagination_token=pagination_token,
+                    )
+
+                    results_map = {r["event"].event_id: r for r in results}
+
+                    rank_map.update({r["event"].event_id: r["rank"] for r in results})
+
+                    filtered_events = search_filter.filter([
+                        r["event"] for r in results
+                    ])
+
+                    events = yield self._filter_events_for_client(
+                        user.to_string(), filtered_events
+                    )
+
+                    room_events.extend(events)
+                    room_events = room_events[:search_filter.limit()]
+
+                    if len(results) < search_filter.limit() * 2:
+                        pagination_token = None
+                        break
+                    else:
+                        pagination_token = results[-1]["pagination_token"]
+
+                if room_events:
+                    res = results_map[room_events[-1].event_id]
+                    pagination_token = res["pagination_token"]
+
+                    group = room_groups.setdefault(room_id, {})
+                    if pagination_token:
+                        next_batch = encode_base64("%s\n%s\n%s" % (
+                            "room_id", room_id, pagination_token
+                        ))
+                        group["next_batch"] = next_batch
+
+                        if batch_token:
+                            global_next_batch = next_batch
+
+                    group["results"] = [e.event_id for e in room_events]
+                    group["order"] = max(
+                        e.origin_server_ts/1000 for e in room_events
+                        if hasattr(e, "origin_server_ts")
+                    )
+
+                allowed_events.extend(room_events)
+
+            # Normalize the group orders
+            if room_groups:
+                if len(room_groups) > 1:
+                    mx = max(g["order"] for g in room_groups.values())
+                    mn = min(g["order"] for g in room_groups.values())
+
+                    for g in room_groups.values():
+                        g["order"] = (g["order"] - mn) * 1.0 / (mx - mn)
+                else:
+                    room_groups.values()[0]["order"] = 1
+
+        else:
+            # We should never get here due to the guard earlier.
+            raise NotImplementedError()
+
+        # If client has asked for "context" for each event (i.e. some surrounding
+        # events and state), fetch that
+        if event_context is not None:
+            now_token = yield self.hs.get_event_sources().get_current_token()
+
+            contexts = {}
+            for event in allowed_events:
+                res = yield self.store.get_events_around(
+                    event.room_id, event.event_id, before_limit, after_limit
+                )
+
+                res["events_before"] = yield self._filter_events_for_client(
+                    user.to_string(), res["events_before"]
+                )
+
+                res["events_after"] = yield self._filter_events_for_client(
+                    user.to_string(), res["events_after"]
+                )
+
+                res["start"] = now_token.copy_and_replace(
+                    "room_key", res["start"]
+                ).to_string()
+
+                res["end"] = now_token.copy_and_replace(
+                    "room_key", res["end"]
+                ).to_string()
+
+                contexts[event.event_id] = res
+        else:
+            contexts = {}
+
+        # TODO: Add a limit
+
+        time_now = self.clock.time_msec()
+
+        for context in contexts.values():
+            context["events_before"] = [
+                serialize_event(e, time_now)
+                for e in context["events_before"]
+            ]
+            context["events_after"] = [
+                serialize_event(e, time_now)
+                for e in context["events_after"]
+            ]
+
+        results = {
+            e.event_id: {
+                "rank": rank_map[e.event_id],
+                "result": serialize_event(e, time_now),
+                "context": contexts.get(e.event_id, {}),
+            }
+            for e in allowed_events
+        }
+
+        logger.info("Found %d results", len(results))
+
+        rooms_cat_res = {
+            "results": results,
+            "count": len(results)
+        }
+
+        if room_groups and "room_id" in group_keys:
+            rooms_cat_res.setdefault("groups", {})["room_id"] = room_groups
+
+        if sender_group and "sender" in group_keys:
+            rooms_cat_res.setdefault("groups", {})["sender"] = sender_group
+
+        if global_next_batch:
+            rooms_cat_res["next_batch"] = global_next_batch
+
+        defer.returnValue({
+            "search_categories": {
+                "room_events": rooms_cat_res
+            }
+        })
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 353a416054..6dc9d0fb92 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -28,23 +28,30 @@ logger = logging.getLogger(__name__)
 
 SyncConfig = collections.namedtuple("SyncConfig", [
     "user",
-    "client_info",
-    "limit",
-    "gap",
-    "sort",
-    "backfill",
     "filter",
 ])
 
 
-class RoomSyncResult(collections.namedtuple("RoomSyncResult", [
-    "room_id",
-    "limited",
-    "published",
-    "events",
-    "state",
+class TimelineBatch(collections.namedtuple("TimelineBatch", [
     "prev_batch",
+    "events",
+    "limited",
+])):
+    __slots__ = []
+
+    def __nonzero__(self):
+        """Make the result appear empty if there are no updates. This is used
+        to tell if room needs to be part of the sync result.
+        """
+        return bool(self.events)
+
+
+class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [
+    "room_id",           # str
+    "timeline",          # TimelineBatch
+    "state",             # dict[(str, str), FrozenEvent]
     "ephemeral",
+    "private_user_data",
 ])):
     __slots__ = []
 
@@ -52,14 +59,50 @@ class RoomSyncResult(collections.namedtuple("RoomSyncResult", [
         """Make the result appear empty if there are no updates. This is used
         to tell if room needs to be part of the sync result.
         """
-        return bool(self.events or self.state or self.ephemeral)
+        return bool(
+            self.timeline
+            or self.state
+            or self.ephemeral
+            or self.private_user_data
+        )
+
+
+class ArchivedSyncResult(collections.namedtuple("JoinedSyncResult", [
+    "room_id",            # str
+    "timeline",           # TimelineBatch
+    "state",              # dict[(str, str), FrozenEvent]
+    "private_user_data",
+])):
+    __slots__ = []
+
+    def __nonzero__(self):
+        """Make the result appear empty if there are no updates. This is used
+        to tell if room needs to be part of the sync result.
+        """
+        return bool(
+            self.timeline
+            or self.state
+            or self.private_user_data
+        )
+
+
+class InvitedSyncResult(collections.namedtuple("InvitedSyncResult", [
+    "room_id",   # str
+    "invite",    # FrozenEvent: the invite event
+])):
+    __slots__ = []
+
+    def __nonzero__(self):
+        """Invited rooms should always be reported to the client"""
+        return True
 
 
 class SyncResult(collections.namedtuple("SyncResult", [
     "next_batch",  # Token for the next sync
-    "private_user_data",  # List of private events for the user.
-    "public_user_data",  # List of public events for all users.
-    "rooms",  # RoomSyncResult for each room.
+    "presence",  # List of presence events for the user.
+    "joined",  # JoinedSyncResult for each joined room.
+    "invited",  # InvitedSyncResult for each invited room.
+    "archived",  # ArchivedSyncResult for each archived room.
 ])):
     __slots__ = []
 
@@ -69,7 +112,7 @@ class SyncResult(collections.namedtuple("SyncResult", [
         events.
         """
         return bool(
-            self.private_user_data or self.public_user_data or self.rooms
+            self.presence or self.joined or self.invited
         )
 
 
@@ -81,67 +124,58 @@ class SyncHandler(BaseHandler):
         self.clock = hs.get_clock()
 
     @defer.inlineCallbacks
-    def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0):
+    def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0,
+                               full_state=False):
         """Get the sync for a client if we have new data for it now. Otherwise
         wait for new data to arrive on the server. If the timeout expires, then
         return an empty sync result.
         Returns:
             A Deferred SyncResult.
         """
-        if timeout == 0 or since_token is None:
-            result = yield self.current_sync_for_user(sync_config, since_token)
+
+        if timeout == 0 or since_token is None or full_state:
+            # we are going to return immediately, so don't bother calling
+            # notifier.wait_for_events.
+            result = yield self.current_sync_for_user(sync_config, since_token,
+                                                      full_state=full_state)
             defer.returnValue(result)
         else:
             def current_sync_callback(before_token, after_token):
                 return self.current_sync_for_user(sync_config, since_token)
 
-            rm_handler = self.hs.get_handlers().room_member_handler
-
-            app_service = yield self.store.get_app_service_by_user_id(
-                sync_config.user.to_string()
-            )
-            if app_service:
-                rooms = yield self.store.get_app_service_rooms(app_service)
-                room_ids = set(r.room_id for r in rooms)
-            else:
-                room_ids = yield rm_handler.get_joined_rooms_for_user(
-                    sync_config.user
-                )
-
             result = yield self.notifier.wait_for_events(
-                sync_config.user, room_ids,
-                sync_config.filter, timeout, current_sync_callback
+                sync_config.user, timeout, current_sync_callback,
+                from_token=since_token
             )
             defer.returnValue(result)
 
-    def current_sync_for_user(self, sync_config, since_token=None):
+    def current_sync_for_user(self, sync_config, since_token=None,
+                              full_state=False):
         """Get the sync for client needed to match what the server has now.
         Returns:
             A Deferred SyncResult.
         """
-        if since_token is None:
-            return self.initial_sync(sync_config)
+        if since_token is None or full_state:
+            return self.full_state_sync(sync_config, since_token)
         else:
-            if sync_config.gap:
-                return self.incremental_sync_with_gap(sync_config, since_token)
-            else:
-                # TODO(mjark): Handle gapless sync
-                raise NotImplementedError()
+            return self.incremental_sync_with_gap(sync_config, since_token)
 
     @defer.inlineCallbacks
-    def initial_sync(self, sync_config):
-        """Get a sync for a client which is starting without any state
+    def full_state_sync(self, sync_config, timeline_since_token):
+        """Get a sync for a client which is starting without any state.
+
+        If a 'message_since_token' is given, only timeline events which have
+        happened since that token will be returned.
+
         Returns:
             A Deferred SyncResult.
         """
-        if sync_config.sort == "timeline,desc":
-            # TODO(mjark): Handle going through events in reverse order?.
-            # What does "most recent events" mean when applying the limits mean
-            # in this case?
-            raise NotImplementedError()
-
         now_token = yield self.event_sources.get_current_token()
 
+        now_token, ephemeral_by_room = yield self.ephemeral_by_room(
+            sync_config, now_token
+        )
+
         presence_stream = self.event_sources.sources["presence"]
         # TODO (mjark): This looks wrong, shouldn't we be getting the presence
         # UP to the present rather than after the present?
@@ -153,52 +187,179 @@ class SyncHandler(BaseHandler):
         )
         room_list = yield self.store.get_rooms_for_user_where_membership_is(
             user_id=sync_config.user.to_string(),
-            membership_list=[Membership.INVITE, Membership.JOIN]
+            membership_list=(
+                Membership.INVITE,
+                Membership.JOIN,
+                Membership.LEAVE,
+                Membership.BAN
+            )
         )
 
-        # TODO (mjark): Does public mean "published"?
-        published_rooms = yield self.store.get_rooms(is_public=True)
-        published_room_ids = set(r["room_id"] for r in published_rooms)
+        tags_by_room = yield self.store.get_tags_for_user(
+            sync_config.user.to_string()
+        )
 
-        rooms = []
+        joined = []
+        invited = []
+        archived = []
         for event in room_list:
-            room_sync = yield self.initial_sync_for_room(
-                event.room_id, sync_config, now_token, published_room_ids
-            )
-            rooms.append(room_sync)
+            if event.membership == Membership.JOIN:
+                room_sync = yield self.full_state_sync_for_joined_room(
+                    room_id=event.room_id,
+                    sync_config=sync_config,
+                    now_token=now_token,
+                    timeline_since_token=timeline_since_token,
+                    ephemeral_by_room=ephemeral_by_room,
+                    tags_by_room=tags_by_room,
+                )
+                joined.append(room_sync)
+            elif event.membership == Membership.INVITE:
+                invite = yield self.store.get_event(event.event_id)
+                invited.append(InvitedSyncResult(
+                    room_id=event.room_id,
+                    invite=invite,
+                ))
+            elif event.membership in (Membership.LEAVE, Membership.BAN):
+                leave_token = now_token.copy_and_replace(
+                    "room_key", "s%d" % (event.stream_ordering,)
+                )
+                room_sync = yield self.full_state_sync_for_archived_room(
+                    sync_config=sync_config,
+                    room_id=event.room_id,
+                    leave_event_id=event.event_id,
+                    leave_token=leave_token,
+                    timeline_since_token=timeline_since_token,
+                    tags_by_room=tags_by_room,
+                )
+                archived.append(room_sync)
 
         defer.returnValue(SyncResult(
-            public_user_data=presence,
-            private_user_data=[],
-            rooms=rooms,
+            presence=presence,
+            joined=joined,
+            invited=invited,
+            archived=archived,
             next_batch=now_token,
         ))
 
     @defer.inlineCallbacks
-    def initial_sync_for_room(self, room_id, sync_config, now_token,
-                              published_room_ids):
+    def full_state_sync_for_joined_room(self, room_id, sync_config,
+                                        now_token, timeline_since_token,
+                                        ephemeral_by_room, tags_by_room):
         """Sync a room for a client which is starting without any state
         Returns:
-            A Deferred RoomSyncResult.
+            A Deferred JoinedSyncResult.
         """
 
-        recents, prev_batch_token, limited = yield self.load_filtered_recents(
-            room_id, sync_config, now_token,
+        batch = yield self.load_filtered_recents(
+            room_id, sync_config, now_token, since_token=timeline_since_token
         )
 
-        current_state = yield self.state_handler.get_current_state(
-            room_id
+        current_state = yield self.get_state_at(room_id, now_token)
+
+        defer.returnValue(JoinedSyncResult(
+            room_id=room_id,
+            timeline=batch,
+            state=current_state,
+            ephemeral=ephemeral_by_room.get(room_id, []),
+            private_user_data=self.private_user_data_for_room(
+                room_id, tags_by_room
+            ),
+        ))
+
+    def private_user_data_for_room(self, room_id, tags_by_room):
+        private_user_data = []
+        tags = tags_by_room.get(room_id)
+        if tags is not None:
+            private_user_data.append({
+                "type": "m.tag",
+                "content": {"tags": tags},
+            })
+        return private_user_data
+
+    @defer.inlineCallbacks
+    def ephemeral_by_room(self, sync_config, now_token, since_token=None):
+        """Get the ephemeral events for each room the user is in
+        Args:
+            sync_config (SyncConfig): The flags, filters and user for the sync.
+            now_token (StreamToken): Where the server is currently up to.
+            since_token (StreamToken): Where the server was when the client
+                last synced.
+        Returns:
+            A tuple of the now StreamToken, updated to reflect the which typing
+            events are included, and a dict mapping from room_id to a list of
+            typing events for that room.
+        """
+
+        typing_key = since_token.typing_key if since_token else "0"
+
+        rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
+        room_ids = [room.room_id for room in rooms]
+
+        typing_source = self.event_sources.sources["typing"]
+        typing, typing_key = yield typing_source.get_new_events(
+            user=sync_config.user,
+            from_key=typing_key,
+            limit=sync_config.filter.ephemeral_limit(),
+            room_ids=room_ids,
+            is_guest=False,
         )
-        current_state_events = current_state.values()
+        now_token = now_token.copy_and_replace("typing_key", typing_key)
+
+        ephemeral_by_room = {}
 
-        defer.returnValue(RoomSyncResult(
+        for event in typing:
+            # we want to exclude the room_id from the event, but modifying the
+            # result returned by the event source is poor form (it might cache
+            # the object)
+            room_id = event["room_id"]
+            event_copy = {k: v for (k, v) in event.iteritems()
+                          if k != "room_id"}
+            ephemeral_by_room.setdefault(room_id, []).append(event_copy)
+
+        receipt_key = since_token.receipt_key if since_token else "0"
+
+        receipt_source = self.event_sources.sources["receipt"]
+        receipts, receipt_key = yield receipt_source.get_new_events(
+            user=sync_config.user,
+            from_key=receipt_key,
+            limit=sync_config.filter.ephemeral_limit(),
+            room_ids=room_ids,
+            # /sync doesn't support guest access, they can't get to this point in code
+            is_guest=False,
+        )
+        now_token = now_token.copy_and_replace("receipt_key", receipt_key)
+
+        for event in receipts:
+            room_id = event["room_id"]
+            # exclude room id, as above
+            event_copy = {k: v for (k, v) in event.iteritems()
+                          if k != "room_id"}
+            ephemeral_by_room.setdefault(room_id, []).append(event_copy)
+
+        defer.returnValue((now_token, ephemeral_by_room))
+
+    @defer.inlineCallbacks
+    def full_state_sync_for_archived_room(self, room_id, sync_config,
+                                          leave_event_id, leave_token,
+                                          timeline_since_token, tags_by_room):
+        """Sync a room for a client which is starting without any state
+        Returns:
+            A Deferred JoinedSyncResult.
+        """
+
+        batch = yield self.load_filtered_recents(
+            room_id, sync_config, leave_token, since_token=timeline_since_token
+        )
+
+        leave_state = yield self.store.get_state_for_event(leave_event_id)
+
+        defer.returnValue(ArchivedSyncResult(
             room_id=room_id,
-            published=room_id in published_room_ids,
-            events=recents,
-            prev_batch=prev_batch_token,
-            state=current_state_events,
-            limited=limited,
-            ephemeral=[],
+            timeline=batch,
+            state=leave_state,
+            private_user_data=self.private_user_data_for_room(
+                room_id, tags_by_room
+            ),
         ))
 
     @defer.inlineCallbacks
@@ -208,34 +369,25 @@ class SyncHandler(BaseHandler):
         Returns:
             A Deferred SyncResult.
         """
-        if sync_config.sort == "timeline,desc":
-            # TODO(mjark): Handle going through events in reverse order?.
-            # What does "most recent events" mean when applying the limits mean
-            # in this case?
-            raise NotImplementedError()
-
         now_token = yield self.event_sources.get_current_token()
 
+        rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
+        room_ids = [room.room_id for room in rooms]
+
         presence_source = self.event_sources.sources["presence"]
-        presence, presence_key = yield presence_source.get_new_events_for_user(
+        presence, presence_key = yield presence_source.get_new_events(
             user=sync_config.user,
             from_key=since_token.presence_key,
-            limit=sync_config.limit,
+            limit=sync_config.filter.presence_limit(),
+            room_ids=room_ids,
+            # /sync doesn't support guest access, they can't get to this point in code
+            is_guest=False,
         )
         now_token = now_token.copy_and_replace("presence_key", presence_key)
 
-        typing_source = self.event_sources.sources["typing"]
-        typing, typing_key = yield typing_source.get_new_events_for_user(
-            user=sync_config.user,
-            from_key=since_token.typing_key,
-            limit=sync_config.limit,
+        now_token, ephemeral_by_room = yield self.ephemeral_by_room(
+            sync_config, now_token, since_token
         )
-        now_token = now_token.copy_and_replace("typing_key", typing_key)
-
-        typing_by_room = {event["room_id"]: [event] for event in typing}
-        for event in typing:
-            event.pop("room_id")
-        logger.debug("Typing %r", typing_by_room)
 
         rm_handler = self.hs.get_handlers().room_member_handler
         app_service = yield self.store.get_app_service_by_user_id(
@@ -243,35 +395,55 @@ class SyncHandler(BaseHandler):
         )
         if app_service:
             rooms = yield self.store.get_app_service_rooms(app_service)
-            room_ids = set(r.room_id for r in rooms)
+            joined_room_ids = set(r.room_id for r in rooms)
         else:
-            room_ids = yield rm_handler.get_joined_rooms_for_user(
+            joined_room_ids = yield rm_handler.get_joined_rooms_for_user(
                 sync_config.user
             )
 
-        # TODO (mjark): Does public mean "published"?
-        published_rooms = yield self.store.get_rooms(is_public=True)
-        published_room_ids = set(r["room_id"] for r in published_rooms)
+        timeline_limit = sync_config.filter.timeline_limit()
 
         room_events, _ = yield self.store.get_room_events_stream(
             sync_config.user.to_string(),
             from_key=since_token.room_key,
             to_key=now_token.room_key,
-            room_id=None,
-            limit=sync_config.limit + 1,
+            limit=timeline_limit + 1,
         )
 
-        rooms = []
-        if len(room_events) <= sync_config.limit:
+        tags_by_room = yield self.store.get_updated_tags(
+            sync_config.user.to_string(),
+            since_token.private_user_data_key,
+        )
+
+        joined = []
+        archived = []
+        if len(room_events) <= timeline_limit:
             # There is no gap in any of the rooms. Therefore we can just
             # partition the new events by room and return them.
+            logger.debug("Got %i events for incremental sync - not limited",
+                         len(room_events))
+
+            invite_events = []
+            leave_events = []
             events_by_room_id = {}
             for event in room_events:
                 events_by_room_id.setdefault(event.room_id, []).append(event)
-
-            for room_id in room_ids:
+                if event.room_id not in joined_room_ids:
+                    if (event.type == EventTypes.Member
+                            and event.state_key == sync_config.user.to_string()):
+                        if event.membership == Membership.INVITE:
+                            invite_events.append(event)
+                        elif event.membership in (Membership.LEAVE, Membership.BAN):
+                            leave_events.append(event)
+
+            for room_id in joined_room_ids:
                 recents = events_by_room_id.get(room_id, [])
-                state = [event for event in recents if event.is_state()]
+                logger.debug("Events for room %s: %r", room_id, recents)
+                state = {
+                    (event.type, event.state_key): event
+                    for event in recents if event.is_state()}
+                limited = False
+
                 if recents:
                     prev_batch = now_token.copy_and_replace(
                         "room_key", recents[0].internal_metadata.before
@@ -279,95 +451,87 @@ class SyncHandler(BaseHandler):
                 else:
                     prev_batch = now_token
 
-                state = yield self.check_joined_room(
-                    sync_config, room_id, state
-                )
+                just_joined = yield self.check_joined_room(sync_config, state)
+                if just_joined:
+                    logger.debug("User has just joined %s: needs full state",
+                                 room_id)
+                    state = yield self.get_state_at(room_id, now_token)
+                    # the timeline is inherently limited if we've just joined
+                    limited = True
 
-                room_sync = RoomSyncResult(
+                room_sync = JoinedSyncResult(
                     room_id=room_id,
-                    published=room_id in published_room_ids,
-                    events=recents,
-                    prev_batch=prev_batch,
+                    timeline=TimelineBatch(
+                        events=recents,
+                        prev_batch=prev_batch,
+                        limited=limited,
+                    ),
                     state=state,
-                    limited=False,
-                    ephemeral=typing_by_room.get(room_id, [])
+                    ephemeral=ephemeral_by_room.get(room_id, []),
+                    private_user_data=self.private_user_data_for_room(
+                        room_id, tags_by_room
+                    ),
                 )
+                logger.debug("Result for room %s: %r", room_id, room_sync)
+
                 if room_sync:
-                    rooms.append(room_sync)
+                    joined.append(room_sync)
+
         else:
-            for room_id in room_ids:
+            logger.debug("Got %i events for incremental sync - hit limit",
+                         len(room_events))
+
+            invite_events = yield self.store.get_invites_for_user(
+                sync_config.user.to_string()
+            )
+
+            leave_events = yield self.store.get_leave_and_ban_events_for_user(
+                sync_config.user.to_string()
+            )
+
+            for room_id in joined_room_ids:
                 room_sync = yield self.incremental_sync_with_gap_for_room(
                     room_id, sync_config, since_token, now_token,
-                    published_room_ids, typing_by_room
+                    ephemeral_by_room, tags_by_room
                 )
                 if room_sync:
-                    rooms.append(room_sync)
+                    joined.append(room_sync)
 
-        defer.returnValue(SyncResult(
-            public_user_data=presence,
-            private_user_data=[],
-            rooms=rooms,
-            next_batch=now_token,
-        ))
-
-    @defer.inlineCallbacks
-    def _filter_events_for_client(self, user_id, room_id, events):
-        event_id_to_state = yield self.store.get_state_for_events(
-            room_id, frozenset(e.event_id for e in events),
-            types=(
-                (EventTypes.RoomHistoryVisibility, ""),
-                (EventTypes.Member, user_id),
+        for leave_event in leave_events:
+            room_sync = yield self.incremental_sync_for_archived_room(
+                sync_config, leave_event, since_token, tags_by_room
             )
-        )
-
-        def allowed(event, state):
-            if event.type == EventTypes.RoomHistoryVisibility:
-                return True
-
-            membership_ev = state.get((EventTypes.Member, user_id), None)
-            if membership_ev:
-                membership = membership_ev.membership
-            else:
-                membership = Membership.LEAVE
-
-            if membership == Membership.JOIN:
-                return True
+            archived.append(room_sync)
 
-            history = state.get((EventTypes.RoomHistoryVisibility, ''), None)
-            if history:
-                visibility = history.content.get("history_visibility", "shared")
-            else:
-                visibility = "shared"
+        invited = [
+            InvitedSyncResult(room_id=event.room_id, invite=event)
+            for event in invite_events
+        ]
 
-            if visibility == "public":
-                return True
-            elif visibility == "shared":
-                return True
-            elif visibility == "joined":
-                return membership == Membership.JOIN
-            elif visibility == "invited":
-                return membership == Membership.INVITE
-
-            return True
-
-        defer.returnValue([
-            event
-            for event in events
-            if allowed(event, event_id_to_state[event.event_id])
-        ])
+        defer.returnValue(SyncResult(
+            presence=presence,
+            joined=joined,
+            invited=invited,
+            archived=archived,
+            next_batch=now_token,
+        ))
 
     @defer.inlineCallbacks
     def load_filtered_recents(self, room_id, sync_config, now_token,
                               since_token=None):
+        """
+        :returns a Deferred TimelineBatch
+        """
         limited = True
         recents = []
         filtering_factor = 2
-        load_limit = max(sync_config.limit * filtering_factor, 100)
+        timeline_limit = sync_config.filter.timeline_limit()
+        load_limit = max(timeline_limit * filtering_factor, 100)
         max_repeat = 3  # Only try a few times per room, otherwise
         room_key = now_token.room_key
         end_key = room_key
 
-        while limited and len(recents) < sync_config.limit and max_repeat:
+        while limited and len(recents) < timeline_limit and max_repeat:
             events, keys = yield self.store.get_recent_events_for_room(
                 room_id,
                 limit=load_limit + 1,
@@ -376,9 +540,9 @@ class SyncHandler(BaseHandler):
             )
             (room_key, _) = keys
             end_key = "s" + room_key.split('-')[-1]
-            loaded_recents = sync_config.filter.filter_room_events(events)
+            loaded_recents = sync_config.filter.filter_room_timeline(events)
             loaded_recents = yield self._filter_events_for_client(
-                sync_config.user.to_string(), room_id, loaded_recents,
+                sync_config.user.to_string(), loaded_recents,
             )
             loaded_recents.extend(recents)
             recents = loaded_recents
@@ -386,64 +550,64 @@ class SyncHandler(BaseHandler):
                 limited = False
             max_repeat -= 1
 
-        if len(recents) > sync_config.limit:
-            recents = recents[-sync_config.limit:]
+        if len(recents) > timeline_limit:
+            limited = True
+            recents = recents[-timeline_limit:]
             room_key = recents[0].internal_metadata.before
 
         prev_batch_token = now_token.copy_and_replace(
             "room_key", room_key
         )
 
-        defer.returnValue((recents, prev_batch_token, limited))
+        defer.returnValue(TimelineBatch(
+            events=recents, prev_batch=prev_batch_token, limited=limited
+        ))
 
     @defer.inlineCallbacks
     def incremental_sync_with_gap_for_room(self, room_id, sync_config,
                                            since_token, now_token,
-                                           published_room_ids, typing_by_room):
+                                           ephemeral_by_room, tags_by_room):
         """ Get the incremental delta needed to bring the client up to date for
         the room. Gives the client the most recent events and the changes to
         state.
         Returns:
-            A Deferred RoomSyncResult
+            A Deferred JoinedSyncResult
         """
+        logger.debug("Doing incremental sync for room %s between %s and %s",
+                     room_id, since_token, now_token)
 
         # TODO(mjark): Check for redactions we might have missed.
 
-        recents, prev_batch_token, limited = yield self.load_filtered_recents(
+        batch = yield self.load_filtered_recents(
             room_id, sync_config, now_token, since_token,
         )
 
-        logging.debug("Recents %r", recents)
+        logging.debug("Recents %r", batch)
 
-        # TODO(mjark): This seems racy since this isn't being passed a
-        # token to indicate what point in the stream this is
-        current_state = yield self.state_handler.get_current_state(
-            room_id
-        )
-        current_state_events = current_state.values()
+        current_state = yield self.get_state_at(room_id, now_token)
 
-        state_at_previous_sync = yield self.get_state_at_previous_sync(
-            room_id, since_token=since_token
+        state_at_previous_sync = yield self.get_state_at(
+            room_id, stream_position=since_token
         )
 
-        state_events_delta = yield self.compute_state_delta(
+        state = yield self.compute_state_delta(
             since_token=since_token,
             previous_state=state_at_previous_sync,
-            current_state=current_state_events,
+            current_state=current_state,
         )
 
-        state_events_delta = yield self.check_joined_room(
-            sync_config, room_id, state_events_delta
-        )
+        just_joined = yield self.check_joined_room(sync_config, state)
+        if just_joined:
+            state = yield self.get_state_at(room_id, now_token)
 
-        room_sync = RoomSyncResult(
+        room_sync = JoinedSyncResult(
             room_id=room_id,
-            published=room_id in published_room_ids,
-            events=recents,
-            prev_batch=prev_batch_token,
-            state=state_events_delta,
-            limited=limited,
-            ephemeral=typing_by_room.get(room_id, [])
+            timeline=batch,
+            state=state,
+            ephemeral=ephemeral_by_room.get(room_id, []),
+            private_user_data=self.private_user_data_for_room(
+                room_id, tags_by_room
+            ),
         )
 
         logging.debug("Room sync: %r", room_sync)
@@ -451,58 +615,125 @@ class SyncHandler(BaseHandler):
         defer.returnValue(room_sync)
 
     @defer.inlineCallbacks
-    def get_state_at_previous_sync(self, room_id, since_token):
-        """ Get the room state at the previous sync the client made.
+    def incremental_sync_for_archived_room(self, sync_config, leave_event,
+                                           since_token, tags_by_room):
+        """ Get the incremental delta needed to bring the client up to date for
+        the archived room.
         Returns:
-            A Deferred list of Events.
+            A Deferred ArchivedSyncResult
+        """
+
+        stream_token = yield self.store.get_stream_token_for_event(
+            leave_event.event_id
+        )
+
+        leave_token = since_token.copy_and_replace("room_key", stream_token)
+
+        batch = yield self.load_filtered_recents(
+            leave_event.room_id, sync_config, leave_token, since_token,
+        )
+
+        logging.debug("Recents %r", batch)
+
+        state_events_at_leave = yield self.store.get_state_for_event(
+            leave_event.event_id
+        )
+
+        state_at_previous_sync = yield self.get_state_at(
+            leave_event.room_id, stream_position=since_token
+        )
+
+        state_events_delta = yield self.compute_state_delta(
+            since_token=since_token,
+            previous_state=state_at_previous_sync,
+            current_state=state_events_at_leave,
+        )
+
+        room_sync = ArchivedSyncResult(
+            room_id=leave_event.room_id,
+            timeline=batch,
+            state=state_events_delta,
+            private_user_data=self.private_user_data_for_room(
+                leave_event.room_id, tags_by_room
+            ),
+        )
+
+        logging.debug("Room sync: %r", room_sync)
+
+        defer.returnValue(room_sync)
+
+    @defer.inlineCallbacks
+    def get_state_after_event(self, event):
+        """
+        Get the room state after the given event
+
+        :param synapse.events.EventBase event: event of interest
+        :return: A Deferred map from ((type, state_key)->Event)
+        """
+        state = yield self.store.get_state_for_event(event.event_id)
+        if event.is_state():
+            state = state.copy()
+            state[(event.type, event.state_key)] = event
+        defer.returnValue(state)
+
+    @defer.inlineCallbacks
+    def get_state_at(self, room_id, stream_position):
+        """ Get the room state at a particular stream position
+        :param str room_id: room for which to get state
+        :param StreamToken stream_position: point at which to get state
+        :returns: A Deferred map from ((type, state_key)->Event)
         """
         last_events, token = yield self.store.get_recent_events_for_room(
-            room_id, end_token=since_token.room_key, limit=1,
+            room_id, end_token=stream_position.room_key, limit=1,
         )
 
         if last_events:
-            last_event = last_events[0]
-            last_context = yield self.state_handler.compute_event_context(
-                last_event
-            )
-            if last_event.is_state():
-                state = [last_event] + last_context.current_state.values()
-            else:
-                state = last_context.current_state.values()
+            last_event = last_events[-1]
+            state = yield self.get_state_after_event(last_event)
+
         else:
-            state = ()
+            # no events in this room - so presumably no state
+            state = {}
         defer.returnValue(state)
 
     def compute_state_delta(self, since_token, previous_state, current_state):
         """ Works out the differnce in state between the current state and the
         state the client got when it last performed a sync.
-        Returns:
-            A list of events.
+
+        :param str since_token: the point we are comparing against
+        :param dict[(str,str), synapse.events.FrozenEvent] previous_state: the
+            state to compare to
+        :param dict[(str,str), synapse.events.FrozenEvent] current_state: the
+            new state
+
+        :returns A new event dictionary
         """
         # TODO(mjark) Check if the state events were received by the server
         # after the previous sync, since we need to include those state
         # updates even if they occured logically before the previous event.
         # TODO(mjark) Check for new redactions in the state events.
-        previous_dict = {event.event_id: event for event in previous_state}
-        state_delta = []
-        for event in current_state:
-            if event.event_id not in previous_dict:
-                state_delta.append(event)
+
+        state_delta = {}
+        for key, event in current_state.iteritems():
+            if (key not in previous_state or
+                    previous_state[key].event_id != event.event_id):
+                state_delta[key] = event
         return state_delta
 
-    @defer.inlineCallbacks
-    def check_joined_room(self, sync_config, room_id, state_delta):
-        joined = False
-        for event in state_delta:
-            if (
-                event.type == EventTypes.Member
-                and event.state_key == sync_config.user.to_string()
-            ):
-                if event.content["membership"] == Membership.JOIN:
-                    joined = True
-
-        if joined:
-            res = yield self.state_handler.get_current_state(room_id)
-            state_delta = res.values()
-
-        defer.returnValue(state_delta)
+    def check_joined_room(self, sync_config, state_delta):
+        """
+        Check if the user has just joined the given room (so should
+        be given the full state)
+
+        :param sync_config:
+        :param dict[(str,str), synapse.events.FrozenEvent] state_delta: the
+           difference in state since the last sync
+
+        :returns A deferred Tuple (state_delta, limited)
+        """
+        join_event = state_delta.get((
+            EventTypes.Member, sync_config.user.to_string()), None)
+        if join_event is not None:
+            if join_event.content["membership"] == Membership.JOIN:
+                return True
+        return False
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index d7096aab8c..2846f3e6e8 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -246,17 +246,12 @@ class TypingNotificationEventSource(object):
             },
         }
 
-    @defer.inlineCallbacks
-    def get_new_events_for_user(self, user, from_key, limit):
+    def get_new_events(self, from_key, room_ids, **kwargs):
         from_key = int(from_key)
         handler = self.handler()
 
-        joined_room_ids = (
-            yield self.room_member_handler().get_joined_rooms_for_user(user)
-        )
-
         events = []
-        for room_id in joined_room_ids:
+        for room_id in room_ids:
             if room_id not in handler._room_serials:
                 continue
             if handler._room_serials[room_id] <= from_key:
@@ -264,7 +259,7 @@ class TypingNotificationEventSource(object):
 
             events.append(self._make_event_for(room_id))
 
-        defer.returnValue((events, handler._latest_room_serial))
+        return events, handler._latest_room_serial
 
     def get_current_key(self):
         return self.handler()._latest_room_serial
diff --git a/synapse/http/client.py b/synapse/http/client.py
index 49737d55da..27e5190224 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -12,16 +12,18 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from OpenSSL import SSL
+from OpenSSL.SSL import VERIFY_NONE
 
 from synapse.api.errors import CodeMessageException
 from synapse.util.logcontext import preserve_context_over_fn
-from syutil.jsonutil import encode_canonical_json
 import synapse.metrics
 
-from twisted.internet import defer, reactor
+from canonicaljson import encode_canonical_json
+
+from twisted.internet import defer, reactor, ssl
 from twisted.web.client import (
     Agent, readBody, FileBodyProducer, PartialDownloadError,
-    HTTPConnectionPool,
 )
 from twisted.web.http_headers import Headers
 
@@ -56,10 +58,14 @@ class SimpleHttpClient(object):
         # The default context factory in Twisted 14.0.0 (which we require) is
         # BrowserLikePolicyForHTTPS which will do regular cert validation
         # 'like a browser'
-        pool = HTTPConnectionPool(reactor)
-        pool.maxPersistentPerHost = 10
-        self.agent = Agent(reactor, pool=pool)
-        self.version_string = hs.version_string
+        self.agent = Agent(
+            reactor,
+            connectTimeout=15,
+            contextFactory=hs.get_http_client_context_factory()
+        )
+        self.user_agent = hs.version_string
+        if hs.config.user_agent_suffix:
+            self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix,)
 
     def request(self, method, uri, *args, **kwargs):
         # A small wrapper around self.agent.request() so we can easily attach
@@ -104,7 +110,7 @@ class SimpleHttpClient(object):
             uri.encode("ascii"),
             headers=Headers({
                 b"Content-Type": [b"application/x-www-form-urlencoded"],
-                b"User-Agent": [self.version_string],
+                b"User-Agent": [self.user_agent],
             }),
             bodyProducer=FileBodyProducer(StringIO(query_bytes))
         )
@@ -123,7 +129,8 @@ class SimpleHttpClient(object):
             "POST",
             uri.encode("ascii"),
             headers=Headers({
-                "Content-Type": ["application/json"]
+                b"Content-Type": [b"application/json"],
+                b"User-Agent": [self.user_agent],
             }),
             bodyProducer=FileBodyProducer(StringIO(json_str))
         )
@@ -149,16 +156,40 @@ class SimpleHttpClient(object):
             On a non-2xx HTTP response. The response body will be used as the
             error message.
         """
+        body = yield self.get_raw(uri, args)
+        defer.returnValue(json.loads(body))
+
+    @defer.inlineCallbacks
+    def put_json(self, uri, json_body, args={}):
+        """ Puts some json to the given URI.
+
+        Args:
+            uri (str): The URI to request, not including query parameters
+            json_body (dict): The JSON to put in the HTTP body,
+            args (dict): A dictionary used to create query strings, defaults to
+                None.
+                **Note**: The value of each key is assumed to be an iterable
+                and *not* a string.
+        Returns:
+            Deferred: Succeeds when we get *any* 2xx HTTP response, with the
+            HTTP body as JSON.
+        Raises:
+            On a non-2xx HTTP response.
+        """
         if len(args):
             query_bytes = urllib.urlencode(args, True)
             uri = "%s?%s" % (uri, query_bytes)
 
+        json_str = encode_canonical_json(json_body)
+
         response = yield self.request(
-            "GET",
+            "PUT",
             uri.encode("ascii"),
             headers=Headers({
-                b"User-Agent": [self.version_string],
-            })
+                b"User-Agent": [self.user_agent],
+                "Content-Type": ["application/json"]
+            }),
+            bodyProducer=FileBodyProducer(StringIO(json_str))
         )
 
         body = yield preserve_context_over_fn(readBody, response)
@@ -172,46 +203,39 @@ class SimpleHttpClient(object):
             raise CodeMessageException(response.code, body)
 
     @defer.inlineCallbacks
-    def put_json(self, uri, json_body, args={}):
-        """ Puts some json to the given URI.
+    def get_raw(self, uri, args={}):
+        """ Gets raw text from the given URI.
 
         Args:
             uri (str): The URI to request, not including query parameters
-            json_body (dict): The JSON to put in the HTTP body,
             args (dict): A dictionary used to create query strings, defaults to
                 None.
                 **Note**: The value of each key is assumed to be an iterable
                 and *not* a string.
         Returns:
             Deferred: Succeeds when we get *any* 2xx HTTP response, with the
-            HTTP body as JSON.
+            HTTP body at text.
         Raises:
-            On a non-2xx HTTP response.
+            On a non-2xx HTTP response. The response body will be used as the
+            error message.
         """
         if len(args):
             query_bytes = urllib.urlencode(args, True)
             uri = "%s?%s" % (uri, query_bytes)
 
-        json_str = encode_canonical_json(json_body)
-
         response = yield self.request(
-            "PUT",
+            "GET",
             uri.encode("ascii"),
             headers=Headers({
-                b"User-Agent": [self.version_string],
-                "Content-Type": ["application/json"]
-            }),
-            bodyProducer=FileBodyProducer(StringIO(json_str))
+                b"User-Agent": [self.user_agent],
+            })
         )
 
         body = yield preserve_context_over_fn(readBody, response)
 
         if 200 <= response.code < 300:
-            defer.returnValue(json.loads(body))
+            defer.returnValue(body)
         else:
-            # NB: This is explicitly not json.loads(body)'d because the contract
-            # of CodeMessageException is a *string* message. Callers can always
-            # load it into JSON if they want.
             raise CodeMessageException(response.code, body)
 
 
@@ -233,7 +257,7 @@ class CaptchaServerHttpClient(SimpleHttpClient):
             bodyProducer=FileBodyProducer(StringIO(query_bytes)),
             headers=Headers({
                 b"Content-Type": [b"application/x-www-form-urlencoded"],
-                b"User-Agent": [self.version_string],
+                b"User-Agent": [self.user_agent],
             })
         )
 
@@ -251,3 +275,18 @@ def _print_ex(e):
             _print_ex(ex)
     else:
         logger.exception(e)
+
+
+class InsecureInterceptableContextFactory(ssl.ContextFactory):
+    """
+    Factory for PyOpenSSL SSL contexts which accepts any certificate for any domain.
+
+    Do not use this since it allows an attacker to intercept your communications.
+    """
+
+    def __init__(self):
+        self._context = SSL.Context(SSL.SSLv23_METHOD)
+        self._context.set_verify(VERIFY_NONE, lambda *_: None)
+
+    def getContext(self, hostname, port):
+        return self._context
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index 854e17a473..ca9591556d 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -25,16 +25,17 @@ from synapse.util.async import sleep
 from synapse.util.logcontext import preserve_context_over_fn
 import synapse.metrics
 
-from syutil.jsonutil import encode_canonical_json
+from canonicaljson import encode_canonical_json
 
 from synapse.api.errors import (
     SynapseError, Codes, HttpResponseException,
 )
 
-from syutil.crypto.jsonsign import sign_json
+from signedjson.sign import sign_json
 
 import simplejson as json
 import logging
+import random
 import sys
 import urllib
 import urlparse
@@ -55,16 +56,19 @@ incoming_responses_counter = metrics.register_counter(
 )
 
 
+MAX_RETRIES = 10
+
+
 class MatrixFederationEndpointFactory(object):
     def __init__(self, hs):
-        self.tls_context_factory = hs.tls_context_factory
+        self.tls_server_context_factory = hs.tls_server_context_factory
 
     def endpointForURI(self, uri):
         destination = uri.netloc
 
         return matrix_federation_endpoint(
             reactor, destination, timeout=10,
-            ssl_context_factory=self.tls_context_factory
+            ssl_context_factory=self.tls_server_context_factory
         )
 
 
@@ -119,7 +123,7 @@ class MatrixFederationHttpClient(object):
 
         # XXX: Would be much nicer to retry only at the transaction-layer
         # (once we have reliable transactions in place)
-        retries_left = 5
+        retries_left = MAX_RETRIES
 
         http_url_bytes = urlparse.urlunparse(
             ("", "", path_bytes, param_bytes, query_bytes, "")
@@ -180,7 +184,10 @@ class MatrixFederationHttpClient(object):
                     )
 
                     if retries_left and not timeout:
-                        yield sleep(2 ** (5 - retries_left))
+                        delay = 4 ** (MAX_RETRIES + 1 - retries_left)
+                        delay = max(delay, 60)
+                        delay *= random.uniform(0.8, 1.4)
+                        yield sleep(delay)
                         retries_left -= 1
                     else:
                         raise
diff --git a/synapse/http/server.py b/synapse/http/server.py
index b60e905a62..50feea6f1c 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -21,8 +21,8 @@ from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
 import synapse.metrics
 import synapse.events
 
-from syutil.jsonutil import (
-    encode_canonical_json, encode_pretty_printed_json, encode_json
+from canonicaljson import (
+    encode_canonical_json, encode_pretty_printed_json
 )
 
 from twisted.internet import defer
@@ -33,6 +33,7 @@ from twisted.web.util import redirectTo
 import collections
 import logging
 import urllib
+import ujson
 
 logger = logging.getLogger(__name__)
 
@@ -270,12 +271,11 @@ def respond_with_json(request, code, json_object, send_cors=False,
     if pretty_print:
         json_bytes = encode_pretty_printed_json(json_object) + "\n"
     else:
-        if canonical_json:
+        if canonical_json or synapse.events.USE_FROZEN_DICTS:
             json_bytes = encode_canonical_json(json_object)
         else:
-            json_bytes = encode_json(
-                json_object, using_frozen_dicts=synapse.events.USE_FROZEN_DICTS
-            )
+            # ujson doesn't like frozen_dicts.
+            json_bytes = ujson.dumps(json_object, ensure_ascii=False)
 
     return respond_with_json_bytes(
         request, code, json_bytes,
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index d7bcad8a8a..943d637459 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -17,7 +17,7 @@
 from __future__ import absolute_import
 
 import logging
-from resource import getrusage, getpagesize, RUSAGE_SELF
+from resource import getrusage, RUSAGE_SELF
 import functools
 import os
 import stat
@@ -100,7 +100,6 @@ def render_all():
 # process resource usage
 
 rusage = None
-PAGE_SIZE = getpagesize()
 
 
 def update_resource_metrics():
@@ -113,8 +112,8 @@ resource_metrics = get_metrics_for("process.resource")
 resource_metrics.register_callback("utime", lambda: rusage.ru_utime * 1000)
 resource_metrics.register_callback("stime", lambda: rusage.ru_stime * 1000)
 
-# pages
-resource_metrics.register_callback("maxrss", lambda: rusage.ru_maxrss * PAGE_SIZE)
+# kilobytes
+resource_metrics.register_callback("maxrss", lambda: rusage.ru_maxrss * 1024)
 
 TYPES = {
     stat.S_IFSOCK: "SOCK",
@@ -131,6 +130,10 @@ def _process_fds():
     counts = {(k,): 0 for k in TYPES.values()}
     counts[("other",)] = 0
 
+    # Not every OS will have a /proc/self/fd directory
+    if not os.path.exists("/proc/self/fd"):
+        return counts
+
     for fd in os.listdir("/proc/self/fd"):
         try:
             s = os.stat("/proc/self/fd/%s" % (fd))
diff --git a/synapse/notifier.py b/synapse/notifier.py
index f998fc83bf..e3b42e2331 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -14,6 +14,8 @@
 # limitations under the License.
 
 from twisted.internet import defer
+from synapse.api.constants import EventTypes
+from synapse.api.errors import AuthError
 
 from synapse.util.logutils import log_function
 from synapse.util.async import run_on_reactor, ObservableDeferred
@@ -269,8 +271,8 @@ class Notifier(object):
                 logger.exception("Failed to notify listener")
 
     @defer.inlineCallbacks
-    def wait_for_events(self, user, rooms, timeout, callback,
-                        from_token=StreamToken("s0", "0", "0", "0")):
+    def wait_for_events(self, user, timeout, callback, room_ids=None,
+                        from_token=StreamToken("s0", "0", "0", "0", "0")):
         """Wait until the callback returns a non empty response or the
         timeout fires.
         """
@@ -279,11 +281,12 @@ class Notifier(object):
         if user_stream is None:
             appservice = yield self.store.get_app_service_by_user_id(user)
             current_token = yield self.event_sources.get_current_token()
-            rooms = yield self.store.get_rooms_for_user(user)
-            rooms = [room.room_id for room in rooms]
+            if room_ids is None:
+                rooms = yield self.store.get_rooms_for_user(user)
+                room_ids = [room.room_id for room in rooms]
             user_stream = _NotifierUserStream(
                 user=user,
-                rooms=rooms,
+                rooms=room_ids,
                 appservice=appservice,
                 current_token=current_token,
                 time_now_ms=self.clock.time_msec(),
@@ -328,8 +331,9 @@ class Notifier(object):
         defer.returnValue(result)
 
     @defer.inlineCallbacks
-    def get_events_for(self, user, rooms, pagination_config, timeout,
-                       only_room_events=False):
+    def get_events_for(self, user, pagination_config, timeout,
+                       only_room_events=False,
+                       is_guest=False, guest_room_id=None):
         """ For the given user and rooms, return any new events for them. If
         there are no new events wait for up to `timeout` milliseconds for any
         new events to happen before returning.
@@ -342,6 +346,16 @@ class Notifier(object):
 
         limit = pagination_config.limit
 
+        room_ids = []
+        if is_guest:
+            if guest_room_id:
+                if not self._is_world_readable(guest_room_id):
+                    raise AuthError(403, "Guest access not allowed")
+                room_ids = [guest_room_id]
+        else:
+            rooms = yield self.store.get_rooms_for_user(user.to_string())
+            room_ids = [room.room_id for room in rooms]
+
         @defer.inlineCallbacks
         def check_for_updates(before_token, after_token):
             if not after_token.is_after(before_token):
@@ -349,6 +363,7 @@ class Notifier(object):
 
             events = []
             end_token = from_token
+
             for name, source in self.event_sources.sources.items():
                 keyname = "%s_key" % name
                 before_id = getattr(before_token, keyname)
@@ -357,9 +372,23 @@ class Notifier(object):
                     continue
                 if only_room_events and name != "room":
                     continue
-                new_events, new_key = yield source.get_new_events_for_user(
-                    user, getattr(from_token, keyname), limit,
+                new_events, new_key = yield source.get_new_events(
+                    user=user,
+                    from_key=getattr(from_token, keyname),
+                    limit=limit,
+                    is_guest=is_guest,
+                    room_ids=room_ids,
                 )
+
+                if name == "room":
+                    room_member_handler = self.hs.get_handlers().room_member_handler
+                    new_events = yield room_member_handler._filter_events_for_client(
+                        user.to_string(),
+                        new_events,
+                        is_guest=is_guest,
+                        require_all_visible_for_guests=False
+                    )
+
                 events.extend(new_events)
                 end_token = end_token.copy_and_replace(keyname, new_key)
 
@@ -369,7 +398,7 @@ class Notifier(object):
                 defer.returnValue(None)
 
         result = yield self.wait_for_events(
-            user, rooms, timeout, check_for_updates, from_token=from_token
+            user, timeout, check_for_updates, room_ids=room_ids, from_token=from_token
         )
 
         if result is None:
@@ -377,6 +406,17 @@ class Notifier(object):
 
         defer.returnValue(result)
 
+    @defer.inlineCallbacks
+    def _is_world_readable(self, room_id):
+        state = yield self.hs.get_state_handler().get_current_state(
+            room_id,
+            EventTypes.RoomHistoryVisibility
+        )
+        if state and "history_visibility" in state.content:
+            defer.returnValue(state.content["history_visibility"] == "world_readable")
+        else:
+            defer.returnValue(False)
+
     @log_function
     def remove_expired_streams(self):
         time_now_ms = self.clock.time_msec()
diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py
index f1952b5a0f..0e0c61dec8 100644
--- a/synapse/push/__init__.py
+++ b/synapse/push/__init__.py
@@ -186,7 +186,7 @@ class Pusher(object):
             if not display_name:
                 return False
             return re.search(
-                "\b%s\b" % re.escape(display_name), ev['content']['body'],
+                r"\b%s\b" % re.escape(display_name), ev['content']['body'],
                 flags=re.IGNORECASE
             ) is not None
 
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index d7e3a686fa..e95316720e 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -18,21 +18,24 @@ from distutils.version import LooseVersion
 logger = logging.getLogger(__name__)
 
 REQUIREMENTS = {
-    "syutil>=0.0.7": ["syutil>=0.0.7"],
-    "Twisted>=15.1.0": ["twisted>=15.1.0"],
+    "frozendict>=0.4": ["frozendict"],
+    "unpaddedbase64>=1.0.1": ["unpaddedbase64>=1.0.1"],
+    "canonicaljson>=1.0.0": ["canonicaljson>=1.0.0"],
+    "signedjson>=1.0.0": ["signedjson>=1.0.0"],
+    "pynacl>=0.3.0": ["nacl>=0.3.0", "nacl.bindings"],
     "service_identity>=1.0.0": ["service_identity>=1.0.0"],
+    "Twisted>=15.1.0": ["twisted>=15.1.0"],
     "pyopenssl>=0.14": ["OpenSSL>=0.14"],
     "pyyaml": ["yaml"],
     "pyasn1": ["pyasn1"],
-    "pynacl>=0.0.3": ["nacl>=0.0.3"],
     "daemonize": ["daemonize"],
     "py-bcrypt": ["bcrypt"],
-    "frozendict>=0.4": ["frozendict"],
     "pillow": ["PIL"],
     "pydenticon": ["pydenticon"],
     "ujson": ["ujson"],
     "blist": ["blist"],
     "pysaml2": ["saml2"],
+    "pymacaroons-pynacl": ["pymacaroons"],
 }
 CONDITIONAL_REQUIREMENTS = {
     "web_client": {
@@ -53,21 +56,14 @@ def github_link(project, version, egg):
     return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg)
 
 DEPENDENCY_LINKS = {
-    "syutil": github_link(
-        project="matrix-org/syutil",
-        version="v0.0.7",
-        egg="syutil-0.0.7",
-    ),
-    "matrix-angular-sdk": github_link(
-        project="matrix-org/matrix-angular-sdk",
-        version="v0.6.6",
-        egg="matrix_angular_sdk-0.6.6",
-    ),
 }
 
 
 class MissingRequirementError(Exception):
-    pass
+    def __init__(self, message, module_name, dependency):
+        super(MissingRequirementError, self).__init__(message)
+        self.module_name = module_name
+        self.dependency = dependency
 
 
 def check_requirements(config=None):
@@ -95,7 +91,7 @@ def check_requirements(config=None):
                 )
                 raise MissingRequirementError(
                     "Can't import %r which is part of %r"
-                    % (module_name, dependency)
+                    % (module_name, dependency), module_name, dependency
                 )
             version = getattr(module, "__version__", None)
             file_path = getattr(module, "__file__", None)
@@ -108,23 +104,25 @@ def check_requirements(config=None):
                 if version is None:
                     raise MissingRequirementError(
                         "Version of %r isn't set as __version__ of module %r"
-                        % (dependency, module_name)
+                        % (dependency, module_name), module_name, dependency
                     )
                 if LooseVersion(version) < LooseVersion(required_version):
                     raise MissingRequirementError(
                         "Version of %r in %r is too old. %r < %r"
-                        % (dependency, file_path, version, required_version)
+                        % (dependency, file_path, version, required_version),
+                        module_name, dependency
                     )
             elif version_test == "==":
                 if version is None:
                     raise MissingRequirementError(
                         "Version of %r isn't set as __version__ of module %r"
-                        % (dependency, module_name)
+                        % (dependency, module_name), module_name, dependency
                     )
                 if LooseVersion(version) != LooseVersion(required_version):
                     raise MissingRequirementError(
                         "Unexpected version of %r in %r. %r != %r"
-                        % (dependency, file_path, version, required_version)
+                        % (dependency, file_path, version, required_version),
+                        module_name, dependency
                     )
 
 
diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py
index 2ce754b028..bdde43864c 100644
--- a/synapse/rest/client/v1/admin.py
+++ b/synapse/rest/client/v1/admin.py
@@ -31,7 +31,7 @@ class WhoisRestServlet(ClientV1RestServlet):
     @defer.inlineCallbacks
     def on_GET(self, request, user_id):
         target_user = UserID.from_string(user_id)
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
         is_admin = yield self.auth.is_server_admin(auth_user)
 
         if not is_admin and target_user != auth_user:
diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py
index 6758a888b3..240eedac75 100644
--- a/synapse/rest/client/v1/directory.py
+++ b/synapse/rest/client/v1/directory.py
@@ -69,7 +69,7 @@ class ClientDirectoryServer(ClientV1RestServlet):
 
         try:
             # try to auth as a user
-            user, client = yield self.auth.get_user_by_req(request)
+            user, _, _ = yield self.auth.get_user_by_req(request)
             try:
                 user_id = user.to_string()
                 yield dir_handler.create_association(
@@ -116,7 +116,7 @@ class ClientDirectoryServer(ClientV1RestServlet):
             # fallback to default user behaviour if they aren't an AS
             pass
 
-        user, client = yield self.auth.get_user_by_req(request)
+        user, _, _ = yield self.auth.get_user_by_req(request)
 
         is_admin = yield self.auth.is_server_admin(user)
         if not is_admin:
diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py
index 77b7c25a03..3e1750d1a1 100644
--- a/synapse/rest/client/v1/events.py
+++ b/synapse/rest/client/v1/events.py
@@ -34,7 +34,15 @@ class EventStreamRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, is_guest = yield self.auth.get_user_by_req(
+            request,
+            allow_guest=True
+        )
+        room_id = None
+        if is_guest:
+            if "room_id" not in request.args:
+                raise SynapseError(400, "Guest users must specify room_id param")
+            room_id = request.args["room_id"][0]
         try:
             handler = self.handlers.event_stream_handler
             pagin_config = PaginationConfig.from_request(request)
@@ -49,7 +57,8 @@ class EventStreamRestServlet(ClientV1RestServlet):
 
             chunk = yield handler.get_stream(
                 auth_user.to_string(), pagin_config, timeout=timeout,
-                as_client_event=as_client_event
+                as_client_event=as_client_event, affect_presence=(not is_guest),
+                room_id=room_id, is_guest=is_guest
             )
         except:
             logger.exception("Event stream failed")
@@ -71,7 +80,7 @@ class EventRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request, event_id):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
         handler = self.handlers.event_handler
         event = yield handler.get_event(auth_user, event_id)
 
diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py
index 4a259bba64..856a70f297 100644
--- a/synapse/rest/client/v1/initial_sync.py
+++ b/synapse/rest/client/v1/initial_sync.py
@@ -25,16 +25,16 @@ class InitialSyncRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request):
-        user, client = yield self.auth.get_user_by_req(request)
-        with_feedback = "feedback" in request.args
+        user, _, _ = yield self.auth.get_user_by_req(request)
         as_client_event = "raw" not in request.args
         pagination_config = PaginationConfig.from_request(request)
         handler = self.handlers.message_handler
+        include_archived = request.args.get("archived", None) == ["true"]
         content = yield handler.snapshot_all_rooms(
             user_id=user.to_string(),
             pagin_config=pagination_config,
-            feedback=with_feedback,
-            as_client_event=as_client_event
+            as_client_event=as_client_event,
+            include_archived=include_archived,
         )
 
         defer.returnValue((200, content))
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index 2444f27366..0171f6c018 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -15,18 +15,22 @@
 
 from twisted.internet import defer
 
-from synapse.api.errors import SynapseError
+from synapse.api.errors import SynapseError, LoginError, Codes
+from synapse.http.client import SimpleHttpClient
 from synapse.types import UserID
 from base import ClientV1RestServlet, client_path_pattern
 
 import simplejson as json
 import urllib
+import urlparse
 
 import logging
 from saml2 import BINDING_HTTP_POST
 from saml2 import config
 from saml2.client import Saml2Client
 
+import xml.etree.ElementTree as ET
+
 
 logger = logging.getLogger(__name__)
 
@@ -35,16 +39,28 @@ class LoginRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/login$")
     PASS_TYPE = "m.login.password"
     SAML2_TYPE = "m.login.saml2"
+    CAS_TYPE = "m.login.cas"
+    TOKEN_TYPE = "m.login.token"
 
     def __init__(self, hs):
         super(LoginRestServlet, self).__init__(hs)
         self.idp_redirect_url = hs.config.saml2_idp_redirect_url
+        self.password_enabled = hs.config.password_enabled
         self.saml2_enabled = hs.config.saml2_enabled
+        self.cas_enabled = hs.config.cas_enabled
+        self.cas_server_url = hs.config.cas_server_url
+        self.cas_required_attributes = hs.config.cas_required_attributes
+        self.servername = hs.config.server_name
 
     def on_GET(self, request):
-        flows = [{"type": LoginRestServlet.PASS_TYPE}]
+        flows = []
         if self.saml2_enabled:
             flows.append({"type": LoginRestServlet.SAML2_TYPE})
+        if self.cas_enabled:
+            flows.append({"type": LoginRestServlet.CAS_TYPE})
+        if self.password_enabled:
+            flows.append({"type": LoginRestServlet.PASS_TYPE})
+        flows.append({"type": LoginRestServlet.TOKEN_TYPE})
         return (200, {"flows": flows})
 
     def on_OPTIONS(self, request):
@@ -55,6 +71,9 @@ class LoginRestServlet(ClientV1RestServlet):
         login_submission = _parse_json(request)
         try:
             if login_submission["type"] == LoginRestServlet.PASS_TYPE:
+                if not self.password_enabled:
+                    raise SynapseError(400, "Password login has been disabled.")
+
                 result = yield self.do_password_login(login_submission)
                 defer.returnValue(result)
             elif self.saml2_enabled and (login_submission["type"] ==
@@ -67,6 +86,23 @@ class LoginRestServlet(ClientV1RestServlet):
                     "uri": "%s%s" % (self.idp_redirect_url, relay_state)
                 }
                 defer.returnValue((200, result))
+            # TODO Delete this after all CAS clients switch to token login instead
+            elif self.cas_enabled and (login_submission["type"] ==
+                                       LoginRestServlet.CAS_TYPE):
+                # TODO: get this from the homeserver rather than creating a new one for
+                # each request
+                http_client = SimpleHttpClient(self.hs)
+                uri = "%s/proxyValidate" % (self.cas_server_url,)
+                args = {
+                    "ticket": login_submission["ticket"],
+                    "service": login_submission["service"]
+                }
+                body = yield http_client.get_raw(uri, args)
+                result = yield self.do_cas_login(body)
+                defer.returnValue(result)
+            elif login_submission["type"] == LoginRestServlet.TOKEN_TYPE:
+                result = yield self.do_token_login(login_submission)
+                defer.returnValue(result)
             else:
                 raise SynapseError(400, "Bad login type.")
         except KeyError:
@@ -78,6 +114,8 @@ class LoginRestServlet(ClientV1RestServlet):
             user_id = yield self.hs.get_datastore().get_user_id_by_threepid(
                 login_submission['medium'], login_submission['address']
             )
+            if not user_id:
+                raise LoginError(403, "", errcode=Codes.FORBIDDEN)
         else:
             user_id = login_submission['user']
 
@@ -86,47 +124,108 @@ class LoginRestServlet(ClientV1RestServlet):
                 user_id, self.hs.hostname
             ).to_string()
 
-        user_id, token = yield self.handlers.auth_handler.login_with_password(
+        auth_handler = self.handlers.auth_handler
+        user_id, access_token, refresh_token = yield auth_handler.login_with_password(
             user_id=user_id,
             password=login_submission["password"])
 
         result = {
             "user_id": user_id,  # may have changed
-            "access_token": token,
+            "access_token": access_token,
+            "refresh_token": refresh_token,
             "home_server": self.hs.hostname,
         }
 
         defer.returnValue((200, result))
 
+    @defer.inlineCallbacks
+    def do_token_login(self, login_submission):
+        token = login_submission['token']
+        auth_handler = self.handlers.auth_handler
+        user_id = (
+            yield auth_handler.validate_short_term_login_token_and_get_user_id(token)
+        )
+        user_id, access_token, refresh_token = (
+            yield auth_handler.get_login_tuple_for_user_id(user_id)
+        )
+        result = {
+            "user_id": user_id,  # may have changed
+            "access_token": access_token,
+            "refresh_token": refresh_token,
+            "home_server": self.hs.hostname,
+        }
 
-class LoginFallbackRestServlet(ClientV1RestServlet):
-    PATTERN = client_path_pattern("/login/fallback$")
+        defer.returnValue((200, result))
 
-    def on_GET(self, request):
-        # TODO(kegan): This should be returning some HTML which is capable of
-        # hitting LoginRestServlet
-        return (200, {})
+    # TODO Delete this after all CAS clients switch to token login instead
+    @defer.inlineCallbacks
+    def do_cas_login(self, cas_response_body):
+        user, attributes = self.parse_cas_response(cas_response_body)
 
+        for required_attribute, required_value in self.cas_required_attributes.items():
+            # If required attribute was not in CAS Response - Forbidden
+            if required_attribute not in attributes:
+                raise LoginError(401, "Unauthorized", errcode=Codes.UNAUTHORIZED)
 
-class PasswordResetRestServlet(ClientV1RestServlet):
-    PATTERN = client_path_pattern("/login/reset")
+            # Also need to check value
+            if required_value is not None:
+                actual_value = attributes[required_attribute]
+                # If required attribute value does not match expected - Forbidden
+                if required_value != actual_value:
+                    raise LoginError(401, "Unauthorized", errcode=Codes.UNAUTHORIZED)
 
-    @defer.inlineCallbacks
-    def on_POST(self, request):
-        reset_info = _parse_json(request)
-        try:
-            email = reset_info["email"]
-            user_id = reset_info["user_id"]
-            handler = self.handlers.login_handler
-            yield handler.reset_password(user_id, email)
-            # purposefully give no feedback to avoid people hammering different
-            # combinations.
-            defer.returnValue((200, {}))
-        except KeyError:
-            raise SynapseError(
-                400,
-                "Missing keys. Requires 'email' and 'user_id'."
+        user_id = UserID.create(user, self.hs.hostname).to_string()
+        auth_handler = self.handlers.auth_handler
+        user_exists = yield auth_handler.does_user_exist(user_id)
+        if user_exists:
+            user_id, access_token, refresh_token = (
+                yield auth_handler.get_login_tuple_for_user_id(user_id)
+            )
+            result = {
+                "user_id": user_id,  # may have changed
+                "access_token": access_token,
+                "refresh_token": refresh_token,
+                "home_server": self.hs.hostname,
+            }
+
+        else:
+            user_id, access_token = (
+                yield self.handlers.registration_handler.register(localpart=user)
             )
+            result = {
+                "user_id": user_id,  # may have changed
+                "access_token": access_token,
+                "home_server": self.hs.hostname,
+            }
+
+        defer.returnValue((200, result))
+
+    # TODO Delete this after all CAS clients switch to token login instead
+    def parse_cas_response(self, cas_response_body):
+        root = ET.fromstring(cas_response_body)
+        if not root.tag.endswith("serviceResponse"):
+            raise LoginError(401, "Invalid CAS response", errcode=Codes.UNAUTHORIZED)
+        if not root[0].tag.endswith("authenticationSuccess"):
+            raise LoginError(401, "Unsuccessful CAS response", errcode=Codes.UNAUTHORIZED)
+        for child in root[0]:
+            if child.tag.endswith("user"):
+                user = child.text
+            if child.tag.endswith("attributes"):
+                attributes = {}
+                for attribute in child:
+                    # ElementTree library expands the namespace in attribute tags
+                    # to the full URL of the namespace.
+                    # See (https://docs.python.org/2/library/xml.etree.elementtree.html)
+                    # We don't care about namespace here and it will always be encased in
+                    # curly braces, so we remove them.
+                    if "}" in attribute.tag:
+                        attributes[attribute.tag.split("}")[1]] = attribute.text
+                    else:
+                        attributes[attribute.tag] = attribute.text
+        if user is None or attributes is None:
+            raise LoginError(401, "Invalid CAS response", errcode=Codes.UNAUTHORIZED)
+
+        return (user, attributes)
 
 
 class SAML2RestServlet(ClientV1RestServlet):
@@ -172,6 +271,127 @@ class SAML2RestServlet(ClientV1RestServlet):
         defer.returnValue((200, {"status": "not_authenticated"}))
 
 
+# TODO Delete this after all CAS clients switch to token login instead
+class CasRestServlet(ClientV1RestServlet):
+    PATTERN = client_path_pattern("/login/cas")
+
+    def __init__(self, hs):
+        super(CasRestServlet, self).__init__(hs)
+        self.cas_server_url = hs.config.cas_server_url
+
+    def on_GET(self, request):
+        return (200, {"serverUrl": self.cas_server_url})
+
+
+class CasRedirectServlet(ClientV1RestServlet):
+    PATTERN = client_path_pattern("/login/cas/redirect")
+
+    def __init__(self, hs):
+        super(CasRedirectServlet, self).__init__(hs)
+        self.cas_server_url = hs.config.cas_server_url
+        self.cas_service_url = hs.config.cas_service_url
+
+    def on_GET(self, request):
+        args = request.args
+        if "redirectUrl" not in args:
+            return (400, "Redirect URL not specified for CAS auth")
+        client_redirect_url_param = urllib.urlencode({
+            "redirectUrl": args["redirectUrl"][0]
+        })
+        hs_redirect_url = self.cas_service_url + "/_matrix/client/api/v1/login/cas/ticket"
+        service_param = urllib.urlencode({
+            "service": "%s?%s" % (hs_redirect_url, client_redirect_url_param)
+        })
+        request.redirect("%s?%s" % (self.cas_server_url, service_param))
+        request.finish()
+
+
+class CasTicketServlet(ClientV1RestServlet):
+    PATTERN = client_path_pattern("/login/cas/ticket")
+
+    def __init__(self, hs):
+        super(CasTicketServlet, self).__init__(hs)
+        self.cas_server_url = hs.config.cas_server_url
+        self.cas_service_url = hs.config.cas_service_url
+        self.cas_required_attributes = hs.config.cas_required_attributes
+
+    @defer.inlineCallbacks
+    def on_GET(self, request):
+        client_redirect_url = request.args["redirectUrl"][0]
+        http_client = self.hs.get_simple_http_client()
+        uri = self.cas_server_url + "/proxyValidate"
+        args = {
+            "ticket": request.args["ticket"],
+            "service": self.cas_service_url
+        }
+        body = yield http_client.get_raw(uri, args)
+        result = yield self.handle_cas_response(request, body, client_redirect_url)
+        defer.returnValue(result)
+
+    @defer.inlineCallbacks
+    def handle_cas_response(self, request, cas_response_body, client_redirect_url):
+        user, attributes = self.parse_cas_response(cas_response_body)
+
+        for required_attribute, required_value in self.cas_required_attributes.items():
+            # If required attribute was not in CAS Response - Forbidden
+            if required_attribute not in attributes:
+                raise LoginError(401, "Unauthorized", errcode=Codes.UNAUTHORIZED)
+
+            # Also need to check value
+            if required_value is not None:
+                actual_value = attributes[required_attribute]
+                # If required attribute value does not match expected - Forbidden
+                if required_value != actual_value:
+                    raise LoginError(401, "Unauthorized", errcode=Codes.UNAUTHORIZED)
+
+        user_id = UserID.create(user, self.hs.hostname).to_string()
+        auth_handler = self.handlers.auth_handler
+        user_exists = yield auth_handler.does_user_exist(user_id)
+        if not user_exists:
+            user_id, _ = (
+                yield self.handlers.registration_handler.register(localpart=user)
+            )
+
+        login_token = auth_handler.generate_short_term_login_token(user_id)
+        redirect_url = self.add_login_token_to_redirect_url(client_redirect_url,
+                                                            login_token)
+        request.redirect(redirect_url)
+        request.finish()
+
+    def add_login_token_to_redirect_url(self, url, token):
+        url_parts = list(urlparse.urlparse(url))
+        query = dict(urlparse.parse_qsl(url_parts[4]))
+        query.update({"loginToken": token})
+        url_parts[4] = urllib.urlencode(query)
+        return urlparse.urlunparse(url_parts)
+
+    def parse_cas_response(self, cas_response_body):
+        root = ET.fromstring(cas_response_body)
+        if not root.tag.endswith("serviceResponse"):
+            raise LoginError(401, "Invalid CAS response", errcode=Codes.UNAUTHORIZED)
+        if not root[0].tag.endswith("authenticationSuccess"):
+            raise LoginError(401, "Unsuccessful CAS response", errcode=Codes.UNAUTHORIZED)
+        for child in root[0]:
+            if child.tag.endswith("user"):
+                user = child.text
+            if child.tag.endswith("attributes"):
+                attributes = {}
+                for attribute in child:
+                    # ElementTree library expands the namespace in attribute tags
+                    # to the full URL of the namespace.
+                    # See (https://docs.python.org/2/library/xml.etree.elementtree.html)
+                    # We don't care about namespace here and it will always be encased in
+                    # curly braces, so we remove them.
+                    if "}" in attribute.tag:
+                        attributes[attribute.tag.split("}")[1]] = attribute.text
+                    else:
+                        attributes[attribute.tag] = attribute.text
+        if user is None or attributes is None:
+            raise LoginError(401, "Invalid CAS response", errcode=Codes.UNAUTHORIZED)
+
+        return (user, attributes)
+
+
 def _parse_json(request):
     try:
         content = json.loads(request.content.read())
@@ -186,4 +406,8 @@ def register_servlets(hs, http_server):
     LoginRestServlet(hs).register(http_server)
     if hs.config.saml2_enabled:
         SAML2RestServlet(hs).register(http_server)
+    if hs.config.cas_enabled:
+        CasRedirectServlet(hs).register(http_server)
+        CasTicketServlet(hs).register(http_server)
+        CasRestServlet(hs).register(http_server)
     # TODO PasswordResetRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py
index 78d4f2b128..6fe5d19a22 100644
--- a/synapse/rest/client/v1/presence.py
+++ b/synapse/rest/client/v1/presence.py
@@ -32,7 +32,7 @@ class PresenceStatusRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request, user_id):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
         user = UserID.from_string(user_id)
 
         state = yield self.handlers.presence_handler.get_state(
@@ -42,7 +42,7 @@ class PresenceStatusRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_PUT(self, request, user_id):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
         user = UserID.from_string(user_id)
 
         state = {}
@@ -77,7 +77,7 @@ class PresenceListRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request, user_id):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
         user = UserID.from_string(user_id)
 
         if not self.hs.is_mine(user):
@@ -97,7 +97,7 @@ class PresenceListRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_POST(self, request, user_id):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
         user = UserID.from_string(user_id)
 
         if not self.hs.is_mine(user):
diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py
index 1e77eb49cf..3218e47025 100644
--- a/synapse/rest/client/v1/profile.py
+++ b/synapse/rest/client/v1/profile.py
@@ -37,7 +37,7 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_PUT(self, request, user_id):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request, allow_guest=True)
         user = UserID.from_string(user_id)
 
         try:
@@ -70,7 +70,7 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_PUT(self, request, user_id):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
         user = UserID.from_string(user_id)
 
         try:
diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py
index bd759a2589..b0870db1ac 100644
--- a/synapse/rest/client/v1/push_rule.py
+++ b/synapse/rest/client/v1/push_rule.py
@@ -43,7 +43,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
         except InvalidRuleException as e:
             raise SynapseError(400, e.message)
 
-        user, _ = yield self.auth.get_user_by_req(request)
+        user, _, _ = yield self.auth.get_user_by_req(request)
 
         if '/' in spec['rule_id'] or '\\' in spec['rule_id']:
             raise SynapseError(400, "rule_id may not contain slashes")
@@ -92,7 +92,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
     def on_DELETE(self, request):
         spec = _rule_spec_from_path(request.postpath)
 
-        user, _ = yield self.auth.get_user_by_req(request)
+        user, _, _ = yield self.auth.get_user_by_req(request)
 
         namespaced_rule_id = _namespaced_rule_id_from_spec(spec)
 
@@ -109,7 +109,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request):
-        user, _ = yield self.auth.get_user_by_req(request)
+        user, _, _ = yield self.auth.get_user_by_req(request)
 
         # we build up the full structure and then decide which bits of it
         # to send which means doing unnecessary work sometimes but is
diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py
index c83287c028..a110c0a4f0 100644
--- a/synapse/rest/client/v1/pusher.py
+++ b/synapse/rest/client/v1/pusher.py
@@ -27,7 +27,7 @@ class PusherRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_POST(self, request):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, token_id, _ = yield self.auth.get_user_by_req(request)
 
         content = _parse_json(request)
 
@@ -65,7 +65,7 @@ class PusherRestServlet(ClientV1RestServlet):
         try:
             yield pusher_pool.add_pusher(
                 user_name=user.to_string(),
-                access_token=client.token_id,
+                access_token=token_id,
                 profile_tag=content['profile_tag'],
                 kind=content['kind'],
                 app_id=content['app_id'],
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index b4a70cba99..139dac1cc3 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -17,7 +17,7 @@
 from twisted.internet import defer
 
 from base import ClientV1RestServlet, client_path_pattern
-from synapse.api.errors import SynapseError, Codes
+from synapse.api.errors import SynapseError, Codes, AuthError
 from synapse.streams.config import PaginationConfig
 from synapse.api.constants import EventTypes, Membership
 from synapse.types import UserID, RoomID, RoomAlias
@@ -27,7 +27,6 @@ import simplejson as json
 import logging
 import urllib
 
-
 logger = logging.getLogger(__name__)
 
 
@@ -62,7 +61,7 @@ class RoomCreateRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_POST(self, request):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
 
         room_config = self.get_room_config(request)
         info = yield self.make_room(room_config, auth_user, None)
@@ -125,7 +124,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id, event_type, state_key):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True)
 
         msg_handler = self.handlers.message_handler
         data = yield msg_handler.get_room_data(
@@ -133,6 +132,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
             room_id=room_id,
             event_type=event_type,
             state_key=state_key,
+            is_guest=is_guest,
         )
 
         if not data:
@@ -143,7 +143,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_PUT(self, request, room_id, event_type, state_key, txn_id=None):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, token_id, _ = yield self.auth.get_user_by_req(request)
 
         content = _parse_json(request)
 
@@ -159,7 +159,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
 
         msg_handler = self.handlers.message_handler
         yield msg_handler.create_and_send_event(
-            event_dict, client=client, txn_id=txn_id,
+            event_dict, token_id=token_id, txn_id=txn_id,
         )
 
         defer.returnValue((200, {}))
@@ -175,7 +175,7 @@ class RoomSendEventRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_POST(self, request, room_id, event_type, txn_id=None):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, token_id, _ = yield self.auth.get_user_by_req(request, allow_guest=True)
         content = _parse_json(request)
 
         msg_handler = self.handlers.message_handler
@@ -186,7 +186,7 @@ class RoomSendEventRestServlet(ClientV1RestServlet):
                 "room_id": room_id,
                 "sender": user.to_string(),
             },
-            client=client,
+            token_id=token_id,
             txn_id=txn_id,
         )
 
@@ -220,7 +220,10 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_POST(self, request, room_identifier, txn_id=None):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, token_id, is_guest = yield self.auth.get_user_by_req(
+            request,
+            allow_guest=True
+        )
 
         # the identifier could be a room alias or a room id. Try one then the
         # other if it fails to parse, without swallowing other valid
@@ -242,16 +245,20 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
             defer.returnValue((200, ret_dict))
         else:  # room id
             msg_handler = self.handlers.message_handler
+            content = {"membership": Membership.JOIN}
+            if is_guest:
+                content["kind"] = "guest"
             yield msg_handler.create_and_send_event(
                 {
                     "type": EventTypes.Member,
-                    "content": {"membership": Membership.JOIN},
+                    "content": content,
                     "room_id": identifier.to_string(),
                     "sender": user.to_string(),
                     "state_key": user.to_string(),
                 },
-                client=client,
+                token_id=token_id,
                 txn_id=txn_id,
+                is_guest=is_guest,
             )
 
             defer.returnValue((200, {"room_id": identifier.to_string()}))
@@ -289,13 +296,19 @@ class RoomMemberListRestServlet(ClientV1RestServlet):
     @defer.inlineCallbacks
     def on_GET(self, request, room_id):
         # TODO support Pagination stream API (limit/tokens)
-        user, client = yield self.auth.get_user_by_req(request)
-        handler = self.handlers.room_member_handler
-        members = yield handler.get_room_members_as_pagination_chunk(
+        user, _, _ = yield self.auth.get_user_by_req(request)
+        handler = self.handlers.message_handler
+        events = yield handler.get_state_events(
             room_id=room_id,
-            user_id=user.to_string())
+            user_id=user.to_string(),
+        )
 
-        for event in members["chunk"]:
+        chunk = []
+
+        for event in events:
+            if event["type"] != EventTypes.Member:
+                continue
+            chunk.append(event)
             # FIXME: should probably be state_key here, not user_id
             target_user = UserID.from_string(event["user_id"])
             # Presence is an optional cache; don't fail if we can't fetch it
@@ -308,27 +321,28 @@ class RoomMemberListRestServlet(ClientV1RestServlet):
             except:
                 pass
 
-        defer.returnValue((200, members))
+        defer.returnValue((200, {
+            "chunk": chunk
+        }))
 
 
-# TODO: Needs unit testing
+# TODO: Needs better unit testing
 class RoomMessageListRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/messages$")
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True)
         pagination_config = PaginationConfig.from_request(
             request, default_limit=10,
         )
-        with_feedback = "feedback" in request.args
         as_client_event = "raw" not in request.args
         handler = self.handlers.message_handler
         msgs = yield handler.get_messages(
             room_id=room_id,
             user_id=user.to_string(),
+            is_guest=is_guest,
             pagin_config=pagination_config,
-            feedback=with_feedback,
             as_client_event=as_client_event
         )
 
@@ -341,12 +355,13 @@ class RoomStateRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True)
         handler = self.handlers.message_handler
         # Get all the current state for this room
         events = yield handler.get_state_events(
             room_id=room_id,
             user_id=user.to_string(),
+            is_guest=is_guest,
         )
         defer.returnValue((200, events))
 
@@ -357,12 +372,13 @@ class RoomInitialSyncRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True)
         pagination_config = PaginationConfig.from_request(request)
         content = yield self.handlers.message_handler.room_initial_sync(
             room_id=room_id,
             user_id=user.to_string(),
             pagin_config=pagination_config,
+            is_guest=is_guest,
         )
         defer.returnValue((200, content))
 
@@ -391,6 +407,41 @@ class RoomTriggerBackfill(ClientV1RestServlet):
         defer.returnValue((200, res))
 
 
+class RoomEventContext(ClientV1RestServlet):
+    PATTERN = client_path_pattern(
+        "/rooms/(?P<room_id>[^/]*)/context/(?P<event_id>[^/]*)$"
+    )
+
+    def __init__(self, hs):
+        super(RoomEventContext, self).__init__(hs)
+        self.clock = hs.get_clock()
+
+    @defer.inlineCallbacks
+    def on_GET(self, request, room_id, event_id):
+        user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True)
+
+        limit = int(request.args.get("limit", [10])[0])
+
+        results = yield self.handlers.room_context_handler.get_event_context(
+            user, room_id, event_id, limit, is_guest
+        )
+
+        time_now = self.clock.time_msec()
+        results["events_before"] = [
+            serialize_event(event, time_now) for event in results["events_before"]
+        ]
+        results["events_after"] = [
+            serialize_event(event, time_now) for event in results["events_after"]
+        ]
+        results["state"] = [
+            serialize_event(event, time_now) for event in results["state"]
+        ]
+
+        logger.info("Responding with %r", results)
+
+        defer.returnValue((200, results))
+
+
 # TODO: Needs unit testing
 class RoomMembershipRestServlet(ClientV1RestServlet):
 
@@ -402,16 +453,37 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_POST(self, request, room_id, membership_action, txn_id=None):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, token_id, is_guest = yield self.auth.get_user_by_req(
+            request,
+            allow_guest=True
+        )
+
+        if is_guest and membership_action not in {Membership.JOIN, Membership.LEAVE}:
+            raise AuthError(403, "Guest access not allowed")
 
         content = _parse_json(request)
 
         # target user is you unless it is an invite
         state_key = user.to_string()
-        if membership_action in ["invite", "ban", "kick"]:
-            if "user_id" not in content:
+
+        if membership_action == "invite" and self._has_3pid_invite_keys(content):
+            yield self.handlers.room_member_handler.do_3pid_invite(
+                room_id,
+                user,
+                content["medium"],
+                content["address"],
+                content["id_server"],
+                token_id,
+                txn_id
+            )
+            defer.returnValue((200, {}))
+            return
+        elif membership_action in ["invite", "ban", "kick"]:
+            if "user_id" in content:
+                state_key = content["user_id"]
+            else:
                 raise SynapseError(400, "Missing user_id key.")
-            state_key = content["user_id"]
+
             # make sure it looks like a user ID; it'll throw if it's invalid.
             UserID.from_string(state_key)
 
@@ -419,20 +491,32 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
                 membership_action = "leave"
 
         msg_handler = self.handlers.message_handler
+
+        content = {"membership": unicode(membership_action)}
+        if is_guest:
+            content["kind"] = "guest"
+
         yield msg_handler.create_and_send_event(
             {
                 "type": EventTypes.Member,
-                "content": {"membership": unicode(membership_action)},
+                "content": content,
                 "room_id": room_id,
                 "sender": user.to_string(),
                 "state_key": state_key,
             },
-            client=client,
+            token_id=token_id,
             txn_id=txn_id,
+            is_guest=is_guest,
         )
 
         defer.returnValue((200, {}))
 
+    def _has_3pid_invite_keys(self, content):
+        for key in {"id_server", "medium", "address"}:
+            if key not in content:
+                return False
+        return True
+
     @defer.inlineCallbacks
     def on_PUT(self, request, room_id, membership_action, txn_id):
         try:
@@ -457,7 +541,7 @@ class RoomRedactEventRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_POST(self, request, room_id, event_id, txn_id=None):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, token_id, _ = yield self.auth.get_user_by_req(request)
         content = _parse_json(request)
 
         msg_handler = self.handlers.message_handler
@@ -469,7 +553,7 @@ class RoomRedactEventRestServlet(ClientV1RestServlet):
                 "sender": user.to_string(),
                 "redacts": event_id,
             },
-            client=client,
+            token_id=token_id,
             txn_id=txn_id,
         )
 
@@ -497,7 +581,7 @@ class RoomTypingRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_PUT(self, request, room_id, user_id):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
 
         room_id = urllib.unquote(room_id)
         target_user = UserID.from_string(urllib.unquote(user_id))
@@ -523,6 +607,23 @@ class RoomTypingRestServlet(ClientV1RestServlet):
         defer.returnValue((200, {}))
 
 
+class SearchRestServlet(ClientV1RestServlet):
+    PATTERN = client_path_pattern(
+        "/search$"
+    )
+
+    @defer.inlineCallbacks
+    def on_POST(self, request):
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
+
+        content = _parse_json(request)
+
+        batch = request.args.get("next_batch", [None])[0]
+        results = yield self.handlers.search_handler.search(auth_user, content, batch)
+
+        defer.returnValue((200, results))
+
+
 def _parse_json(request):
     try:
         content = json.loads(request.content.read())
@@ -579,3 +680,5 @@ def register_servlets(hs, http_server):
     RoomInitialSyncRestServlet(hs).register(http_server)
     RoomRedactEventRestServlet(hs).register(http_server)
     RoomTypingRestServlet(hs).register(http_server)
+    SearchRestServlet(hs).register(http_server)
+    RoomEventContext(hs).register(http_server)
diff --git a/synapse/rest/client/v1/voip.py b/synapse/rest/client/v1/voip.py
index 11d08fbced..eb7c57cade 100644
--- a/synapse/rest/client/v1/voip.py
+++ b/synapse/rest/client/v1/voip.py
@@ -28,7 +28,7 @@ class VoipRestServlet(ClientV1RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
 
         turnUris = self.hs.config.turn_uris
         turnSecret = self.hs.config.turn_shared_secret
@@ -40,7 +40,7 @@ class VoipRestServlet(ClientV1RestServlet):
         username = "%d:%s" % (expiry, auth_user.to_string())
 
         mac = hmac.new(turnSecret, msg=username, digestmod=hashlib.sha1)
-        # We need to use standard base64 encoding here, *not* syutil's
+        # We need to use standard padded base64 encoding here
         # encode_base64 because we need to add the standard padding to get the
         # same result as the TURN server.
         password = base64.b64encode(mac.digest())
diff --git a/synapse/rest/client/v2_alpha/__init__.py b/synapse/rest/client/v2_alpha/__init__.py
index 33f961e898..a108132346 100644
--- a/synapse/rest/client/v2_alpha/__init__.py
+++ b/synapse/rest/client/v2_alpha/__init__.py
@@ -21,6 +21,8 @@ from . import (
     auth,
     receipts,
     keys,
+    tokenrefresh,
+    tags,
 )
 
 from synapse.http.server import JsonResource
@@ -42,3 +44,5 @@ class ClientV2AlphaRestResource(JsonResource):
         auth.register_servlets(hs, client_resource)
         receipts.register_servlets(hs, client_resource)
         keys.register_servlets(hs, client_resource)
+        tokenrefresh.register_servlets(hs, client_resource)
+        tags.register_servlets(hs, client_resource)
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index 6281e2d029..1970ad3458 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -55,7 +55,7 @@ class PasswordRestServlet(RestServlet):
 
         if LoginType.PASSWORD in result:
             # if using password, they should also be logged in
-            auth_user, client = yield self.auth.get_user_by_req(request)
+            auth_user, _, _ = yield self.auth.get_user_by_req(request)
             if auth_user.to_string() != result[LoginType.PASSWORD]:
                 raise LoginError(400, "", Codes.UNKNOWN)
             user_id = auth_user.to_string()
@@ -102,7 +102,7 @@ class ThreepidRestServlet(RestServlet):
     def on_GET(self, request):
         yield run_on_reactor()
 
-        auth_user, _ = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
 
         threepids = yield self.hs.get_datastore().user_get_threepids(
             auth_user.to_string()
@@ -120,7 +120,7 @@ class ThreepidRestServlet(RestServlet):
             raise SynapseError(400, "Missing param", Codes.MISSING_PARAM)
         threePidCreds = body['threePidCreds']
 
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
 
         threepid = yield self.identity_handler.threepid_from_creds(threePidCreds)
 
diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py
index 703250cea8..97956a4b91 100644
--- a/synapse/rest/client/v2_alpha/filter.py
+++ b/synapse/rest/client/v2_alpha/filter.py
@@ -40,7 +40,7 @@ class GetFilterRestServlet(RestServlet):
     @defer.inlineCallbacks
     def on_GET(self, request, user_id, filter_id):
         target_user = UserID.from_string(user_id)
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
 
         if target_user != auth_user:
             raise AuthError(403, "Cannot get filters for other users")
@@ -76,7 +76,7 @@ class CreateFilterRestServlet(RestServlet):
     @defer.inlineCallbacks
     def on_POST(self, request, user_id):
         target_user = UserID.from_string(user_id)
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
 
         if target_user != auth_user:
             raise AuthError(403, "Cannot create filters for other users")
diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py
index 718928eedd..820d33336f 100644
--- a/synapse/rest/client/v2_alpha/keys.py
+++ b/synapse/rest/client/v2_alpha/keys.py
@@ -18,7 +18,8 @@ from twisted.internet import defer
 from synapse.api.errors import SynapseError
 from synapse.http.servlet import RestServlet
 from synapse.types import UserID
-from syutil.jsonutil import encode_canonical_json
+
+from canonicaljson import encode_canonical_json
 
 from ._base import client_v2_pattern
 
@@ -63,7 +64,7 @@ class KeyUploadServlet(RestServlet):
 
     @defer.inlineCallbacks
     def on_POST(self, request, device_id):
-        auth_user, client_info = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
         user_id = auth_user.to_string()
         # TODO: Check that the device_id matches that in the authentication
         # or derive the device_id from the authentication instead.
@@ -108,7 +109,7 @@ class KeyUploadServlet(RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request, device_id):
-        auth_user, client_info = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
         user_id = auth_user.to_string()
 
         result = yield self.store.count_e2e_one_time_keys(user_id, device_id)
@@ -180,7 +181,7 @@ class KeyQueryServlet(RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request, user_id, device_id):
-        auth_user, client_info = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
         auth_user_id = auth_user.to_string()
         user_id = user_id if user_id else auth_user_id
         device_ids = [device_id] if device_id else []
diff --git a/synapse/rest/client/v2_alpha/receipts.py b/synapse/rest/client/v2_alpha/receipts.py
index 40406e2ede..788acd4adb 100644
--- a/synapse/rest/client/v2_alpha/receipts.py
+++ b/synapse/rest/client/v2_alpha/receipts.py
@@ -15,6 +15,7 @@
 
 from twisted.internet import defer
 
+from synapse.api.errors import SynapseError
 from synapse.http.servlet import RestServlet
 from ._base import client_v2_pattern
 
@@ -39,7 +40,10 @@ class ReceiptRestServlet(RestServlet):
 
     @defer.inlineCallbacks
     def on_POST(self, request, room_id, receipt_type, event_id):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, _, _ = yield self.auth.get_user_by_req(request)
+
+        if receipt_type != "m.read":
+            raise SynapseError(400, "Receipt type must be 'm.read'")
 
         yield self.receipts_handler.received_client_receipt(
             room_id,
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index 1ba2f29711..f899376311 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -16,7 +16,7 @@
 from twisted.internet import defer
 
 from synapse.api.constants import LoginType
-from synapse.api.errors import SynapseError, Codes
+from synapse.api.errors import SynapseError, Codes, UnrecognizedRequestError
 from synapse.http.servlet import RestServlet
 
 from ._base import client_v2_pattern, parse_json_dict_from_request
@@ -55,6 +55,19 @@ class RegisterRestServlet(RestServlet):
     def on_POST(self, request):
         yield run_on_reactor()
 
+        kind = "user"
+        if "kind" in request.args:
+            kind = request.args["kind"][0]
+
+        if kind == "guest":
+            ret = yield self._do_guest_registration()
+            defer.returnValue(ret)
+            return
+        elif kind != "user":
+            raise UnrecognizedRequestError(
+                "Do not understand membership kind: %s" % (kind,)
+            )
+
         if '/register/email/requestToken' in request.path:
             ret = yield self.onEmailTokenRequest(request)
             defer.returnValue(ret)
@@ -236,6 +249,18 @@ class RegisterRestServlet(RestServlet):
         ret = yield self.identity_handler.requestEmailToken(**body)
         defer.returnValue((200, ret))
 
+    @defer.inlineCallbacks
+    def _do_guest_registration(self):
+        if not self.hs.config.allow_guest_access:
+            defer.returnValue((403, "Guest access is disabled"))
+        user_id, _ = yield self.registration_handler.register(generate_token=False)
+        access_token = self.auth_handler.generate_access_token(user_id, ["guest = true"])
+        defer.returnValue((200, {
+            "user_id": user_id,
+            "access_token": access_token,
+            "home_server": self.hs.hostname,
+        }))
+
 
 def register_servlets(hs, http_server):
     RegisterRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
index f2fd0b9f32..efd8281558 100644
--- a/synapse/rest/client/v2_alpha/sync.py
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -20,12 +20,14 @@ from synapse.http.servlet import (
 )
 from synapse.handlers.sync import SyncConfig
 from synapse.types import StreamToken
+from synapse.events import FrozenEvent
 from synapse.events.utils import (
     serialize_event, format_event_for_client_v2_without_event_id,
 )
-from synapse.api.filtering import Filter
+from synapse.api.filtering import FilterCollection
 from ._base import client_v2_pattern
 
+import copy
 import logging
 
 logger = logging.getLogger(__name__)
@@ -36,100 +38,77 @@ class SyncRestServlet(RestServlet):
 
     GET parameters::
         timeout(int): How long to wait for new events in milliseconds.
-        limit(int): Maxiumum number of events per room to return.
-        gap(bool): Create gaps the message history if limit is exceeded to
-            ensure that the client has the most recent messages. Defaults to
-            "true".
-        sort(str,str): tuple of sort key (e.g. "timeline") and direction
-            (e.g. "asc", "desc"). Defaults to "timeline,asc".
         since(batch_token): Batch token when asking for incremental deltas.
         set_presence(str): What state the device presence should be set to.
             default is "online".
-        backfill(bool): Should the HS request message history from other
-            servers. This may take a long time making it unsuitable for clients
-            expecting a prompt response. Defaults to "true".
         filter(filter_id): A filter to apply to the events returned.
-        filter_*: Filter override parameters.
 
     Response JSON::
         {
-            "next_batch": // batch token for the next /sync
-            "private_user_data": // private events for this user.
-            "public_user_data": // public events for all users including the
-                                // public events for this user.
-            "rooms": [{ // List of rooms with updates.
-                "room_id": // Id of the room being updated
-                "limited": // Was the per-room event limit exceeded?
-                "published": // Is the room published by our HS?
+          "next_batch": // batch token for the next /sync
+          "presence": // presence data for the user.
+          "rooms": {
+            "joined": { // Joined rooms being updated.
+              "${room_id}": { // Id of the room being updated
                 "event_map": // Map of EventID -> event JSON.
-                "events": { // The recent events in the room if gap is "true"
-                            // otherwise the next events in the room.
-                    "batch": [] // list of EventIDs in the "event_map".
-                    "prev_batch": // back token for getting previous events.
+                "timeline": { // The recent events in the room if gap is "true"
+                  "limited": // Was the per-room event limit exceeded?
+                             // otherwise the next events in the room.
+                  "events": [] // list of EventIDs in the "event_map".
+                  "prev_batch": // back token for getting previous events.
                 }
-                "state": [] // list of EventIDs updating the current state to
-                            // be what it should be at the end of the batch.
-                "ephemeral": []
-            }]
+                "state": {"events": []} // list of EventIDs updating the
+                                        // current state to be what it should
+                                        // be at the end of the batch.
+                "ephemeral": {"events": []} // list of event objects
+              }
+            },
+            "invited": {}, // Invited rooms being updated.
+            "archived": {} // Archived rooms being updated.
+          }
         }
     """
 
     PATTERN = client_v2_pattern("/sync$")
-    ALLOWED_SORT = set(["timeline,asc", "timeline,desc"])
-    ALLOWED_PRESENCE = set(["online", "offline", "idle"])
+    ALLOWED_PRESENCE = set(["online", "offline"])
 
     def __init__(self, hs):
         super(SyncRestServlet, self).__init__()
         self.auth = hs.get_auth()
+        self.event_stream_handler = hs.get_handlers().event_stream_handler
         self.sync_handler = hs.get_handlers().sync_handler
         self.clock = hs.get_clock()
         self.filtering = hs.get_filtering()
 
     @defer.inlineCallbacks
     def on_GET(self, request):
-        user, client = yield self.auth.get_user_by_req(request)
+        user, token_id, _ = yield self.auth.get_user_by_req(request)
 
         timeout = parse_integer(request, "timeout", default=0)
-        limit = parse_integer(request, "limit", required=True)
-        gap = parse_boolean(request, "gap", default=True)
-        sort = parse_string(
-            request, "sort", default="timeline,asc",
-            allowed_values=self.ALLOWED_SORT
-        )
         since = parse_string(request, "since")
         set_presence = parse_string(
             request, "set_presence", default="online",
             allowed_values=self.ALLOWED_PRESENCE
         )
-        backfill = parse_boolean(request, "backfill", default=False)
         filter_id = parse_string(request, "filter", default=None)
+        full_state = parse_boolean(request, "full_state", default=False)
 
         logger.info(
-            "/sync: user=%r, timeout=%r, limit=%r, gap=%r, sort=%r, since=%r,"
-            " set_presence=%r, backfill=%r, filter_id=%r" % (
-                user, timeout, limit, gap, sort, since, set_presence,
-                backfill, filter_id
+            "/sync: user=%r, timeout=%r, since=%r,"
+            " set_presence=%r, filter_id=%r" % (
+                user, timeout, since, set_presence, filter_id
             )
         )
 
-        # TODO(mjark): Load filter and apply overrides.
         try:
             filter = yield self.filtering.get_user_filter(
                 user.localpart, filter_id
             )
         except:
-            filter = Filter({})
-        # filter = filter.apply_overrides(http_request)
-        # if filter.matches(event):
-        #   # stuff
+            filter = FilterCollection({})
 
         sync_config = SyncConfig(
             user=user,
-            client_info=client,
-            gap=gap,
-            limit=limit,
-            sort=sort,
-            backfill=backfill,
             filter=filter,
         )
 
@@ -138,43 +117,154 @@ class SyncRestServlet(RestServlet):
         else:
             since_token = None
 
-        sync_result = yield self.sync_handler.wait_for_sync_for_user(
-            sync_config, since_token=since_token, timeout=timeout
-        )
+        if set_presence == "online":
+            yield self.event_stream_handler.started_stream(user)
+
+        try:
+            sync_result = yield self.sync_handler.wait_for_sync_for_user(
+                sync_config, since_token=since_token, timeout=timeout,
+                full_state=full_state
+            )
+        finally:
+            if set_presence == "online":
+                self.event_stream_handler.stopped_stream(user)
 
         time_now = self.clock.time_msec()
 
+        joined = self.encode_joined(
+            sync_result.joined, filter, time_now, token_id
+        )
+
+        invited = self.encode_invited(
+            sync_result.invited, filter, time_now, token_id
+        )
+
+        archived = self.encode_archived(
+            sync_result.archived, filter, time_now, token_id
+        )
+
         response_content = {
-            "public_user_data": self.encode_user_data(
-                sync_result.public_user_data, filter, time_now
-            ),
-            "private_user_data": self.encode_user_data(
-                sync_result.private_user_data, filter, time_now
-            ),
-            "rooms": self.encode_rooms(
-                sync_result.rooms, filter, time_now, client.token_id
+            "presence": self.encode_presence(
+                sync_result.presence, filter, time_now
             ),
+            "rooms": {
+                "joined": joined,
+                "invited": invited,
+                "archived": archived,
+            },
             "next_batch": sync_result.next_batch.to_string(),
         }
 
         defer.returnValue((200, response_content))
 
-    def encode_user_data(self, events, filter, time_now):
-        return events
+    def encode_presence(self, events, filter, time_now):
+        formatted = []
+        for event in events:
+            event = copy.deepcopy(event)
+            event['sender'] = event['content'].pop('user_id')
+            formatted.append(event)
+        return {"events": filter.filter_presence(formatted)}
+
+    def encode_joined(self, rooms, filter, time_now, token_id):
+        """
+        Encode the joined rooms in a sync result
+
+        :param list[synapse.handlers.sync.JoinedSyncResult] rooms: list of sync
+            results for rooms this user is joined to
+        :param FilterCollection filter: filters to apply to the results
+        :param int time_now: current time - used as a baseline for age
+            calculations
+        :param int token_id: ID of the user's auth token - used for namespacing
+            of transaction IDs
+
+        :return: the joined rooms list, in our response format
+        :rtype: dict[str, dict[str, object]]
+        """
+        joined = {}
+        for room in rooms:
+            joined[room.room_id] = self.encode_room(
+                room, filter, time_now, token_id
+            )
+
+        return joined
+
+    def encode_invited(self, rooms, filter, time_now, token_id):
+        """
+        Encode the invited rooms in a sync result
+
+        :param list[synapse.handlers.sync.InvitedSyncResult] rooms: list of
+             sync results for rooms this user is joined to
+        :param FilterCollection filter: filters to apply to the results
+        :param int time_now: current time - used as a baseline for age
+            calculations
+        :param int token_id: ID of the user's auth token - used for namespacing
+            of transaction IDs
+
+        :return: the invited rooms list, in our response format
+        :rtype: dict[str, dict[str, object]]
+        """
+        invited = {}
+        for room in rooms:
+            invite = serialize_event(
+                room.invite, time_now, token_id=token_id,
+                event_format=format_event_for_client_v2_without_event_id,
+            )
+            invited_state = invite.get("unsigned", {}).pop("invite_room_state", [])
+            invited_state.append(invite)
+            invited[room.room_id] = {
+                "invite_state": {"events": invited_state}
+            }
+
+        return invited
+
+    def encode_archived(self, rooms, filter, time_now, token_id):
+        """
+        Encode the archived rooms in a sync result
 
-    def encode_rooms(self, rooms, filter, time_now, token_id):
-        return [
-            self.encode_room(room, filter, time_now, token_id)
-            for room in rooms
-        ]
+        :param list[synapse.handlers.sync.ArchivedSyncResult] rooms: list of
+             sync results for rooms this user is joined to
+        :param FilterCollection filter: filters to apply to the results
+        :param int time_now: current time - used as a baseline for age
+            calculations
+        :param int token_id: ID of the user's auth token - used for namespacing
+            of transaction IDs
+
+        :return: the invited rooms list, in our response format
+        :rtype: dict[str, dict[str, object]]
+        """
+        joined = {}
+        for room in rooms:
+            joined[room.room_id] = self.encode_room(
+                room, filter, time_now, token_id, joined=False
+            )
+
+        return joined
 
     @staticmethod
-    def encode_room(room, filter, time_now, token_id):
+    def encode_room(room, filter, time_now, token_id, joined=True):
+        """
+        :param JoinedSyncResult|ArchivedSyncResult room: sync result for a
+            single room
+        :param FilterCollection filter: filters to apply to the results
+        :param int time_now: current time - used as a baseline for age
+            calculations
+        :param int token_id: ID of the user's auth token - used for namespacing
+            of transaction IDs
+        :param joined: True if the user is joined to this room - will mean
+            we handle ephemeral events
+
+        :return: the room, encoded in our response format
+        :rtype: dict[str, object]
+        """
         event_map = {}
-        state_events = filter.filter_room_state(room.state)
-        recent_events = filter.filter_room_events(room.events)
+        state_dict = room.state
+        timeline_events = filter.filter_room_timeline(room.timeline.events)
+
+        state_dict = SyncRestServlet._rollback_state_for_timeline(
+            state_dict, timeline_events)
+
+        state_events = filter.filter_room_state(state_dict.values())
         state_event_ids = []
-        recent_event_ids = []
         for event in state_events:
             # TODO(mjark): Respect formatting requirements in the filter.
             event_map[event.event_id] = serialize_event(
@@ -183,25 +273,91 @@ class SyncRestServlet(RestServlet):
             )
             state_event_ids.append(event.event_id)
 
-        for event in recent_events:
+        timeline_event_ids = []
+        for event in timeline_events:
             # TODO(mjark): Respect formatting requirements in the filter.
             event_map[event.event_id] = serialize_event(
                 event, time_now, token_id=token_id,
                 event_format=format_event_for_client_v2_without_event_id,
             )
-            recent_event_ids.append(event.event_id)
+            timeline_event_ids.append(event.event_id)
+
+        private_user_data = filter.filter_room_private_user_data(
+            room.private_user_data
+        )
+
         result = {
-            "room_id": room.room_id,
             "event_map": event_map,
-            "events": {
-                "batch": recent_event_ids,
-                "prev_batch": room.prev_batch.to_string(),
+            "timeline": {
+                "events": timeline_event_ids,
+                "prev_batch": room.timeline.prev_batch.to_string(),
+                "limited": room.timeline.limited,
             },
-            "state": state_event_ids,
-            "limited": room.limited,
-            "published": room.published,
-            "ephemeral": room.ephemeral,
+            "state": {"events": state_event_ids},
+            "private_user_data": {"events": private_user_data},
         }
+
+        if joined:
+            ephemeral_events = filter.filter_room_ephemeral(room.ephemeral)
+            result["ephemeral"] = {"events": ephemeral_events}
+
+        return result
+
+    @staticmethod
+    def _rollback_state_for_timeline(state, timeline):
+        """
+        Wind the state dictionary backwards, so that it represents the
+        state at the start of the timeline, rather than at the end.
+
+        :param dict[(str, str), synapse.events.EventBase] state: the
+            state dictionary. Will be updated to the state before the timeline.
+        :param list[synapse.events.EventBase] timeline: the event timeline
+        :return: updated state dictionary
+        """
+        logger.debug("Processing state dict %r; timeline %r", state,
+                     [e.get_dict() for e in timeline])
+
+        result = state.copy()
+
+        for timeline_event in reversed(timeline):
+            if not timeline_event.is_state():
+                continue
+
+            event_key = (timeline_event.type, timeline_event.state_key)
+
+            logger.debug("Considering %s for removal", event_key)
+
+            state_event = result.get(event_key)
+            if (state_event is None or
+                    state_event.event_id != timeline_event.event_id):
+                # the event in the timeline isn't present in the state
+                # dictionary.
+                #
+                # the most likely cause for this is that there was a fork in
+                # the event graph, and the state is no longer valid. Really,
+                # the event shouldn't be in the timeline. We're going to ignore
+                # it for now, however.
+                logger.warn("Found state event %r in timeline which doesn't "
+                            "match state dictionary", timeline_event)
+                continue
+
+            prev_event_id = timeline_event.unsigned.get("replaces_state", None)
+            logger.debug("Replacing %s with %s in state dict",
+                         timeline_event.event_id, prev_event_id)
+
+            if prev_event_id is None:
+                del result[event_key]
+            else:
+                result[event_key] = FrozenEvent({
+                    "type": timeline_event.type,
+                    "state_key": timeline_event.state_key,
+                    "content": timeline_event.unsigned['prev_content'],
+                    "sender": timeline_event.unsigned['prev_sender'],
+                    "event_id": prev_event_id,
+                    "room_id": timeline_event.room_id,
+                })
+            logger.debug("New value: %r", result.get(event_key))
+
         return result
 
 
diff --git a/synapse/rest/client/v2_alpha/tags.py b/synapse/rest/client/v2_alpha/tags.py
new file mode 100644
index 0000000000..35482ae6a6
--- /dev/null
+++ b/synapse/rest/client/v2_alpha/tags.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import client_v2_pattern
+
+from synapse.http.servlet import RestServlet
+from synapse.api.errors import AuthError, SynapseError
+
+from twisted.internet import defer
+
+import logging
+
+import simplejson as json
+
+logger = logging.getLogger(__name__)
+
+
+class TagListServlet(RestServlet):
+    """
+    GET /user/{user_id}/rooms/{room_id}/tags HTTP/1.1
+    """
+    PATTERN = client_v2_pattern(
+        "/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags"
+    )
+
+    def __init__(self, hs):
+        super(TagListServlet, self).__init__()
+        self.auth = hs.get_auth()
+        self.store = hs.get_datastore()
+
+    @defer.inlineCallbacks
+    def on_GET(self, request, user_id, room_id):
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
+        if user_id != auth_user.to_string():
+            raise AuthError(403, "Cannot get tags for other users.")
+
+        tags = yield self.store.get_tags_for_room(user_id, room_id)
+
+        defer.returnValue((200, {"tags": tags}))
+
+
+class TagServlet(RestServlet):
+    """
+    PUT /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1
+    DELETE /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1
+    """
+    PATTERN = client_v2_pattern(
+        "/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags/(?P<tag>[^/]*)"
+    )
+
+    def __init__(self, hs):
+        super(TagServlet, self).__init__()
+        self.auth = hs.get_auth()
+        self.store = hs.get_datastore()
+        self.notifier = hs.get_notifier()
+
+    @defer.inlineCallbacks
+    def on_PUT(self, request, user_id, room_id, tag):
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
+        if user_id != auth_user.to_string():
+            raise AuthError(403, "Cannot add tags for other users.")
+
+        try:
+            content_bytes = request.content.read()
+            body = json.loads(content_bytes)
+        except:
+            raise SynapseError(400, "Invalid tag JSON")
+
+        max_id = yield self.store.add_tag_to_room(user_id, room_id, tag, body)
+
+        yield self.notifier.on_new_event(
+            "private_user_data_key", max_id, users=[user_id]
+        )
+
+        defer.returnValue((200, {}))
+
+    @defer.inlineCallbacks
+    def on_DELETE(self, request, user_id, room_id, tag):
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
+        if user_id != auth_user.to_string():
+            raise AuthError(403, "Cannot add tags for other users.")
+
+        max_id = yield self.store.remove_tag_from_room(user_id, room_id, tag)
+
+        yield self.notifier.on_new_event(
+            "private_user_data_key", max_id, users=[user_id]
+        )
+
+        defer.returnValue((200, {}))
+
+
+def register_servlets(hs, http_server):
+    TagListServlet(hs).register(http_server)
+    TagServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/tokenrefresh.py b/synapse/rest/client/v2_alpha/tokenrefresh.py
new file mode 100644
index 0000000000..901e777983
--- /dev/null
+++ b/synapse/rest/client/v2_alpha/tokenrefresh.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.api.errors import AuthError, StoreError, SynapseError
+from synapse.http.servlet import RestServlet
+
+from ._base import client_v2_pattern, parse_json_dict_from_request
+
+
+class TokenRefreshRestServlet(RestServlet):
+    """
+    Exchanges refresh tokens for a pair of an access token and a new refresh
+    token.
+    """
+    PATTERN = client_v2_pattern("/tokenrefresh")
+
+    def __init__(self, hs):
+        super(TokenRefreshRestServlet, self).__init__()
+        self.hs = hs
+        self.store = hs.get_datastore()
+
+    @defer.inlineCallbacks
+    def on_POST(self, request):
+        body = parse_json_dict_from_request(request)
+        try:
+            old_refresh_token = body["refresh_token"]
+            auth_handler = self.hs.get_handlers().auth_handler
+            (user_id, new_refresh_token) = yield self.store.exchange_refresh_token(
+                old_refresh_token, auth_handler.generate_refresh_token)
+            new_access_token = yield auth_handler.issue_access_token(user_id)
+            defer.returnValue((200, {
+                "access_token": new_access_token,
+                "refresh_token": new_refresh_token,
+            }))
+        except KeyError:
+            raise SynapseError(400, "Missing required key 'refresh_token'.")
+        except StoreError:
+            raise AuthError(403, "Did not recognize refresh token")
+
+
+def register_servlets(hs, http_server):
+    TokenRefreshRestServlet(hs).register(http_server)
diff --git a/synapse/rest/key/v1/server_key_resource.py b/synapse/rest/key/v1/server_key_resource.py
index 71e9a51f5c..6df46969c4 100644
--- a/synapse/rest/key/v1/server_key_resource.py
+++ b/synapse/rest/key/v1/server_key_resource.py
@@ -16,9 +16,9 @@
 
 from twisted.web.resource import Resource
 from synapse.http.server import respond_with_json_bytes
-from syutil.crypto.jsonsign import sign_json
-from syutil.base64util import encode_base64
-from syutil.jsonutil import encode_canonical_json
+from signedjson.sign import sign_json
+from unpaddedbase64 import encode_base64
+from canonicaljson import encode_canonical_json
 from OpenSSL import crypto
 import logging
 
diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py
index 33cbd7cf8e..ef7699d590 100644
--- a/synapse/rest/key/v2/local_key_resource.py
+++ b/synapse/rest/key/v2/local_key_resource.py
@@ -16,9 +16,9 @@
 
 from twisted.web.resource import Resource
 from synapse.http.server import respond_with_json_bytes
-from syutil.crypto.jsonsign import sign_json
-from syutil.base64util import encode_base64
-from syutil.jsonutil import encode_canonical_json
+from signedjson.sign import sign_json
+from unpaddedbase64 import encode_base64
+from canonicaljson import encode_canonical_json
 from hashlib import sha256
 from OpenSSL import crypto
 import logging
diff --git a/synapse/rest/media/v0/content_repository.py b/synapse/rest/media/v0/content_repository.py
index e77a20fb2e..e4fa8c4647 100644
--- a/synapse/rest/media/v0/content_repository.py
+++ b/synapse/rest/media/v0/content_repository.py
@@ -66,7 +66,7 @@ class ContentRepoResource(resource.Resource):
     @defer.inlineCallbacks
     def map_request_to_name(self, request):
         # auth the user
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
 
         # namespace all file uploads on the user
         prefix = base64.urlsafe_b64encode(
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index 031bfa80f8..7d61596082 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -70,7 +70,7 @@ class UploadResource(BaseMediaResource):
     @request_handler
     @defer.inlineCallbacks
     def _async_render_POST(self, request):
-        auth_user, client = yield self.auth.get_user_by_req(request)
+        auth_user, _, _ = yield self.auth.get_user_by_req(request)
         # TODO: The checks here are a bit late. The content will have
         # already been uploaded to a tmp file at this point
         content_length = request.getHeader("Content-Length")
diff --git a/synapse/server.py b/synapse/server.py
index 4d1fb1cbf6..f75d5358b2 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -19,7 +19,9 @@
 # partial one for unit test mocking.
 
 # Imports required for the default HomeServer() implementation
+from twisted.web.client import BrowserLikePolicyForHTTPS
 from synapse.federation import initialize_http_replication
+from synapse.http.client import SimpleHttpClient,  InsecureInterceptableContextFactory
 from synapse.notifier import Notifier
 from synapse.api.auth import Auth
 from synapse.handlers import Handlers
@@ -27,7 +29,6 @@ from synapse.state import StateHandler
 from synapse.storage import DataStore
 from synapse.util import Clock
 from synapse.util.distributor import Distributor
-from synapse.util.lockutils import LockManager
 from synapse.streams.events import EventSources
 from synapse.api.ratelimiting import Ratelimiter
 from synapse.crypto.keyring import Keyring
@@ -68,7 +69,6 @@ class BaseHomeServer(object):
         'auth',
         'rest_servlet_factory',
         'state_handler',
-        'room_lock_manager',
         'notifier',
         'distributor',
         'resource_for_client',
@@ -87,6 +87,8 @@ class BaseHomeServer(object):
         'pusherpool',
         'event_builder_factory',
         'filtering',
+        'http_client_context_factory',
+        'simple_http_client',
     ]
 
     def __init__(self, hostname, **kwargs):
@@ -174,6 +176,17 @@ class HomeServer(BaseHomeServer):
     def build_auth(self):
         return Auth(self)
 
+    def build_http_client_context_factory(self):
+        config = self.get_config()
+        return (
+            InsecureInterceptableContextFactory()
+            if config.use_insecure_ssl_client_just_for_testing_do_not_use
+            else BrowserLikePolicyForHTTPS()
+        )
+
+    def build_simple_http_client(self):
+        return SimpleHttpClient(self)
+
     def build_v1auth(self):
         orf = Auth(self)
         # Matrix spec makes no reference to what HTTP status code is returned,
@@ -186,9 +199,6 @@ class HomeServer(BaseHomeServer):
     def build_state_handler(self):
         return StateHandler(self)
 
-    def build_room_lock_manager(self):
-        return LockManager()
-
     def build_distributor(self):
         return Distributor()
 
diff --git a/synapse/state.py b/synapse/state.py
index 1fe4d066bd..8ea2cac5d6 100644
--- a/synapse/state.py
+++ b/synapse/state.py
@@ -17,7 +17,6 @@
 from twisted.internet import defer
 
 from synapse.util.logutils import log_function
-from synapse.util.async import run_on_reactor
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.api.constants import EventTypes
 from synapse.api.errors import AuthError
@@ -32,10 +31,6 @@ import hashlib
 logger = logging.getLogger(__name__)
 
 
-def _get_state_key_from_event(event):
-    return event.state_key
-
-
 KeyStateTuple = namedtuple("KeyStateTuple", ("context", "type", "state_key"))
 
 
@@ -76,7 +71,7 @@ class StateHandler(object):
 
     @defer.inlineCallbacks
     def get_current_state(self, room_id, event_type=None, state_key=""):
-        """ Returns the current state for the room as a list. This is done by
+        """ Retrieves the current state for the room. This is done by
         calling `get_latest_events_in_room` to get the leading edges of the
         event graph and then resolving any of the state conflicts.
 
@@ -85,6 +80,8 @@ class StateHandler(object):
 
         If `event_type` is specified, then the method returns only the one
         event (or None) with that `event_type` and `state_key`.
+
+        :returns map from (type, state_key) to event
         """
         event_ids = yield self.store.get_latest_event_ids_in_room(room_id)
 
@@ -119,8 +116,6 @@ class StateHandler(object):
         Returns:
             an EventContext
         """
-        yield run_on_reactor()
-
         context = EventContext()
 
         if outlier:
@@ -184,9 +179,10 @@ class StateHandler(object):
         """ Given a list of event_ids this method fetches the state at each
         event, resolves conflicts between them and returns them.
 
-        Return format is a tuple: (`state_group`, `state_events`), where the
-        first is the name of a state group if one and only one is involved,
-        otherwise `None`.
+        :returns a Deferred tuple of (`state_group`, `state`, `prev_state`).
+        `state_group` is the name of a state group if one and only one is
+        involved. `state` is a map from (type, state_key) to event, and
+        `prev_state` is a list of event ids.
         """
         logger.debug("resolve_state_groups event_ids %s", event_ids)
 
@@ -262,6 +258,11 @@ class StateHandler(object):
             return self._resolve_events(state_sets)
 
     def _resolve_events(self, state_sets, event_type=None, state_key=""):
+        """
+        :returns a tuple (new_state, prev_states). new_state is a map
+        from (type, state_key) to event. prev_states is a list of event_ids.
+        :rtype: (dict[(str, str), synapse.events.FrozenEvent], list[str])
+        """
         state = {}
         for st in state_sets:
             for e in st:
@@ -314,19 +315,23 @@ class StateHandler(object):
 
         We resolve conflicts in the following order:
             1. power levels
-            2. memberships
-            3. other events.
+            2. join rules
+            3. memberships
+            4. other events.
         """
         resolved_state = {}
         power_key = (EventTypes.PowerLevels, "")
-        if power_key in conflicted_state.items():
-            power_levels = conflicted_state[power_key]
-            resolved_state[power_key] = self._resolve_auth_events(power_levels)
+        if power_key in conflicted_state:
+            events = conflicted_state[power_key]
+            logger.debug("Resolving conflicted power levels %r", events)
+            resolved_state[power_key] = self._resolve_auth_events(
+                events, auth_events)
 
         auth_events.update(resolved_state)
 
         for key, events in conflicted_state.items():
             if key[0] == EventTypes.JoinRules:
+                logger.debug("Resolving conflicted join rules %r", events)
                 resolved_state[key] = self._resolve_auth_events(
                     events,
                     auth_events
@@ -336,6 +341,7 @@ class StateHandler(object):
 
         for key, events in conflicted_state.items():
             if key[0] == EventTypes.Member:
+                logger.debug("Resolving conflicted member lists %r", events)
                 resolved_state[key] = self._resolve_auth_events(
                     events,
                     auth_events
@@ -345,6 +351,7 @@ class StateHandler(object):
 
         for key, events in conflicted_state.items():
             if key not in resolved_state:
+                logger.debug("Resolving conflicted state %r:%r", key, events)
                 resolved_state[key] = self._resolve_normal_events(
                     events, auth_events
                 )
diff --git a/synapse/static/client/login/index.html b/synapse/static/client/login/index.html
new file mode 100644
index 0000000000..96c8723cab
--- /dev/null
+++ b/synapse/static/client/login/index.html
@@ -0,0 +1,50 @@
+<html>
+<head>
+<title> Login </title>
+<meta name='viewport' content='width=device-width, initial-scale=1, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0'>
+<link rel="stylesheet" href="style.css">
+<script src="js/jquery-2.1.3.min.js"></script>
+<script src="js/login.js"></script>
+</head>
+<body onload="matrixLogin.onLoad()">
+    <center>
+        <br/>
+        <h1>Log in with one of the following methods</h1>
+
+        <span id="feedback" style="color: #f00"></span>
+        <br/>
+        <br/>
+
+        <div id="loading">
+            <img src="spinner.gif" />
+        </div>
+
+        <div id="cas_flow" class="login_flow" style="display:none"
+                onclick="gotoCas(); return false;">
+            CAS Authentication: <button id="cas_button" style="margin: 10px">Log in</button>
+        </div>
+
+        <br/>
+
+        <form id="password_form" class="login_flow" style="display:none"
+                onsubmit="matrixLogin.password_login(); return false;">
+            <div>
+                Password Authentication:<br/>
+
+                <div style="text-align: center">
+                    <input id="user_id" size="32" type="text" placeholder="Matrix ID (e.g. bob)" autocapitalize="off" autocorrect="off" />
+                    <br/>
+                    <input id="password" size="32" type="password" placeholder="Password"/>
+                    <br/>
+
+                    <button type="submit" style="margin: 10px">Log in</button>
+                </div>
+            </div>
+        </form>
+
+        <div id="no_login_types" type="button" class="login_flow" style="display:none">
+            Log in currently unavailable.
+        </div>
+    </center>
+</body>
+</html>
diff --git a/static/client/register/js/jquery-2.1.3.min.js b/synapse/static/client/login/js/jquery-2.1.3.min.js
index 25714ed29a..25714ed29a 100644
--- a/static/client/register/js/jquery-2.1.3.min.js
+++ b/synapse/static/client/login/js/jquery-2.1.3.min.js
diff --git a/synapse/static/client/login/js/login.js b/synapse/static/client/login/js/login.js
new file mode 100644
index 0000000000..bfb7386035
--- /dev/null
+++ b/synapse/static/client/login/js/login.js
@@ -0,0 +1,153 @@
+window.matrixLogin = {
+    endpoint: location.origin + "/_matrix/client/api/v1/login",
+    serverAcceptsPassword: false,
+    serverAcceptsCas: false
+};
+
+var submitPassword = function(user, pwd) {
+    console.log("Logging in with password...");
+    var data = {
+        type: "m.login.password",
+        user: user,
+        password: pwd,
+    };
+    $.post(matrixLogin.endpoint, JSON.stringify(data), function(response) {
+        show_login();
+        matrixLogin.onLogin(response);
+    }).error(errorFunc);
+};
+
+var submitToken = function(loginToken) {
+    console.log("Logging in with login token...");
+    var data = {
+        type: "m.login.token",
+        token: loginToken
+    };
+    $.post(matrixLogin.endpoint, JSON.stringify(data), function(response) {
+        show_login();
+        matrixLogin.onLogin(response);
+    }).error(errorFunc);
+};
+
+var errorFunc = function(err) {
+    show_login();
+
+    if (err.responseJSON && err.responseJSON.error) {
+        setFeedbackString(err.responseJSON.error + " (" + err.responseJSON.errcode + ")");
+    }
+    else {
+        setFeedbackString("Request failed: " + err.status);
+    }
+};
+
+var gotoCas = function() {
+    var this_page = window.location.origin + window.location.pathname;
+    var redirect_url = matrixLogin.endpoint + "/cas/redirect?redirectUrl=" + encodeURIComponent(this_page);
+    window.location.replace(redirect_url);
+}
+
+var setFeedbackString = function(text) {
+    $("#feedback").text(text);
+};
+
+var show_login = function() {
+    $("#loading").hide();
+
+    if (matrixLogin.serverAcceptsPassword) {
+        $("#password_form").show();
+    }
+
+    if (matrixLogin.serverAcceptsCas) {
+        $("#cas_flow").show();
+    }
+
+    if (!matrixLogin.serverAcceptsPassword && !matrixLogin.serverAcceptsCas) {
+        $("#no_login_types").show();
+    }
+};
+
+var show_spinner = function() {
+    $("#password_form").hide();
+    $("#cas_flow").hide();
+    $("#no_login_types").hide();
+    $("#loading").show();
+};
+
+
+var fetch_info = function(cb) {
+    $.get(matrixLogin.endpoint, function(response) {
+        var serverAcceptsPassword = false;
+        var serverAcceptsCas = false;
+        for (var i=0; i<response.flows.length; i++) {
+            var flow = response.flows[i];
+            if ("m.login.cas" === flow.type) {
+                matrixLogin.serverAcceptsCas = true;
+                console.log("Server accepts CAS");
+            }
+
+            if ("m.login.password" === flow.type) {
+                matrixLogin.serverAcceptsPassword = true;
+                console.log("Server accepts password");
+            }
+        }
+
+        cb();
+    }).error(errorFunc);
+}
+
+matrixLogin.onLoad = function() {
+    fetch_info(function() {
+        if (!try_token()) {
+            show_login();
+        }
+    });
+};
+
+matrixLogin.password_login = function() {
+    var user = $("#user_id").val();
+    var pwd = $("#password").val();
+
+    setFeedbackString("");
+
+    show_spinner();
+    submitPassword(user, pwd);
+};
+
+matrixLogin.onLogin = function(response) {
+    // clobber this function
+    console.log("onLogin - This function should be replaced to proceed.");
+    console.log(response);
+};
+
+var parseQsFromUrl = function(query) {
+    var result = {};
+    query.split("&").forEach(function(part) {
+        var item = part.split("=");
+        var key = item[0];
+        var val = item[1];
+
+        if (val) {
+            val = decodeURIComponent(val);
+        }
+        result[key] = val
+    });
+    return result;
+};
+
+var try_token = function() {
+    var pos = window.location.href.indexOf("?");
+    if (pos == -1) {
+        return false;
+    }
+    var qs = parseQsFromUrl(window.location.href.substr(pos+1));
+
+    var loginToken = qs.loginToken;
+
+    if (!loginToken) {
+        return false;
+    }
+
+    submitToken(loginToken);
+
+    return true;
+};
diff --git a/synapse/static/client/login/spinner.gif b/synapse/static/client/login/spinner.gif
new file mode 100644
index 0000000000..12c24df798
--- /dev/null
+++ b/synapse/static/client/login/spinner.gif
Binary files differdiff --git a/synapse/static/client/login/style.css b/synapse/static/client/login/style.css
new file mode 100644
index 0000000000..73da0b5117
--- /dev/null
+++ b/synapse/static/client/login/style.css
@@ -0,0 +1,57 @@
+html {
+    height: 100%;
+}
+
+body {
+    height: 100%;
+    font-family: "Myriad Pro", "Myriad", Helvetica, Arial, sans-serif;
+    font-size: 12pt;
+    margin: 0px;
+}
+
+h1 {
+    font-size: 20pt;
+}
+
+a:link    { color: #666; }
+a:visited { color: #666; }
+a:hover   { color: #000; }
+a:active  { color: #000; }
+
+input {
+   width: 90%
+}
+
+textarea, input {
+   font-family: inherit;
+   font-size: inherit;
+   margin: 5px;
+}
+
+.smallPrint {
+    color: #888;
+    font-size: 9pt ! important;
+    font-style: italic ! important;
+}
+
+.g-recaptcha div {
+    margin: auto;
+}
+
+.login_flow {
+    text-align: left;
+    padding: 10px;
+    margin-bottom: 40px;
+    display: inline-block;
+
+    -webkit-border-radius: 10px;
+    -moz-border-radius: 10px;
+    border-radius: 10px;
+
+    -webkit-box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
+    -moz-box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
+    box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
+
+    background-color: #f8f8f8;
+    border: 1px #ccc solid;
+}
diff --git a/static/client/register/index.html b/synapse/static/client/register/index.html
index 600b3ee41e..600b3ee41e 100644
--- a/static/client/register/index.html
+++ b/synapse/static/client/register/index.html
diff --git a/synapse/static/client/register/js/jquery-2.1.3.min.js b/synapse/static/client/register/js/jquery-2.1.3.min.js
new file mode 100644
index 0000000000..25714ed29a
--- /dev/null
+++ b/synapse/static/client/register/js/jquery-2.1.3.min.js
@@ -0,0 +1,4 @@
+/*! jQuery v2.1.3 | (c) 2005, 2014 jQuery Foundation, Inc. | jquery.org/license */
+!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k={},l=a.document,m="2.1.3",n=function(a,b){return new n.fn.init(a,b)},o=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,p=/^-ms-/,q=/-([\da-z])/gi,r=function(a,b){return b.toUpperCase()};n.fn=n.prototype={jquery:m,constructor:n,selector:"",length:0,toArray:function(){return d.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:d.call(this)},pushStack:function(a){var b=n.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return n.each(this,a,b)},map:function(a){return this.pushStack(n.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:c.sort,splice:c.splice},n.extend=n.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||n.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(a=arguments[h]))for(b in a)c=g[b],d=a[b],g!==d&&(j&&d&&(n.isPlainObject(d)||(e=n.isArray(d)))?(e?(e=!1,f=c&&n.isArray(c)?c:[]):f=c&&n.isPlainObject(c)?c:{},g[b]=n.extend(j,f,d)):void 0!==d&&(g[b]=d));return g},n.extend({expando:"jQuery"+(m+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===n.type(a)},isArray:Array.isArray,isWindow:function(a){return null!=a&&a===a.window},isNumeric:function(a){return!n.isArray(a)&&a-parseFloat(a)+1>=0},isPlainObject:function(a){return"object"!==n.type(a)||a.nodeType||n.isWindow(a)?!1:a.constructor&&!j.call(a.constructor.prototype,"isPrototypeOf")?!1:!0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?h[i.call(a)]||"object":typeof a},globalEval:function(a){var b,c=eval;a=n.trim(a),a&&(1===a.indexOf("use strict")?(b=l.createElement("script"),b.text=a,l.head.appendChild(b).parentNode.removeChild(b)):c(a))},camelCase:function(a){return a.replace(p,"ms-").replace(q,r)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,c){var d,e=0,f=a.length,g=s(a);if(c){if(g){for(;f>e;e++)if(d=b.apply(a[e],c),d===!1)break}else for(e in a)if(d=b.apply(a[e],c),d===!1)break}else if(g){for(;f>e;e++)if(d=b.call(a[e],e,a[e]),d===!1)break}else for(e in a)if(d=b.call(a[e],e,a[e]),d===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(o,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(s(Object(a))?n.merge(c,"string"==typeof a?[a]:a):f.call(c,a)),c},inArray:function(a,b,c){return null==b?-1:g.call(b,a,c)},merge:function(a,b){for(var c=+b.length,d=0,e=a.length;c>d;d++)a[e++]=b[d];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,f=0,g=a.length,h=s(a),i=[];if(h)for(;g>f;f++)d=b(a[f],f,c),null!=d&&i.push(d);else for(f in a)d=b(a[f],f,c),null!=d&&i.push(d);return e.apply([],i)},guid:1,proxy:function(a,b){var c,e,f;return"string"==typeof b&&(c=a[b],b=a,a=c),n.isFunction(a)?(e=d.call(arguments,2),f=function(){return a.apply(b||this,e.concat(d.call(arguments)))},f.guid=a.guid=a.guid||n.guid++,f):void 0},now:Date.now,support:k}),n.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){h["[object "+b+"]"]=b.toLowerCase()});function s(a){var b=a.length,c=n.type(a);return"function"===c||n.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var t=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+1*new Date,v=a.document,w=0,x=0,y=hb(),z=hb(),A=hb(),B=function(a,b){return a===b&&(l=!0),0},C=1<<31,D={}.hasOwnProperty,E=[],F=E.pop,G=E.push,H=E.push,I=E.slice,J=function(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return c;return-1},K="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",L="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",N=M.replace("w","w#"),O="\\["+L+"*("+M+")(?:"+L+"*([*^$|!~]?=)"+L+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+N+"))|)"+L+"*\\]",P=":("+M+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+O+")*)|.*)\\)|)",Q=new RegExp(L+"+","g"),R=new RegExp("^"+L+"+|((?:^|[^\\\\])(?:\\\\.)*)"+L+"+$","g"),S=new RegExp("^"+L+"*,"+L+"*"),T=new RegExp("^"+L+"*([>+~]|"+L+")"+L+"*"),U=new RegExp("="+L+"*([^\\]'\"]*?)"+L+"*\\]","g"),V=new RegExp(P),W=new RegExp("^"+N+"$"),X={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+O),PSEUDO:new RegExp("^"+P),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+L+"*(even|odd|(([+-]|)(\\d*)n|)"+L+"*(?:([+-]|)"+L+"*(\\d+)|))"+L+"*\\)|)","i"),bool:new RegExp("^(?:"+K+")$","i"),needsContext:new RegExp("^"+L+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+L+"*((?:-\\d)?\\d*)"+L+"*\\)|)(?=[^-]|$)","i")},Y=/^(?:input|select|textarea|button)$/i,Z=/^h\d$/i,$=/^[^{]+\{\s*\[native \w/,_=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ab=/[+~]/,bb=/'|\\/g,cb=new RegExp("\\\\([\\da-f]{1,6}"+L+"?|("+L+")|.)","ig"),db=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)},eb=function(){m()};try{H.apply(E=I.call(v.childNodes),v.childNodes),E[v.childNodes.length].nodeType}catch(fb){H={apply:E.length?function(a,b){G.apply(a,I.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function gb(a,b,d,e){var f,h,j,k,l,o,r,s,w,x;if((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,d=d||[],k=b.nodeType,"string"!=typeof a||!a||1!==k&&9!==k&&11!==k)return d;if(!e&&p){if(11!==k&&(f=_.exec(a)))if(j=f[1]){if(9===k){if(h=b.getElementById(j),!h||!h.parentNode)return d;if(h.id===j)return d.push(h),d}else if(b.ownerDocument&&(h=b.ownerDocument.getElementById(j))&&t(b,h)&&h.id===j)return d.push(h),d}else{if(f[2])return H.apply(d,b.getElementsByTagName(a)),d;if((j=f[3])&&c.getElementsByClassName)return H.apply(d,b.getElementsByClassName(j)),d}if(c.qsa&&(!q||!q.test(a))){if(s=r=u,w=b,x=1!==k&&a,1===k&&"object"!==b.nodeName.toLowerCase()){o=g(a),(r=b.getAttribute("id"))?s=r.replace(bb,"\\$&"):b.setAttribute("id",s),s="[id='"+s+"'] ",l=o.length;while(l--)o[l]=s+rb(o[l]);w=ab.test(a)&&pb(b.parentNode)||b,x=o.join(",")}if(x)try{return H.apply(d,w.querySelectorAll(x)),d}catch(y){}finally{r||b.removeAttribute("id")}}}return i(a.replace(R,"$1"),b,d,e)}function hb(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function ib(a){return a[u]=!0,a}function jb(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function kb(a,b){var c=a.split("|"),e=a.length;while(e--)d.attrHandle[c[e]]=b}function lb(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||C)-(~a.sourceIndex||C);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function mb(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function nb(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function ob(a){return ib(function(b){return b=+b,ib(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function pb(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}c=gb.support={},f=gb.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=gb.setDocument=function(a){var b,e,g=a?a.ownerDocument||a:v;return g!==n&&9===g.nodeType&&g.documentElement?(n=g,o=g.documentElement,e=g.defaultView,e&&e!==e.top&&(e.addEventListener?e.addEventListener("unload",eb,!1):e.attachEvent&&e.attachEvent("onunload",eb)),p=!f(g),c.attributes=jb(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=jb(function(a){return a.appendChild(g.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=$.test(g.getElementsByClassName),c.getById=jb(function(a){return o.appendChild(a).id=u,!g.getElementsByName||!g.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){var c="undefined"!=typeof a.getAttributeNode&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return"undefined"!=typeof b.getElementsByTagName?b.getElementsByTagName(a):c.qsa?b.querySelectorAll(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=$.test(g.querySelectorAll))&&(jb(function(a){o.appendChild(a).innerHTML="<a id='"+u+"'></a><select id='"+u+"-\f]' msallowcapture=''><option selected=''></option></select>",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+L+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+L+"*(?:value|"+K+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),jb(function(a){var b=g.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+L+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=$.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&jb(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",P)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=$.test(o.compareDocumentPosition),t=b||$.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===g||a.ownerDocument===v&&t(v,a)?-1:b===g||b.ownerDocument===v&&t(v,b)?1:k?J(k,a)-J(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,h=[a],i=[b];if(!e||!f)return a===g?-1:b===g?1:e?-1:f?1:k?J(k,a)-J(k,b):0;if(e===f)return lb(a,b);c=a;while(c=c.parentNode)h.unshift(c);c=b;while(c=c.parentNode)i.unshift(c);while(h[d]===i[d])d++;return d?lb(h[d],i[d]):h[d]===v?-1:i[d]===v?1:0},g):n},gb.matches=function(a,b){return gb(a,null,null,b)},gb.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(U,"='$1']"),!(!c.matchesSelector||!p||r&&r.test(b)||q&&q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return gb(b,n,null,[a]).length>0},gb.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},gb.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&D.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},gb.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},gb.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=gb.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=gb.selectors={cacheLength:50,createPseudo:ib,match:X,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(cb,db),a[3]=(a[3]||a[4]||a[5]||"").replace(cb,db),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||gb.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&gb.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return X.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&V.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(cb,db).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+L+")"+a+"("+L+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=gb.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(Q," ")+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h;if(q){if(f){while(p){l=b;while(l=l[p])if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){k=q[u]||(q[u]={}),j=k[a]||[],n=j[0]===w&&j[1],m=j[0]===w&&j[2],l=n&&q.childNodes[n];while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if(1===l.nodeType&&++m&&l===b){k[a]=[w,n,m];break}}else if(s&&(j=(b[u]||(b[u]={}))[a])&&j[0]===w)m=j[1];else while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if((h?l.nodeName.toLowerCase()===r:1===l.nodeType)&&++m&&(s&&((l[u]||(l[u]={}))[a]=[w,m]),l===b))break;return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||gb.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ib(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=J(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ib(function(a){var b=[],c=[],d=h(a.replace(R,"$1"));return d[u]?ib(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ib(function(a){return function(b){return gb(a,b).length>0}}),contains:ib(function(a){return a=a.replace(cb,db),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ib(function(a){return W.test(a||"")||gb.error("unsupported lang: "+a),a=a.replace(cb,db).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Z.test(a.nodeName)},input:function(a){return Y.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:ob(function(){return[0]}),last:ob(function(a,b){return[b-1]}),eq:ob(function(a,b,c){return[0>c?c+b:c]}),even:ob(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:ob(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:ob(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:ob(function(a,b,c){for(var d=0>c?c+b:c;++d<b;)a.push(d);return a})}},d.pseudos.nth=d.pseudos.eq;for(b in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})d.pseudos[b]=mb(b);for(b in{submit:!0,reset:!0})d.pseudos[b]=nb(b);function qb(){}qb.prototype=d.filters=d.pseudos,d.setFilters=new qb,g=gb.tokenize=function(a,b){var c,e,f,g,h,i,j,k=z[a+" "];if(k)return b?0:k.slice(0);h=a,i=[],j=d.preFilter;while(h){(!c||(e=S.exec(h)))&&(e&&(h=h.slice(e[0].length)||h),i.push(f=[])),c=!1,(e=T.exec(h))&&(c=e.shift(),f.push({value:c,type:e[0].replace(R," ")}),h=h.slice(c.length));for(g in d.filter)!(e=X[g].exec(h))||j[g]&&!(e=j[g](e))||(c=e.shift(),f.push({value:c,type:g,matches:e}),h=h.slice(c.length));if(!c)break}return b?h.length:h?gb.error(a):z(a,i).slice(0)};function rb(a){for(var b=0,c=a.length,d="";c>b;b++)d+=a[b].value;return d}function sb(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(i=b[u]||(b[u]={}),(h=i[d])&&h[0]===w&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function tb(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function ub(a,b,c){for(var d=0,e=b.length;e>d;d++)gb(a,b[d],c);return c}function vb(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function wb(a,b,c,d,e,f){return d&&!d[u]&&(d=wb(d)),e&&!e[u]&&(e=wb(e,f)),ib(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||ub(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:vb(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=vb(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?J(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=vb(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):H.apply(g,r)})}function xb(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=sb(function(a){return a===b},h,!0),l=sb(function(a){return J(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];f>i;i++)if(c=d.relative[a[i].type])m=[sb(tb(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return wb(i>1&&tb(m),i>1&&rb(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(R,"$1"),c,e>i&&xb(a.slice(i,e)),f>e&&xb(a=a.slice(e)),f>e&&rb(a))}m.push(c)}return tb(m)}function yb(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,m,o,p=0,q="0",r=f&&[],s=[],t=j,u=f||e&&d.find.TAG("*",k),v=w+=null==t?1:Math.random()||.1,x=u.length;for(k&&(j=g!==n&&g);q!==x&&null!=(l=u[q]);q++){if(e&&l){m=0;while(o=a[m++])if(o(l,g,h)){i.push(l);break}k&&(w=v)}c&&((l=!o&&l)&&p--,f&&r.push(l))}if(p+=q,c&&q!==p){m=0;while(o=b[m++])o(r,s,g,h);if(f){if(p>0)while(q--)r[q]||s[q]||(s[q]=F.call(i));s=vb(s)}H.apply(i,s),k&&!f&&s.length>0&&p+b.length>1&&gb.uniqueSort(i)}return k&&(w=v,j=t),r};return c?ib(f):f}return h=gb.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=xb(b[c]),f[u]?d.push(f):e.push(f);f=A(a,yb(e,d)),f.selector=a}return f},i=gb.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(cb,db),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=X.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(cb,db),ab.test(j[0].type)&&pb(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&rb(j),!a)return H.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,ab.test(a)&&pb(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=jb(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),jb(function(a){return a.innerHTML="<a href='#'></a>","#"===a.firstChild.getAttribute("href")})||kb("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&jb(function(a){return a.innerHTML="<input/>",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||kb("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),jb(function(a){return null==a.getAttribute("disabled")})||kb(K,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),gb}(a);n.find=t,n.expr=t.selectors,n.expr[":"]=n.expr.pseudos,n.unique=t.uniqueSort,n.text=t.getText,n.isXMLDoc=t.isXML,n.contains=t.contains;var u=n.expr.match.needsContext,v=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,w=/^.[^:#\[\.,]*$/;function x(a,b,c){if(n.isFunction(b))return n.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return n.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(w.test(b))return n.filter(b,a,c);b=n.filter(b,a)}return n.grep(a,function(a){return g.call(b,a)>=0!==c})}n.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?n.find.matchesSelector(d,a)?[d]:[]:n.find.matches(a,n.grep(b,function(a){return 1===a.nodeType}))},n.fn.extend({find:function(a){var b,c=this.length,d=[],e=this;if("string"!=typeof a)return this.pushStack(n(a).filter(function(){for(b=0;c>b;b++)if(n.contains(e[b],this))return!0}));for(b=0;c>b;b++)n.find(a,e[b],d);return d=this.pushStack(c>1?n.unique(d):d),d.selector=this.selector?this.selector+" "+a:a,d},filter:function(a){return this.pushStack(x(this,a||[],!1))},not:function(a){return this.pushStack(x(this,a||[],!0))},is:function(a){return!!x(this,"string"==typeof a&&u.test(a)?n(a):a||[],!1).length}});var y,z=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,A=n.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a[0]&&">"===a[a.length-1]&&a.length>=3?[null,a,null]:z.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||y).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof n?b[0]:b,n.merge(this,n.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:l,!0)),v.test(c[1])&&n.isPlainObject(b))for(c in b)n.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}return d=l.getElementById(c[2]),d&&d.parentNode&&(this.length=1,this[0]=d),this.context=l,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):n.isFunction(a)?"undefined"!=typeof y.ready?y.ready(a):a(n):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),n.makeArray(a,this))};A.prototype=n.fn,y=n(l);var B=/^(?:parents|prev(?:Until|All))/,C={children:!0,contents:!0,next:!0,prev:!0};n.extend({dir:function(a,b,c){var d=[],e=void 0!==c;while((a=a[b])&&9!==a.nodeType)if(1===a.nodeType){if(e&&n(a).is(c))break;d.push(a)}return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),n.fn.extend({has:function(a){var b=n(a,this),c=b.length;return this.filter(function(){for(var a=0;c>a;a++)if(n.contains(this,b[a]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=u.test(a)||"string"!=typeof a?n(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&n.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?n.unique(f):f)},index:function(a){return a?"string"==typeof a?g.call(n(a),this[0]):g.call(this,a.jquery?a[0]:a):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(n.unique(n.merge(this.get(),n(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function D(a,b){while((a=a[b])&&1!==a.nodeType);return a}n.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return n.dir(a,"parentNode")},parentsUntil:function(a,b,c){return n.dir(a,"parentNode",c)},next:function(a){return D(a,"nextSibling")},prev:function(a){return D(a,"previousSibling")},nextAll:function(a){return n.dir(a,"nextSibling")},prevAll:function(a){return n.dir(a,"previousSibling")},nextUntil:function(a,b,c){return n.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return n.dir(a,"previousSibling",c)},siblings:function(a){return n.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return n.sibling(a.firstChild)},contents:function(a){return a.contentDocument||n.merge([],a.childNodes)}},function(a,b){n.fn[a]=function(c,d){var e=n.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=n.filter(d,e)),this.length>1&&(C[a]||n.unique(e),B.test(a)&&e.reverse()),this.pushStack(e)}});var E=/\S+/g,F={};function G(a){var b=F[a]={};return n.each(a.match(E)||[],function(a,c){b[c]=!0}),b}n.Callbacks=function(a){a="string"==typeof a?F[a]||G(a):n.extend({},a);var b,c,d,e,f,g,h=[],i=!a.once&&[],j=function(l){for(b=a.memory&&l,c=!0,g=e||0,e=0,f=h.length,d=!0;h&&f>g;g++)if(h[g].apply(l[0],l[1])===!1&&a.stopOnFalse){b=!1;break}d=!1,h&&(i?i.length&&j(i.shift()):b?h=[]:k.disable())},k={add:function(){if(h){var c=h.length;!function g(b){n.each(b,function(b,c){var d=n.type(c);"function"===d?a.unique&&k.has(c)||h.push(c):c&&c.length&&"string"!==d&&g(c)})}(arguments),d?f=h.length:b&&(e=c,j(b))}return this},remove:function(){return h&&n.each(arguments,function(a,b){var c;while((c=n.inArray(b,h,c))>-1)h.splice(c,1),d&&(f>=c&&f--,g>=c&&g--)}),this},has:function(a){return a?n.inArray(a,h)>-1:!(!h||!h.length)},empty:function(){return h=[],f=0,this},disable:function(){return h=i=b=void 0,this},disabled:function(){return!h},lock:function(){return i=void 0,b||k.disable(),this},locked:function(){return!i},fireWith:function(a,b){return!h||c&&!i||(b=b||[],b=[a,b.slice?b.slice():b],d?i.push(b):j(b)),this},fire:function(){return k.fireWith(this,arguments),this},fired:function(){return!!c}};return k},n.extend({Deferred:function(a){var b=[["resolve","done",n.Callbacks("once memory"),"resolved"],["reject","fail",n.Callbacks("once memory"),"rejected"],["notify","progress",n.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return n.Deferred(function(c){n.each(b,function(b,f){var g=n.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&n.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?n.extend(a,d):d}},e={};return d.pipe=d.then,n.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=d.call(arguments),e=c.length,f=1!==e||a&&n.isFunction(a.promise)?e:0,g=1===f?a:n.Deferred(),h=function(a,b,c){return function(e){b[a]=this,c[a]=arguments.length>1?d.call(arguments):e,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(e>1)for(i=new Array(e),j=new Array(e),k=new Array(e);e>b;b++)c[b]&&n.isFunction(c[b].promise)?c[b].promise().done(h(b,k,c)).fail(g.reject).progress(h(b,j,i)):--f;return f||g.resolveWith(k,c),g.promise()}});var H;n.fn.ready=function(a){return n.ready.promise().done(a),this},n.extend({isReady:!1,readyWait:1,holdReady:function(a){a?n.readyWait++:n.ready(!0)},ready:function(a){(a===!0?--n.readyWait:n.isReady)||(n.isReady=!0,a!==!0&&--n.readyWait>0||(H.resolveWith(l,[n]),n.fn.triggerHandler&&(n(l).triggerHandler("ready"),n(l).off("ready"))))}});function I(){l.removeEventListener("DOMContentLoaded",I,!1),a.removeEventListener("load",I,!1),n.ready()}n.ready.promise=function(b){return H||(H=n.Deferred(),"complete"===l.readyState?setTimeout(n.ready):(l.addEventListener("DOMContentLoaded",I,!1),a.addEventListener("load",I,!1))),H.promise(b)},n.ready.promise();var J=n.access=function(a,b,c,d,e,f,g){var h=0,i=a.length,j=null==c;if("object"===n.type(c)){e=!0;for(h in c)n.access(a,b,h,c[h],!0,f,g)}else if(void 0!==d&&(e=!0,n.isFunction(d)||(g=!0),j&&(g?(b.call(a,d),b=null):(j=b,b=function(a,b,c){return j.call(n(a),c)})),b))for(;i>h;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f};n.acceptData=function(a){return 1===a.nodeType||9===a.nodeType||!+a.nodeType};function K(){Object.defineProperty(this.cache={},0,{get:function(){return{}}}),this.expando=n.expando+K.uid++}K.uid=1,K.accepts=n.acceptData,K.prototype={key:function(a){if(!K.accepts(a))return 0;var b={},c=a[this.expando];if(!c){c=K.uid++;try{b[this.expando]={value:c},Object.defineProperties(a,b)}catch(d){b[this.expando]=c,n.extend(a,b)}}return this.cache[c]||(this.cache[c]={}),c},set:function(a,b,c){var d,e=this.key(a),f=this.cache[e];if("string"==typeof b)f[b]=c;else if(n.isEmptyObject(f))n.extend(this.cache[e],b);else for(d in b)f[d]=b[d];return f},get:function(a,b){var c=this.cache[this.key(a)];return void 0===b?c:c[b]},access:function(a,b,c){var d;return void 0===b||b&&"string"==typeof b&&void 0===c?(d=this.get(a,b),void 0!==d?d:this.get(a,n.camelCase(b))):(this.set(a,b,c),void 0!==c?c:b)},remove:function(a,b){var c,d,e,f=this.key(a),g=this.cache[f];if(void 0===b)this.cache[f]={};else{n.isArray(b)?d=b.concat(b.map(n.camelCase)):(e=n.camelCase(b),b in g?d=[b,e]:(d=e,d=d in g?[d]:d.match(E)||[])),c=d.length;while(c--)delete g[d[c]]}},hasData:function(a){return!n.isEmptyObject(this.cache[a[this.expando]]||{})},discard:function(a){a[this.expando]&&delete this.cache[a[this.expando]]}};var L=new K,M=new K,N=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,O=/([A-Z])/g;function P(a,b,c){var d;if(void 0===c&&1===a.nodeType)if(d="data-"+b.replace(O,"-$1").toLowerCase(),c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:N.test(c)?n.parseJSON(c):c}catch(e){}M.set(a,b,c)}else c=void 0;return c}n.extend({hasData:function(a){return M.hasData(a)||L.hasData(a)},data:function(a,b,c){return M.access(a,b,c)
+},removeData:function(a,b){M.remove(a,b)},_data:function(a,b,c){return L.access(a,b,c)},_removeData:function(a,b){L.remove(a,b)}}),n.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=M.get(f),1===f.nodeType&&!L.get(f,"hasDataAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=n.camelCase(d.slice(5)),P(f,d,e[d])));L.set(f,"hasDataAttrs",!0)}return e}return"object"==typeof a?this.each(function(){M.set(this,a)}):J(this,function(b){var c,d=n.camelCase(a);if(f&&void 0===b){if(c=M.get(f,a),void 0!==c)return c;if(c=M.get(f,d),void 0!==c)return c;if(c=P(f,d,void 0),void 0!==c)return c}else this.each(function(){var c=M.get(this,d);M.set(this,d,b),-1!==a.indexOf("-")&&void 0!==c&&M.set(this,a,b)})},null,b,arguments.length>1,null,!0)},removeData:function(a){return this.each(function(){M.remove(this,a)})}}),n.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=L.get(a,b),c&&(!d||n.isArray(c)?d=L.access(a,b,n.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=n.queue(a,b),d=c.length,e=c.shift(),f=n._queueHooks(a,b),g=function(){n.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return L.get(a,c)||L.access(a,c,{empty:n.Callbacks("once memory").add(function(){L.remove(a,[b+"queue",c])})})}}),n.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.length<c?n.queue(this[0],a):void 0===b?this:this.each(function(){var c=n.queue(this,a,b);n._queueHooks(this,a),"fx"===a&&"inprogress"!==c[0]&&n.dequeue(this,a)})},dequeue:function(a){return this.each(function(){n.dequeue(this,a)})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,b){var c,d=1,e=n.Deferred(),f=this,g=this.length,h=function(){--d||e.resolveWith(f,[f])};"string"!=typeof a&&(b=a,a=void 0),a=a||"fx";while(g--)c=L.get(f[g],a+"queueHooks"),c&&c.empty&&(d++,c.empty.add(h));return h(),e.promise(b)}});var Q=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,R=["Top","Right","Bottom","Left"],S=function(a,b){return a=b||a,"none"===n.css(a,"display")||!n.contains(a.ownerDocument,a)},T=/^(?:checkbox|radio)$/i;!function(){var a=l.createDocumentFragment(),b=a.appendChild(l.createElement("div")),c=l.createElement("input");c.setAttribute("type","radio"),c.setAttribute("checked","checked"),c.setAttribute("name","t"),b.appendChild(c),k.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,b.innerHTML="<textarea>x</textarea>",k.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue}();var U="undefined";k.focusinBubbles="onfocusin"in a;var V=/^key/,W=/^(?:mouse|pointer|contextmenu)|click/,X=/^(?:focusinfocus|focusoutblur)$/,Y=/^([^.]*)(?:\.(.+)|)$/;function Z(){return!0}function $(){return!1}function _(){try{return l.activeElement}catch(a){}}n.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=L.get(a);if(r){c.handler&&(f=c,c=f.handler,e=f.selector),c.guid||(c.guid=n.guid++),(i=r.events)||(i=r.events={}),(g=r.handle)||(g=r.handle=function(b){return typeof n!==U&&n.event.triggered!==b.type?n.event.dispatch.apply(a,arguments):void 0}),b=(b||"").match(E)||[""],j=b.length;while(j--)h=Y.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o&&(l=n.event.special[o]||{},o=(e?l.delegateType:l.bindType)||o,l=n.event.special[o]||{},k=n.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&n.expr.match.needsContext.test(e),namespace:p.join(".")},f),(m=i[o])||(m=i[o]=[],m.delegateCount=0,l.setup&&l.setup.call(a,d,p,g)!==!1||a.addEventListener&&a.addEventListener(o,g,!1)),l.add&&(l.add.call(a,k),k.handler.guid||(k.handler.guid=c.guid)),e?m.splice(m.delegateCount++,0,k):m.push(k),n.event.global[o]=!0)}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=L.hasData(a)&&L.get(a);if(r&&(i=r.events)){b=(b||"").match(E)||[""],j=b.length;while(j--)if(h=Y.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=n.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,m=i[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),g=f=m.length;while(f--)k=m[f],!e&&q!==k.origType||c&&c.guid!==k.guid||h&&!h.test(k.namespace)||d&&d!==k.selector&&("**"!==d||!k.selector)||(m.splice(f,1),k.selector&&m.delegateCount--,l.remove&&l.remove.call(a,k));g&&!m.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||n.removeEvent(a,o,r.handle),delete i[o])}else for(o in i)n.event.remove(a,o+b[j],c,d,!0);n.isEmptyObject(i)&&(delete r.handle,L.remove(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,k,m,o,p=[d||l],q=j.call(b,"type")?b.type:b,r=j.call(b,"namespace")?b.namespace.split("."):[];if(g=h=d=d||l,3!==d.nodeType&&8!==d.nodeType&&!X.test(q+n.event.triggered)&&(q.indexOf(".")>=0&&(r=q.split("."),q=r.shift(),r.sort()),k=q.indexOf(":")<0&&"on"+q,b=b[n.expando]?b:new n.Event(q,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=r.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+r.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:n.makeArray(c,[b]),o=n.event.special[q]||{},e||!o.trigger||o.trigger.apply(d,c)!==!1)){if(!e&&!o.noBubble&&!n.isWindow(d)){for(i=o.delegateType||q,X.test(i+q)||(g=g.parentNode);g;g=g.parentNode)p.push(g),h=g;h===(d.ownerDocument||l)&&p.push(h.defaultView||h.parentWindow||a)}f=0;while((g=p[f++])&&!b.isPropagationStopped())b.type=f>1?i:o.bindType||q,m=(L.get(g,"events")||{})[b.type]&&L.get(g,"handle"),m&&m.apply(g,c),m=k&&g[k],m&&m.apply&&n.acceptData(g)&&(b.result=m.apply(g,c),b.result===!1&&b.preventDefault());return b.type=q,e||b.isDefaultPrevented()||o._default&&o._default.apply(p.pop(),c)!==!1||!n.acceptData(d)||k&&n.isFunction(d[q])&&!n.isWindow(d)&&(h=d[k],h&&(d[k]=null),n.event.triggered=q,d[q](),n.event.triggered=void 0,h&&(d[k]=h)),b.result}},dispatch:function(a){a=n.event.fix(a);var b,c,e,f,g,h=[],i=d.call(arguments),j=(L.get(this,"events")||{})[a.type]||[],k=n.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=n.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,c=0;while((g=f.handlers[c++])&&!a.isImmediatePropagationStopped())(!a.namespace_re||a.namespace_re.test(g.namespace))&&(a.handleObj=g,a.data=g.data,e=((n.event.special[g.origType]||{}).handle||g.handler).apply(f.elem,i),void 0!==e&&(a.result=e)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!==this;i=i.parentNode||this)if(i.disabled!==!0||"click"!==a.type){for(d=[],c=0;h>c;c++)f=b[c],e=f.selector+" ",void 0===d[e]&&(d[e]=f.needsContext?n(e,this).index(i)>=0:n.find(e,this,null,[i]).length),d[e]&&d.push(f);d.length&&g.push({elem:i,handlers:d})}return h<b.length&&g.push({elem:this,handlers:b.slice(h)}),g},props:"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){return null==a.which&&(a.which=null!=b.charCode?b.charCode:b.keyCode),a}},mouseHooks:{props:"button buttons clientX clientY offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,b){var c,d,e,f=b.button;return null==a.pageX&&null!=b.clientX&&(c=a.target.ownerDocument||l,d=c.documentElement,e=c.body,a.pageX=b.clientX+(d&&d.scrollLeft||e&&e.scrollLeft||0)-(d&&d.clientLeft||e&&e.clientLeft||0),a.pageY=b.clientY+(d&&d.scrollTop||e&&e.scrollTop||0)-(d&&d.clientTop||e&&e.clientTop||0)),a.which||void 0===f||(a.which=1&f?1:2&f?3:4&f?2:0),a}},fix:function(a){if(a[n.expando])return a;var b,c,d,e=a.type,f=a,g=this.fixHooks[e];g||(this.fixHooks[e]=g=W.test(e)?this.mouseHooks:V.test(e)?this.keyHooks:{}),d=g.props?this.props.concat(g.props):this.props,a=new n.Event(f),b=d.length;while(b--)c=d[b],a[c]=f[c];return a.target||(a.target=l),3===a.target.nodeType&&(a.target=a.target.parentNode),g.filter?g.filter(a,f):a},special:{load:{noBubble:!0},focus:{trigger:function(){return this!==_()&&this.focus?(this.focus(),!1):void 0},delegateType:"focusin"},blur:{trigger:function(){return this===_()&&this.blur?(this.blur(),!1):void 0},delegateType:"focusout"},click:{trigger:function(){return"checkbox"===this.type&&this.click&&n.nodeName(this,"input")?(this.click(),!1):void 0},_default:function(a){return n.nodeName(a.target,"a")}},beforeunload:{postDispatch:function(a){void 0!==a.result&&a.originalEvent&&(a.originalEvent.returnValue=a.result)}}},simulate:function(a,b,c,d){var e=n.extend(new n.Event,c,{type:a,isSimulated:!0,originalEvent:{}});d?n.event.trigger(e,null,b):n.event.dispatch.call(b,e),e.isDefaultPrevented()&&c.preventDefault()}},n.removeEvent=function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c,!1)},n.Event=function(a,b){return this instanceof n.Event?(a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||void 0===a.defaultPrevented&&a.returnValue===!1?Z:$):this.type=a,b&&n.extend(this,b),this.timeStamp=a&&a.timeStamp||n.now(),void(this[n.expando]=!0)):new n.Event(a,b)},n.Event.prototype={isDefaultPrevented:$,isPropagationStopped:$,isImmediatePropagationStopped:$,preventDefault:function(){var a=this.originalEvent;this.isDefaultPrevented=Z,a&&a.preventDefault&&a.preventDefault()},stopPropagation:function(){var a=this.originalEvent;this.isPropagationStopped=Z,a&&a.stopPropagation&&a.stopPropagation()},stopImmediatePropagation:function(){var a=this.originalEvent;this.isImmediatePropagationStopped=Z,a&&a.stopImmediatePropagation&&a.stopImmediatePropagation(),this.stopPropagation()}},n.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(a,b){n.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c,d=this,e=a.relatedTarget,f=a.handleObj;return(!e||e!==d&&!n.contains(d,e))&&(a.type=f.origType,c=f.handler.apply(this,arguments),a.type=b),c}}}),k.focusinBubbles||n.each({focus:"focusin",blur:"focusout"},function(a,b){var c=function(a){n.event.simulate(b,a.target,n.event.fix(a),!0)};n.event.special[b]={setup:function(){var d=this.ownerDocument||this,e=L.access(d,b);e||d.addEventListener(a,c,!0),L.access(d,b,(e||0)+1)},teardown:function(){var d=this.ownerDocument||this,e=L.access(d,b)-1;e?L.access(d,b,e):(d.removeEventListener(a,c,!0),L.remove(d,b))}}}),n.fn.extend({on:function(a,b,c,d,e){var f,g;if("object"==typeof a){"string"!=typeof b&&(c=c||b,b=void 0);for(g in a)this.on(g,b,c,a[g],e);return this}if(null==c&&null==d?(d=b,c=b=void 0):null==d&&("string"==typeof b?(d=c,c=void 0):(d=c,c=b,b=void 0)),d===!1)d=$;else if(!d)return this;return 1===e&&(f=d,d=function(a){return n().off(a),f.apply(this,arguments)},d.guid=f.guid||(f.guid=n.guid++)),this.each(function(){n.event.add(this,a,d,c,b)})},one:function(a,b,c,d){return this.on(a,b,c,d,1)},off:function(a,b,c){var d,e;if(a&&a.preventDefault&&a.handleObj)return d=a.handleObj,n(a.delegateTarget).off(d.namespace?d.origType+"."+d.namespace:d.origType,d.selector,d.handler),this;if("object"==typeof a){for(e in a)this.off(e,b,a[e]);return this}return(b===!1||"function"==typeof b)&&(c=b,b=void 0),c===!1&&(c=$),this.each(function(){n.event.remove(this,a,c,b)})},trigger:function(a,b){return this.each(function(){n.event.trigger(a,b,this)})},triggerHandler:function(a,b){var c=this[0];return c?n.event.trigger(a,b,c,!0):void 0}});var ab=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,bb=/<([\w:]+)/,cb=/<|&#?\w+;/,db=/<(?:script|style|link)/i,eb=/checked\s*(?:[^=]|=\s*.checked.)/i,fb=/^$|\/(?:java|ecma)script/i,gb=/^true\/(.*)/,hb=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,ib={option:[1,"<select multiple='multiple'>","</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};ib.optgroup=ib.option,ib.tbody=ib.tfoot=ib.colgroup=ib.caption=ib.thead,ib.th=ib.td;function jb(a,b){return n.nodeName(a,"table")&&n.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function kb(a){return a.type=(null!==a.getAttribute("type"))+"/"+a.type,a}function lb(a){var b=gb.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function mb(a,b){for(var c=0,d=a.length;d>c;c++)L.set(a[c],"globalEval",!b||L.get(b[c],"globalEval"))}function nb(a,b){var c,d,e,f,g,h,i,j;if(1===b.nodeType){if(L.hasData(a)&&(f=L.access(a),g=L.set(b,f),j=f.events)){delete g.handle,g.events={};for(e in j)for(c=0,d=j[e].length;d>c;c++)n.event.add(b,e,j[e][c])}M.hasData(a)&&(h=M.access(a),i=n.extend({},h),M.set(b,i))}}function ob(a,b){var c=a.getElementsByTagName?a.getElementsByTagName(b||"*"):a.querySelectorAll?a.querySelectorAll(b||"*"):[];return void 0===b||b&&n.nodeName(a,b)?n.merge([a],c):c}function pb(a,b){var c=b.nodeName.toLowerCase();"input"===c&&T.test(a.type)?b.checked=a.checked:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}n.extend({clone:function(a,b,c){var d,e,f,g,h=a.cloneNode(!0),i=n.contains(a.ownerDocument,a);if(!(k.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||n.isXMLDoc(a)))for(g=ob(h),f=ob(a),d=0,e=f.length;e>d;d++)pb(f[d],g[d]);if(b)if(c)for(f=f||ob(a),g=g||ob(h),d=0,e=f.length;e>d;d++)nb(f[d],g[d]);else nb(a,h);return g=ob(h,"script"),g.length>0&&mb(g,!i&&ob(a,"script")),h},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,k=b.createDocumentFragment(),l=[],m=0,o=a.length;o>m;m++)if(e=a[m],e||0===e)if("object"===n.type(e))n.merge(l,e.nodeType?[e]:e);else if(cb.test(e)){f=f||k.appendChild(b.createElement("div")),g=(bb.exec(e)||["",""])[1].toLowerCase(),h=ib[g]||ib._default,f.innerHTML=h[1]+e.replace(ab,"<$1></$2>")+h[2],j=h[0];while(j--)f=f.lastChild;n.merge(l,f.childNodes),f=k.firstChild,f.textContent=""}else l.push(b.createTextNode(e));k.textContent="",m=0;while(e=l[m++])if((!d||-1===n.inArray(e,d))&&(i=n.contains(e.ownerDocument,e),f=ob(k.appendChild(e),"script"),i&&mb(f),c)){j=0;while(e=f[j++])fb.test(e.type||"")&&c.push(e)}return k},cleanData:function(a){for(var b,c,d,e,f=n.event.special,g=0;void 0!==(c=a[g]);g++){if(n.acceptData(c)&&(e=c[L.expando],e&&(b=L.cache[e]))){if(b.events)for(d in b.events)f[d]?n.event.remove(c,d):n.removeEvent(c,d,b.handle);L.cache[e]&&delete L.cache[e]}delete M.cache[c[M.expando]]}}}),n.fn.extend({text:function(a){return J(this,function(a){return void 0===a?n.text(this):this.empty().each(function(){(1===this.nodeType||11===this.nodeType||9===this.nodeType)&&(this.textContent=a)})},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=jb(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=jb(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?n.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||n.cleanData(ob(c)),c.parentNode&&(b&&n.contains(c.ownerDocument,c)&&mb(ob(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++)1===a.nodeType&&(n.cleanData(ob(a,!1)),a.textContent="");return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return n.clone(this,a,b)})},html:function(a){return J(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a&&1===b.nodeType)return b.innerHTML;if("string"==typeof a&&!db.test(a)&&!ib[(bb.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(ab,"<$1></$2>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(n.cleanData(ob(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,n.cleanData(ob(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=e.apply([],a);var c,d,f,g,h,i,j=0,l=this.length,m=this,o=l-1,p=a[0],q=n.isFunction(p);if(q||l>1&&"string"==typeof p&&!k.checkClone&&eb.test(p))return this.each(function(c){var d=m.eq(c);q&&(a[0]=p.call(this,c,d.html())),d.domManip(a,b)});if(l&&(c=n.buildFragment(a,this[0].ownerDocument,!1,this),d=c.firstChild,1===c.childNodes.length&&(c=d),d)){for(f=n.map(ob(c,"script"),kb),g=f.length;l>j;j++)h=c,j!==o&&(h=n.clone(h,!0,!0),g&&n.merge(f,ob(h,"script"))),b.call(this[j],h,j);if(g)for(i=f[f.length-1].ownerDocument,n.map(f,lb),j=0;g>j;j++)h=f[j],fb.test(h.type||"")&&!L.access(h,"globalEval")&&n.contains(i,h)&&(h.src?n._evalUrl&&n._evalUrl(h.src):n.globalEval(h.textContent.replace(hb,"")))}return this}}),n.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){n.fn[a]=function(a){for(var c,d=[],e=n(a),g=e.length-1,h=0;g>=h;h++)c=h===g?this:this.clone(!0),n(e[h])[b](c),f.apply(d,c.get());return this.pushStack(d)}});var qb,rb={};function sb(b,c){var d,e=n(c.createElement(b)).appendTo(c.body),f=a.getDefaultComputedStyle&&(d=a.getDefaultComputedStyle(e[0]))?d.display:n.css(e[0],"display");return e.detach(),f}function tb(a){var b=l,c=rb[a];return c||(c=sb(a,b),"none"!==c&&c||(qb=(qb||n("<iframe frameborder='0' width='0' height='0'/>")).appendTo(b.documentElement),b=qb[0].contentDocument,b.write(),b.close(),c=sb(a,b),qb.detach()),rb[a]=c),c}var ub=/^margin/,vb=new RegExp("^("+Q+")(?!px)[a-z%]+$","i"),wb=function(b){return b.ownerDocument.defaultView.opener?b.ownerDocument.defaultView.getComputedStyle(b,null):a.getComputedStyle(b,null)};function xb(a,b,c){var d,e,f,g,h=a.style;return c=c||wb(a),c&&(g=c.getPropertyValue(b)||c[b]),c&&(""!==g||n.contains(a.ownerDocument,a)||(g=n.style(a,b)),vb.test(g)&&ub.test(b)&&(d=h.width,e=h.minWidth,f=h.maxWidth,h.minWidth=h.maxWidth=h.width=g,g=c.width,h.width=d,h.minWidth=e,h.maxWidth=f)),void 0!==g?g+"":g}function yb(a,b){return{get:function(){return a()?void delete this.get:(this.get=b).apply(this,arguments)}}}!function(){var b,c,d=l.documentElement,e=l.createElement("div"),f=l.createElement("div");if(f.style){f.style.backgroundClip="content-box",f.cloneNode(!0).style.backgroundClip="",k.clearCloneStyle="content-box"===f.style.backgroundClip,e.style.cssText="border:0;width:0;height:0;top:0;left:-9999px;margin-top:1px;position:absolute",e.appendChild(f);function g(){f.style.cssText="-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;display:block;margin-top:1%;top:1%;border:1px;padding:1px;width:4px;position:absolute",f.innerHTML="",d.appendChild(e);var g=a.getComputedStyle(f,null);b="1%"!==g.top,c="4px"===g.width,d.removeChild(e)}a.getComputedStyle&&n.extend(k,{pixelPosition:function(){return g(),b},boxSizingReliable:function(){return null==c&&g(),c},reliableMarginRight:function(){var b,c=f.appendChild(l.createElement("div"));return c.style.cssText=f.style.cssText="-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;display:block;margin:0;border:0;padding:0",c.style.marginRight=c.style.width="0",f.style.width="1px",d.appendChild(e),b=!parseFloat(a.getComputedStyle(c,null).marginRight),d.removeChild(e),f.removeChild(c),b}})}}(),n.swap=function(a,b,c,d){var e,f,g={};for(f in b)g[f]=a.style[f],a.style[f]=b[f];e=c.apply(a,d||[]);for(f in b)a.style[f]=g[f];return e};var zb=/^(none|table(?!-c[ea]).+)/,Ab=new RegExp("^("+Q+")(.*)$","i"),Bb=new RegExp("^([+-])=("+Q+")","i"),Cb={position:"absolute",visibility:"hidden",display:"block"},Db={letterSpacing:"0",fontWeight:"400"},Eb=["Webkit","O","Moz","ms"];function Fb(a,b){if(b in a)return b;var c=b[0].toUpperCase()+b.slice(1),d=b,e=Eb.length;while(e--)if(b=Eb[e]+c,b in a)return b;return d}function Gb(a,b,c){var d=Ab.exec(b);return d?Math.max(0,d[1]-(c||0))+(d[2]||"px"):b}function Hb(a,b,c,d,e){for(var f=c===(d?"border":"content")?4:"width"===b?1:0,g=0;4>f;f+=2)"margin"===c&&(g+=n.css(a,c+R[f],!0,e)),d?("content"===c&&(g-=n.css(a,"padding"+R[f],!0,e)),"margin"!==c&&(g-=n.css(a,"border"+R[f]+"Width",!0,e))):(g+=n.css(a,"padding"+R[f],!0,e),"padding"!==c&&(g+=n.css(a,"border"+R[f]+"Width",!0,e)));return g}function Ib(a,b,c){var d=!0,e="width"===b?a.offsetWidth:a.offsetHeight,f=wb(a),g="border-box"===n.css(a,"boxSizing",!1,f);if(0>=e||null==e){if(e=xb(a,b,f),(0>e||null==e)&&(e=a.style[b]),vb.test(e))return e;d=g&&(k.boxSizingReliable()||e===a.style[b]),e=parseFloat(e)||0}return e+Hb(a,b,c||(g?"border":"content"),d,f)+"px"}function Jb(a,b){for(var c,d,e,f=[],g=0,h=a.length;h>g;g++)d=a[g],d.style&&(f[g]=L.get(d,"olddisplay"),c=d.style.display,b?(f[g]||"none"!==c||(d.style.display=""),""===d.style.display&&S(d)&&(f[g]=L.access(d,"olddisplay",tb(d.nodeName)))):(e=S(d),"none"===c&&e||L.set(d,"olddisplay",e?c:n.css(d,"display"))));for(g=0;h>g;g++)d=a[g],d.style&&(b&&"none"!==d.style.display&&""!==d.style.display||(d.style.display=b?f[g]||"":"none"));return a}n.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=xb(a,"opacity");return""===c?"1":c}}}},cssNumber:{columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":"cssFloat"},style:function(a,b,c,d){if(a&&3!==a.nodeType&&8!==a.nodeType&&a.style){var e,f,g,h=n.camelCase(b),i=a.style;return b=n.cssProps[h]||(n.cssProps[h]=Fb(i,h)),g=n.cssHooks[b]||n.cssHooks[h],void 0===c?g&&"get"in g&&void 0!==(e=g.get(a,!1,d))?e:i[b]:(f=typeof c,"string"===f&&(e=Bb.exec(c))&&(c=(e[1]+1)*e[2]+parseFloat(n.css(a,b)),f="number"),null!=c&&c===c&&("number"!==f||n.cssNumber[h]||(c+="px"),k.clearCloneStyle||""!==c||0!==b.indexOf("background")||(i[b]="inherit"),g&&"set"in g&&void 0===(c=g.set(a,c,d))||(i[b]=c)),void 0)}},css:function(a,b,c,d){var e,f,g,h=n.camelCase(b);return b=n.cssProps[h]||(n.cssProps[h]=Fb(a.style,h)),g=n.cssHooks[b]||n.cssHooks[h],g&&"get"in g&&(e=g.get(a,!0,c)),void 0===e&&(e=xb(a,b,d)),"normal"===e&&b in Db&&(e=Db[b]),""===c||c?(f=parseFloat(e),c===!0||n.isNumeric(f)?f||0:e):e}}),n.each(["height","width"],function(a,b){n.cssHooks[b]={get:function(a,c,d){return c?zb.test(n.css(a,"display"))&&0===a.offsetWidth?n.swap(a,Cb,function(){return Ib(a,b,d)}):Ib(a,b,d):void 0},set:function(a,c,d){var e=d&&wb(a);return Gb(a,c,d?Hb(a,b,d,"border-box"===n.css(a,"boxSizing",!1,e),e):0)}}}),n.cssHooks.marginRight=yb(k.reliableMarginRight,function(a,b){return b?n.swap(a,{display:"inline-block"},xb,[a,"marginRight"]):void 0}),n.each({margin:"",padding:"",border:"Width"},function(a,b){n.cssHooks[a+b]={expand:function(c){for(var d=0,e={},f="string"==typeof c?c.split(" "):[c];4>d;d++)e[a+R[d]+b]=f[d]||f[d-2]||f[0];return e}},ub.test(a)||(n.cssHooks[a+b].set=Gb)}),n.fn.extend({css:function(a,b){return J(this,function(a,b,c){var d,e,f={},g=0;if(n.isArray(b)){for(d=wb(a),e=b.length;e>g;g++)f[b[g]]=n.css(a,b[g],!1,d);return f}return void 0!==c?n.style(a,b,c):n.css(a,b)},a,b,arguments.length>1)},show:function(){return Jb(this,!0)},hide:function(){return Jb(this)},toggle:function(a){return"boolean"==typeof a?a?this.show():this.hide():this.each(function(){S(this)?n(this).show():n(this).hide()})}});function Kb(a,b,c,d,e){return new Kb.prototype.init(a,b,c,d,e)}n.Tween=Kb,Kb.prototype={constructor:Kb,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||"swing",this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(n.cssNumber[c]?"":"px")},cur:function(){var a=Kb.propHooks[this.prop];return a&&a.get?a.get(this):Kb.propHooks._default.get(this)},run:function(a){var b,c=Kb.propHooks[this.prop];return this.pos=b=this.options.duration?n.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):Kb.propHooks._default.set(this),this}},Kb.prototype.init.prototype=Kb.prototype,Kb.propHooks={_default:{get:function(a){var b;return null==a.elem[a.prop]||a.elem.style&&null!=a.elem.style[a.prop]?(b=n.css(a.elem,a.prop,""),b&&"auto"!==b?b:0):a.elem[a.prop]},set:function(a){n.fx.step[a.prop]?n.fx.step[a.prop](a):a.elem.style&&(null!=a.elem.style[n.cssProps[a.prop]]||n.cssHooks[a.prop])?n.style(a.elem,a.prop,a.now+a.unit):a.elem[a.prop]=a.now}}},Kb.propHooks.scrollTop=Kb.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},n.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2}},n.fx=Kb.prototype.init,n.fx.step={};var Lb,Mb,Nb=/^(?:toggle|show|hide)$/,Ob=new RegExp("^(?:([+-])=|)("+Q+")([a-z%]*)$","i"),Pb=/queueHooks$/,Qb=[Vb],Rb={"*":[function(a,b){var c=this.createTween(a,b),d=c.cur(),e=Ob.exec(b),f=e&&e[3]||(n.cssNumber[a]?"":"px"),g=(n.cssNumber[a]||"px"!==f&&+d)&&Ob.exec(n.css(c.elem,a)),h=1,i=20;if(g&&g[3]!==f){f=f||g[3],e=e||[],g=+d||1;do h=h||".5",g/=h,n.style(c.elem,a,g+f);while(h!==(h=c.cur()/d)&&1!==h&&--i)}return e&&(g=c.start=+g||+d||0,c.unit=f,c.end=e[1]?g+(e[1]+1)*e[2]:+e[2]),c}]};function Sb(){return setTimeout(function(){Lb=void 0}),Lb=n.now()}function Tb(a,b){var c,d=0,e={height:a};for(b=b?1:0;4>d;d+=2-b)c=R[d],e["margin"+c]=e["padding"+c]=a;return b&&(e.opacity=e.width=a),e}function Ub(a,b,c){for(var d,e=(Rb[b]||[]).concat(Rb["*"]),f=0,g=e.length;g>f;f++)if(d=e[f].call(c,b,a))return d}function Vb(a,b,c){var d,e,f,g,h,i,j,k,l=this,m={},o=a.style,p=a.nodeType&&S(a),q=L.get(a,"fxshow");c.queue||(h=n._queueHooks(a,"fx"),null==h.unqueued&&(h.unqueued=0,i=h.empty.fire,h.empty.fire=function(){h.unqueued||i()}),h.unqueued++,l.always(function(){l.always(function(){h.unqueued--,n.queue(a,"fx").length||h.empty.fire()})})),1===a.nodeType&&("height"in b||"width"in b)&&(c.overflow=[o.overflow,o.overflowX,o.overflowY],j=n.css(a,"display"),k="none"===j?L.get(a,"olddisplay")||tb(a.nodeName):j,"inline"===k&&"none"===n.css(a,"float")&&(o.display="inline-block")),c.overflow&&(o.overflow="hidden",l.always(function(){o.overflow=c.overflow[0],o.overflowX=c.overflow[1],o.overflowY=c.overflow[2]}));for(d in b)if(e=b[d],Nb.exec(e)){if(delete b[d],f=f||"toggle"===e,e===(p?"hide":"show")){if("show"!==e||!q||void 0===q[d])continue;p=!0}m[d]=q&&q[d]||n.style(a,d)}else j=void 0;if(n.isEmptyObject(m))"inline"===("none"===j?tb(a.nodeName):j)&&(o.display=j);else{q?"hidden"in q&&(p=q.hidden):q=L.access(a,"fxshow",{}),f&&(q.hidden=!p),p?n(a).show():l.done(function(){n(a).hide()}),l.done(function(){var b;L.remove(a,"fxshow");for(b in m)n.style(a,b,m[b])});for(d in m)g=Ub(p?q[d]:0,d,l),d in q||(q[d]=g.start,p&&(g.end=g.start,g.start="width"===d||"height"===d?1:0))}}function Wb(a,b){var c,d,e,f,g;for(c in a)if(d=n.camelCase(c),e=b[d],f=a[c],n.isArray(f)&&(e=f[1],f=a[c]=f[0]),c!==d&&(a[d]=f,delete a[c]),g=n.cssHooks[d],g&&"expand"in g){f=g.expand(f),delete a[d];for(c in f)c in a||(a[c]=f[c],b[c]=e)}else b[d]=e}function Xb(a,b,c){var d,e,f=0,g=Qb.length,h=n.Deferred().always(function(){delete i.elem}),i=function(){if(e)return!1;for(var b=Lb||Sb(),c=Math.max(0,j.startTime+j.duration-b),d=c/j.duration||0,f=1-d,g=0,i=j.tweens.length;i>g;g++)j.tweens[g].run(f);return h.notifyWith(a,[j,f,c]),1>f&&i?c:(h.resolveWith(a,[j]),!1)},j=h.promise({elem:a,props:n.extend({},b),opts:n.extend(!0,{specialEasing:{}},c),originalProperties:b,originalOptions:c,startTime:Lb||Sb(),duration:c.duration,tweens:[],createTween:function(b,c){var d=n.Tween(a,j.opts,b,c,j.opts.specialEasing[b]||j.opts.easing);return j.tweens.push(d),d},stop:function(b){var c=0,d=b?j.tweens.length:0;if(e)return this;for(e=!0;d>c;c++)j.tweens[c].run(1);return b?h.resolveWith(a,[j,b]):h.rejectWith(a,[j,b]),this}}),k=j.props;for(Wb(k,j.opts.specialEasing);g>f;f++)if(d=Qb[f].call(j,a,k,j.opts))return d;return n.map(k,Ub,j),n.isFunction(j.opts.start)&&j.opts.start.call(a,j),n.fx.timer(n.extend(i,{elem:a,anim:j,queue:j.opts.queue})),j.progress(j.opts.progress).done(j.opts.done,j.opts.complete).fail(j.opts.fail).always(j.opts.always)}n.Animation=n.extend(Xb,{tweener:function(a,b){n.isFunction(a)?(b=a,a=["*"]):a=a.split(" ");for(var c,d=0,e=a.length;e>d;d++)c=a[d],Rb[c]=Rb[c]||[],Rb[c].unshift(b)},prefilter:function(a,b){b?Qb.unshift(a):Qb.push(a)}}),n.speed=function(a,b,c){var d=a&&"object"==typeof a?n.extend({},a):{complete:c||!c&&b||n.isFunction(a)&&a,duration:a,easing:c&&b||b&&!n.isFunction(b)&&b};return d.duration=n.fx.off?0:"number"==typeof d.duration?d.duration:d.duration in n.fx.speeds?n.fx.speeds[d.duration]:n.fx.speeds._default,(null==d.queue||d.queue===!0)&&(d.queue="fx"),d.old=d.complete,d.complete=function(){n.isFunction(d.old)&&d.old.call(this),d.queue&&n.dequeue(this,d.queue)},d},n.fn.extend({fadeTo:function(a,b,c,d){return this.filter(S).css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){var e=n.isEmptyObject(a),f=n.speed(b,c,d),g=function(){var b=Xb(this,n.extend({},a),f);(e||L.get(this,"finish"))&&b.stop(!0)};return g.finish=g,e||f.queue===!1?this.each(g):this.queue(f.queue,g)},stop:function(a,b,c){var d=function(a){var b=a.stop;delete a.stop,b(c)};return"string"!=typeof a&&(c=b,b=a,a=void 0),b&&a!==!1&&this.queue(a||"fx",[]),this.each(function(){var b=!0,e=null!=a&&a+"queueHooks",f=n.timers,g=L.get(this);if(e)g[e]&&g[e].stop&&d(g[e]);else for(e in g)g[e]&&g[e].stop&&Pb.test(e)&&d(g[e]);for(e=f.length;e--;)f[e].elem!==this||null!=a&&f[e].queue!==a||(f[e].anim.stop(c),b=!1,f.splice(e,1));(b||!c)&&n.dequeue(this,a)})},finish:function(a){return a!==!1&&(a=a||"fx"),this.each(function(){var b,c=L.get(this),d=c[a+"queue"],e=c[a+"queueHooks"],f=n.timers,g=d?d.length:0;for(c.finish=!0,n.queue(this,a,[]),e&&e.stop&&e.stop.call(this,!0),b=f.length;b--;)f[b].elem===this&&f[b].queue===a&&(f[b].anim.stop(!0),f.splice(b,1));for(b=0;g>b;b++)d[b]&&d[b].finish&&d[b].finish.call(this);delete c.finish})}}),n.each(["toggle","show","hide"],function(a,b){var c=n.fn[b];n.fn[b]=function(a,d,e){return null==a||"boolean"==typeof a?c.apply(this,arguments):this.animate(Tb(b,!0),a,d,e)}}),n.each({slideDown:Tb("show"),slideUp:Tb("hide"),slideToggle:Tb("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){n.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),n.timers=[],n.fx.tick=function(){var a,b=0,c=n.timers;for(Lb=n.now();b<c.length;b++)a=c[b],a()||c[b]!==a||c.splice(b--,1);c.length||n.fx.stop(),Lb=void 0},n.fx.timer=function(a){n.timers.push(a),a()?n.fx.start():n.timers.pop()},n.fx.interval=13,n.fx.start=function(){Mb||(Mb=setInterval(n.fx.tick,n.fx.interval))},n.fx.stop=function(){clearInterval(Mb),Mb=null},n.fx.speeds={slow:600,fast:200,_default:400},n.fn.delay=function(a,b){return a=n.fx?n.fx.speeds[a]||a:a,b=b||"fx",this.queue(b,function(b,c){var d=setTimeout(b,a);c.stop=function(){clearTimeout(d)}})},function(){var a=l.createElement("input"),b=l.createElement("select"),c=b.appendChild(l.createElement("option"));a.type="checkbox",k.checkOn=""!==a.value,k.optSelected=c.selected,b.disabled=!0,k.optDisabled=!c.disabled,a=l.createElement("input"),a.value="t",a.type="radio",k.radioValue="t"===a.value}();var Yb,Zb,$b=n.expr.attrHandle;n.fn.extend({attr:function(a,b){return J(this,n.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){n.removeAttr(this,a)})}}),n.extend({attr:function(a,b,c){var d,e,f=a.nodeType;if(a&&3!==f&&8!==f&&2!==f)return typeof a.getAttribute===U?n.prop(a,b,c):(1===f&&n.isXMLDoc(a)||(b=b.toLowerCase(),d=n.attrHooks[b]||(n.expr.match.bool.test(b)?Zb:Yb)),void 0===c?d&&"get"in d&&null!==(e=d.get(a,b))?e:(e=n.find.attr(a,b),null==e?void 0:e):null!==c?d&&"set"in d&&void 0!==(e=d.set(a,c,b))?e:(a.setAttribute(b,c+""),c):void n.removeAttr(a,b))
+},removeAttr:function(a,b){var c,d,e=0,f=b&&b.match(E);if(f&&1===a.nodeType)while(c=f[e++])d=n.propFix[c]||c,n.expr.match.bool.test(c)&&(a[d]=!1),a.removeAttribute(c)},attrHooks:{type:{set:function(a,b){if(!k.radioValue&&"radio"===b&&n.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}}}}),Zb={set:function(a,b,c){return b===!1?n.removeAttr(a,c):a.setAttribute(c,c),c}},n.each(n.expr.match.bool.source.match(/\w+/g),function(a,b){var c=$b[b]||n.find.attr;$b[b]=function(a,b,d){var e,f;return d||(f=$b[b],$b[b]=e,e=null!=c(a,b,d)?b.toLowerCase():null,$b[b]=f),e}});var _b=/^(?:input|select|textarea|button)$/i;n.fn.extend({prop:function(a,b){return J(this,n.prop,a,b,arguments.length>1)},removeProp:function(a){return this.each(function(){delete this[n.propFix[a]||a]})}}),n.extend({propFix:{"for":"htmlFor","class":"className"},prop:function(a,b,c){var d,e,f,g=a.nodeType;if(a&&3!==g&&8!==g&&2!==g)return f=1!==g||!n.isXMLDoc(a),f&&(b=n.propFix[b]||b,e=n.propHooks[b]),void 0!==c?e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:a[b]=c:e&&"get"in e&&null!==(d=e.get(a,b))?d:a[b]},propHooks:{tabIndex:{get:function(a){return a.hasAttribute("tabindex")||_b.test(a.nodeName)||a.href?a.tabIndex:-1}}}}),k.optSelected||(n.propHooks.selected={get:function(a){var b=a.parentNode;return b&&b.parentNode&&b.parentNode.selectedIndex,null}}),n.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){n.propFix[this.toLowerCase()]=this});var ac=/[\t\r\n\f]/g;n.fn.extend({addClass:function(a){var b,c,d,e,f,g,h="string"==typeof a&&a,i=0,j=this.length;if(n.isFunction(a))return this.each(function(b){n(this).addClass(a.call(this,b,this.className))});if(h)for(b=(a||"").match(E)||[];j>i;i++)if(c=this[i],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(ac," "):" ")){f=0;while(e=b[f++])d.indexOf(" "+e+" ")<0&&(d+=e+" ");g=n.trim(d),c.className!==g&&(c.className=g)}return this},removeClass:function(a){var b,c,d,e,f,g,h=0===arguments.length||"string"==typeof a&&a,i=0,j=this.length;if(n.isFunction(a))return this.each(function(b){n(this).removeClass(a.call(this,b,this.className))});if(h)for(b=(a||"").match(E)||[];j>i;i++)if(c=this[i],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(ac," "):"")){f=0;while(e=b[f++])while(d.indexOf(" "+e+" ")>=0)d=d.replace(" "+e+" "," ");g=a?n.trim(d):"",c.className!==g&&(c.className=g)}return this},toggleClass:function(a,b){var c=typeof a;return"boolean"==typeof b&&"string"===c?b?this.addClass(a):this.removeClass(a):this.each(n.isFunction(a)?function(c){n(this).toggleClass(a.call(this,c,this.className,b),b)}:function(){if("string"===c){var b,d=0,e=n(this),f=a.match(E)||[];while(b=f[d++])e.hasClass(b)?e.removeClass(b):e.addClass(b)}else(c===U||"boolean"===c)&&(this.className&&L.set(this,"__className__",this.className),this.className=this.className||a===!1?"":L.get(this,"__className__")||"")})},hasClass:function(a){for(var b=" "+a+" ",c=0,d=this.length;d>c;c++)if(1===this[c].nodeType&&(" "+this[c].className+" ").replace(ac," ").indexOf(b)>=0)return!0;return!1}});var bc=/\r/g;n.fn.extend({val:function(a){var b,c,d,e=this[0];{if(arguments.length)return d=n.isFunction(a),this.each(function(c){var e;1===this.nodeType&&(e=d?a.call(this,c,n(this).val()):a,null==e?e="":"number"==typeof e?e+="":n.isArray(e)&&(e=n.map(e,function(a){return null==a?"":a+""})),b=n.valHooks[this.type]||n.valHooks[this.nodeName.toLowerCase()],b&&"set"in b&&void 0!==b.set(this,e,"value")||(this.value=e))});if(e)return b=n.valHooks[e.type]||n.valHooks[e.nodeName.toLowerCase()],b&&"get"in b&&void 0!==(c=b.get(e,"value"))?c:(c=e.value,"string"==typeof c?c.replace(bc,""):null==c?"":c)}}}),n.extend({valHooks:{option:{get:function(a){var b=n.find.attr(a,"value");return null!=b?b:n.trim(n.text(a))}},select:{get:function(a){for(var b,c,d=a.options,e=a.selectedIndex,f="select-one"===a.type||0>e,g=f?null:[],h=f?e+1:d.length,i=0>e?h:f?e:0;h>i;i++)if(c=d[i],!(!c.selected&&i!==e||(k.optDisabled?c.disabled:null!==c.getAttribute("disabled"))||c.parentNode.disabled&&n.nodeName(c.parentNode,"optgroup"))){if(b=n(c).val(),f)return b;g.push(b)}return g},set:function(a,b){var c,d,e=a.options,f=n.makeArray(b),g=e.length;while(g--)d=e[g],(d.selected=n.inArray(d.value,f)>=0)&&(c=!0);return c||(a.selectedIndex=-1),f}}}}),n.each(["radio","checkbox"],function(){n.valHooks[this]={set:function(a,b){return n.isArray(b)?a.checked=n.inArray(n(a).val(),b)>=0:void 0}},k.checkOn||(n.valHooks[this].get=function(a){return null===a.getAttribute("value")?"on":a.value})}),n.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){n.fn[b]=function(a,c){return arguments.length>0?this.on(b,null,a,c):this.trigger(b)}}),n.fn.extend({hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)},bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return 1===arguments.length?this.off(a,"**"):this.off(b,a||"**",c)}});var cc=n.now(),dc=/\?/;n.parseJSON=function(a){return JSON.parse(a+"")},n.parseXML=function(a){var b,c;if(!a||"string"!=typeof a)return null;try{c=new DOMParser,b=c.parseFromString(a,"text/xml")}catch(d){b=void 0}return(!b||b.getElementsByTagName("parsererror").length)&&n.error("Invalid XML: "+a),b};var ec=/#.*$/,fc=/([?&])_=[^&]*/,gc=/^(.*?):[ \t]*([^\r\n]*)$/gm,hc=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,ic=/^(?:GET|HEAD)$/,jc=/^\/\//,kc=/^([\w.+-]+:)(?:\/\/(?:[^\/?#]*@|)([^\/?#:]*)(?::(\d+)|)|)/,lc={},mc={},nc="*/".concat("*"),oc=a.location.href,pc=kc.exec(oc.toLowerCase())||[];function qc(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var d,e=0,f=b.toLowerCase().match(E)||[];if(n.isFunction(c))while(d=f[e++])"+"===d[0]?(d=d.slice(1)||"*",(a[d]=a[d]||[]).unshift(c)):(a[d]=a[d]||[]).push(c)}}function rc(a,b,c,d){var e={},f=a===mc;function g(h){var i;return e[h]=!0,n.each(a[h]||[],function(a,h){var j=h(b,c,d);return"string"!=typeof j||f||e[j]?f?!(i=j):void 0:(b.dataTypes.unshift(j),g(j),!1)}),i}return g(b.dataTypes[0])||!e["*"]&&g("*")}function sc(a,b){var c,d,e=n.ajaxSettings.flatOptions||{};for(c in b)void 0!==b[c]&&((e[c]?a:d||(d={}))[c]=b[c]);return d&&n.extend(!0,a,d),a}function tc(a,b,c){var d,e,f,g,h=a.contents,i=a.dataTypes;while("*"===i[0])i.shift(),void 0===d&&(d=a.mimeType||b.getResponseHeader("Content-Type"));if(d)for(e in h)if(h[e]&&h[e].test(d)){i.unshift(e);break}if(i[0]in c)f=i[0];else{for(e in c){if(!i[0]||a.converters[e+" "+i[0]]){f=e;break}g||(g=e)}f=f||g}return f?(f!==i[0]&&i.unshift(f),c[f]):void 0}function uc(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])for(g in a.converters)j[g.toLowerCase()]=a.converters[g];f=k.shift();while(f)if(a.responseFields[f]&&(c[a.responseFields[f]]=b),!i&&d&&a.dataFilter&&(b=a.dataFilter(b,a.dataType)),i=f,f=k.shift())if("*"===f)f=i;else if("*"!==i&&i!==f){if(g=j[i+" "+f]||j["* "+f],!g)for(e in j)if(h=e.split(" "),h[1]===f&&(g=j[i+" "+h[0]]||j["* "+h[0]])){g===!0?g=j[e]:j[e]!==!0&&(f=h[0],k.unshift(h[1]));break}if(g!==!0)if(g&&a["throws"])b=g(b);else try{b=g(b)}catch(l){return{state:"parsererror",error:g?l:"No conversion from "+i+" to "+f}}}return{state:"success",data:b}}n.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:oc,type:"GET",isLocal:hc.test(pc[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":nc,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":n.parseJSON,"text xml":n.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(a,b){return b?sc(sc(a,n.ajaxSettings),b):sc(n.ajaxSettings,a)},ajaxPrefilter:qc(lc),ajaxTransport:qc(mc),ajax:function(a,b){"object"==typeof a&&(b=a,a=void 0),b=b||{};var c,d,e,f,g,h,i,j,k=n.ajaxSetup({},b),l=k.context||k,m=k.context&&(l.nodeType||l.jquery)?n(l):n.event,o=n.Deferred(),p=n.Callbacks("once memory"),q=k.statusCode||{},r={},s={},t=0,u="canceled",v={readyState:0,getResponseHeader:function(a){var b;if(2===t){if(!f){f={};while(b=gc.exec(e))f[b[1].toLowerCase()]=b[2]}b=f[a.toLowerCase()]}return null==b?null:b},getAllResponseHeaders:function(){return 2===t?e:null},setRequestHeader:function(a,b){var c=a.toLowerCase();return t||(a=s[c]=s[c]||a,r[a]=b),this},overrideMimeType:function(a){return t||(k.mimeType=a),this},statusCode:function(a){var b;if(a)if(2>t)for(b in a)q[b]=[q[b],a[b]];else v.always(a[v.status]);return this},abort:function(a){var b=a||u;return c&&c.abort(b),x(0,b),this}};if(o.promise(v).complete=p.add,v.success=v.done,v.error=v.fail,k.url=((a||k.url||oc)+"").replace(ec,"").replace(jc,pc[1]+"//"),k.type=b.method||b.type||k.method||k.type,k.dataTypes=n.trim(k.dataType||"*").toLowerCase().match(E)||[""],null==k.crossDomain&&(h=kc.exec(k.url.toLowerCase()),k.crossDomain=!(!h||h[1]===pc[1]&&h[2]===pc[2]&&(h[3]||("http:"===h[1]?"80":"443"))===(pc[3]||("http:"===pc[1]?"80":"443")))),k.data&&k.processData&&"string"!=typeof k.data&&(k.data=n.param(k.data,k.traditional)),rc(lc,k,b,v),2===t)return v;i=n.event&&k.global,i&&0===n.active++&&n.event.trigger("ajaxStart"),k.type=k.type.toUpperCase(),k.hasContent=!ic.test(k.type),d=k.url,k.hasContent||(k.data&&(d=k.url+=(dc.test(d)?"&":"?")+k.data,delete k.data),k.cache===!1&&(k.url=fc.test(d)?d.replace(fc,"$1_="+cc++):d+(dc.test(d)?"&":"?")+"_="+cc++)),k.ifModified&&(n.lastModified[d]&&v.setRequestHeader("If-Modified-Since",n.lastModified[d]),n.etag[d]&&v.setRequestHeader("If-None-Match",n.etag[d])),(k.data&&k.hasContent&&k.contentType!==!1||b.contentType)&&v.setRequestHeader("Content-Type",k.contentType),v.setRequestHeader("Accept",k.dataTypes[0]&&k.accepts[k.dataTypes[0]]?k.accepts[k.dataTypes[0]]+("*"!==k.dataTypes[0]?", "+nc+"; q=0.01":""):k.accepts["*"]);for(j in k.headers)v.setRequestHeader(j,k.headers[j]);if(k.beforeSend&&(k.beforeSend.call(l,v,k)===!1||2===t))return v.abort();u="abort";for(j in{success:1,error:1,complete:1})v[j](k[j]);if(c=rc(mc,k,b,v)){v.readyState=1,i&&m.trigger("ajaxSend",[v,k]),k.async&&k.timeout>0&&(g=setTimeout(function(){v.abort("timeout")},k.timeout));try{t=1,c.send(r,x)}catch(w){if(!(2>t))throw w;x(-1,w)}}else x(-1,"No Transport");function x(a,b,f,h){var j,r,s,u,w,x=b;2!==t&&(t=2,g&&clearTimeout(g),c=void 0,e=h||"",v.readyState=a>0?4:0,j=a>=200&&300>a||304===a,f&&(u=tc(k,v,f)),u=uc(k,u,v,j),j?(k.ifModified&&(w=v.getResponseHeader("Last-Modified"),w&&(n.lastModified[d]=w),w=v.getResponseHeader("etag"),w&&(n.etag[d]=w)),204===a||"HEAD"===k.type?x="nocontent":304===a?x="notmodified":(x=u.state,r=u.data,s=u.error,j=!s)):(s=x,(a||!x)&&(x="error",0>a&&(a=0))),v.status=a,v.statusText=(b||x)+"",j?o.resolveWith(l,[r,x,v]):o.rejectWith(l,[v,x,s]),v.statusCode(q),q=void 0,i&&m.trigger(j?"ajaxSuccess":"ajaxError",[v,k,j?r:s]),p.fireWith(l,[v,x]),i&&(m.trigger("ajaxComplete",[v,k]),--n.active||n.event.trigger("ajaxStop")))}return v},getJSON:function(a,b,c){return n.get(a,b,c,"json")},getScript:function(a,b){return n.get(a,void 0,b,"script")}}),n.each(["get","post"],function(a,b){n[b]=function(a,c,d,e){return n.isFunction(c)&&(e=e||d,d=c,c=void 0),n.ajax({url:a,type:b,dataType:e,data:c,success:d})}}),n._evalUrl=function(a){return n.ajax({url:a,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0})},n.fn.extend({wrapAll:function(a){var b;return n.isFunction(a)?this.each(function(b){n(this).wrapAll(a.call(this,b))}):(this[0]&&(b=n(a,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstElementChild)a=a.firstElementChild;return a}).append(this)),this)},wrapInner:function(a){return this.each(n.isFunction(a)?function(b){n(this).wrapInner(a.call(this,b))}:function(){var b=n(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=n.isFunction(a);return this.each(function(c){n(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){n.nodeName(this,"body")||n(this).replaceWith(this.childNodes)}).end()}}),n.expr.filters.hidden=function(a){return a.offsetWidth<=0&&a.offsetHeight<=0},n.expr.filters.visible=function(a){return!n.expr.filters.hidden(a)};var vc=/%20/g,wc=/\[\]$/,xc=/\r?\n/g,yc=/^(?:submit|button|image|reset|file)$/i,zc=/^(?:input|select|textarea|keygen)/i;function Ac(a,b,c,d){var e;if(n.isArray(b))n.each(b,function(b,e){c||wc.test(a)?d(a,e):Ac(a+"["+("object"==typeof e?b:"")+"]",e,c,d)});else if(c||"object"!==n.type(b))d(a,b);else for(e in b)Ac(a+"["+e+"]",b[e],c,d)}n.param=function(a,b){var c,d=[],e=function(a,b){b=n.isFunction(b)?b():null==b?"":b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};if(void 0===b&&(b=n.ajaxSettings&&n.ajaxSettings.traditional),n.isArray(a)||a.jquery&&!n.isPlainObject(a))n.each(a,function(){e(this.name,this.value)});else for(c in a)Ac(c,a[c],b,e);return d.join("&").replace(vc,"+")},n.fn.extend({serialize:function(){return n.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var a=n.prop(this,"elements");return a?n.makeArray(a):this}).filter(function(){var a=this.type;return this.name&&!n(this).is(":disabled")&&zc.test(this.nodeName)&&!yc.test(a)&&(this.checked||!T.test(a))}).map(function(a,b){var c=n(this).val();return null==c?null:n.isArray(c)?n.map(c,function(a){return{name:b.name,value:a.replace(xc,"\r\n")}}):{name:b.name,value:c.replace(xc,"\r\n")}}).get()}}),n.ajaxSettings.xhr=function(){try{return new XMLHttpRequest}catch(a){}};var Bc=0,Cc={},Dc={0:200,1223:204},Ec=n.ajaxSettings.xhr();a.attachEvent&&a.attachEvent("onunload",function(){for(var a in Cc)Cc[a]()}),k.cors=!!Ec&&"withCredentials"in Ec,k.ajax=Ec=!!Ec,n.ajaxTransport(function(a){var b;return k.cors||Ec&&!a.crossDomain?{send:function(c,d){var e,f=a.xhr(),g=++Bc;if(f.open(a.type,a.url,a.async,a.username,a.password),a.xhrFields)for(e in a.xhrFields)f[e]=a.xhrFields[e];a.mimeType&&f.overrideMimeType&&f.overrideMimeType(a.mimeType),a.crossDomain||c["X-Requested-With"]||(c["X-Requested-With"]="XMLHttpRequest");for(e in c)f.setRequestHeader(e,c[e]);b=function(a){return function(){b&&(delete Cc[g],b=f.onload=f.onerror=null,"abort"===a?f.abort():"error"===a?d(f.status,f.statusText):d(Dc[f.status]||f.status,f.statusText,"string"==typeof f.responseText?{text:f.responseText}:void 0,f.getAllResponseHeaders()))}},f.onload=b(),f.onerror=b("error"),b=Cc[g]=b("abort");try{f.send(a.hasContent&&a.data||null)}catch(h){if(b)throw h}},abort:function(){b&&b()}}:void 0}),n.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/(?:java|ecma)script/},converters:{"text script":function(a){return n.globalEval(a),a}}}),n.ajaxPrefilter("script",function(a){void 0===a.cache&&(a.cache=!1),a.crossDomain&&(a.type="GET")}),n.ajaxTransport("script",function(a){if(a.crossDomain){var b,c;return{send:function(d,e){b=n("<script>").prop({async:!0,charset:a.scriptCharset,src:a.url}).on("load error",c=function(a){b.remove(),c=null,a&&e("error"===a.type?404:200,a.type)}),l.head.appendChild(b[0])},abort:function(){c&&c()}}}});var Fc=[],Gc=/(=)\?(?=&|$)|\?\?/;n.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=Fc.pop()||n.expando+"_"+cc++;return this[a]=!0,a}}),n.ajaxPrefilter("json jsonp",function(b,c,d){var e,f,g,h=b.jsonp!==!1&&(Gc.test(b.url)?"url":"string"==typeof b.data&&!(b.contentType||"").indexOf("application/x-www-form-urlencoded")&&Gc.test(b.data)&&"data");return h||"jsonp"===b.dataTypes[0]?(e=b.jsonpCallback=n.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,h?b[h]=b[h].replace(Gc,"$1"+e):b.jsonp!==!1&&(b.url+=(dc.test(b.url)?"&":"?")+b.jsonp+"="+e),b.converters["script json"]=function(){return g||n.error(e+" was not called"),g[0]},b.dataTypes[0]="json",f=a[e],a[e]=function(){g=arguments},d.always(function(){a[e]=f,b[e]&&(b.jsonpCallback=c.jsonpCallback,Fc.push(e)),g&&n.isFunction(f)&&f(g[0]),g=f=void 0}),"script"):void 0}),n.parseHTML=function(a,b,c){if(!a||"string"!=typeof a)return null;"boolean"==typeof b&&(c=b,b=!1),b=b||l;var d=v.exec(a),e=!c&&[];return d?[b.createElement(d[1])]:(d=n.buildFragment([a],b,e),e&&e.length&&n(e).remove(),n.merge([],d.childNodes))};var Hc=n.fn.load;n.fn.load=function(a,b,c){if("string"!=typeof a&&Hc)return Hc.apply(this,arguments);var d,e,f,g=this,h=a.indexOf(" ");return h>=0&&(d=n.trim(a.slice(h)),a=a.slice(0,h)),n.isFunction(b)?(c=b,b=void 0):b&&"object"==typeof b&&(e="POST"),g.length>0&&n.ajax({url:a,type:e,dataType:"html",data:b}).done(function(a){f=arguments,g.html(d?n("<div>").append(n.parseHTML(a)).find(d):a)}).complete(c&&function(a,b){g.each(c,f||[a.responseText,b,a])}),this},n.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(a,b){n.fn[b]=function(a){return this.on(b,a)}}),n.expr.filters.animated=function(a){return n.grep(n.timers,function(b){return a===b.elem}).length};var Ic=a.document.documentElement;function Jc(a){return n.isWindow(a)?a:9===a.nodeType&&a.defaultView}n.offset={setOffset:function(a,b,c){var d,e,f,g,h,i,j,k=n.css(a,"position"),l=n(a),m={};"static"===k&&(a.style.position="relative"),h=l.offset(),f=n.css(a,"top"),i=n.css(a,"left"),j=("absolute"===k||"fixed"===k)&&(f+i).indexOf("auto")>-1,j?(d=l.position(),g=d.top,e=d.left):(g=parseFloat(f)||0,e=parseFloat(i)||0),n.isFunction(b)&&(b=b.call(a,c,h)),null!=b.top&&(m.top=b.top-h.top+g),null!=b.left&&(m.left=b.left-h.left+e),"using"in b?b.using.call(a,m):l.css(m)}},n.fn.extend({offset:function(a){if(arguments.length)return void 0===a?this:this.each(function(b){n.offset.setOffset(this,a,b)});var b,c,d=this[0],e={top:0,left:0},f=d&&d.ownerDocument;if(f)return b=f.documentElement,n.contains(b,d)?(typeof d.getBoundingClientRect!==U&&(e=d.getBoundingClientRect()),c=Jc(f),{top:e.top+c.pageYOffset-b.clientTop,left:e.left+c.pageXOffset-b.clientLeft}):e},position:function(){if(this[0]){var a,b,c=this[0],d={top:0,left:0};return"fixed"===n.css(c,"position")?b=c.getBoundingClientRect():(a=this.offsetParent(),b=this.offset(),n.nodeName(a[0],"html")||(d=a.offset()),d.top+=n.css(a[0],"borderTopWidth",!0),d.left+=n.css(a[0],"borderLeftWidth",!0)),{top:b.top-d.top-n.css(c,"marginTop",!0),left:b.left-d.left-n.css(c,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||Ic;while(a&&!n.nodeName(a,"html")&&"static"===n.css(a,"position"))a=a.offsetParent;return a||Ic})}}),n.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(b,c){var d="pageYOffset"===c;n.fn[b]=function(e){return J(this,function(b,e,f){var g=Jc(b);return void 0===f?g?g[c]:b[e]:void(g?g.scrollTo(d?a.pageXOffset:f,d?f:a.pageYOffset):b[e]=f)},b,e,arguments.length,null)}}),n.each(["top","left"],function(a,b){n.cssHooks[b]=yb(k.pixelPosition,function(a,c){return c?(c=xb(a,b),vb.test(c)?n(a).position()[b]+"px":c):void 0})}),n.each({Height:"height",Width:"width"},function(a,b){n.each({padding:"inner"+a,content:b,"":"outer"+a},function(c,d){n.fn[d]=function(d,e){var f=arguments.length&&(c||"boolean"!=typeof d),g=c||(d===!0||e===!0?"margin":"border");return J(this,function(b,c,d){var e;return n.isWindow(b)?b.document.documentElement["client"+a]:9===b.nodeType?(e=b.documentElement,Math.max(b.body["scroll"+a],e["scroll"+a],b.body["offset"+a],e["offset"+a],e["client"+a])):void 0===d?n.css(b,c,g):n.style(b,c,d,g)},b,f?d:void 0,f,null)}})}),n.fn.size=function(){return this.length},n.fn.andSelf=n.fn.addBack,"function"==typeof define&&define.amd&&define("jquery",[],function(){return n});var Kc=a.jQuery,Lc=a.$;return n.noConflict=function(b){return a.$===n&&(a.$=Lc),b&&a.jQuery===n&&(a.jQuery=Kc),n},typeof b===U&&(a.jQuery=a.$=n),n});
diff --git a/static/client/register/js/recaptcha_ajax.js b/synapse/static/client/register/js/recaptcha_ajax.js
index d0e71e5b88..d0e71e5b88 100644
--- a/static/client/register/js/recaptcha_ajax.js
+++ b/synapse/static/client/register/js/recaptcha_ajax.js
diff --git a/static/client/register/js/register.js b/synapse/static/client/register/js/register.js
index b62763a293..b62763a293 100644
--- a/static/client/register/js/register.js
+++ b/synapse/static/client/register/js/register.js
diff --git a/static/client/register/register_config.sample.js b/synapse/static/client/register/register_config.sample.js
index c7ea180dee..c7ea180dee 100644
--- a/static/client/register/register_config.sample.js
+++ b/synapse/static/client/register/register_config.sample.js
diff --git a/static/client/register/style.css b/synapse/static/client/register/style.css
index 5a7b6eebf2..5a7b6eebf2 100644
--- a/static/client/register/style.css
+++ b/synapse/static/client/register/style.css
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index f154b1c8ae..e7443f2838 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -40,24 +40,16 @@ from .filtering import FilteringStore
 from .end_to_end_keys import EndToEndKeyStore
 
 from .receipts import ReceiptsStore
+from .search import SearchStore
+from .tags import TagsStore
 
 
-import fnmatch
-import imp
 import logging
-import os
-import re
 
 
 logger = logging.getLogger(__name__)
 
 
-# Remember to update this number every time a change is made to database
-# schema files, so the users will be informed on server restarts.
-SCHEMA_VERSION = 22
-
-dir_path = os.path.abspath(os.path.dirname(__file__))
-
 # Number of msec of granularity to store the user IP 'last seen' time. Smaller
 # times give more inserts into the database even for readonly API hits
 # 120 seconds == 2 minutes
@@ -79,6 +71,8 @@ class DataStore(RoomMemberStore, RoomStore,
                 EventsStore,
                 ReceiptsStore,
                 EndToEndKeyStore,
+                SearchStore,
+                TagsStore,
                 ):
 
     def __init__(self, hs):
@@ -94,9 +88,9 @@ class DataStore(RoomMemberStore, RoomStore,
         )
 
     @defer.inlineCallbacks
-    def insert_client_ip(self, user, access_token, device_id, ip, user_agent):
+    def insert_client_ip(self, user, access_token, ip, user_agent):
         now = int(self._clock.time_msec())
-        key = (user.to_string(), access_token, device_id, ip)
+        key = (user.to_string(), access_token, ip)
 
         try:
             last_seen = self.client_ip_last_seen.get(key)
@@ -120,389 +114,44 @@ class DataStore(RoomMemberStore, RoomStore,
                 "user_agent": user_agent,
             },
             values={
-                "device_id": device_id,
                 "last_seen": now,
             },
             desc="insert_client_ip",
             lock=False,
         )
 
+    @defer.inlineCallbacks
+    def count_daily_users(self):
+        """
+        Counts the number of users who used this homeserver in the last 24 hours.
+        """
+        def _count_users(txn):
+            txn.execute(
+                "SELECT COUNT(DISTINCT user_id) AS users"
+                " FROM user_ips"
+                " WHERE last_seen > ?",
+                # This is close enough to a day for our purposes.
+                (int(self._clock.time_msec()) - (1000 * 60 * 60 * 24),)
+            )
+            rows = self.cursor_to_dict(txn)
+            if rows:
+                return rows[0]["users"]
+            return 0
+
+        ret = yield self.runInteraction("count_users", _count_users)
+        defer.returnValue(ret)
+
     def get_user_ip_and_agents(self, user):
         return self._simple_select_list(
             table="user_ips",
             keyvalues={"user_id": user.to_string()},
             retcols=[
-                "device_id", "access_token", "ip", "user_agent", "last_seen"
+                "access_token", "ip", "user_agent", "last_seen"
             ],
             desc="get_user_ip_and_agents",
         )
 
 
-def read_schema(path):
-    """ Read the named database schema.
-
-    Args:
-        path: Path of the database schema.
-    Returns:
-        A string containing the database schema.
-    """
-    with open(path) as schema_file:
-        return schema_file.read()
-
-
-class PrepareDatabaseException(Exception):
-    pass
-
-
-class UpgradeDatabaseException(PrepareDatabaseException):
-    pass
-
-
-def prepare_database(db_conn, database_engine):
-    """Prepares a database for usage. Will either create all necessary tables
-    or upgrade from an older schema version.
-    """
-    try:
-        cur = db_conn.cursor()
-        version_info = _get_or_create_schema_state(cur, database_engine)
-
-        if version_info:
-            user_version, delta_files, upgraded = version_info
-            _upgrade_existing_database(
-                cur, user_version, delta_files, upgraded, database_engine
-            )
-        else:
-            _setup_new_database(cur, database_engine)
-
-        # cur.execute("PRAGMA user_version = %d" % (SCHEMA_VERSION,))
-
-        cur.close()
-        db_conn.commit()
-    except:
-        db_conn.rollback()
-        raise
-
-
-def _setup_new_database(cur, database_engine):
-    """Sets up the database by finding a base set of "full schemas" and then
-    applying any necessary deltas.
-
-    The "full_schemas" directory has subdirectories named after versions. This
-    function searches for the highest version less than or equal to
-    `SCHEMA_VERSION` and executes all .sql files in that directory.
-
-    The function will then apply all deltas for all versions after the base
-    version.
-
-    Example directory structure:
-
-        schema/
-            delta/
-                ...
-            full_schemas/
-                3/
-                    test.sql
-                    ...
-                11/
-                    foo.sql
-                    bar.sql
-                ...
-
-    In the example foo.sql and bar.sql would be run, and then any delta files
-    for versions strictly greater than 11.
-    """
-    current_dir = os.path.join(dir_path, "schema", "full_schemas")
-    directory_entries = os.listdir(current_dir)
-
-    valid_dirs = []
-    pattern = re.compile(r"^\d+(\.sql)?$")
-    for filename in directory_entries:
-        match = pattern.match(filename)
-        abs_path = os.path.join(current_dir, filename)
-        if match and os.path.isdir(abs_path):
-            ver = int(match.group(0))
-            if ver <= SCHEMA_VERSION:
-                valid_dirs.append((ver, abs_path))
-        else:
-            logger.warn("Unexpected entry in 'full_schemas': %s", filename)
-
-    if not valid_dirs:
-        raise PrepareDatabaseException(
-            "Could not find a suitable base set of full schemas"
-        )
-
-    max_current_ver, sql_dir = max(valid_dirs, key=lambda x: x[0])
-
-    logger.debug("Initialising schema v%d", max_current_ver)
-
-    directory_entries = os.listdir(sql_dir)
-
-    for filename in fnmatch.filter(directory_entries, "*.sql"):
-        sql_loc = os.path.join(sql_dir, filename)
-        logger.debug("Applying schema %s", sql_loc)
-        executescript(cur, sql_loc)
-
-    cur.execute(
-        database_engine.convert_param_style(
-            "INSERT INTO schema_version (version, upgraded)"
-            " VALUES (?,?)"
-        ),
-        (max_current_ver, False,)
-    )
-
-    _upgrade_existing_database(
-        cur,
-        current_version=max_current_ver,
-        applied_delta_files=[],
-        upgraded=False,
-        database_engine=database_engine,
-    )
-
-
-def _upgrade_existing_database(cur, current_version, applied_delta_files,
-                               upgraded, database_engine):
-    """Upgrades an existing database.
-
-    Delta files can either be SQL stored in *.sql files, or python modules
-    in *.py.
-
-    There can be multiple delta files per version. Synapse will keep track of
-    which delta files have been applied, and will apply any that haven't been
-    even if there has been no version bump. This is useful for development
-    where orthogonal schema changes may happen on separate branches.
-
-    Different delta files for the same version *must* be orthogonal and give
-    the same result when applied in any order. No guarantees are made on the
-    order of execution of these scripts.
-
-    This is a no-op of current_version == SCHEMA_VERSION.
-
-    Example directory structure:
-
-        schema/
-            delta/
-                11/
-                    foo.sql
-                    ...
-                12/
-                    foo.sql
-                    bar.py
-                ...
-            full_schemas/
-                ...
-
-    In the example, if current_version is 11, then foo.sql will be run if and
-    only if `upgraded` is True. Then `foo.sql` and `bar.py` would be run in
-    some arbitrary order.
-
-    Args:
-        cur (Cursor)
-        current_version (int): The current version of the schema.
-        applied_delta_files (list): A list of deltas that have already been
-            applied.
-        upgraded (bool): Whether the current version was generated by having
-            applied deltas or from full schema file. If `True` the function
-            will never apply delta files for the given `current_version`, since
-            the current_version wasn't generated by applying those delta files.
-    """
-
-    if current_version > SCHEMA_VERSION:
-        raise ValueError(
-            "Cannot use this database as it is too " +
-            "new for the server to understand"
-        )
-
-    start_ver = current_version
-    if not upgraded:
-        start_ver += 1
-
-    logger.debug("applied_delta_files: %s", applied_delta_files)
-
-    for v in range(start_ver, SCHEMA_VERSION + 1):
-        logger.debug("Upgrading schema to v%d", v)
-
-        delta_dir = os.path.join(dir_path, "schema", "delta", str(v))
-
-        try:
-            directory_entries = os.listdir(delta_dir)
-        except OSError:
-            logger.exception("Could not open delta dir for version %d", v)
-            raise UpgradeDatabaseException(
-                "Could not open delta dir for version %d" % (v,)
-            )
-
-        directory_entries.sort()
-        for file_name in directory_entries:
-            relative_path = os.path.join(str(v), file_name)
-            logger.debug("Found file: %s", relative_path)
-            if relative_path in applied_delta_files:
-                continue
-
-            absolute_path = os.path.join(
-                dir_path, "schema", "delta", relative_path,
-            )
-            root_name, ext = os.path.splitext(file_name)
-            if ext == ".py":
-                # This is a python upgrade module. We need to import into some
-                # package and then execute its `run_upgrade` function.
-                module_name = "synapse.storage.v%d_%s" % (
-                    v, root_name
-                )
-                with open(absolute_path) as python_file:
-                    module = imp.load_source(
-                        module_name, absolute_path, python_file
-                    )
-                logger.debug("Running script %s", relative_path)
-                module.run_upgrade(cur, database_engine)
-            elif ext == ".pyc":
-                # Sometimes .pyc files turn up anyway even though we've
-                # disabled their generation; e.g. from distribution package
-                # installers. Silently skip it
-                pass
-            elif ext == ".sql":
-                # A plain old .sql file, just read and execute it
-                logger.debug("Applying schema %s", relative_path)
-                executescript(cur, absolute_path)
-            else:
-                # Not a valid delta file.
-                logger.warn(
-                    "Found directory entry that did not end in .py or"
-                    " .sql: %s",
-                    relative_path,
-                )
-                continue
-
-            # Mark as done.
-            cur.execute(
-                database_engine.convert_param_style(
-                    "INSERT INTO applied_schema_deltas (version, file)"
-                    " VALUES (?,?)",
-                ),
-                (v, relative_path)
-            )
-
-            cur.execute("DELETE FROM schema_version")
-            cur.execute(
-                database_engine.convert_param_style(
-                    "INSERT INTO schema_version (version, upgraded)"
-                    " VALUES (?,?)",
-                ),
-                (v, True)
-            )
-
-
-def get_statements(f):
-    statement_buffer = ""
-    in_comment = False  # If we're in a /* ... */ style comment
-
-    for line in f:
-        line = line.strip()
-
-        if in_comment:
-            # Check if this line contains an end to the comment
-            comments = line.split("*/", 1)
-            if len(comments) == 1:
-                continue
-            line = comments[1]
-            in_comment = False
-
-        # Remove inline block comments
-        line = re.sub(r"/\*.*\*/", " ", line)
-
-        # Does this line start a comment?
-        comments = line.split("/*", 1)
-        if len(comments) > 1:
-            line = comments[0]
-            in_comment = True
-
-        # Deal with line comments
-        line = line.split("--", 1)[0]
-        line = line.split("//", 1)[0]
-
-        # Find *all* semicolons. We need to treat first and last entry
-        # specially.
-        statements = line.split(";")
-
-        # We must prepend statement_buffer to the first statement
-        first_statement = "%s %s" % (
-            statement_buffer.strip(),
-            statements[0].strip()
-        )
-        statements[0] = first_statement
-
-        # Every entry, except the last, is a full statement
-        for statement in statements[:-1]:
-            yield statement.strip()
-
-        # The last entry did *not* end in a semicolon, so we store it for the
-        # next semicolon we find
-        statement_buffer = statements[-1].strip()
-
-
-def executescript(txn, schema_path):
-    with open(schema_path, 'r') as f:
-        for statement in get_statements(f):
-            txn.execute(statement)
-
-
-def _get_or_create_schema_state(txn, database_engine):
-    # Bluntly try creating the schema_version tables.
-    schema_path = os.path.join(
-        dir_path, "schema", "schema_version.sql",
-    )
-    executescript(txn, schema_path)
-
-    txn.execute("SELECT version, upgraded FROM schema_version")
-    row = txn.fetchone()
-    current_version = int(row[0]) if row else None
-    upgraded = bool(row[1]) if row else None
-
-    if current_version:
-        txn.execute(
-            database_engine.convert_param_style(
-                "SELECT file FROM applied_schema_deltas WHERE version >= ?"
-            ),
-            (current_version,)
-        )
-        applied_deltas = [d for d, in txn.fetchall()]
-        return current_version, applied_deltas, upgraded
-
-    return None
-
-
-def prepare_sqlite3_database(db_conn):
-    """This function should be called before `prepare_database` on sqlite3
-    databases.
-
-    Since we changed the way we store the current schema version and handle
-    updates to schemas, we need a way to upgrade from the old method to the
-    new. This only affects sqlite databases since they were the only ones
-    supported at the time.
-    """
-    with db_conn:
-        schema_path = os.path.join(
-            dir_path, "schema", "schema_version.sql",
-        )
-        create_schema = read_schema(schema_path)
-        db_conn.executescript(create_schema)
-
-        c = db_conn.execute("SELECT * FROM schema_version")
-        rows = c.fetchall()
-        c.close()
-
-        if not rows:
-            c = db_conn.execute("PRAGMA user_version")
-            row = c.fetchone()
-            c.close()
-
-            if row and row[0]:
-                db_conn.execute(
-                    "REPLACE INTO schema_version (version, upgraded)"
-                    " VALUES (?,?)",
-                    (row[0], False)
-                )
-
-
 def are_all_users_on_domain(txn, database_engine, domain):
     sql = database_engine.convert_param_style(
         "SELECT COUNT(*) FROM users WHERE name NOT LIKE ?"
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index d976e17786..218e708054 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -25,8 +25,6 @@ from util.id_generators import IdGenerator, StreamIdGenerator
 
 from twisted.internet import defer
 
-from collections import namedtuple
-
 import sys
 import time
 import threading
@@ -181,6 +179,7 @@ class SQLBaseStore(object):
         self._transaction_id_gen = IdGenerator("sent_transactions", "id", self)
         self._state_groups_id_gen = IdGenerator("state_groups", "id", self)
         self._access_tokens_id_gen = IdGenerator("access_tokens", "id", self)
+        self._refresh_tokens_id_gen = IdGenerator("refresh_tokens", "id", self)
         self._pushers_id_gen = IdGenerator("pushers", "id", self)
         self._push_rule_id_gen = IdGenerator("push_rules", "id", self)
         self._push_rules_enable_id_gen = IdGenerator("push_rules_enable", "id", self)
@@ -375,9 +374,6 @@ class SQLBaseStore(object):
 
         return self.runInteraction(desc, interaction)
 
-    def _execute_and_decode(self, desc, query, *args):
-        return self._execute(desc, self.cursor_to_dict, query, *args)
-
     # "Simple" SQL API methods that operate on a single table with no JOINs,
     # no complex WHERE clauses, just a dict of values for columns.
 
@@ -523,7 +519,7 @@ class SQLBaseStore(object):
                                   allow_none=False,
                                   desc="_simple_select_one_onecol"):
         """Executes a SELECT query on the named table, which is expected to
-        return a single row, returning a single column from it."
+        return a single row, returning a single column from it.
 
         Args:
             table : string giving the table name
@@ -690,37 +686,6 @@ class SQLBaseStore(object):
 
         return dict(zip(retcols, row))
 
-    def _simple_selectupdate_one(self, table, keyvalues, updatevalues=None,
-                                 retcols=None, allow_none=False,
-                                 desc="_simple_selectupdate_one"):
-        """ Combined SELECT then UPDATE."""
-        def func(txn):
-            ret = None
-            if retcols:
-                ret = self._simple_select_one_txn(
-                    txn,
-                    table=table,
-                    keyvalues=keyvalues,
-                    retcols=retcols,
-                    allow_none=allow_none,
-                )
-
-            if updatevalues:
-                self._simple_update_one_txn(
-                    txn,
-                    table=table,
-                    keyvalues=keyvalues,
-                    updatevalues=updatevalues,
-                )
-
-                # if txn.rowcount == 0:
-                #     raise StoreError(404, "No row found")
-                if txn.rowcount > 1:
-                    raise StoreError(500, "More than one row matched")
-
-            return ret
-        return self.runInteraction(desc, func)
-
     def _simple_delete_one(self, table, keyvalues, desc="_simple_delete_one"):
         """Executes a DELETE query on the named table, expecting to delete a
         single row.
@@ -742,16 +707,6 @@ class SQLBaseStore(object):
                 raise StoreError(500, "more than one row matched")
         return self.runInteraction(desc, func)
 
-    def _simple_delete(self, table, keyvalues, desc="_simple_delete"):
-        """Executes a DELETE query on the named table.
-
-        Args:
-            table : string giving the table name
-            keyvalues : dict of column names and values to select the row with
-        """
-
-        return self.runInteraction(desc, self._simple_delete_txn)
-
     def _simple_delete_txn(self, txn, table, keyvalues):
         sql = "DELETE FROM %s WHERE %s" % (
             table,
@@ -760,24 +715,6 @@ class SQLBaseStore(object):
 
         return txn.execute(sql, keyvalues.values())
 
-    def _simple_max_id(self, table):
-        """Executes a SELECT query on the named table, expecting to return the
-        max value for the column "id".
-
-        Args:
-            table : string giving the table name
-        """
-        sql = "SELECT MAX(id) AS id FROM %s" % table
-
-        def func(txn):
-            txn.execute(sql)
-            max_id = self.cursor_to_dict(txn)[0]["id"]
-            if max_id is None:
-                return 0
-            return max_id
-
-        return self.runInteraction("_simple_max_id", func)
-
     def get_next_stream_id(self):
         with self._next_stream_id_lock:
             i = self._next_stream_id
@@ -790,129 +727,3 @@ class _RollbackButIsFineException(Exception):
     something went wrong.
     """
     pass
-
-
-class Table(object):
-    """ A base class used to store information about a particular table.
-    """
-
-    table_name = None
-    """ str: The name of the table """
-
-    fields = None
-    """ list: The field names """
-
-    EntryType = None
-    """ Type: A tuple type used to decode the results """
-
-    _select_where_clause = "SELECT %s FROM %s WHERE %s"
-    _select_clause = "SELECT %s FROM %s"
-    _insert_clause = "REPLACE INTO %s (%s) VALUES (%s)"
-
-    @classmethod
-    def select_statement(cls, where_clause=None):
-        """
-        Args:
-            where_clause (str): The WHERE clause to use.
-
-        Returns:
-            str: An SQL statement to select rows from the table with the given
-            WHERE clause.
-        """
-        if where_clause:
-            return cls._select_where_clause % (
-                ", ".join(cls.fields),
-                cls.table_name,
-                where_clause
-            )
-        else:
-            return cls._select_clause % (
-                ", ".join(cls.fields),
-                cls.table_name,
-            )
-
-    @classmethod
-    def insert_statement(cls):
-        return cls._insert_clause % (
-            cls.table_name,
-            ", ".join(cls.fields),
-            ", ".join(["?"] * len(cls.fields)),
-        )
-
-    @classmethod
-    def decode_single_result(cls, results):
-        """ Given an iterable of tuples, return a single instance of
-            `EntryType` or None if the iterable is empty
-        Args:
-            results (list): The results list to convert to `EntryType`
-        Returns:
-            EntryType: An instance of `EntryType`
-        """
-        results = list(results)
-        if results:
-            return cls.EntryType(*results[0])
-        else:
-            return None
-
-    @classmethod
-    def decode_results(cls, results):
-        """ Given an iterable of tuples, return a list of `EntryType`
-        Args:
-            results (list): The results list to convert to `EntryType`
-
-        Returns:
-            list: A list of `EntryType`
-        """
-        return [cls.EntryType(*row) for row in results]
-
-    @classmethod
-    def get_fields_string(cls, prefix=None):
-        if prefix:
-            to_join = ("%s.%s" % (prefix, f) for f in cls.fields)
-        else:
-            to_join = cls.fields
-
-        return ", ".join(to_join)
-
-
-class JoinHelper(object):
-    """ Used to help do joins on tables by looking at the tables' fields and
-    creating a list of unique fields to use with SELECTs and a namedtuple
-    to dump the results into.
-
-    Attributes:
-        tables (list): List of `Table` classes
-        EntryType (type)
-    """
-
-    def __init__(self, *tables):
-        self.tables = tables
-
-        res = []
-        for table in self.tables:
-            res += [f for f in table.fields if f not in res]
-
-        self.EntryType = namedtuple("JoinHelperEntry", res)
-
-    def get_fields(self, **prefixes):
-        """Get a string representing a list of fields for use in SELECT
-        statements with the given prefixes applied to each.
-
-        For example::
-
-            JoinHelper(PdusTable, StateTable).get_fields(
-                PdusTable="pdus",
-                StateTable="state"
-            )
-        """
-        res = []
-        for field in self.EntryType._fields:
-            for table in self.tables:
-                if field in table.fields:
-                    res.append("%s.%s" % (prefixes[table.__name__], field))
-                    break
-
-        return ", ".join(res)
-
-    def decode_results(self, rows):
-        return [self.EntryType(*row) for row in rows]
diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py
new file mode 100644
index 0000000000..45fccc2e5e
--- /dev/null
+++ b/synapse/storage/background_updates.py
@@ -0,0 +1,256 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import SQLBaseStore
+
+from twisted.internet import defer
+
+import ujson as json
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class BackgroundUpdatePerformance(object):
+    """Tracks the how long a background update is taking to update its items"""
+
+    def __init__(self, name):
+        self.name = name
+        self.total_item_count = 0
+        self.total_duration_ms = 0
+        self.avg_item_count = 0
+        self.avg_duration_ms = 0
+
+    def update(self, item_count, duration_ms):
+        """Update the stats after doing an update"""
+        self.total_item_count += item_count
+        self.total_duration_ms += duration_ms
+
+        # Exponential moving averages for the number of items updated and
+        # the duration.
+        self.avg_item_count += 0.1 * (item_count - self.avg_item_count)
+        self.avg_duration_ms += 0.1 * (duration_ms - self.avg_duration_ms)
+
+    def average_items_per_ms(self):
+        """An estimate of how long it takes to do a single update.
+        Returns:
+            A duration in ms as a float
+        """
+        if self.total_item_count == 0:
+            return None
+        else:
+            # Use the exponential moving average so that we can adapt to
+            # changes in how long the update process takes.
+            return float(self.avg_item_count) / float(self.avg_duration_ms)
+
+    def total_items_per_ms(self):
+        """An estimate of how long it takes to do a single update.
+        Returns:
+            A duration in ms as a float
+        """
+        if self.total_item_count == 0:
+            return None
+        else:
+            return float(self.total_item_count) / float(self.total_duration_ms)
+
+
+class BackgroundUpdateStore(SQLBaseStore):
+    """ Background updates are updates to the database that run in the
+    background. Each update processes a batch of data at once. We attempt to
+    limit the impact of each update by monitoring how long each batch takes to
+    process and autotuning the batch size.
+    """
+
+    MINIMUM_BACKGROUND_BATCH_SIZE = 100
+    DEFAULT_BACKGROUND_BATCH_SIZE = 100
+    BACKGROUND_UPDATE_INTERVAL_MS = 1000
+    BACKGROUND_UPDATE_DURATION_MS = 100
+
+    def __init__(self, hs):
+        super(BackgroundUpdateStore, self).__init__(hs)
+        self._background_update_performance = {}
+        self._background_update_queue = []
+        self._background_update_handlers = {}
+        self._background_update_timer = None
+
+    @defer.inlineCallbacks
+    def start_doing_background_updates(self):
+        while True:
+            if self._background_update_timer is not None:
+                return
+
+            sleep = defer.Deferred()
+            self._background_update_timer = self._clock.call_later(
+                self.BACKGROUND_UPDATE_INTERVAL_MS / 1000., sleep.callback, None
+            )
+            try:
+                yield sleep
+            finally:
+                self._background_update_timer = None
+
+            try:
+                result = yield self.do_background_update(
+                    self.BACKGROUND_UPDATE_DURATION_MS
+                )
+            except:
+                logger.exception("Error doing update")
+
+            if result is None:
+                logger.info(
+                    "No more background updates to do."
+                    " Unscheduling background update task."
+                )
+                return
+
+    @defer.inlineCallbacks
+    def do_background_update(self, desired_duration_ms):
+        """Does some amount of work on a background update
+        Args:
+            desired_duration_ms(float): How long we want to spend
+                updating.
+        Returns:
+            A deferred that completes once some amount of work is done.
+            The deferred will have a value of None if there is currently
+            no more work to do.
+        """
+        if not self._background_update_queue:
+            updates = yield self._simple_select_list(
+                "background_updates",
+                keyvalues=None,
+                retcols=("update_name",),
+            )
+            for update in updates:
+                self._background_update_queue.append(update['update_name'])
+
+        if not self._background_update_queue:
+            defer.returnValue(None)
+
+        update_name = self._background_update_queue.pop(0)
+        self._background_update_queue.append(update_name)
+
+        update_handler = self._background_update_handlers[update_name]
+
+        performance = self._background_update_performance.get(update_name)
+
+        if performance is None:
+            performance = BackgroundUpdatePerformance(update_name)
+            self._background_update_performance[update_name] = performance
+
+        items_per_ms = performance.average_items_per_ms()
+
+        if items_per_ms is not None:
+            batch_size = int(desired_duration_ms * items_per_ms)
+            # Clamp the batch size so that we always make progress
+            batch_size = max(batch_size, self.MINIMUM_BACKGROUND_BATCH_SIZE)
+        else:
+            batch_size = self.DEFAULT_BACKGROUND_BATCH_SIZE
+
+        progress_json = yield self._simple_select_one_onecol(
+            "background_updates",
+            keyvalues={"update_name": update_name},
+            retcol="progress_json"
+        )
+
+        progress = json.loads(progress_json)
+
+        time_start = self._clock.time_msec()
+        items_updated = yield update_handler(progress, batch_size)
+        time_stop = self._clock.time_msec()
+
+        duration_ms = time_stop - time_start
+
+        logger.info(
+            "Updating %r. Updated %r items in %rms."
+            " (total_rate=%r/ms, current_rate=%r/ms, total_updated=%r)",
+            update_name, items_updated, duration_ms,
+            performance.total_items_per_ms(),
+            performance.average_items_per_ms(),
+            performance.total_item_count,
+        )
+
+        performance.update(items_updated, duration_ms)
+
+        defer.returnValue(len(self._background_update_performance))
+
+    def register_background_update_handler(self, update_name, update_handler):
+        """Register a handler for doing a background update.
+
+        The handler should take two arguments:
+
+        * A dict of the current progress
+        * An integer count of the number of items to update in this batch.
+
+        The handler should return a deferred integer count of items updated.
+        The hander is responsible for updating the progress of the update.
+
+        Args:
+            update_name(str): The name of the update that this code handles.
+            update_handler(function): The function that does the update.
+        """
+        self._background_update_handlers[update_name] = update_handler
+
+    def start_background_update(self, update_name, progress):
+        """Starts a background update running.
+
+        Args:
+            update_name: The update to set running.
+            progress: The initial state of the progress of the update.
+
+        Returns:
+            A deferred that completes once the task has been added to the
+            queue.
+        """
+        # Clear the background update queue so that we will pick up the new
+        # task on the next iteration of do_background_update.
+        self._background_update_queue = []
+        progress_json = json.dumps(progress)
+
+        return self._simple_insert(
+            "background_updates",
+            {"update_name": update_name, "progress_json": progress_json}
+        )
+
+    def _end_background_update(self, update_name):
+        """Removes a completed background update task from the queue.
+
+        Args:
+            update_name(str): The name of the completed task to remove
+        Returns:
+            A deferred that completes once the task is removed.
+        """
+        self._background_update_queue = [
+            name for name in self._background_update_queue if name != update_name
+        ]
+        return self._simple_delete_one(
+            "background_updates", keyvalues={"update_name": update_name}
+        )
+
+    def _background_update_progress_txn(self, txn, update_name, progress):
+        """Update the progress of a background update
+
+        Args:
+            txn(cursor): The transaction.
+            update_name(str): The name of the background update task
+            progress(dict): The progress of the update.
+        """
+
+        progress_json = json.dumps(progress)
+
+        self._simple_update_one_txn(
+            txn,
+            "background_updates",
+            keyvalues={"update_name": update_name},
+            updatevalues={"progress_json": progress_json},
+        )
diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py
index 4a855ffd56..98d66e0a86 100644
--- a/synapse/storage/engines/postgres.py
+++ b/synapse/storage/engines/postgres.py
@@ -13,7 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from synapse.storage import prepare_database
+from synapse.storage.prepare_database import prepare_database
 
 from ._base import IncorrectDatabaseSetup
 
diff --git a/synapse/storage/engines/sqlite3.py b/synapse/storage/engines/sqlite3.py
index d18e2808d1..a5a54ec011 100644
--- a/synapse/storage/engines/sqlite3.py
+++ b/synapse/storage/engines/sqlite3.py
@@ -13,7 +13,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from synapse.storage import prepare_database, prepare_sqlite3_database
+from synapse.storage.prepare_database import (
+    prepare_database, prepare_sqlite3_database
+)
+
+import struct
 
 
 class Sqlite3Engine(object):
@@ -30,6 +34,7 @@ class Sqlite3Engine(object):
 
     def on_new_connection(self, db_conn):
         self.prepare_database(db_conn)
+        db_conn.create_function("rank", 1, _rank)
 
     def prepare_database(self, db_conn):
         prepare_sqlite3_database(db_conn)
@@ -43,3 +48,27 @@ class Sqlite3Engine(object):
 
     def lock_table(self, txn, table):
         return
+
+
+# Following functions taken from: https://github.com/coleifer/peewee
+
+def _parse_match_info(buf):
+    bufsize = len(buf)
+    return [struct.unpack('@I', buf[i:i+4])[0] for i in range(0, bufsize, 4)]
+
+
+def _rank(raw_match_info):
+    """Handle match_info called w/default args 'pcx' - based on the example rank
+    function http://sqlite.org/fts3.html#appendix_a
+    """
+    match_info = _parse_match_info(raw_match_info)
+    score = 0.0
+    p, c = match_info[:2]
+    for phrase_num in range(p):
+        phrase_info_idx = 2 + (phrase_num * c * 3)
+        for col_num in range(c):
+            col_idx = phrase_info_idx + (col_num * 3)
+            x1, x2 = match_info[col_idx:col_idx + 2]
+            if x1 > 0:
+                score += float(x1) / x2
+    return score
diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py
index 7cb314dee8..6d4421dd8f 100644
--- a/synapse/storage/event_federation.py
+++ b/synapse/storage/event_federation.py
@@ -17,7 +17,7 @@ from twisted.internet import defer
 
 from ._base import SQLBaseStore
 from synapse.util.caches.descriptors import cached
-from syutil.base64util import encode_base64
+from unpaddedbase64 import encode_base64
 
 import logging
 from Queue import PriorityQueue, Empty
@@ -154,98 +154,6 @@ class EventFederationStore(SQLBaseStore):
 
         return results
 
-    def _get_latest_state_in_room(self, txn, room_id, type, state_key):
-        event_ids = self._simple_select_onecol_txn(
-            txn,
-            table="state_forward_extremities",
-            keyvalues={
-                "room_id": room_id,
-                "type": type,
-                "state_key": state_key,
-            },
-            retcol="event_id",
-        )
-
-        results = []
-        for event_id in event_ids:
-            hashes = self._get_event_reference_hashes_txn(txn, event_id)
-            prev_hashes = {
-                k: encode_base64(v) for k, v in hashes.items()
-                if k == "sha256"
-            }
-            results.append((event_id, prev_hashes))
-
-        return results
-
-    def _get_prev_events(self, txn, event_id):
-        results = self._get_prev_events_and_state(
-            txn,
-            event_id,
-            is_state=0,
-        )
-
-        return [(e_id, h, ) for e_id, h, _ in results]
-
-    def _get_prev_state(self, txn, event_id):
-        results = self._get_prev_events_and_state(
-            txn,
-            event_id,
-            is_state=True,
-        )
-
-        return [(e_id, h, ) for e_id, h, _ in results]
-
-    def _get_prev_events_and_state(self, txn, event_id, is_state=None):
-        keyvalues = {
-            "event_id": event_id,
-        }
-
-        if is_state is not None:
-            keyvalues["is_state"] = bool(is_state)
-
-        res = self._simple_select_list_txn(
-            txn,
-            table="event_edges",
-            keyvalues=keyvalues,
-            retcols=["prev_event_id", "is_state"],
-        )
-
-        hashes = self._get_prev_event_hashes_txn(txn, event_id)
-
-        results = []
-        for d in res:
-            edge_hash = self._get_event_reference_hashes_txn(txn, d["prev_event_id"])
-            edge_hash.update(hashes.get(d["prev_event_id"], {}))
-            prev_hashes = {
-                k: encode_base64(v)
-                for k, v in edge_hash.items()
-                if k == "sha256"
-            }
-            results.append((d["prev_event_id"], prev_hashes, d["is_state"]))
-
-        return results
-
-    def _get_auth_events(self, txn, event_id):
-        auth_ids = self._simple_select_onecol_txn(
-            txn,
-            table="event_auth",
-            keyvalues={
-                "event_id": event_id,
-            },
-            retcol="auth_id",
-        )
-
-        results = []
-        for auth_id in auth_ids:
-            hashes = self._get_event_reference_hashes_txn(txn, auth_id)
-            prev_hashes = {
-                k: encode_base64(v) for k, v in hashes.items()
-                if k == "sha256"
-            }
-            results.append((auth_id, prev_hashes))
-
-        return results
-
     def get_min_depth(self, room_id):
         """ For hte given room, get the minimum depth we have seen for it.
         """
diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index 8774b3b388..5d35ca90b9 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -12,7 +12,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 from _base import SQLBaseStore, _RollbackButIsFineException
 
 from twisted.internet import defer, reactor
@@ -24,15 +23,23 @@ from synapse.util.logcontext import preserve_context_over_deferred
 from synapse.util.logutils import log_function
 from synapse.api.constants import EventTypes
 
-from syutil.jsonutil import encode_json
+from canonicaljson import encode_canonical_json
 from contextlib import contextmanager
 
 import logging
+import math
 import ujson as json
 
 logger = logging.getLogger(__name__)
 
 
+def encode_json(json_object):
+    if USE_FROZEN_DICTS:
+        # ujson doesn't like frozen_dicts
+        return encode_canonical_json(json_object)
+    else:
+        return json.dumps(json_object, ensure_ascii=False)
+
 # These values are used in the `enqueus_event` and `_do_fetch` methods to
 # control how we batch/bulk fetch events from the database.
 # The values are plucked out of thing air to make initial sync run faster
@@ -253,8 +260,7 @@ class EventsStore(SQLBaseStore):
                 )
 
                 metadata_json = encode_json(
-                    event.internal_metadata.get_dict(),
-                    using_frozen_dicts=USE_FROZEN_DICTS
+                    event.internal_metadata.get_dict()
                 ).decode("UTF-8")
 
                 sql = (
@@ -301,8 +307,14 @@ class EventsStore(SQLBaseStore):
                 self._store_room_name_txn(txn, event)
             elif event.type == EventTypes.Topic:
                 self._store_room_topic_txn(txn, event)
+            elif event.type == EventTypes.Message:
+                self._store_room_message_txn(txn, event)
             elif event.type == EventTypes.Redaction:
                 self._store_redaction(txn, event)
+            elif event.type == EventTypes.RoomHistoryVisibility:
+                self._store_history_visibility_txn(txn, event)
+            elif event.type == EventTypes.GuestAccess:
+                self._store_guest_access_txn(txn, event)
 
         self._store_room_members_txn(
             txn,
@@ -331,12 +343,9 @@ class EventsStore(SQLBaseStore):
                     "event_id": event.event_id,
                     "room_id": event.room_id,
                     "internal_metadata": encode_json(
-                        event.internal_metadata.get_dict(),
-                        using_frozen_dicts=USE_FROZEN_DICTS
-                    ).decode("UTF-8"),
-                    "json": encode_json(
-                        event_dict(event), using_frozen_dicts=USE_FROZEN_DICTS
+                        event.internal_metadata.get_dict()
                     ).decode("UTF-8"),
+                    "json": encode_json(event_dict(event)).decode("UTF-8"),
                 }
                 for event, _ in events_and_contexts
             ],
@@ -355,9 +364,7 @@ class EventsStore(SQLBaseStore):
                     "type": event.type,
                     "processed": True,
                     "outlier": event.internal_metadata.is_outlier(),
-                    "content": encode_json(
-                        event.content, using_frozen_dicts=USE_FROZEN_DICTS
-                    ).decode("UTF-8"),
+                    "content": encode_json(event.content).decode("UTF-8"),
                 }
                 for event, _ in events_and_contexts
             ],
@@ -824,7 +831,8 @@ class EventsStore(SQLBaseStore):
                 allow_none=True,
             )
             if prev:
-                ev.unsigned["prev_content"] = prev.get_dict()["content"]
+                ev.unsigned["prev_content"] = prev.content
+                ev.unsigned["prev_sender"] = prev.sender
 
         self._get_event_cache.prefill(
             (ev.event_id, check_redacted, get_prev_content), ev
@@ -881,7 +889,8 @@ class EventsStore(SQLBaseStore):
                 get_prev_content=False,
             )
             if prev:
-                ev.unsigned["prev_content"] = prev.get_dict()["content"]
+                ev.unsigned["prev_content"] = prev.content
+                ev.unsigned["prev_sender"] = prev.sender
 
         self._get_event_cache.prefill(
             (ev.event_id, check_redacted, get_prev_content), ev
@@ -889,18 +898,69 @@ class EventsStore(SQLBaseStore):
 
         return ev
 
-    def _parse_events(self, rows):
-        return self.runInteraction(
-            "_parse_events", self._parse_events_txn, rows
-        )
-
     def _parse_events_txn(self, txn, rows):
         event_ids = [r["event_id"] for r in rows]
 
         return self._get_events_txn(txn, event_ids)
 
-    def _has_been_redacted_txn(self, txn, event):
-        sql = "SELECT event_id FROM redactions WHERE redacts = ?"
-        txn.execute(sql, (event.event_id,))
-        result = txn.fetchone()
-        return result[0] if result else None
+    @defer.inlineCallbacks
+    def count_daily_messages(self):
+        """
+        Returns an estimate of the number of messages sent in the last day.
+
+        If it has been significantly less or more than one day since the last
+        call to this function, it will return None.
+        """
+        def _count_messages(txn):
+            now = self.hs.get_clock().time()
+
+            txn.execute(
+                "SELECT reported_stream_token, reported_time FROM stats_reporting"
+            )
+            last_reported = self.cursor_to_dict(txn)
+
+            txn.execute(
+                "SELECT stream_ordering"
+                " FROM events"
+                " ORDER BY stream_ordering DESC"
+                " LIMIT 1"
+            )
+            now_reporting = self.cursor_to_dict(txn)
+            if not now_reporting:
+                return None
+            now_reporting = now_reporting[0]["stream_ordering"]
+
+            txn.execute("DELETE FROM stats_reporting")
+            txn.execute(
+                "INSERT INTO stats_reporting"
+                " (reported_stream_token, reported_time)"
+                " VALUES (?, ?)",
+                (now_reporting, now,)
+            )
+
+            if not last_reported:
+                return None
+
+            # Close enough to correct for our purposes.
+            yesterday = (now - 24 * 60 * 60)
+            if math.fabs(yesterday - last_reported[0]["reported_time"]) > 60 * 60:
+                return None
+
+            txn.execute(
+                "SELECT COUNT(*) as messages"
+                " FROM events NATURAL JOIN event_json"
+                " WHERE json like '%m.room.message%'"
+                " AND stream_ordering > ?"
+                " AND stream_ordering <= ?",
+                (
+                    last_reported[0]["reported_stream_token"],
+                    now_reporting,
+                )
+            )
+            rows = self.cursor_to_dict(txn)
+            if not rows:
+                return None
+            return rows[0]["messages"]
+
+        ret = yield self.runInteraction("count_messages", _count_messages)
+        defer.returnValue(ret)
diff --git a/synapse/storage/filtering.py b/synapse/storage/filtering.py
index 8800116570..fcd43c7fdd 100644
--- a/synapse/storage/filtering.py
+++ b/synapse/storage/filtering.py
@@ -34,10 +34,10 @@ class FilteringStore(SQLBaseStore):
             desc="get_user_filter",
         )
 
-        defer.returnValue(json.loads(def_json))
+        defer.returnValue(json.loads(str(def_json).decode("utf-8")))
 
     def add_user_filter(self, user_localpart, user_filter):
-        def_json = json.dumps(user_filter)
+        def_json = json.dumps(user_filter).encode("utf-8")
 
         # Need an atomic transaction to SELECT the maximal ID so far then
         # INSERT a new one
diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py
index ffd6daa880..344cacdc75 100644
--- a/synapse/storage/keys.py
+++ b/synapse/storage/keys.py
@@ -19,7 +19,7 @@ from synapse.util.caches.descriptors import cachedInlineCallbacks
 from twisted.internet import defer
 
 import OpenSSL
-from syutil.crypto.signing_key import decode_verify_key_bytes
+from signedjson.key import decode_verify_key_bytes
 import hashlib
 
 
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
new file mode 100644
index 0000000000..1a74d6e360
--- /dev/null
+++ b/synapse/storage/prepare_database.py
@@ -0,0 +1,395 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import fnmatch
+import imp
+import logging
+import os
+import re
+
+
+logger = logging.getLogger(__name__)
+
+
+# Remember to update this number every time a change is made to database
+# schema files, so the users will be informed on server restarts.
+SCHEMA_VERSION = 25
+
+dir_path = os.path.abspath(os.path.dirname(__file__))
+
+
+def read_schema(path):
+    """ Read the named database schema.
+
+    Args:
+        path: Path of the database schema.
+    Returns:
+        A string containing the database schema.
+    """
+    with open(path) as schema_file:
+        return schema_file.read()
+
+
+class PrepareDatabaseException(Exception):
+    pass
+
+
+class UpgradeDatabaseException(PrepareDatabaseException):
+    pass
+
+
+def prepare_database(db_conn, database_engine):
+    """Prepares a database for usage. Will either create all necessary tables
+    or upgrade from an older schema version.
+    """
+    try:
+        cur = db_conn.cursor()
+        version_info = _get_or_create_schema_state(cur, database_engine)
+
+        if version_info:
+            user_version, delta_files, upgraded = version_info
+            _upgrade_existing_database(
+                cur, user_version, delta_files, upgraded, database_engine
+            )
+        else:
+            _setup_new_database(cur, database_engine)
+
+        # cur.execute("PRAGMA user_version = %d" % (SCHEMA_VERSION,))
+
+        cur.close()
+        db_conn.commit()
+    except:
+        db_conn.rollback()
+        raise
+
+
+def _setup_new_database(cur, database_engine):
+    """Sets up the database by finding a base set of "full schemas" and then
+    applying any necessary deltas.
+
+    The "full_schemas" directory has subdirectories named after versions. This
+    function searches for the highest version less than or equal to
+    `SCHEMA_VERSION` and executes all .sql files in that directory.
+
+    The function will then apply all deltas for all versions after the base
+    version.
+
+    Example directory structure:
+
+        schema/
+            delta/
+                ...
+            full_schemas/
+                3/
+                    test.sql
+                    ...
+                11/
+                    foo.sql
+                    bar.sql
+                ...
+
+    In the example foo.sql and bar.sql would be run, and then any delta files
+    for versions strictly greater than 11.
+    """
+    current_dir = os.path.join(dir_path, "schema", "full_schemas")
+    directory_entries = os.listdir(current_dir)
+
+    valid_dirs = []
+    pattern = re.compile(r"^\d+(\.sql)?$")
+    for filename in directory_entries:
+        match = pattern.match(filename)
+        abs_path = os.path.join(current_dir, filename)
+        if match and os.path.isdir(abs_path):
+            ver = int(match.group(0))
+            if ver <= SCHEMA_VERSION:
+                valid_dirs.append((ver, abs_path))
+        else:
+            logger.warn("Unexpected entry in 'full_schemas': %s", filename)
+
+    if not valid_dirs:
+        raise PrepareDatabaseException(
+            "Could not find a suitable base set of full schemas"
+        )
+
+    max_current_ver, sql_dir = max(valid_dirs, key=lambda x: x[0])
+
+    logger.debug("Initialising schema v%d", max_current_ver)
+
+    directory_entries = os.listdir(sql_dir)
+
+    for filename in fnmatch.filter(directory_entries, "*.sql"):
+        sql_loc = os.path.join(sql_dir, filename)
+        logger.debug("Applying schema %s", sql_loc)
+        executescript(cur, sql_loc)
+
+    cur.execute(
+        database_engine.convert_param_style(
+            "INSERT INTO schema_version (version, upgraded)"
+            " VALUES (?,?)"
+        ),
+        (max_current_ver, False,)
+    )
+
+    _upgrade_existing_database(
+        cur,
+        current_version=max_current_ver,
+        applied_delta_files=[],
+        upgraded=False,
+        database_engine=database_engine,
+    )
+
+
+def _upgrade_existing_database(cur, current_version, applied_delta_files,
+                               upgraded, database_engine):
+    """Upgrades an existing database.
+
+    Delta files can either be SQL stored in *.sql files, or python modules
+    in *.py.
+
+    There can be multiple delta files per version. Synapse will keep track of
+    which delta files have been applied, and will apply any that haven't been
+    even if there has been no version bump. This is useful for development
+    where orthogonal schema changes may happen on separate branches.
+
+    Different delta files for the same version *must* be orthogonal and give
+    the same result when applied in any order. No guarantees are made on the
+    order of execution of these scripts.
+
+    This is a no-op of current_version == SCHEMA_VERSION.
+
+    Example directory structure:
+
+        schema/
+            delta/
+                11/
+                    foo.sql
+                    ...
+                12/
+                    foo.sql
+                    bar.py
+                ...
+            full_schemas/
+                ...
+
+    In the example, if current_version is 11, then foo.sql will be run if and
+    only if `upgraded` is True. Then `foo.sql` and `bar.py` would be run in
+    some arbitrary order.
+
+    Args:
+        cur (Cursor)
+        current_version (int): The current version of the schema.
+        applied_delta_files (list): A list of deltas that have already been
+            applied.
+        upgraded (bool): Whether the current version was generated by having
+            applied deltas or from full schema file. If `True` the function
+            will never apply delta files for the given `current_version`, since
+            the current_version wasn't generated by applying those delta files.
+    """
+
+    if current_version > SCHEMA_VERSION:
+        raise ValueError(
+            "Cannot use this database as it is too " +
+            "new for the server to understand"
+        )
+
+    start_ver = current_version
+    if not upgraded:
+        start_ver += 1
+
+    logger.debug("applied_delta_files: %s", applied_delta_files)
+
+    for v in range(start_ver, SCHEMA_VERSION + 1):
+        logger.debug("Upgrading schema to v%d", v)
+
+        delta_dir = os.path.join(dir_path, "schema", "delta", str(v))
+
+        try:
+            directory_entries = os.listdir(delta_dir)
+        except OSError:
+            logger.exception("Could not open delta dir for version %d", v)
+            raise UpgradeDatabaseException(
+                "Could not open delta dir for version %d" % (v,)
+            )
+
+        directory_entries.sort()
+        for file_name in directory_entries:
+            relative_path = os.path.join(str(v), file_name)
+            logger.debug("Found file: %s", relative_path)
+            if relative_path in applied_delta_files:
+                continue
+
+            absolute_path = os.path.join(
+                dir_path, "schema", "delta", relative_path,
+            )
+            root_name, ext = os.path.splitext(file_name)
+            if ext == ".py":
+                # This is a python upgrade module. We need to import into some
+                # package and then execute its `run_upgrade` function.
+                module_name = "synapse.storage.v%d_%s" % (
+                    v, root_name
+                )
+                with open(absolute_path) as python_file:
+                    module = imp.load_source(
+                        module_name, absolute_path, python_file
+                    )
+                logger.debug("Running script %s", relative_path)
+                module.run_upgrade(cur, database_engine)
+            elif ext == ".pyc":
+                # Sometimes .pyc files turn up anyway even though we've
+                # disabled their generation; e.g. from distribution package
+                # installers. Silently skip it
+                pass
+            elif ext == ".sql":
+                # A plain old .sql file, just read and execute it
+                logger.debug("Applying schema %s", relative_path)
+                executescript(cur, absolute_path)
+            else:
+                # Not a valid delta file.
+                logger.warn(
+                    "Found directory entry that did not end in .py or"
+                    " .sql: %s",
+                    relative_path,
+                )
+                continue
+
+            # Mark as done.
+            cur.execute(
+                database_engine.convert_param_style(
+                    "INSERT INTO applied_schema_deltas (version, file)"
+                    " VALUES (?,?)",
+                ),
+                (v, relative_path)
+            )
+
+            cur.execute("DELETE FROM schema_version")
+            cur.execute(
+                database_engine.convert_param_style(
+                    "INSERT INTO schema_version (version, upgraded)"
+                    " VALUES (?,?)",
+                ),
+                (v, True)
+            )
+
+
+def get_statements(f):
+    statement_buffer = ""
+    in_comment = False  # If we're in a /* ... */ style comment
+
+    for line in f:
+        line = line.strip()
+
+        if in_comment:
+            # Check if this line contains an end to the comment
+            comments = line.split("*/", 1)
+            if len(comments) == 1:
+                continue
+            line = comments[1]
+            in_comment = False
+
+        # Remove inline block comments
+        line = re.sub(r"/\*.*\*/", " ", line)
+
+        # Does this line start a comment?
+        comments = line.split("/*", 1)
+        if len(comments) > 1:
+            line = comments[0]
+            in_comment = True
+
+        # Deal with line comments
+        line = line.split("--", 1)[0]
+        line = line.split("//", 1)[0]
+
+        # Find *all* semicolons. We need to treat first and last entry
+        # specially.
+        statements = line.split(";")
+
+        # We must prepend statement_buffer to the first statement
+        first_statement = "%s %s" % (
+            statement_buffer.strip(),
+            statements[0].strip()
+        )
+        statements[0] = first_statement
+
+        # Every entry, except the last, is a full statement
+        for statement in statements[:-1]:
+            yield statement.strip()
+
+        # The last entry did *not* end in a semicolon, so we store it for the
+        # next semicolon we find
+        statement_buffer = statements[-1].strip()
+
+
+def executescript(txn, schema_path):
+    with open(schema_path, 'r') as f:
+        for statement in get_statements(f):
+            txn.execute(statement)
+
+
+def _get_or_create_schema_state(txn, database_engine):
+    # Bluntly try creating the schema_version tables.
+    schema_path = os.path.join(
+        dir_path, "schema", "schema_version.sql",
+    )
+    executescript(txn, schema_path)
+
+    txn.execute("SELECT version, upgraded FROM schema_version")
+    row = txn.fetchone()
+    current_version = int(row[0]) if row else None
+    upgraded = bool(row[1]) if row else None
+
+    if current_version:
+        txn.execute(
+            database_engine.convert_param_style(
+                "SELECT file FROM applied_schema_deltas WHERE version >= ?"
+            ),
+            (current_version,)
+        )
+        applied_deltas = [d for d, in txn.fetchall()]
+        return current_version, applied_deltas, upgraded
+
+    return None
+
+
+def prepare_sqlite3_database(db_conn):
+    """This function should be called before `prepare_database` on sqlite3
+    databases.
+
+    Since we changed the way we store the current schema version and handle
+    updates to schemas, we need a way to upgrade from the old method to the
+    new. This only affects sqlite databases since they were the only ones
+    supported at the time.
+    """
+    with db_conn:
+        schema_path = os.path.join(
+            dir_path, "schema", "schema_version.sql",
+        )
+        create_schema = read_schema(schema_path)
+        db_conn.executescript(create_schema)
+
+        c = db_conn.execute("SELECT * FROM schema_version")
+        rows = c.fetchall()
+        c.close()
+
+        if not rows:
+            c = db_conn.execute("PRAGMA user_version")
+            row = c.fetchone()
+            c.close()
+
+            if row and row[0]:
+                db_conn.execute(
+                    "REPLACE INTO schema_version (version, upgraded)"
+                    " VALUES (?,?)",
+                    (row[0], False)
+                )
diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py
index 08ea62681b..345c4e1104 100644
--- a/synapse/storage/pusher.py
+++ b/synapse/storage/pusher.py
@@ -13,12 +13,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from ._base import SQLBaseStore, Table
+from ._base import SQLBaseStore
 from twisted.internet import defer
 
 from synapse.api.errors import StoreError
 
-from syutil.jsonutil import encode_canonical_json
+from canonicaljson import encode_canonical_json
 
 import logging
 import simplejson as json
@@ -149,5 +149,5 @@ class PusherStore(SQLBaseStore):
         )
 
 
-class PushersTable(Table):
+class PushersTable(object):
     table_name = "pushers"
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index 586628579d..2e5eddd259 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -51,6 +51,28 @@ class RegistrationStore(SQLBaseStore):
         )
 
     @defer.inlineCallbacks
+    def add_refresh_token_to_user(self, user_id, token):
+        """Adds a refresh token for the given user.
+
+        Args:
+            user_id (str): The user ID.
+            token (str): The new refresh token to add.
+        Raises:
+            StoreError if there was a problem adding this.
+        """
+        next_id = yield self._refresh_tokens_id_gen.get_next()
+
+        yield self._simple_insert(
+            "refresh_tokens",
+            {
+                "id": next_id,
+                "user_id": user_id,
+                "token": token
+            },
+            desc="add_refresh_token_to_user",
+        )
+
+    @defer.inlineCallbacks
     def register(self, user_id, token, password_hash):
         """Attempts to register an account.
 
@@ -80,13 +102,14 @@ class RegistrationStore(SQLBaseStore):
                 400, "User ID already taken.", errcode=Codes.USER_IN_USE
             )
 
-        # it's possible for this to get a conflict, but only for a single user
-        # since tokens are namespaced based on their user ID
-        txn.execute(
-            "INSERT INTO access_tokens(id, user_id, token)"
-            " VALUES (?,?,?)",
-            (next_id, user_id, token,)
-        )
+        if token:
+            # it's possible for this to get a conflict, but only for a single user
+            # since tokens are namespaced based on their user ID
+            txn.execute(
+                "INSERT INTO access_tokens(id, user_id, token)"
+                " VALUES (?,?,?)",
+                (next_id, user_id, token,)
+            )
 
     def get_user_by_id(self, user_id):
         return self._simple_select_one(
@@ -146,26 +169,65 @@ class RegistrationStore(SQLBaseStore):
             user_id
         )
         for r in rows:
-            self.get_user_by_token.invalidate((r,))
+            self.get_user_by_access_token.invalidate((r,))
 
     @cached()
-    def get_user_by_token(self, token):
+    def get_user_by_access_token(self, token):
         """Get a user from the given access token.
 
         Args:
             token (str): The access token of a user.
         Returns:
-            dict: Including the name (user_id), device_id and whether they are
-                an admin.
+            dict: Including the name (user_id) and the ID of their access token.
         Raises:
             StoreError if no user was found.
         """
         return self.runInteraction(
-            "get_user_by_token",
+            "get_user_by_access_token",
             self._query_for_auth,
             token
         )
 
+    def exchange_refresh_token(self, refresh_token, token_generator):
+        """Exchange a refresh token for a new access token and refresh token.
+
+        Doing so invalidates the old refresh token - refresh tokens are single
+        use.
+
+        Args:
+            token (str): The refresh token of a user.
+            token_generator (fn: str -> str): Function which, when given a
+                user ID, returns a unique refresh token for that user. This
+                function must never return the same value twice.
+        Returns:
+            tuple of (user_id, refresh_token)
+        Raises:
+            StoreError if no user was found with that refresh token.
+        """
+        return self.runInteraction(
+            "exchange_refresh_token",
+            self._exchange_refresh_token,
+            refresh_token,
+            token_generator
+        )
+
+    def _exchange_refresh_token(self, txn, old_token, token_generator):
+        sql = "SELECT user_id FROM refresh_tokens WHERE token = ?"
+        txn.execute(sql, (old_token,))
+        rows = self.cursor_to_dict(txn)
+        if not rows:
+            raise StoreError(403, "Did not recognize refresh token")
+        user_id = rows[0]["user_id"]
+
+        # TODO(danielwh): Maybe perform a validation on the macaroon that
+        # macaroon.user_id == user_id.
+
+        new_token = token_generator(user_id)
+        sql = "UPDATE refresh_tokens SET token = ? WHERE token = ?"
+        txn.execute(sql, (new_token, old_token,))
+
+        return user_id, new_token
+
     @defer.inlineCallbacks
     def is_server_admin(self, user):
         res = yield self._simple_select_one_onecol(
@@ -180,8 +242,7 @@ class RegistrationStore(SQLBaseStore):
 
     def _query_for_auth(self, txn, token):
         sql = (
-            "SELECT users.name, users.admin,"
-            " access_tokens.device_id, access_tokens.id as token_id"
+            "SELECT users.name, access_tokens.id as token_id"
             " FROM users"
             " INNER JOIN access_tokens on users.name = access_tokens.user_id"
             " WHERE token = ?"
@@ -229,3 +290,16 @@ class RegistrationStore(SQLBaseStore):
         if ret:
             defer.returnValue(ret['user_id'])
         defer.returnValue(None)
+
+    @defer.inlineCallbacks
+    def count_all_users(self):
+        """Counts all users registered on the homeserver."""
+        def _count_users(txn):
+            txn.execute("SELECT COUNT(*) AS users FROM users")
+            rows = self.cursor_to_dict(txn)
+            if rows:
+                return rows[0]["users"]
+            return 0
+
+        ret = yield self.runInteraction("count_users", _count_users)
+        defer.returnValue(ret)
diff --git a/synapse/storage/room.py b/synapse/storage/room.py
index 5e07b7e0e5..4f08df478c 100644
--- a/synapse/storage/room.py
+++ b/synapse/storage/room.py
@@ -19,6 +19,7 @@ from synapse.api.errors import StoreError
 
 from ._base import SQLBaseStore
 from synapse.util.caches.descriptors import cachedInlineCallbacks
+from .engines import PostgresEngine, Sqlite3Engine
 
 import collections
 import logging
@@ -98,34 +99,39 @@ class RoomStore(SQLBaseStore):
         """
 
         def f(txn):
-            topic_subquery = (
-                "SELECT topics.event_id as event_id, "
-                "topics.room_id as room_id, topic "
-                "FROM topics "
-                "INNER JOIN current_state_events as c "
-                "ON c.event_id = topics.event_id "
-            )
-
-            name_subquery = (
-                "SELECT room_names.event_id as event_id, "
-                "room_names.room_id as room_id, name "
-                "FROM room_names "
-                "INNER JOIN current_state_events as c "
-                "ON c.event_id = room_names.event_id "
-            )
+            def subquery(table_name, column_name=None):
+                column_name = column_name or table_name
+                return (
+                    "SELECT %(table_name)s.event_id as event_id, "
+                    "%(table_name)s.room_id as room_id, %(column_name)s "
+                    "FROM %(table_name)s "
+                    "INNER JOIN current_state_events as c "
+                    "ON c.event_id = %(table_name)s.event_id " % {
+                        "column_name": column_name,
+                        "table_name": table_name,
+                    }
+                )
 
-            # We use non printing ascii character US (\x1F) as a separator
             sql = (
-                "SELECT r.room_id, max(n.name), max(t.topic)"
+                "SELECT"
+                "    r.room_id,"
+                "    max(n.name),"
+                "    max(t.topic),"
+                "    max(v.history_visibility),"
+                "    max(g.guest_access)"
                 " FROM rooms AS r"
                 " LEFT JOIN (%(topic)s) AS t ON t.room_id = r.room_id"
                 " LEFT JOIN (%(name)s) AS n ON n.room_id = r.room_id"
+                " LEFT JOIN (%(history_visibility)s) AS v ON v.room_id = r.room_id"
+                " LEFT JOIN (%(guest_access)s) AS g ON g.room_id = r.room_id"
                 " WHERE r.is_public = ?"
-                " GROUP BY r.room_id"
-            ) % {
-                "topic": topic_subquery,
-                "name": name_subquery,
-            }
+                " GROUP BY r.room_id" % {
+                    "topic": subquery("topics", "topic"),
+                    "name": subquery("room_names", "name"),
+                    "history_visibility": subquery("history_visibility"),
+                    "guest_access": subquery("guest_access"),
+                }
+            )
 
             txn.execute(sql, (is_public,))
 
@@ -155,10 +161,12 @@ class RoomStore(SQLBaseStore):
                 "room_id": r[0],
                 "name": r[1],
                 "topic": r[2],
-                "aliases": r[3],
+                "world_readable": r[3] == "world_readable",
+                "guest_can_join": r[4] == "can_join",
+                "aliases": r[5],
             }
             for r in rows
-            if r[3]  # We only return rooms that have at least one alias.
+            if r[5]  # We only return rooms that have at least one alias.
         ]
 
         defer.returnValue(ret)
@@ -175,6 +183,10 @@ class RoomStore(SQLBaseStore):
                 },
             )
 
+            self._store_event_search_txn(
+                txn, event, "content.topic", event.content["topic"]
+            )
+
     def _store_room_name_txn(self, txn, event):
         if hasattr(event, "content") and "name" in event.content:
             self._simple_insert_txn(
@@ -187,6 +199,52 @@ class RoomStore(SQLBaseStore):
                 }
             )
 
+            self._store_event_search_txn(
+                txn, event, "content.name", event.content["name"]
+            )
+
+    def _store_room_message_txn(self, txn, event):
+        if hasattr(event, "content") and "body" in event.content:
+            self._store_event_search_txn(
+                txn, event, "content.body", event.content["body"]
+            )
+
+    def _store_history_visibility_txn(self, txn, event):
+        self._store_content_index_txn(txn, event, "history_visibility")
+
+    def _store_guest_access_txn(self, txn, event):
+        self._store_content_index_txn(txn, event, "guest_access")
+
+    def _store_content_index_txn(self, txn, event, key):
+        if hasattr(event, "content") and key in event.content:
+            sql = (
+                "INSERT INTO %(key)s"
+                " (event_id, room_id, %(key)s)"
+                " VALUES (?, ?, ?)" % {"key": key}
+            )
+            txn.execute(sql, (
+                event.event_id,
+                event.room_id,
+                event.content[key]
+            ))
+
+    def _store_event_search_txn(self, txn, event, key, value):
+        if isinstance(self.database_engine, PostgresEngine):
+            sql = (
+                "INSERT INTO event_search (event_id, room_id, key, vector)"
+                " VALUES (?,?,?,to_tsvector('english', ?))"
+            )
+        elif isinstance(self.database_engine, Sqlite3Engine):
+            sql = (
+                "INSERT INTO event_search (event_id, room_id, key, value)"
+                " VALUES (?,?,?,?)"
+            )
+        else:
+            # This should be unreachable.
+            raise Exception("Unrecognized database engine")
+
+        txn.execute(sql, (event.event_id, event.room_id, key, value,))
+
     @cachedInlineCallbacks()
     def get_room_name_and_aliases(self, room_id):
         def f(txn):
diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py
index 8eee2dfbcc..ae1ad56d9a 100644
--- a/synapse/storage/roommember.py
+++ b/synapse/storage/roommember.py
@@ -30,7 +30,7 @@ logger = logging.getLogger(__name__)
 
 RoomsForUser = namedtuple(
     "RoomsForUser",
-    ("room_id", "sender", "membership")
+    ("room_id", "sender", "membership", "event_id", "stream_ordering")
 )
 
 
@@ -110,6 +110,33 @@ class RoomMemberStore(SQLBaseStore):
             membership=membership,
         ).addCallback(self._get_events)
 
+    def get_invites_for_user(self, user_id):
+        """ Get all the invite events for a user
+        Args:
+            user_id (str): The user ID.
+        Returns:
+            A deferred list of event objects.
+        """
+
+        return self.get_rooms_for_user_where_membership_is(
+            user_id, [Membership.INVITE]
+        ).addCallback(lambda invites: self._get_events([
+            invites.event_id for invite in invites
+        ]))
+
+    def get_leave_and_ban_events_for_user(self, user_id):
+        """ Get all the leave events for a user
+        Args:
+            user_id (str): The user ID.
+        Returns:
+            A deferred list of event objects.
+        """
+        return self.get_rooms_for_user_where_membership_is(
+            user_id, (Membership.LEAVE, Membership.BAN)
+        ).addCallback(lambda leaves: self._get_events([
+            leave.event_id for leave in leaves
+        ]))
+
     def get_rooms_for_user_where_membership_is(self, user_id, membership_list):
         """ Get all the rooms for this user where the membership for this user
         matches one in the membership list.
@@ -141,11 +168,13 @@ class RoomMemberStore(SQLBaseStore):
         args.extend(membership_list)
 
         sql = (
-            "SELECT m.room_id, m.sender, m.membership"
-            " FROM room_memberships as m"
-            " INNER JOIN current_state_events as c"
-            " ON m.event_id = c.event_id "
-            " AND m.room_id = c.room_id "
+            "SELECT m.room_id, m.sender, m.membership, m.event_id, e.stream_ordering"
+            " FROM current_state_events as c"
+            " INNER JOIN room_memberships as m"
+            " ON m.event_id = c.event_id"
+            " INNER JOIN events as e"
+            " ON e.event_id = c.event_id"
+            " AND m.room_id = c.room_id"
             " AND m.user_id = c.state_key"
             " WHERE %s"
         ) % (where_clause,)
@@ -176,12 +205,6 @@ class RoomMemberStore(SQLBaseStore):
 
         return joined_domains
 
-    def _get_members_query(self, where_clause, where_values):
-        return self.runInteraction(
-            "get_members_query", self._get_members_events_txn,
-            where_clause, where_values
-        ).addCallbacks(self._get_events)
-
     def _get_members_events_txn(self, txn, room_id, membership=None, user_id=None):
         rows = self._get_members_rows_txn(
             txn,
diff --git a/synapse/storage/schema/delta/23/drop_state_index.sql b/synapse/storage/schema/delta/23/drop_state_index.sql
new file mode 100644
index 0000000000..07d0ea5cb2
--- /dev/null
+++ b/synapse/storage/schema/delta/23/drop_state_index.sql
@@ -0,0 +1,16 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+DROP INDEX IF EXISTS state_groups_state_tuple;
diff --git a/synapse/storage/schema/delta/23/refresh_tokens.sql b/synapse/storage/schema/delta/23/refresh_tokens.sql
new file mode 100644
index 0000000000..437b1ac1be
--- /dev/null
+++ b/synapse/storage/schema/delta/23/refresh_tokens.sql
@@ -0,0 +1,21 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+CREATE TABLE IF NOT EXISTS refresh_tokens(
+    id INTEGER PRIMARY KEY,
+    token TEXT NOT NULL,
+    user_id TEXT NOT NULL,
+    UNIQUE (token)
+);
diff --git a/synapse/storage/schema/delta/24/stats_reporting.sql b/synapse/storage/schema/delta/24/stats_reporting.sql
new file mode 100644
index 0000000000..e9165d2917
--- /dev/null
+++ b/synapse/storage/schema/delta/24/stats_reporting.sql
@@ -0,0 +1,22 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+-- Should only ever contain one row
+CREATE TABLE IF NOT EXISTS stats_reporting(
+  -- The stream ordering token which was most recently reported as stats
+  reported_stream_token INTEGER,
+  -- The time (seconds since epoch) stats were most recently reported
+  reported_time BIGINT
+);
diff --git a/synapse/storage/schema/delta/25/00background_updates.sql b/synapse/storage/schema/delta/25/00background_updates.sql
new file mode 100644
index 0000000000..41a9b59b1b
--- /dev/null
+++ b/synapse/storage/schema/delta/25/00background_updates.sql
@@ -0,0 +1,21 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+CREATE TABLE IF NOT EXISTS background_updates(
+    update_name TEXT NOT NULL, -- The name of the background update.
+    progress_json TEXT NOT NULL, -- The current progress of the update as JSON.
+    CONSTRAINT background_updates_uniqueness UNIQUE (update_name)
+);
diff --git a/synapse/storage/schema/delta/25/fts.py b/synapse/storage/schema/delta/25/fts.py
new file mode 100644
index 0000000000..5239d69073
--- /dev/null
+++ b/synapse/storage/schema/delta/25/fts.py
@@ -0,0 +1,78 @@
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from synapse.storage.prepare_database import get_statements
+from synapse.storage.engines import PostgresEngine, Sqlite3Engine
+
+import ujson
+
+logger = logging.getLogger(__name__)
+
+
+POSTGRES_TABLE = """
+CREATE TABLE IF NOT EXISTS event_search (
+    event_id TEXT,
+    room_id TEXT,
+    sender TEXT,
+    key TEXT,
+    vector tsvector
+);
+
+CREATE INDEX event_search_fts_idx ON event_search USING gin(vector);
+CREATE INDEX event_search_ev_idx ON event_search(event_id);
+CREATE INDEX event_search_ev_ridx ON event_search(room_id);
+"""
+
+
+SQLITE_TABLE = (
+    "CREATE VIRTUAL TABLE IF NOT EXISTS event_search"
+    " USING fts4 ( event_id, room_id, sender, key, value )"
+)
+
+
+def run_upgrade(cur, database_engine, *args, **kwargs):
+    if isinstance(database_engine, PostgresEngine):
+        for statement in get_statements(POSTGRES_TABLE.splitlines()):
+            cur.execute(statement)
+    elif isinstance(database_engine, Sqlite3Engine):
+        cur.execute(SQLITE_TABLE)
+    else:
+        raise Exception("Unrecognized database engine")
+
+    cur.execute("SELECT MIN(stream_ordering) FROM events")
+    rows = cur.fetchall()
+    min_stream_id = rows[0][0]
+
+    cur.execute("SELECT MAX(stream_ordering) FROM events")
+    rows = cur.fetchall()
+    max_stream_id = rows[0][0]
+
+    if min_stream_id is not None and max_stream_id is not None:
+        progress = {
+            "target_min_stream_id_inclusive": min_stream_id,
+            "max_stream_id_exclusive": max_stream_id + 1,
+            "rows_inserted": 0,
+        }
+        progress_json = ujson.dumps(progress)
+
+        sql = (
+            "INSERT into background_updates (update_name, progress_json)"
+            " VALUES (?, ?)"
+        )
+
+        sql = database_engine.convert_param_style(sql)
+
+        cur.execute(sql, ("event_search", progress_json))
diff --git a/synapse/storage/schema/delta/25/guest_access.sql b/synapse/storage/schema/delta/25/guest_access.sql
new file mode 100644
index 0000000000..bdb90e7118
--- /dev/null
+++ b/synapse/storage/schema/delta/25/guest_access.sql
@@ -0,0 +1,25 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * This is a manual index of guest_access content of state events,
+ * so that we can join on them in SELECT statements.
+ */
+CREATE TABLE IF NOT EXISTS guest_access(
+    event_id TEXT NOT NULL,
+    room_id TEXT NOT NULL,
+    guest_access TEXT NOT NULL,
+    UNIQUE (event_id)
+);
diff --git a/synapse/storage/schema/delta/25/history_visibility.sql b/synapse/storage/schema/delta/25/history_visibility.sql
new file mode 100644
index 0000000000..532cb05151
--- /dev/null
+++ b/synapse/storage/schema/delta/25/history_visibility.sql
@@ -0,0 +1,25 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * This is a manual index of history_visibility content of state events,
+ * so that we can join on them in SELECT statements.
+ */
+CREATE TABLE IF NOT EXISTS history_visibility(
+    event_id TEXT NOT NULL,
+    room_id TEXT NOT NULL,
+    history_visibility TEXT NOT NULL,
+    UNIQUE (event_id)
+);
diff --git a/synapse/storage/schema/delta/25/tags.sql b/synapse/storage/schema/delta/25/tags.sql
new file mode 100644
index 0000000000..527424c998
--- /dev/null
+++ b/synapse/storage/schema/delta/25/tags.sql
@@ -0,0 +1,38 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+CREATE TABLE IF NOT EXISTS room_tags(
+    user_id TEXT NOT NULL,
+    room_id TEXT NOT NULL,
+    tag     TEXT NOT NULL,  -- The name of the tag.
+    content TEXT NOT NULL,  -- The JSON content of the tag.
+    CONSTRAINT room_tag_uniqueness UNIQUE (user_id, room_id, tag)
+);
+
+CREATE TABLE IF NOT EXISTS room_tags_revisions (
+    user_id TEXT NOT NULL,
+    room_id TEXT NOT NULL,
+    stream_id BIGINT NOT NULL, -- The current version of the room tags.
+    CONSTRAINT room_tag_revisions_uniqueness UNIQUE (user_id, room_id)
+);
+
+CREATE TABLE IF NOT EXISTS private_user_data_max_stream_id(
+    Lock CHAR(1) NOT NULL DEFAULT 'X' UNIQUE,  -- Makes sure this table only has one row.
+    stream_id  BIGINT NOT NULL,
+    CHECK (Lock='X')
+);
+
+INSERT INTO private_user_data_max_stream_id (stream_id) VALUES (0);
diff --git a/synapse/storage/search.py b/synapse/storage/search.py
new file mode 100644
index 0000000000..380270b009
--- /dev/null
+++ b/synapse/storage/search.py
@@ -0,0 +1,307 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from .background_updates import BackgroundUpdateStore
+from synapse.api.errors import SynapseError
+from synapse.storage.engines import PostgresEngine, Sqlite3Engine
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class SearchStore(BackgroundUpdateStore):
+
+    EVENT_SEARCH_UPDATE_NAME = "event_search"
+
+    def __init__(self, hs):
+        super(SearchStore, self).__init__(hs)
+        self.register_background_update_handler(
+            self.EVENT_SEARCH_UPDATE_NAME, self._background_reindex_search
+        )
+
+    @defer.inlineCallbacks
+    def _background_reindex_search(self, progress, batch_size):
+        target_min_stream_id = progress["target_min_stream_id_inclusive"]
+        max_stream_id = progress["max_stream_id_exclusive"]
+        rows_inserted = progress.get("rows_inserted", 0)
+
+        INSERT_CLUMP_SIZE = 1000
+        TYPES = ["m.room.name", "m.room.message", "m.room.topic"]
+
+        def reindex_search_txn(txn):
+            sql = (
+                "SELECT stream_ordering, event_id FROM events"
+                " WHERE ? <= stream_ordering AND stream_ordering < ?"
+                " AND (%s)"
+                " ORDER BY stream_ordering DESC"
+                " LIMIT ?"
+            ) % (" OR ".join("type = '%s'" % (t,) for t in TYPES),)
+
+            txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
+
+            rows = txn.fetchall()
+            if not rows:
+                return 0
+
+            min_stream_id = rows[-1][0]
+            event_ids = [row[1] for row in rows]
+
+            events = self._get_events_txn(txn, event_ids)
+
+            event_search_rows = []
+            for event in events:
+                try:
+                    event_id = event.event_id
+                    room_id = event.room_id
+                    content = event.content
+                    if event.type == "m.room.message":
+                        key = "content.body"
+                        value = content["body"]
+                    elif event.type == "m.room.topic":
+                        key = "content.topic"
+                        value = content["topic"]
+                    elif event.type == "m.room.name":
+                        key = "content.name"
+                        value = content["name"]
+                except (KeyError, AttributeError):
+                    # If the event is missing a necessary field then
+                    # skip over it.
+                    continue
+
+                event_search_rows.append((event_id, room_id, key, value))
+
+            if isinstance(self.database_engine, PostgresEngine):
+                sql = (
+                    "INSERT INTO event_search (event_id, room_id, key, vector)"
+                    " VALUES (?,?,?,to_tsvector('english', ?))"
+                )
+            elif isinstance(self.database_engine, Sqlite3Engine):
+                sql = (
+                    "INSERT INTO event_search (event_id, room_id, key, value)"
+                    " VALUES (?,?,?,?)"
+                )
+            else:
+                # This should be unreachable.
+                raise Exception("Unrecognized database engine")
+
+            for index in range(0, len(event_search_rows), INSERT_CLUMP_SIZE):
+                clump = event_search_rows[index:index + INSERT_CLUMP_SIZE]
+                txn.executemany(sql, clump)
+
+            progress = {
+                "target_min_stream_id_inclusive": target_min_stream_id,
+                "max_stream_id_exclusive": min_stream_id,
+                "rows_inserted": rows_inserted + len(event_search_rows)
+            }
+
+            self._background_update_progress_txn(
+                txn, self.EVENT_SEARCH_UPDATE_NAME, progress
+            )
+
+            return len(event_search_rows)
+
+        result = yield self.runInteraction(
+            self.EVENT_SEARCH_UPDATE_NAME, reindex_search_txn
+        )
+
+        if not result:
+            yield self._end_background_update(self.EVENT_SEARCH_UPDATE_NAME)
+
+        defer.returnValue(result)
+
+    @defer.inlineCallbacks
+    def search_msgs(self, room_ids, search_term, keys):
+        """Performs a full text search over events with given keys.
+
+        Args:
+            room_ids (list): List of room ids to search in
+            search_term (str): Search term to search for
+            keys (list): List of keys to search in, currently supports
+                "content.body", "content.name", "content.topic"
+
+        Returns:
+            list of dicts
+        """
+        clauses = []
+        args = []
+
+        # Make sure we don't explode because the person is in too many rooms.
+        # We filter the results below regardless.
+        if len(room_ids) < 500:
+            clauses.append(
+                "room_id IN (%s)" % (",".join(["?"] * len(room_ids)),)
+            )
+            args.extend(room_ids)
+
+        local_clauses = []
+        for key in keys:
+            local_clauses.append("key = ?")
+            args.append(key)
+
+        clauses.append(
+            "(%s)" % (" OR ".join(local_clauses),)
+        )
+
+        if isinstance(self.database_engine, PostgresEngine):
+            sql = (
+                "SELECT ts_rank_cd(vector, query) AS rank, room_id, event_id"
+                " FROM plainto_tsquery('english', ?) as query, event_search"
+                " WHERE vector @@ query"
+            )
+        elif isinstance(self.database_engine, Sqlite3Engine):
+            sql = (
+                "SELECT rank(matchinfo(event_search)) as rank, room_id, event_id"
+                " FROM event_search"
+                " WHERE value MATCH ?"
+            )
+        else:
+            # This should be unreachable.
+            raise Exception("Unrecognized database engine")
+
+        for clause in clauses:
+            sql += " AND " + clause
+
+        # We add an arbitrary limit here to ensure we don't try to pull the
+        # entire table from the database.
+        sql += " ORDER BY rank DESC LIMIT 500"
+
+        results = yield self._execute(
+            "search_msgs", self.cursor_to_dict, sql, *([search_term] + args)
+        )
+
+        results = filter(lambda row: row["room_id"] in room_ids, results)
+
+        events = yield self._get_events([r["event_id"] for r in results])
+
+        event_map = {
+            ev.event_id: ev
+            for ev in events
+        }
+
+        defer.returnValue([
+            {
+                "event": event_map[r["event_id"]],
+                "rank": r["rank"],
+            }
+            for r in results
+            if r["event_id"] in event_map
+        ])
+
+    @defer.inlineCallbacks
+    def search_room(self, room_id, search_term, keys, limit, pagination_token=None):
+        """Performs a full text search over events with given keys.
+
+        Args:
+            room_id (str): The room_id to search in
+            search_term (str): Search term to search for
+            keys (list): List of keys to search in, currently supports
+                "content.body", "content.name", "content.topic"
+            pagination_token (str): A pagination token previously returned
+
+        Returns:
+            list of dicts
+        """
+        clauses = []
+        args = [search_term, room_id]
+
+        local_clauses = []
+        for key in keys:
+            local_clauses.append("key = ?")
+            args.append(key)
+
+        clauses.append(
+            "(%s)" % (" OR ".join(local_clauses),)
+        )
+
+        if pagination_token:
+            try:
+                topo, stream = pagination_token.split(",")
+                topo = int(topo)
+                stream = int(stream)
+            except:
+                raise SynapseError(400, "Invalid pagination token")
+
+            clauses.append(
+                "(topological_ordering < ?"
+                " OR (topological_ordering = ? AND stream_ordering < ?))"
+            )
+            args.extend([topo, topo, stream])
+
+        if isinstance(self.database_engine, PostgresEngine):
+            sql = (
+                "SELECT ts_rank_cd(vector, query) as rank,"
+                " topological_ordering, stream_ordering, room_id, event_id"
+                " FROM plainto_tsquery('english', ?) as query, event_search"
+                " NATURAL JOIN events"
+                " WHERE vector @@ query AND room_id = ?"
+            )
+        elif isinstance(self.database_engine, Sqlite3Engine):
+            # We use CROSS JOIN here to ensure we use the right indexes.
+            # https://sqlite.org/optoverview.html#crossjoin
+            #
+            # We want to use the full text search index on event_search to
+            # extract all possible matches first, then lookup those matches
+            # in the events table to get the topological ordering. We need
+            # to use the indexes in this order because sqlite refuses to
+            # MATCH unless it uses the full text search index
+            sql = (
+                "SELECT rank(matchinfo) as rank, room_id, event_id,"
+                " topological_ordering, stream_ordering"
+                " FROM (SELECT key, event_id, matchinfo(event_search) as matchinfo"
+                " FROM event_search"
+                " WHERE value MATCH ?"
+                " )"
+                " CROSS JOIN events USING (event_id)"
+                " WHERE room_id = ?"
+            )
+        else:
+            # This should be unreachable.
+            raise Exception("Unrecognized database engine")
+
+        for clause in clauses:
+            sql += " AND " + clause
+
+        # We add an arbitrary limit here to ensure we don't try to pull the
+        # entire table from the database.
+        sql += " ORDER BY topological_ordering DESC, stream_ordering DESC LIMIT ?"
+
+        args.append(limit)
+
+        results = yield self._execute(
+            "search_rooms", self.cursor_to_dict, sql, *args
+        )
+
+        events = yield self._get_events([r["event_id"] for r in results])
+
+        event_map = {
+            ev.event_id: ev
+            for ev in events
+        }
+
+        defer.returnValue([
+            {
+                "event": event_map[r["event_id"]],
+                "rank": r["rank"],
+                "pagination_token": "%s,%s" % (
+                    r["topological_ordering"], r["stream_ordering"]
+                ),
+            }
+            for r in results
+            if r["event_id"] in event_map
+        ])
diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py
index 4f15e534b4..b070be504d 100644
--- a/synapse/storage/signatures.py
+++ b/synapse/storage/signatures.py
@@ -17,48 +17,13 @@ from twisted.internet import defer
 
 from _base import SQLBaseStore
 
-from syutil.base64util import encode_base64
+from unpaddedbase64 import encode_base64
 from synapse.crypto.event_signing import compute_event_reference_hash
 
 
 class SignatureStore(SQLBaseStore):
     """Persistence for event signatures and hashes"""
 
-    def _get_event_content_hashes_txn(self, txn, event_id):
-        """Get all the hashes for a given Event.
-        Args:
-            txn (cursor):
-            event_id (str): Id for the Event.
-        Returns:
-            A dict of algorithm -> hash.
-        """
-        query = (
-            "SELECT algorithm, hash"
-            " FROM event_content_hashes"
-            " WHERE event_id = ?"
-        )
-        txn.execute(query, (event_id, ))
-        return dict(txn.fetchall())
-
-    def _store_event_content_hash_txn(self, txn, event_id, algorithm,
-                                      hash_bytes):
-        """Store a hash for a Event
-        Args:
-            txn (cursor):
-            event_id (str): Id for the Event.
-            algorithm (str): Hashing algorithm.
-            hash_bytes (bytes): Hash function output bytes.
-        """
-        self._simple_insert_txn(
-            txn,
-            "event_content_hashes",
-            {
-                "event_id": event_id,
-                "algorithm": algorithm,
-                "hash": buffer(hash_bytes),
-            },
-        )
-
     def get_event_reference_hashes(self, event_ids):
         def f(txn):
             return [
@@ -123,80 +88,3 @@ class SignatureStore(SQLBaseStore):
             table="event_reference_hashes",
             values=vals,
         )
-
-    def _get_event_signatures_txn(self, txn, event_id):
-        """Get all the signatures for a given PDU.
-        Args:
-            txn (cursor):
-            event_id (str): Id for the Event.
-        Returns:
-            A dict of sig name -> dict(key_id -> signature_bytes)
-        """
-        query = (
-            "SELECT signature_name, key_id, signature"
-            " FROM event_signatures"
-            " WHERE event_id = ? "
-        )
-        txn.execute(query, (event_id, ))
-        rows = txn.fetchall()
-
-        res = {}
-
-        for name, key, sig in rows:
-            res.setdefault(name, {})[key] = sig
-
-        return res
-
-    def _store_event_signature_txn(self, txn, event_id, signature_name, key_id,
-                                   signature_bytes):
-        """Store a signature from the origin server for a PDU.
-        Args:
-            txn (cursor):
-            event_id (str): Id for the Event.
-            origin (str): origin of the Event.
-            key_id (str): Id for the signing key.
-            signature (bytes): The signature.
-        """
-        self._simple_insert_txn(
-            txn,
-            "event_signatures",
-            {
-                "event_id": event_id,
-                "signature_name": signature_name,
-                "key_id": key_id,
-                "signature": buffer(signature_bytes),
-            },
-        )
-
-    def _get_prev_event_hashes_txn(self, txn, event_id):
-        """Get all the hashes for previous PDUs of a PDU
-        Args:
-            txn (cursor):
-            event_id (str): Id for the Event.
-        Returns:
-            dict of (pdu_id, origin) -> dict of algorithm -> hash_bytes.
-        """
-        query = (
-            "SELECT prev_event_id, algorithm, hash"
-            " FROM event_edge_hashes"
-            " WHERE event_id = ?"
-        )
-        txn.execute(query, (event_id, ))
-        results = {}
-        for prev_event_id, algorithm, hash_bytes in txn.fetchall():
-            hashes = results.setdefault(prev_event_id, {})
-            hashes[algorithm] = hash_bytes
-        return results
-
-    def _store_prev_event_hash_txn(self, txn, event_id, prev_event_id,
-                                   algorithm, hash_bytes):
-        self._simple_insert_txn(
-            txn,
-            "event_edge_hashes",
-            {
-                "event_id": event_id,
-                "prev_event_id": prev_event_id,
-                "algorithm": algorithm,
-                "hash": buffer(hash_bytes),
-            },
-        )
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index 9630efcfcc..80e9b63f50 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -20,8 +20,6 @@ from synapse.util.caches.descriptors import (
 
 from twisted.internet import defer
 
-from synapse.util.stringutils import random_string
-
 import logging
 
 logger = logging.getLogger(__name__)
@@ -56,7 +54,7 @@ class StateStore(SQLBaseStore):
             defer.returnValue({})
 
         event_to_groups = yield self._get_state_group_for_events(
-            room_id, event_ids,
+            event_ids,
         )
 
         groups = set(event_to_groups.values())
@@ -210,13 +208,12 @@ class StateStore(SQLBaseStore):
         )
 
     @defer.inlineCallbacks
-    def get_state_for_events(self, room_id, event_ids, types):
+    def get_state_for_events(self, event_ids, types):
         """Given a list of event_ids and type tuples, return a list of state
         dicts for each event. The state dicts will only have the type/state_keys
         that are in the `types` list.
 
         Args:
-            room_id (str)
             event_ids (list)
             types (list): List of (type, state_key) tuples which are used to
                 filter the state fetched. `state_key` may be None, which matches
@@ -227,7 +224,7 @@ class StateStore(SQLBaseStore):
             The dicts are mappings from (type, state_key) -> state_events
         """
         event_to_groups = yield self._get_state_group_for_events(
-            room_id, event_ids,
+            event_ids,
         )
 
         groups = set(event_to_groups.values())
@@ -240,6 +237,20 @@ class StateStore(SQLBaseStore):
 
         defer.returnValue({event: event_to_state[event] for event in event_ids})
 
+    @defer.inlineCallbacks
+    def get_state_for_event(self, event_id, types=None):
+        """
+        Get the state dict corresponding to a particular event
+
+        :param str event_id: event whose state should be returned
+        :param list[(str, str)]|None types: List of (type, state_key) tuples
+            which are used to filter the state fetched. May be None, which
+            matches any key
+        :return: a deferred dict from (type, state_key) -> state_event
+        """
+        state_map = yield self.get_state_for_events([event_id], types)
+        defer.returnValue(state_map[event_id])
+
     @cached(num_args=2, lru=True, max_entries=10000)
     def _get_state_group_for_event(self, room_id, event_id):
         return self._simple_select_one_onecol(
@@ -253,8 +264,8 @@ class StateStore(SQLBaseStore):
         )
 
     @cachedList(cache=_get_state_group_for_event.cache, list_name="event_ids",
-                num_args=2)
-    def _get_state_group_for_events(self, room_id, event_ids):
+                num_args=1)
+    def _get_state_group_for_events(self, event_ids):
         """Returns mapping event_id -> state_group
         """
         def f(txn):
@@ -428,7 +439,3 @@ class StateStore(SQLBaseStore):
             }
 
         defer.returnValue(results)
-
-
-def _make_group_id(clock):
-    return str(int(clock.time_msec())) + random_string(5)
diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py
index d7fe423f5a..be8ba76aae 100644
--- a/synapse/storage/stream.py
+++ b/synapse/storage/stream.py
@@ -23,7 +23,7 @@ paginate bacwards.
 
 This is implemented by keeping two ordering columns: stream_ordering and
 topological_ordering. Stream ordering is basically insertion/received order
-(except for events from backfill requests). The topolgical_ordering is a
+(except for events from backfill requests). The topological_ordering is a
 weak ordering of events based on the pdu graph.
 
 This means that we have to have two different types of tokens, depending on
@@ -158,16 +158,40 @@ class StreamStore(SQLBaseStore):
         defer.returnValue(results)
 
     @log_function
-    def get_room_events_stream(self, user_id, from_key, to_key, room_id,
-                               limit=0, with_feedback=False):
-        # TODO (erikj): Handle compressed feedback
-
-        current_room_membership_sql = (
-            "SELECT m.room_id FROM room_memberships as m "
-            " INNER JOIN current_state_events as c"
-            " ON m.event_id = c.event_id AND c.state_key = m.user_id"
-            " WHERE m.user_id = ? AND m.membership = 'join'"
-        )
+    def get_room_events_stream(
+        self,
+        user_id,
+        from_key,
+        to_key,
+        limit=0,
+        is_guest=False,
+        room_ids=None
+    ):
+        room_ids = room_ids or []
+        room_ids = [r for r in room_ids]
+        if is_guest:
+            current_room_membership_sql = (
+                "SELECT c.room_id FROM history_visibility AS h"
+                " INNER JOIN current_state_events AS c"
+                " ON h.event_id = c.event_id"
+                " WHERE c.room_id IN (%s) AND h.history_visibility = 'world_readable'" % (
+                    ",".join(map(lambda _: "?", room_ids))
+                )
+            )
+            current_room_membership_args = room_ids
+        else:
+            current_room_membership_sql = (
+                "SELECT m.room_id FROM room_memberships as m "
+                " INNER JOIN current_state_events as c"
+                " ON m.event_id = c.event_id AND c.state_key = m.user_id"
+                " WHERE m.user_id = ? AND m.membership = 'join'"
+            )
+            current_room_membership_args = [user_id]
+            if room_ids:
+                current_room_membership_sql += " AND m.room_id in (%s)" % (
+                    ",".join(map(lambda _: "?", room_ids))
+                )
+                current_room_membership_args = [user_id] + room_ids
 
         # We also want to get any membership events about that user, e.g.
         # invites or leave notifications.
@@ -176,6 +200,7 @@ class StreamStore(SQLBaseStore):
             "INNER JOIN current_state_events as c ON m.event_id = c.event_id "
             "WHERE m.user_id = ? "
         )
+        membership_args = [user_id]
 
         if limit:
             limit = max(limit, MAX_STREAM_SIZE)
@@ -202,7 +227,9 @@ class StreamStore(SQLBaseStore):
         }
 
         def f(txn):
-            txn.execute(sql, (False, user_id, user_id, from_id.stream, to_id.stream,))
+            args = ([False] + current_room_membership_args + membership_args +
+                    [from_id.stream, to_id.stream])
+            txn.execute(sql, args)
 
             rows = self.cursor_to_dict(txn)
 
@@ -227,10 +254,7 @@ class StreamStore(SQLBaseStore):
 
     @defer.inlineCallbacks
     def paginate_room_events(self, room_id, from_key, to_key=None,
-                             direction='b', limit=-1,
-                             with_feedback=False):
-        # TODO (erikj): Handle compressed feedback
-
+                             direction='b', limit=-1):
         # Tokens really represent positions between elements, but we use
         # the convention of pointing to the event before the gap. Hence
         # we have a bit of asymmetry when it comes to equalities.
@@ -302,7 +326,6 @@ class StreamStore(SQLBaseStore):
 
     @cachedInlineCallbacks(num_args=4)
     def get_recent_events_for_room(self, room_id, limit, end_token, from_token=None):
-        # TODO (erikj): Handle compressed feedback
 
         end_token = RoomStreamToken.parse_stream_token(end_token)
 
@@ -379,6 +402,38 @@ class StreamStore(SQLBaseStore):
             )
             defer.returnValue("t%d-%d" % (topo, token))
 
+    def get_stream_token_for_event(self, event_id):
+        """The stream token for an event
+        Args:
+            event_id(str): The id of the event to look up a stream token for.
+        Raises:
+            StoreError if the event wasn't in the database.
+        Returns:
+            A deferred "s%d" stream token.
+        """
+        return self._simple_select_one_onecol(
+            table="events",
+            keyvalues={"event_id": event_id},
+            retcol="stream_ordering",
+        ).addCallback(lambda row: "s%d" % (row,))
+
+    def get_topological_token_for_event(self, event_id):
+        """The stream token for an event
+        Args:
+            event_id(str): The id of the event to look up a stream token for.
+        Raises:
+            StoreError if the event wasn't in the database.
+        Returns:
+            A deferred "t%d-%d" topological token.
+        """
+        return self._simple_select_one(
+            table="events",
+            keyvalues={"event_id": event_id},
+            retcols=("stream_ordering", "topological_ordering"),
+        ).addCallback(lambda row: "t%d-%d" % (
+            row["topological_ordering"], row["stream_ordering"],)
+        )
+
     def _get_max_topological_txn(self, txn):
         txn.execute(
             "SELECT MAX(topological_ordering) FROM events"
@@ -410,3 +465,138 @@ class StreamStore(SQLBaseStore):
             internal = event.internal_metadata
             internal.before = str(RoomStreamToken(topo, stream - 1))
             internal.after = str(RoomStreamToken(topo, stream))
+
+    @defer.inlineCallbacks
+    def get_events_around(self, room_id, event_id, before_limit, after_limit):
+        """Retrieve events and pagination tokens around a given event in a
+        room.
+
+        Args:
+            room_id (str)
+            event_id (str)
+            before_limit (int)
+            after_limit (int)
+
+        Returns:
+            dict
+        """
+
+        results = yield self.runInteraction(
+            "get_events_around", self._get_events_around_txn,
+            room_id, event_id, before_limit, after_limit
+        )
+
+        events_before = yield self._get_events(
+            [e for e in results["before"]["event_ids"]],
+            get_prev_content=True
+        )
+
+        events_after = yield self._get_events(
+            [e for e in results["after"]["event_ids"]],
+            get_prev_content=True
+        )
+
+        defer.returnValue({
+            "events_before": events_before,
+            "events_after": events_after,
+            "start": results["before"]["token"],
+            "end": results["after"]["token"],
+        })
+
+    def _get_events_around_txn(self, txn, room_id, event_id, before_limit, after_limit):
+        """Retrieves event_ids and pagination tokens around a given event in a
+        room.
+
+        Args:
+            room_id (str)
+            event_id (str)
+            before_limit (int)
+            after_limit (int)
+
+        Returns:
+            dict
+        """
+
+        results = self._simple_select_one_txn(
+            txn,
+            "events",
+            keyvalues={
+                "event_id": event_id,
+                "room_id": room_id,
+            },
+            retcols=["stream_ordering", "topological_ordering"],
+        )
+
+        stream_ordering = results["stream_ordering"]
+        topological_ordering = results["topological_ordering"]
+
+        query_before = (
+            "SELECT topological_ordering, stream_ordering, event_id FROM events"
+            " WHERE room_id = ? AND (topological_ordering < ?"
+            " OR (topological_ordering = ? AND stream_ordering < ?))"
+            " ORDER BY topological_ordering DESC, stream_ordering DESC"
+            " LIMIT ?"
+        )
+
+        query_after = (
+            "SELECT topological_ordering, stream_ordering, event_id FROM events"
+            " WHERE room_id = ? AND (topological_ordering > ?"
+            " OR (topological_ordering = ? AND stream_ordering > ?))"
+            " ORDER BY topological_ordering ASC, stream_ordering ASC"
+            " LIMIT ?"
+        )
+
+        txn.execute(
+            query_before,
+            (
+                room_id, topological_ordering, topological_ordering,
+                stream_ordering, before_limit,
+            )
+        )
+
+        rows = self.cursor_to_dict(txn)
+        events_before = [r["event_id"] for r in rows]
+
+        if rows:
+            start_token = str(RoomStreamToken(
+                rows[0]["topological_ordering"],
+                rows[0]["stream_ordering"] - 1,
+            ))
+        else:
+            start_token = str(RoomStreamToken(
+                topological_ordering,
+                stream_ordering - 1,
+            ))
+
+        txn.execute(
+            query_after,
+            (
+                room_id, topological_ordering, topological_ordering,
+                stream_ordering, after_limit,
+            )
+        )
+
+        rows = self.cursor_to_dict(txn)
+        events_after = [r["event_id"] for r in rows]
+
+        if rows:
+            end_token = str(RoomStreamToken(
+                rows[-1]["topological_ordering"],
+                rows[-1]["stream_ordering"],
+            ))
+        else:
+            end_token = str(RoomStreamToken(
+                topological_ordering,
+                stream_ordering,
+            ))
+
+        return {
+            "before": {
+                "event_ids": events_before,
+                "token": start_token,
+            },
+            "after": {
+                "event_ids": events_after,
+                "token": end_token,
+            },
+        }
diff --git a/synapse/storage/tags.py b/synapse/storage/tags.py
new file mode 100644
index 0000000000..bf695b7800
--- /dev/null
+++ b/synapse/storage/tags.py
@@ -0,0 +1,216 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import SQLBaseStore
+from synapse.util.caches.descriptors import cached
+from twisted.internet import defer
+from .util.id_generators import StreamIdGenerator
+
+import ujson as json
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class TagsStore(SQLBaseStore):
+    def __init__(self, hs):
+        super(TagsStore, self).__init__(hs)
+
+        self._private_user_data_id_gen = StreamIdGenerator(
+            "private_user_data_max_stream_id", "stream_id"
+        )
+
+    def get_max_private_user_data_stream_id(self):
+        """Get the current max stream id for the private user data stream
+
+        Returns:
+            A deferred int.
+        """
+        return self._private_user_data_id_gen.get_max_token(self)
+
+    @cached()
+    def get_tags_for_user(self, user_id):
+        """Get all the tags for a user.
+
+
+        Args:
+            user_id(str): The user to get the tags for.
+        Returns:
+            A deferred dict mapping from room_id strings to lists of tag
+            strings.
+        """
+
+        deferred = self._simple_select_list(
+            "room_tags", {"user_id": user_id}, ["room_id", "tag", "content"]
+        )
+
+        @deferred.addCallback
+        def tags_by_room(rows):
+            tags_by_room = {}
+            for row in rows:
+                room_tags = tags_by_room.setdefault(row["room_id"], {})
+                room_tags[row["tag"]] = json.loads(row["content"])
+            return tags_by_room
+
+        return deferred
+
+    @defer.inlineCallbacks
+    def get_updated_tags(self, user_id, stream_id):
+        """Get all the tags for the rooms where the tags have changed since the
+        given version
+
+        Args:
+            user_id(str): The user to get the tags for.
+            stream_id(int): The earliest update to get for the user.
+        Returns:
+            A deferred dict mapping from room_id strings to lists of tag
+            strings for all the rooms that changed since the stream_id token.
+        """
+        def get_updated_tags_txn(txn):
+            sql = (
+                "SELECT room_id from room_tags_revisions"
+                " WHERE user_id = ? AND stream_id > ?"
+            )
+            txn.execute(sql, (user_id, stream_id))
+            room_ids = [row[0] for row in txn.fetchall()]
+            return room_ids
+
+        room_ids = yield self.runInteraction(
+            "get_updated_tags", get_updated_tags_txn
+        )
+
+        results = {}
+        if room_ids:
+            tags_by_room = yield self.get_tags_for_user(user_id)
+            for room_id in room_ids:
+                results[room_id] = tags_by_room.get(room_id, {})
+
+        defer.returnValue(results)
+
+    def get_tags_for_room(self, user_id, room_id):
+        """Get all the tags for the given room
+        Args:
+            user_id(str): The user to get tags for
+            room_id(str): The room to get tags for
+        Returns:
+            A deferred list of string tags.
+        """
+        return self._simple_select_list(
+            table="room_tags",
+            keyvalues={"user_id": user_id, "room_id": room_id},
+            retcols=("tag", "content"),
+            desc="get_tags_for_room",
+        ).addCallback(lambda rows: {
+            row["tag"]: json.loads(row["content"]) for row in rows
+        })
+
+    @defer.inlineCallbacks
+    def add_tag_to_room(self, user_id, room_id, tag, content):
+        """Add a tag to a room for a user.
+        Args:
+            user_id(str): The user to add a tag for.
+            room_id(str): The room to add a tag for.
+            tag(str): The tag name to add.
+            content(dict): A json object to associate with the tag.
+        Returns:
+            A deferred that completes once the tag has been added.
+        """
+        content_json = json.dumps(content)
+
+        def add_tag_txn(txn, next_id):
+            self._simple_upsert_txn(
+                txn,
+                table="room_tags",
+                keyvalues={
+                    "user_id": user_id,
+                    "room_id": room_id,
+                    "tag": tag,
+                },
+                values={
+                    "content": content_json,
+                }
+            )
+            self._update_revision_txn(txn, user_id, room_id, next_id)
+
+        with (yield self._private_user_data_id_gen.get_next(self)) as next_id:
+            yield self.runInteraction("add_tag", add_tag_txn, next_id)
+
+        self.get_tags_for_user.invalidate((user_id,))
+
+        result = yield self._private_user_data_id_gen.get_max_token(self)
+        defer.returnValue(result)
+
+    @defer.inlineCallbacks
+    def remove_tag_from_room(self, user_id, room_id, tag):
+        """Remove a tag from a room for a user.
+        Returns:
+            A deferred that completes once the tag has been removed
+        """
+        def remove_tag_txn(txn, next_id):
+            sql = (
+                "DELETE FROM room_tags "
+                " WHERE user_id = ? AND room_id = ? AND tag = ?"
+            )
+            txn.execute(sql, (user_id, room_id, tag))
+            self._update_revision_txn(txn, user_id, room_id, next_id)
+
+        with (yield self._private_user_data_id_gen.get_next(self)) as next_id:
+            yield self.runInteraction("remove_tag", remove_tag_txn, next_id)
+
+        self.get_tags_for_user.invalidate((user_id,))
+
+        result = yield self._private_user_data_id_gen.get_max_token(self)
+        defer.returnValue(result)
+
+    def _update_revision_txn(self, txn, user_id, room_id, next_id):
+        """Update the latest revision of the tags for the given user and room.
+
+        Args:
+            txn: The database cursor
+            user_id(str): The ID of the user.
+            room_id(str): The ID of the room.
+            next_id(int): The the revision to advance to.
+        """
+
+        update_max_id_sql = (
+            "UPDATE private_user_data_max_stream_id"
+            " SET stream_id = ?"
+            " WHERE stream_id < ?"
+        )
+        txn.execute(update_max_id_sql, (next_id, next_id))
+
+        update_sql = (
+            "UPDATE room_tags_revisions"
+            " SET stream_id = ?"
+            " WHERE user_id = ?"
+            " AND room_id = ?"
+        )
+        txn.execute(update_sql, (next_id, user_id, room_id))
+
+        if txn.rowcount == 0:
+            insert_sql = (
+                "INSERT INTO room_tags_revisions (user_id, room_id, stream_id)"
+                " VALUES (?, ?, ?)"
+            )
+            try:
+                txn.execute(insert_sql, (user_id, room_id, next_id))
+            except self.database_engine.module.IntegrityError:
+                # Ignore insertion errors. It doesn't matter if the row wasn't
+                # inserted because if two updates happend concurrently the one
+                # with the higher stream_id will not be reported to a client
+                # unless the previous update has completed. It doesn't matter
+                # which stream_id ends up in the table, as long as it is higher
+                # than the id that the client has.
+                pass
diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py
index c8c7e6591a..ad099775eb 100644
--- a/synapse/storage/transactions.py
+++ b/synapse/storage/transactions.py
@@ -18,7 +18,7 @@ from synapse.util.caches.descriptors import cached
 
 from collections import namedtuple
 
-from syutil.jsonutil import encode_canonical_json
+from canonicaljson import encode_canonical_json
 import logging
 
 logger = logging.getLogger(__name__)
@@ -59,7 +59,7 @@ class TransactionStore(SQLBaseStore):
             allow_none=True,
         )
 
-        if result and result.response_code:
+        if result and result["response_code"]:
             return result["response_code"], result["response_json"]
         else:
             return None
@@ -253,16 +253,6 @@ class TransactionStore(SQLBaseStore):
             retry_interval (int) - how long until next retry in ms
         """
 
-        # As this is the new value, we might as well prefill the cache
-        self.get_destination_retry_timings.prefill(
-            destination,
-            {
-                "destination": destination,
-                "retry_last_ts": retry_last_ts,
-                "retry_interval": retry_interval
-            },
-        )
-
         # XXX: we could chose to not bother persisting this if our cache thinks
         # this is a NOOP
         return self.runInteraction(
@@ -275,31 +265,25 @@ class TransactionStore(SQLBaseStore):
 
     def _set_destination_retry_timings(self, txn, destination,
                                        retry_last_ts, retry_interval):
-        query = (
-            "UPDATE destinations"
-            " SET retry_last_ts = ?, retry_interval = ?"
-            " WHERE destination = ?"
-        )
+        txn.call_after(self.get_destination_retry_timings.invalidate, (destination,))
 
-        txn.execute(
-            query,
-            (
-                retry_last_ts, retry_interval, destination,
-            )
+        self._simple_upsert_txn(
+            txn,
+            "destinations",
+            keyvalues={
+                "destination": destination,
+            },
+            values={
+                "retry_last_ts": retry_last_ts,
+                "retry_interval": retry_interval,
+            },
+            insertion_values={
+                "destination": destination,
+                "retry_last_ts": retry_last_ts,
+                "retry_interval": retry_interval,
+            }
         )
 
-        if txn.rowcount == 0:
-            # destination wasn't already in table. Insert it.
-            self._simple_insert_txn(
-                txn,
-                table="destinations",
-                values={
-                    "destination": destination,
-                    "retry_last_ts": retry_last_ts,
-                    "retry_interval": retry_interval,
-                }
-            )
-
     def get_destinations_needing_retry(self):
         """Get all destinations which are due a retry for sending a transaction.
 
diff --git a/synapse/streams/config.py b/synapse/streams/config.py
index 2ec7c5403b..167bfe0de3 100644
--- a/synapse/streams/config.py
+++ b/synapse/streams/config.py
@@ -34,6 +34,11 @@ class SourcePaginationConfig(object):
         self.direction = 'f' if direction == 'f' else 'b'
         self.limit = int(limit) if limit is not None else None
 
+    def __repr__(self):
+        return (
+            "StreamConfig(from_key=%r, to_key=%r, direction=%r, limit=%r)"
+        ) % (self.from_key, self.to_key, self.direction, self.limit)
+
 
 class PaginationConfig(object):
 
@@ -94,10 +99,10 @@ class PaginationConfig(object):
             logger.exception("Failed to create pagination config")
             raise SynapseError(400, "Invalid request.")
 
-    def __str__(self):
+    def __repr__(self):
         return (
-            "<PaginationConfig from_tok=%s, to_tok=%s, "
-            "direction=%s, limit=%s>"
+            "PaginationConfig(from_tok=%r, to_tok=%r,"
+            " direction=%r, limit=%r)"
         ) % (self.from_token, self.to_token, self.direction, self.limit)
 
     def get_source_config(self, source_name):
diff --git a/synapse/streams/events.py b/synapse/streams/events.py
index aaa3609aa5..f0d68b5bf2 100644
--- a/synapse/streams/events.py
+++ b/synapse/streams/events.py
@@ -21,22 +21,7 @@ from synapse.handlers.presence import PresenceEventSource
 from synapse.handlers.room import RoomEventSource
 from synapse.handlers.typing import TypingNotificationEventSource
 from synapse.handlers.receipts import ReceiptEventSource
-
-
-class NullSource(object):
-    """This event source never yields any events and its token remains at
-    zero. It may be useful for unit-testing."""
-    def __init__(self, hs):
-        pass
-
-    def get_new_events_for_user(self, user, from_key, limit):
-        return defer.succeed(([], from_key))
-
-    def get_current_key(self, direction='f'):
-        return defer.succeed(0)
-
-    def get_pagination_rows(self, user, pagination_config, key):
-        return defer.succeed(([], pagination_config.from_key))
+from synapse.handlers.private_user_data import PrivateUserDataEventSource
 
 
 class EventSources(object):
@@ -45,6 +30,7 @@ class EventSources(object):
         "presence": PresenceEventSource,
         "typing": TypingNotificationEventSource,
         "receipt": ReceiptEventSource,
+        "private_user_data": PrivateUserDataEventSource,
     }
 
     def __init__(self, hs):
@@ -68,17 +54,8 @@ class EventSources(object):
             receipt_key=(
                 yield self.sources["receipt"].get_current_key()
             ),
+            private_user_data_key=(
+                yield self.sources["private_user_data"].get_current_key()
+            ),
         )
         defer.returnValue(token)
-
-
-class StreamSource(object):
-    def get_new_events_for_user(self, user, from_key, limit):
-        """from_key is the key within this event source."""
-        raise NotImplementedError("get_new_events_for_user")
-
-    def get_current_key(self):
-        raise NotImplementedError("get_current_key")
-
-    def get_pagination_rows(self, user, pagination_config, key):
-        raise NotImplementedError("get_rows")
diff --git a/synapse/types.py b/synapse/types.py
index e190374cbd..28344d8b36 100644
--- a/synapse/types.py
+++ b/synapse/types.py
@@ -47,7 +47,7 @@ class DomainSpecificString(
     @classmethod
     def from_string(cls, s):
         """Parse the string given by 's' into a structure object."""
-        if s[0] != cls.SIGIL:
+        if len(s) < 1 or s[0] != cls.SIGIL:
             raise SynapseError(400, "Expected %s string to start with '%s'" % (
                 cls.__name__, cls.SIGIL,
             ))
@@ -98,10 +98,13 @@ class EventID(DomainSpecificString):
 
 
 class StreamToken(
-    namedtuple(
-        "Token",
-        ("room_key", "presence_key", "typing_key", "receipt_key")
-    )
+    namedtuple("Token", (
+        "room_key",
+        "presence_key",
+        "typing_key",
+        "receipt_key",
+        "private_user_data_key",
+    ))
 ):
     _SEPARATOR = "_"
 
@@ -109,7 +112,7 @@ class StreamToken(
     def from_string(cls, string):
         try:
             keys = string.split(cls._SEPARATOR)
-            if len(keys) == len(cls._fields) - 1:
+            while len(keys) < len(cls._fields):
                 # i.e. old token from before receipt_key
                 keys.append("0")
             return cls(*keys)
@@ -128,13 +131,14 @@ class StreamToken(
         else:
             return int(self.room_key[1:].split("-")[-1])
 
-    def is_after(self, other_token):
+    def is_after(self, other):
         """Does this token contain events that the other doesn't?"""
         return (
-            (other_token.room_stream_id < self.room_stream_id)
-            or (int(other_token.presence_key) < int(self.presence_key))
-            or (int(other_token.typing_key) < int(self.typing_key))
-            or (int(other_token.receipt_key) < int(self.receipt_key))
+            (other.room_stream_id < self.room_stream_id)
+            or (int(other.presence_key) < int(self.presence_key))
+            or (int(other.typing_key) < int(self.typing_key))
+            or (int(other.receipt_key) < int(self.receipt_key))
+            or (int(other.private_user_data_key) < int(self.private_user_data_key))
         )
 
     def copy_and_advance(self, key, new_value):
@@ -209,7 +213,3 @@ class RoomStreamToken(namedtuple("_StreamToken", "topological stream")):
             return "t%d-%d" % (self.topological, self.stream)
         else:
             return "s%d" % (self.stream,)
-
-
-# token_id is the primary key ID of the access token, not the access token itself.
-ClientInfo = namedtuple("ClientInfo", ("device_id", "token_id"))
diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py
index 07ff25cef3..d69c7cb991 100644
--- a/synapse/util/__init__.py
+++ b/synapse/util/__init__.py
@@ -29,34 +29,6 @@ def unwrapFirstError(failure):
     return failure.value.subFailure
 
 
-def unwrap_deferred(d):
-    """Given a deferred that we know has completed, return its value or raise
-    the failure as an exception
-    """
-    if not d.called:
-        raise RuntimeError("deferred has not finished")
-
-    res = []
-
-    def f(r):
-        res.append(r)
-        return r
-    d.addCallback(f)
-
-    if res:
-        return res[0]
-
-    def f(r):
-        res.append(r)
-        return r
-    d.addErrback(f)
-
-    if res:
-        res[0].raiseException()
-    else:
-        raise RuntimeError("deferred did not call callbacks")
-
-
 class Clock(object):
     """A small utility that obtains current time-of-day so that time may be
     mocked during unit-tests.
@@ -81,6 +53,14 @@ class Clock(object):
         loop.stop()
 
     def call_later(self, delay, callback, *args, **kwargs):
+        """Call something later
+
+        Args:
+            delay(float): How long to wait in seconds.
+            callback(function): Function to call
+            *args: Postional arguments to pass to function.
+            **kwargs: Key arguments to pass to function.
+        """
         current_context = LoggingContext.current_context()
 
         def wrapped_callback(*args, **kwargs):
diff --git a/synapse/util/debug.py b/synapse/util/debug.py
new file mode 100644
index 0000000000..f6a5a841a4
--- /dev/null
+++ b/synapse/util/debug.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer, reactor
+from functools import wraps
+from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
+
+
+def debug_deferreds():
+    """Cause all deferreds to wait for a reactor tick before running their
+    callbacks. This increases the chance of getting a stack trace out of
+    a defer.inlineCallback since the code waiting on the deferred will get
+    a chance to add an errback before the deferred runs."""
+
+    # Helper method for retrieving and restoring the current logging context
+    # around a callback.
+    def with_logging_context(fn):
+        context = LoggingContext.current_context()
+
+        def restore_context_callback(x):
+            with PreserveLoggingContext():
+                LoggingContext.thread_local.current_context = context
+                return fn(x)
+
+        return restore_context_callback
+
+    # We are going to modify the __init__ method of defer.Deferred so we
+    # need to get a copy of the old method so we can still call it.
+    old__init__ = defer.Deferred.__init__
+
+    # We need to create a deferred to bounce the callbacks through the reactor
+    # but we don't want to add a callback when we create that deferred so we
+    # we create a new type of deferred that uses the old __init__ method.
+    # This is safe as long as the old __init__ method doesn't invoke an
+    # __init__ using super.
+    class Bouncer(defer.Deferred):
+        __init__ = old__init__
+
+    # We'll add this as a callback to all Deferreds. Twisted will wait until
+    # the bouncer deferred resolves before calling the callbacks of the
+    # original deferred.
+    def bounce_callback(x):
+        bouncer = Bouncer()
+        reactor.callLater(0, with_logging_context(bouncer.callback), x)
+        return bouncer
+
+    # We'll add this as an errback to all Deferreds. Twisted will wait until
+    # the bouncer deferred resolves before calling the errbacks of the
+    # original deferred.
+    def bounce_errback(x):
+        bouncer = Bouncer()
+        reactor.callLater(0, with_logging_context(bouncer.errback), x)
+        return bouncer
+
+    @wraps(old__init__)
+    def new__init__(self, *args, **kargs):
+        old__init__(self, *args, **kargs)
+        self.addCallbacks(bounce_callback, bounce_errback)
+
+    defer.Deferred.__init__ = new__init__
diff --git a/synapse/util/emailutils.py b/synapse/util/emailutils.py
deleted file mode 100644
index 7f9a77bf44..0000000000
--- a/synapse/util/emailutils.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014, 2015 OpenMarket Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-""" This module allows you to send out emails.
-"""
-import email.utils
-import smtplib
-import twisted.python.log
-from email.mime.text import MIMEText
-from email.mime.multipart import MIMEMultipart
-
-import logging
-
-logger = logging.getLogger(__name__)
-
-
-class EmailException(Exception):
-    pass
-
-
-def send_email(smtp_server, from_addr, to_addr, subject, body):
-    """Sends an email.
-
-    Args:
-        smtp_server(str): The SMTP server to use.
-        from_addr(str): The address to send from.
-        to_addr(str): The address to send to.
-        subject(str): The subject of the email.
-        body(str): The plain text body of the email.
-    Raises:
-        EmailException if there was a problem sending the mail.
-    """
-    if not smtp_server or not from_addr or not to_addr:
-        raise EmailException("Need SMTP server, from and to addresses. Check"
-                             " the config to set these.")
-
-    msg = MIMEMultipart('alternative')
-    msg['Subject'] = subject
-    msg['From'] = from_addr
-    msg['To'] = to_addr
-    plain_part = MIMEText(body)
-    msg.attach(plain_part)
-
-    raw_from = email.utils.parseaddr(from_addr)[1]
-    raw_to = email.utils.parseaddr(to_addr)[1]
-    if not raw_from or not raw_to:
-        raise EmailException("Couldn't parse from/to address.")
-
-    logger.info("Sending email to %s on server %s with subject %s",
-                to_addr, smtp_server, subject)
-
-    try:
-        smtp = smtplib.SMTP(smtp_server)
-        smtp.sendmail(raw_from, raw_to, msg.as_string())
-        smtp.quit()
-    except Exception as origException:
-        twisted.python.log.err()
-        ese = EmailException()
-        ese.cause = origException
-        raise ese
diff --git a/synapse/util/lockutils.py b/synapse/util/lockutils.py
deleted file mode 100644
index 33edc5c20e..0000000000
--- a/synapse/util/lockutils.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014, 2015 OpenMarket Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from twisted.internet import defer
-
-import logging
-
-
-logger = logging.getLogger(__name__)
-
-
-class Lock(object):
-
-    def __init__(self, deferred, key):
-        self._deferred = deferred
-        self.released = False
-        self.key = key
-
-    def release(self):
-        self.released = True
-        self._deferred.callback(None)
-
-    def __del__(self):
-        if not self.released:
-            logger.critical("Lock was destructed but never released!")
-            self.release()
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, type, value, traceback):
-        logger.debug("Releasing lock for key=%r", self.key)
-        self.release()
-
-
-class LockManager(object):
-    """ Utility class that allows us to lock based on a `key` """
-
-    def __init__(self):
-        self._lock_deferreds = {}
-
-    @defer.inlineCallbacks
-    def lock(self, key):
-        """ Allows us to block until it is our turn.
-        Args:
-            key (str)
-        Returns:
-            Lock
-        """
-        new_deferred = defer.Deferred()
-        old_deferred = self._lock_deferreds.get(key)
-        self._lock_deferreds[key] = new_deferred
-
-        if old_deferred:
-            logger.debug("Queueing on lock for key=%r", key)
-            yield old_deferred
-            logger.debug("Obtained lock for key=%r", key)
-        else:
-            logger.debug("Entering uncontended lock for key=%r", key)
-
-        defer.returnValue(Lock(new_deferred, key))
diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py
index a42138f556..2fe6814807 100644
--- a/synapse/util/retryutils.py
+++ b/synapse/util/retryutils.py
@@ -18,6 +18,7 @@ from twisted.internet import defer
 from synapse.api.errors import CodeMessageException
 
 import logging
+import random
 
 
 logger = logging.getLogger(__name__)
@@ -85,8 +86,9 @@ def get_retry_limiter(destination, clock, store, **kwargs):
 
 class RetryDestinationLimiter(object):
     def __init__(self, destination, clock, store, retry_interval,
-                 min_retry_interval=5000, max_retry_interval=60 * 60 * 1000,
-                 multiplier_retry_interval=2,):
+                 min_retry_interval=10 * 60 * 1000,
+                 max_retry_interval=24 * 60 * 60 * 1000,
+                 multiplier_retry_interval=5,):
         """Marks the destination as "down" if an exception is thrown in the
         context, except for CodeMessageException with code < 500.
 
@@ -140,6 +142,7 @@ class RetryDestinationLimiter(object):
             # We couldn't connect.
             if self.retry_interval:
                 self.retry_interval *= self.multiplier_retry_interval
+                self.retry_interval *= int(random.uniform(0.8, 1.4))
 
                 if self.retry_interval >= self.max_retry_interval:
                     self.retry_interval = self.max_retry_interval
diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py
index 4f83db5e84..70d928defe 100644
--- a/tests/api/test_auth.py
+++ b/tests/api/test_auth.py
@@ -19,17 +19,21 @@ from mock import Mock
 
 from synapse.api.auth import Auth
 from synapse.api.errors import AuthError
+from synapse.types import UserID
+from tests.utils import setup_test_homeserver
+
+import pymacaroons
 
 
 class AuthTestCase(unittest.TestCase):
 
+    @defer.inlineCallbacks
     def setUp(self):
         self.state_handler = Mock()
         self.store = Mock()
 
-        self.hs = Mock()
+        self.hs = yield setup_test_homeserver(handlers=None)
         self.hs.get_datastore = Mock(return_value=self.store)
-        self.hs.get_state_handler = Mock(return_value=self.state_handler)
         self.auth = Auth(self.hs)
 
         self.test_user = "@foo:bar"
@@ -40,21 +44,19 @@ class AuthTestCase(unittest.TestCase):
         self.store.get_app_service_by_token = Mock(return_value=None)
         user_info = {
             "name": self.test_user,
-            "device_id": "nothing",
             "token_id": "ditto",
-            "admin": False
         }
-        self.store.get_user_by_token = Mock(return_value=user_info)
+        self.store.get_user_by_access_token = Mock(return_value=user_info)
 
         request = Mock(args={})
         request.args["access_token"] = [self.test_token]
         request.requestHeaders.getRawHeaders = Mock(return_value=[""])
-        (user, info) = yield self.auth.get_user_by_req(request)
+        (user, _, _) = yield self.auth.get_user_by_req(request)
         self.assertEquals(user.to_string(), self.test_user)
 
     def test_get_user_by_req_user_bad_token(self):
         self.store.get_app_service_by_token = Mock(return_value=None)
-        self.store.get_user_by_token = Mock(return_value=None)
+        self.store.get_user_by_access_token = Mock(return_value=None)
 
         request = Mock(args={})
         request.args["access_token"] = [self.test_token]
@@ -66,11 +68,9 @@ class AuthTestCase(unittest.TestCase):
         self.store.get_app_service_by_token = Mock(return_value=None)
         user_info = {
             "name": self.test_user,
-            "device_id": "nothing",
             "token_id": "ditto",
-            "admin": False
         }
-        self.store.get_user_by_token = Mock(return_value=user_info)
+        self.store.get_user_by_access_token = Mock(return_value=user_info)
 
         request = Mock(args={})
         request.requestHeaders.getRawHeaders = Mock(return_value=[""])
@@ -81,17 +81,17 @@ class AuthTestCase(unittest.TestCase):
     def test_get_user_by_req_appservice_valid_token(self):
         app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
         self.store.get_app_service_by_token = Mock(return_value=app_service)
-        self.store.get_user_by_token = Mock(return_value=None)
+        self.store.get_user_by_access_token = Mock(return_value=None)
 
         request = Mock(args={})
         request.args["access_token"] = [self.test_token]
         request.requestHeaders.getRawHeaders = Mock(return_value=[""])
-        (user, info) = yield self.auth.get_user_by_req(request)
+        (user, _, _) = yield self.auth.get_user_by_req(request)
         self.assertEquals(user.to_string(), self.test_user)
 
     def test_get_user_by_req_appservice_bad_token(self):
         self.store.get_app_service_by_token = Mock(return_value=None)
-        self.store.get_user_by_token = Mock(return_value=None)
+        self.store.get_user_by_access_token = Mock(return_value=None)
 
         request = Mock(args={})
         request.args["access_token"] = [self.test_token]
@@ -102,7 +102,7 @@ class AuthTestCase(unittest.TestCase):
     def test_get_user_by_req_appservice_missing_token(self):
         app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
         self.store.get_app_service_by_token = Mock(return_value=app_service)
-        self.store.get_user_by_token = Mock(return_value=None)
+        self.store.get_user_by_access_token = Mock(return_value=None)
 
         request = Mock(args={})
         request.requestHeaders.getRawHeaders = Mock(return_value=[""])
@@ -115,13 +115,13 @@ class AuthTestCase(unittest.TestCase):
         app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
         app_service.is_interested_in_user = Mock(return_value=True)
         self.store.get_app_service_by_token = Mock(return_value=app_service)
-        self.store.get_user_by_token = Mock(return_value=None)
+        self.store.get_user_by_access_token = Mock(return_value=None)
 
         request = Mock(args={})
         request.args["access_token"] = [self.test_token]
         request.args["user_id"] = [masquerading_user_id]
         request.requestHeaders.getRawHeaders = Mock(return_value=[""])
-        (user, info) = yield self.auth.get_user_by_req(request)
+        (user, _, _) = yield self.auth.get_user_by_req(request)
         self.assertEquals(user.to_string(), masquerading_user_id)
 
     def test_get_user_by_req_appservice_valid_token_bad_user_id(self):
@@ -129,7 +129,7 @@ class AuthTestCase(unittest.TestCase):
         app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
         app_service.is_interested_in_user = Mock(return_value=False)
         self.store.get_app_service_by_token = Mock(return_value=app_service)
-        self.store.get_user_by_token = Mock(return_value=None)
+        self.store.get_user_by_access_token = Mock(return_value=None)
 
         request = Mock(args={})
         request.args["access_token"] = [self.test_token]
@@ -137,3 +137,159 @@ class AuthTestCase(unittest.TestCase):
         request.requestHeaders.getRawHeaders = Mock(return_value=[""])
         d = self.auth.get_user_by_req(request)
         self.failureResultOf(d, AuthError)
+
+    @defer.inlineCallbacks
+    def test_get_user_from_macaroon(self):
+        # TODO(danielwh): Remove this mock when we remove the
+        # get_user_by_access_token fallback.
+        self.store.get_user_by_access_token = Mock(
+            return_value={"name": "@baldrick:matrix.org"}
+        )
+
+        user_id = "@baldrick:matrix.org"
+        macaroon = pymacaroons.Macaroon(
+            location=self.hs.config.server_name,
+            identifier="key",
+            key=self.hs.config.macaroon_secret_key)
+        macaroon.add_first_party_caveat("gen = 1")
+        macaroon.add_first_party_caveat("type = access")
+        macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
+        user_info = yield self.auth._get_user_from_macaroon(macaroon.serialize())
+        user = user_info["user"]
+        self.assertEqual(UserID.from_string(user_id), user)
+
+    @defer.inlineCallbacks
+    def test_get_guest_user_from_macaroon(self):
+        user_id = "@baldrick:matrix.org"
+        macaroon = pymacaroons.Macaroon(
+            location=self.hs.config.server_name,
+            identifier="key",
+            key=self.hs.config.macaroon_secret_key)
+        macaroon.add_first_party_caveat("gen = 1")
+        macaroon.add_first_party_caveat("type = access")
+        macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
+        macaroon.add_first_party_caveat("guest = true")
+        serialized = macaroon.serialize()
+
+        user_info = yield self.auth._get_user_from_macaroon(serialized)
+        user = user_info["user"]
+        is_guest = user_info["is_guest"]
+        self.assertEqual(UserID.from_string(user_id), user)
+        self.assertTrue(is_guest)
+
+    @defer.inlineCallbacks
+    def test_get_user_from_macaroon_user_db_mismatch(self):
+        self.store.get_user_by_access_token = Mock(
+            return_value={"name": "@percy:matrix.org"}
+        )
+
+        user = "@baldrick:matrix.org"
+        macaroon = pymacaroons.Macaroon(
+            location=self.hs.config.server_name,
+            identifier="key",
+            key=self.hs.config.macaroon_secret_key)
+        macaroon.add_first_party_caveat("gen = 1")
+        macaroon.add_first_party_caveat("type = access")
+        macaroon.add_first_party_caveat("user_id = %s" % (user,))
+        with self.assertRaises(AuthError) as cm:
+            yield self.auth._get_user_from_macaroon(macaroon.serialize())
+        self.assertEqual(401, cm.exception.code)
+        self.assertIn("User mismatch", cm.exception.msg)
+
+    @defer.inlineCallbacks
+    def test_get_user_from_macaroon_missing_caveat(self):
+        # TODO(danielwh): Remove this mock when we remove the
+        # get_user_by_access_token fallback.
+        self.store.get_user_by_access_token = Mock(
+            return_value={"name": "@baldrick:matrix.org"}
+        )
+
+        macaroon = pymacaroons.Macaroon(
+            location=self.hs.config.server_name,
+            identifier="key",
+            key=self.hs.config.macaroon_secret_key)
+        macaroon.add_first_party_caveat("gen = 1")
+        macaroon.add_first_party_caveat("type = access")
+
+        with self.assertRaises(AuthError) as cm:
+            yield self.auth._get_user_from_macaroon(macaroon.serialize())
+        self.assertEqual(401, cm.exception.code)
+        self.assertIn("No user caveat", cm.exception.msg)
+
+    @defer.inlineCallbacks
+    def test_get_user_from_macaroon_wrong_key(self):
+        # TODO(danielwh): Remove this mock when we remove the
+        # get_user_by_access_token fallback.
+        self.store.get_user_by_access_token = Mock(
+            return_value={"name": "@baldrick:matrix.org"}
+        )
+
+        user = "@baldrick:matrix.org"
+        macaroon = pymacaroons.Macaroon(
+            location=self.hs.config.server_name,
+            identifier="key",
+            key=self.hs.config.macaroon_secret_key + "wrong")
+        macaroon.add_first_party_caveat("gen = 1")
+        macaroon.add_first_party_caveat("type = access")
+        macaroon.add_first_party_caveat("user_id = %s" % (user,))
+
+        with self.assertRaises(AuthError) as cm:
+            yield self.auth._get_user_from_macaroon(macaroon.serialize())
+        self.assertEqual(401, cm.exception.code)
+        self.assertIn("Invalid macaroon", cm.exception.msg)
+
+    @defer.inlineCallbacks
+    def test_get_user_from_macaroon_unknown_caveat(self):
+        # TODO(danielwh): Remove this mock when we remove the
+        # get_user_by_access_token fallback.
+        self.store.get_user_by_access_token = Mock(
+            return_value={"name": "@baldrick:matrix.org"}
+        )
+
+        user = "@baldrick:matrix.org"
+        macaroon = pymacaroons.Macaroon(
+            location=self.hs.config.server_name,
+            identifier="key",
+            key=self.hs.config.macaroon_secret_key)
+        macaroon.add_first_party_caveat("gen = 1")
+        macaroon.add_first_party_caveat("type = access")
+        macaroon.add_first_party_caveat("user_id = %s" % (user,))
+        macaroon.add_first_party_caveat("cunning > fox")
+
+        with self.assertRaises(AuthError) as cm:
+            yield self.auth._get_user_from_macaroon(macaroon.serialize())
+        self.assertEqual(401, cm.exception.code)
+        self.assertIn("Invalid macaroon", cm.exception.msg)
+
+    @defer.inlineCallbacks
+    def test_get_user_from_macaroon_expired(self):
+        # TODO(danielwh): Remove this mock when we remove the
+        # get_user_by_access_token fallback.
+        self.store.get_user_by_access_token = Mock(
+            return_value={"name": "@baldrick:matrix.org"}
+        )
+
+        self.store.get_user_by_access_token = Mock(
+            return_value={"name": "@baldrick:matrix.org"}
+        )
+
+        user = "@baldrick:matrix.org"
+        macaroon = pymacaroons.Macaroon(
+            location=self.hs.config.server_name,
+            identifier="key",
+            key=self.hs.config.macaroon_secret_key)
+        macaroon.add_first_party_caveat("gen = 1")
+        macaroon.add_first_party_caveat("type = access")
+        macaroon.add_first_party_caveat("user_id = %s" % (user,))
+        macaroon.add_first_party_caveat("time < 1") # ms
+
+        self.hs.clock.now = 5000 # seconds
+
+        yield self.auth._get_user_from_macaroon(macaroon.serialize())
+        # TODO(daniel): Turn on the check that we validate expiration, when we
+        # validate expiration (and remove the above line, which will start
+        # throwing).
+        # with self.assertRaises(AuthError) as cm:
+        #     yield self.auth._get_user_from_macaroon(macaroon.serialize())
+        # self.assertEqual(401, cm.exception.code)
+        # self.assertIn("Invalid macaroon", cm.exception.msg)
diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py
index 65b2f590c8..9f9af2d783 100644
--- a/tests/api/test_filtering.py
+++ b/tests/api/test_filtering.py
@@ -23,10 +23,17 @@ from tests.utils import (
 )
 
 from synapse.types import UserID
-from synapse.api.filtering import Filter
+from synapse.api.filtering import FilterCollection, Filter
 
 user_localpart = "test_user"
-MockEvent = namedtuple("MockEvent", "sender type room_id")
+# MockEvent = namedtuple("MockEvent", "sender type room_id")
+
+
+def MockEvent(**kwargs):
+    ev = NonCallableMock(spec_set=kwargs.keys())
+    ev.configure_mock(**kwargs)
+    return ev
+
 
 class FilteringTestCase(unittest.TestCase):
 
@@ -44,7 +51,6 @@ class FilteringTestCase(unittest.TestCase):
         )
 
         self.filtering = hs.get_filtering()
-        self.filter = Filter({})
 
         self.datastore = hs.get_datastore()
 
@@ -57,8 +63,9 @@ class FilteringTestCase(unittest.TestCase):
             type="m.room.message",
             room_id="!foo:bar"
         )
+
         self.assertTrue(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_types_works_with_wildcards(self):
@@ -71,7 +78,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!foo:bar"
         )
         self.assertTrue(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_types_works_with_unknowns(self):
@@ -84,7 +91,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!foo:bar"
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_not_types_works_with_literals(self):
@@ -97,7 +104,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!foo:bar"
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_not_types_works_with_wildcards(self):
@@ -110,7 +117,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!foo:bar"
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_not_types_works_with_unknowns(self):
@@ -123,7 +130,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!foo:bar"
         )
         self.assertTrue(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_not_types_takes_priority_over_types(self):
@@ -137,7 +144,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!foo:bar"
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_senders_works_with_literals(self):
@@ -150,7 +157,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!foo:bar"
         )
         self.assertTrue(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_senders_works_with_unknowns(self):
@@ -163,7 +170,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!foo:bar"
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_not_senders_works_with_literals(self):
@@ -176,7 +183,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!foo:bar"
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_not_senders_works_with_unknowns(self):
@@ -189,7 +196,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!foo:bar"
         )
         self.assertTrue(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_not_senders_takes_priority_over_senders(self):
@@ -203,7 +210,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!foo:bar"
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_rooms_works_with_literals(self):
@@ -216,7 +223,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!secretbase:unknown"
         )
         self.assertTrue(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_rooms_works_with_unknowns(self):
@@ -229,7 +236,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!anothersecretbase:unknown"
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_not_rooms_works_with_literals(self):
@@ -242,7 +249,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!anothersecretbase:unknown"
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_not_rooms_works_with_unknowns(self):
@@ -255,7 +262,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!anothersecretbase:unknown"
         )
         self.assertTrue(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_not_rooms_takes_priority_over_rooms(self):
@@ -269,7 +276,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!secretbase:unknown"
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_combined_event(self):
@@ -287,7 +294,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!stage:unknown"  # yup
         )
         self.assertTrue(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_combined_event_bad_sender(self):
@@ -305,7 +312,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!stage:unknown"  # yup
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_combined_event_bad_room(self):
@@ -323,7 +330,7 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!piggyshouse:muppets"  # nope
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     def test_definition_combined_event_bad_type(self):
@@ -341,13 +348,13 @@ class FilteringTestCase(unittest.TestCase):
             room_id="!stage:unknown"  # yup
         )
         self.assertFalse(
-            self.filter._passes_definition(definition, event)
+            Filter(definition).check(event)
         )
 
     @defer.inlineCallbacks
-    def test_filter_public_user_data_match(self):
+    def test_filter_presence_match(self):
         user_filter_json = {
-            "public_user_data": {
+            "presence": {
                 "types": ["m.*"]
             }
         }
@@ -359,7 +366,6 @@ class FilteringTestCase(unittest.TestCase):
         event = MockEvent(
             sender="@foo:bar",
             type="m.profile",
-            room_id="!foo:bar"
         )
         events = [event]
 
@@ -368,13 +374,13 @@ class FilteringTestCase(unittest.TestCase):
             filter_id=filter_id,
         )
 
-        results = user_filter.filter_public_user_data(events=events)
+        results = user_filter.filter_presence(events=events)
         self.assertEquals(events, results)
 
     @defer.inlineCallbacks
-    def test_filter_public_user_data_no_match(self):
+    def test_filter_presence_no_match(self):
         user_filter_json = {
-            "public_user_data": {
+            "presence": {
                 "types": ["m.*"]
             }
         }
@@ -386,7 +392,6 @@ class FilteringTestCase(unittest.TestCase):
         event = MockEvent(
             sender="@foo:bar",
             type="custom.avatar.3d.crazy",
-            room_id="!foo:bar"
         )
         events = [event]
 
@@ -395,7 +400,7 @@ class FilteringTestCase(unittest.TestCase):
             filter_id=filter_id,
         )
 
-        results = user_filter.filter_public_user_data(events=events)
+        results = user_filter.filter_presence(events=events)
         self.assertEquals([], results)
 
     @defer.inlineCallbacks
diff --git a/tests/crypto/__init__.py b/tests/crypto/__init__.py
new file mode 100644
index 0000000000..9bff9ec169
--- /dev/null
+++ b/tests/crypto/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/tests/crypto/test_event_signing.py b/tests/crypto/test_event_signing.py
new file mode 100644
index 0000000000..7913472941
--- /dev/null
+++ b/tests/crypto/test_event_signing.py
@@ -0,0 +1,114 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from tests import unittest
+
+from synapse.events.builder import EventBuilder
+from synapse.crypto.event_signing import add_hashes_and_signatures
+
+from unpaddedbase64 import decode_base64
+
+import nacl.signing
+
+
+# Perform these tests using given secret key so we get entirely deterministic
+# signatures output that we can test against.
+SIGNING_KEY_SEED = decode_base64(
+    "YJDBA9Xnr2sVqXD9Vj7XVUnmFZcZrlw8Md7kMW+3XA1"
+)
+
+KEY_ALG = "ed25519"
+KEY_VER = 1
+KEY_NAME = "%s:%d" % (KEY_ALG, KEY_VER)
+
+HOSTNAME = "domain"
+
+
+class EventSigningTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.signing_key = nacl.signing.SigningKey(SIGNING_KEY_SEED)
+        self.signing_key.alg = KEY_ALG
+        self.signing_key.version = KEY_VER
+
+    def test_sign_minimal(self):
+        builder = EventBuilder(
+            {
+                'event_id': "$0:domain",
+                'origin': "domain",
+                'origin_server_ts': 1000000,
+                'signatures': {},
+                'type': "X",
+                'unsigned': {'age_ts': 1000000},
+            },
+        )
+
+        add_hashes_and_signatures(builder, HOSTNAME, self.signing_key)
+
+        event = builder.build()
+
+        self.assertTrue(hasattr(event, 'hashes'))
+        self.assertIn('sha256', event.hashes)
+        self.assertEquals(
+            event.hashes['sha256'],
+            "6tJjLpXtggfke8UxFhAKg82QVkJzvKOVOOSjUDK4ZSI",
+        )
+
+        self.assertTrue(hasattr(event, 'signatures'))
+        self.assertIn(HOSTNAME, event.signatures)
+        self.assertIn(KEY_NAME, event.signatures["domain"])
+        self.assertEquals(
+            event.signatures[HOSTNAME][KEY_NAME],
+            "2Wptgo4CwmLo/Y8B8qinxApKaCkBG2fjTWB7AbP5Uy+"
+            "aIbygsSdLOFzvdDjww8zUVKCmI02eP9xtyJxc/cLiBA",
+        )
+
+    def test_sign_message(self):
+        builder = EventBuilder(
+            {
+                'content': {
+                    'body': "Here is the message content",
+                },
+                'event_id': "$0:domain",
+                'origin': "domain",
+                'origin_server_ts': 1000000,
+                'type': "m.room.message",
+                'room_id': "!r:domain",
+                'sender': "@u:domain",
+                'signatures': {},
+                'unsigned': {'age_ts': 1000000},
+            }
+        )
+
+        add_hashes_and_signatures(builder, HOSTNAME, self.signing_key)
+
+        event = builder.build()
+
+        self.assertTrue(hasattr(event, 'hashes'))
+        self.assertIn('sha256', event.hashes)
+        self.assertEquals(
+            event.hashes['sha256'],
+            "onLKD1bGljeBWQhWZ1kaP9SorVmRQNdN5aM2JYU2n/g",
+        )
+
+        self.assertTrue(hasattr(event, 'signatures'))
+        self.assertIn(HOSTNAME, event.signatures)
+        self.assertIn(KEY_NAME, event.signatures["domain"])
+        self.assertEquals(
+            event.signatures[HOSTNAME][KEY_NAME],
+            "Wm+VzmOUOz08Ds+0NTWb1d4CZrVsJSikkeRxh6aCcUw"
+            "u6pNC78FunoD7KNWzqFn241eYHYMGCA5McEiVPdhzBA"
+        )
diff --git a/tests/events/__init__.py b/tests/events/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/tests/events/__init__.py
diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py
new file mode 100644
index 0000000000..16179921f0
--- /dev/null
+++ b/tests/events/test_utils.py
@@ -0,0 +1,115 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from .. import unittest
+
+from synapse.events import FrozenEvent
+from synapse.events.utils import prune_event
+
+class PruneEventTestCase(unittest.TestCase):
+    """ Asserts that a new event constructed with `evdict` will look like
+    `matchdict` when it is redacted. """
+    def run_test(self, evdict, matchdict):
+        self.assertEquals(
+            prune_event(FrozenEvent(evdict)).get_dict(),
+            matchdict
+        )
+
+    def test_minimal(self):
+        self.run_test(
+            {'type': 'A'},
+            {
+                'type': 'A',
+                'content': {},
+                'signatures': {},
+                'unsigned': {},
+            }
+        )
+
+    def test_basic_keys(self):
+        self.run_test(
+            {
+                'type': 'A',
+                'room_id': '!1:domain',
+                'sender': '@2:domain',
+                'event_id': '$3:domain',
+                'origin': 'domain',
+            },
+            {
+                'type': 'A',
+                'room_id': '!1:domain',
+                'sender': '@2:domain',
+                'event_id': '$3:domain',
+                'origin': 'domain',
+                'content': {},
+                'signatures': {},
+                'unsigned': {},
+            }
+        )
+
+    def test_unsigned_age_ts(self):
+        self.run_test(
+            {
+                'type': 'B',
+                'unsigned': {'age_ts': 20},
+            },
+            {
+                'type': 'B',
+                'content': {},
+                'signatures': {},
+                'unsigned': {'age_ts': 20},
+            }
+        )
+
+        self.run_test(
+            {
+                'type': 'B',
+                'unsigned': {'other_key': 'here'},
+            },
+            {
+                'type': 'B',
+                'content': {},
+                'signatures': {},
+                'unsigned': {},
+            }
+        )
+
+    def test_content(self):
+        self.run_test(
+            {
+                'type': 'C',
+                'content': {'things': 'here'},
+            },
+            {
+                'type': 'C',
+                'content': {},
+                'signatures': {},
+                'unsigned': {},
+            }
+        )
+
+        self.run_test(
+            {
+                'type': 'm.room.create',
+                'content': {'creator': '@2:domain', 'other_field': 'here'},
+            },
+            {
+                'type': 'm.room.create',
+                'content': {'creator': '@2:domain'},
+                'signatures': {},
+                'unsigned': {},
+            }
+        )
diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py
new file mode 100644
index 0000000000..978e4d0d2e
--- /dev/null
+++ b/tests/handlers/test_auth.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pymacaroons
+
+from mock import Mock, NonCallableMock
+from synapse.handlers.auth import AuthHandler
+from tests import unittest
+from tests.utils import setup_test_homeserver
+from twisted.internet import defer
+
+
+class AuthHandlers(object):
+    def __init__(self, hs):
+        self.auth_handler = AuthHandler(hs)
+
+
+class AuthTestCase(unittest.TestCase):
+    @defer.inlineCallbacks
+    def setUp(self):
+        self.hs = yield setup_test_homeserver(handlers=None)
+        self.hs.handlers = AuthHandlers(self.hs)
+
+    def test_token_is_a_macaroon(self):
+        self.hs.config.macaroon_secret_key = "this key is a huge secret"
+
+        token = self.hs.handlers.auth_handler.generate_access_token("some_user")
+        # Check that we can parse the thing with pymacaroons
+        macaroon = pymacaroons.Macaroon.deserialize(token)
+        # The most basic of sanity checks
+        if "some_user" not in macaroon.inspect():
+            self.fail("some_user was not in %s" % macaroon.inspect())
+
+    def test_macaroon_caveats(self):
+        self.hs.config.macaroon_secret_key = "this key is a massive secret"
+        self.hs.clock.now = 5000
+
+        token = self.hs.handlers.auth_handler.generate_access_token("a_user")
+        macaroon = pymacaroons.Macaroon.deserialize(token)
+
+        def verify_gen(caveat):
+            return caveat == "gen = 1"
+
+        def verify_user(caveat):
+            return caveat == "user_id = a_user"
+
+        def verify_type(caveat):
+            return caveat == "type = access"
+
+        def verify_expiry(caveat):
+            return caveat == "time < 8600000"
+
+        v = pymacaroons.Verifier()
+        v.satisfy_general(verify_gen)
+        v.satisfy_general(verify_user)
+        v.satisfy_general(verify_type)
+        v.satisfy_general(verify_expiry)
+        v.verify(macaroon, self.hs.config.macaroon_secret_key)
diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py
index 29372d488a..10d4482cce 100644
--- a/tests/handlers/test_presence.py
+++ b/tests/handlers/test_presence.py
@@ -650,9 +650,30 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
             {"presence": ONLINE}
         )
 
+        # Apple sees self-reflection even without room_id
+        (events, _) = yield self.event_source.get_new_events(
+            user=self.u_apple,
+            from_key=0,
+        )
+
+        self.assertEquals(self.event_source.get_current_key(), 1)
+        self.assertEquals(events,
+            [
+                {"type": "m.presence",
+                 "content": {
+                    "user_id": "@apple:test",
+                    "presence": ONLINE,
+                    "last_active_ago": 0,
+                }},
+            ],
+            msg="Presence event should be visible to self-reflection"
+        )
+
         # Apple sees self-reflection
-        (events, _) = yield self.event_source.get_new_events_for_user(
-            self.u_apple, 0, None
+        (events, _) = yield self.event_source.get_new_events(
+            user=self.u_apple,
+            from_key=0,
+            room_ids=[self.room_id],
         )
 
         self.assertEquals(self.event_source.get_current_key(), 1)
@@ -684,8 +705,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
         )
 
         # Banana sees it because of presence subscription
-        (events, _) = yield self.event_source.get_new_events_for_user(
-            self.u_banana, 0, None
+        (events, _) = yield self.event_source.get_new_events(
+            user=self.u_banana,
+            from_key=0,
+            room_ids=[self.room_id],
         )
 
         self.assertEquals(self.event_source.get_current_key(), 1)
@@ -702,8 +725,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
         )
 
         # Elderberry sees it because of same room
-        (events, _) = yield self.event_source.get_new_events_for_user(
-            self.u_elderberry, 0, None
+        (events, _) = yield self.event_source.get_new_events(
+            user=self.u_elderberry,
+            from_key=0,
+            room_ids=[self.room_id],
         )
 
         self.assertEquals(self.event_source.get_current_key(), 1)
@@ -720,8 +745,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
         )
 
         # Durian is not in the room, should not see this event
-        (events, _) = yield self.event_source.get_new_events_for_user(
-            self.u_durian, 0, None
+        (events, _) = yield self.event_source.get_new_events(
+            user=self.u_durian,
+            from_key=0,
+            room_ids=[],
         )
 
         self.assertEquals(self.event_source.get_current_key(), 1)
@@ -767,8 +794,9 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
                  "accepted": True},
         ], presence)
 
-        (events, _) = yield self.event_source.get_new_events_for_user(
-            self.u_apple, 1, None
+        (events, _) = yield self.event_source.get_new_events(
+            user=self.u_apple,
+            from_key=1,
         )
 
         self.assertEquals(self.event_source.get_current_key(), 2)
@@ -858,8 +886,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
             )
         )
 
-        (events, _) = yield self.event_source.get_new_events_for_user(
-            self.u_apple, 0, None
+        (events, _) = yield self.event_source.get_new_events(
+            user=self.u_apple,
+            from_key=0,
+            room_ids=[self.room_id],
         )
 
         self.assertEquals(self.event_source.get_current_key(), 1)
@@ -905,8 +935,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
 
         self.assertEquals(self.event_source.get_current_key(), 1)
 
-        (events, _) = yield self.event_source.get_new_events_for_user(
-            self.u_apple, 0, None
+        (events, _) = yield self.event_source.get_new_events(
+            user=self.u_apple,
+            from_key=0,
+            room_ids=[self.room_id,]
         )
         self.assertEquals(events,
             [
@@ -932,8 +964,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
 
         self.assertEquals(self.event_source.get_current_key(), 2)
 
-        (events, _) = yield self.event_source.get_new_events_for_user(
-            self.u_apple, 0, None
+        (events, _) = yield self.event_source.get_new_events(
+            user=self.u_apple,
+            from_key=0,
+            room_ids=[self.room_id,]
         )
         self.assertEquals(events,
             [
@@ -966,8 +1000,9 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
 
         self.room_members.append(self.u_clementine)
 
-        (events, _) = yield self.event_source.get_new_events_for_user(
-            self.u_apple, 0, None
+        (events, _) = yield self.event_source.get_new_events(
+            user=self.u_apple,
+            from_key=0,
         )
 
         self.assertEquals(self.event_source.get_current_key(), 1)
diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py
index 41bb08b7ca..2d7ba43561 100644
--- a/tests/handlers/test_typing.py
+++ b/tests/handlers/test_typing.py
@@ -187,7 +187,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
         ])
 
         self.assertEquals(self.event_source.get_current_key(), 1)
-        events = yield self.event_source.get_new_events_for_user(self.u_apple, 0, None)
+        events = yield self.event_source.get_new_events(
+            room_ids=[self.room_id],
+            from_key=0,
+        )
         self.assertEquals(
             events[0],
             [
@@ -250,7 +253,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
         ])
 
         self.assertEquals(self.event_source.get_current_key(), 1)
-        events = yield self.event_source.get_new_events_for_user(self.u_apple, 0, None)
+        events = yield self.event_source.get_new_events(
+            room_ids=[self.room_id],
+            from_key=0
+        )
         self.assertEquals(
             events[0],
             [
@@ -306,7 +312,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
         yield put_json.await_calls()
 
         self.assertEquals(self.event_source.get_current_key(), 1)
-        events = yield self.event_source.get_new_events_for_user(self.u_apple, 0, None)
+        events = yield self.event_source.get_new_events(
+            room_ids=[self.room_id],
+            from_key=0,
+        )
         self.assertEquals(
             events[0],
             [
@@ -337,7 +346,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
         self.on_new_event.reset_mock()
 
         self.assertEquals(self.event_source.get_current_key(), 1)
-        events = yield self.event_source.get_new_events_for_user(self.u_apple, 0, None)
+        events = yield self.event_source.get_new_events(
+            room_ids=[self.room_id],
+            from_key=0,
+        )
         self.assertEquals(
             events[0],
             [
@@ -356,7 +368,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
         ])
 
         self.assertEquals(self.event_source.get_current_key(), 2)
-        events = yield self.event_source.get_new_events_for_user(self.u_apple, 1, None)
+        events = yield self.event_source.get_new_events(
+            room_ids=[self.room_id],
+            from_key=1,
+        )
         self.assertEquals(
             events[0],
             [
@@ -383,7 +398,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
         self.on_new_event.reset_mock()
 
         self.assertEquals(self.event_source.get_current_key(), 3)
-        events = yield self.event_source.get_new_events_for_user(self.u_apple, 0, None)
+        events = yield self.event_source.get_new_events(
+            room_ids=[self.room_id],
+            from_key=0,
+        )
         self.assertEquals(
             events[0],
             [
diff --git a/tests/rest/client/v1/test_presence.py b/tests/rest/client/v1/test_presence.py
index 089a71568c..8581796f72 100644
--- a/tests/rest/client/v1/test_presence.py
+++ b/tests/rest/client/v1/test_presence.py
@@ -41,6 +41,29 @@ myid = "@apple:test"
 PATH_PREFIX = "/_matrix/client/api/v1"
 
 
+class NullSource(object):
+    """This event source never yields any events and its token remains at
+    zero. It may be useful for unit-testing."""
+    def __init__(self, hs):
+        pass
+
+    def get_new_events(
+            self,
+            user,
+            from_key,
+            room_ids=None,
+            limit=None,
+            is_guest=None
+    ):
+        return defer.succeed(([], from_key))
+
+    def get_current_key(self, direction='f'):
+        return defer.succeed(0)
+
+    def get_pagination_rows(self, user, pagination_config, key):
+        return defer.succeed(([], pagination_config.from_key))
+
+
 class JustPresenceHandlers(object):
     def __init__(self, hs):
         self.presence_handler = PresenceHandler(hs)
@@ -70,15 +93,14 @@ class PresenceStateTestCase(unittest.TestCase):
             return defer.succeed([])
         self.datastore.get_presence_list = get_presence_list
 
-        def _get_user_by_token(token=None):
+        def _get_user_by_access_token(token=None, allow_guest=False):
             return {
                 "user": UserID.from_string(myid),
-                "admin": False,
-                "device_id": None,
                 "token_id": 1,
+                "is_guest": False,
             }
 
-        hs.get_v1auth().get_user_by_token = _get_user_by_token
+        hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
 
         room_member_handler = hs.handlers.room_member_handler = Mock(
             spec=[
@@ -159,12 +181,11 @@ class PresenceListTestCase(unittest.TestCase):
             )
         self.datastore.has_presence_state = has_presence_state
 
-        def _get_user_by_token(token=None):
+        def _get_user_by_access_token(token=None, allow_guest=False):
             return {
                 "user": UserID.from_string(myid),
-                "admin": False,
-                "device_id": None,
                 "token_id": 1,
+                "is_guest": False,
             }
 
         hs.handlers.room_member_handler = Mock(
@@ -173,7 +194,7 @@ class PresenceListTestCase(unittest.TestCase):
             ]
         )
 
-        hs.get_v1auth().get_user_by_token = _get_user_by_token
+        hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
 
         presence.register_servlets(hs, self.mock_resource)
 
@@ -247,7 +268,7 @@ class PresenceEventStreamTestCase(unittest.TestCase):
         # HIDEOUS HACKERY
         # TODO(paul): This should be injected in via the HomeServer DI system
         from synapse.streams.events import (
-            PresenceEventSource, NullSource, EventSources
+            PresenceEventSource, EventSources
         )
 
         old_SOURCE_TYPES = EventSources.SOURCE_TYPES
@@ -279,8 +300,8 @@ class PresenceEventStreamTestCase(unittest.TestCase):
 
         hs.get_clock().time_msec.return_value = 1000000
 
-        def _get_user_by_req(req=None):
-            return (UserID.from_string(myid), "")
+        def _get_user_by_req(req=None, allow_guest=False):
+            return (UserID.from_string(myid), "", False)
 
         hs.get_v1auth().get_user_by_req = _get_user_by_req
 
@@ -300,6 +321,9 @@ class PresenceEventStreamTestCase(unittest.TestCase):
         hs.handlers.room_member_handler.get_room_members = (
             lambda r: self.room_members if r == "a-room" else []
         )
+        hs.handlers.room_member_handler._filter_events_for_client = (
+            lambda user_id, events, **kwargs: events
+        )
 
         self.mock_datastore = hs.get_datastore()
         self.mock_datastore.get_app_service_by_token = Mock(return_value=None)
@@ -357,7 +381,7 @@ class PresenceEventStreamTestCase(unittest.TestCase):
         # all be ours
 
         # I'll already get my own presence state change
-        self.assertEquals({"start": "0_1_0_0", "end": "0_1_0_0", "chunk": []},
+        self.assertEquals({"start": "0_1_0_0_0", "end": "0_1_0_0_0", "chunk": []},
             response
         )
 
@@ -376,7 +400,7 @@ class PresenceEventStreamTestCase(unittest.TestCase):
                 "/events?from=s0_1_0&timeout=0", None)
 
         self.assertEquals(200, code)
-        self.assertEquals({"start": "s0_1_0_0", "end": "s0_2_0_0", "chunk": [
+        self.assertEquals({"start": "s0_1_0_0_0", "end": "s0_2_0_0_0", "chunk": [
             {"type": "m.presence",
              "content": {
                  "user_id": "@banana:test",
diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py
index 929e5e5dd4..adcc1d1969 100644
--- a/tests/rest/client/v1/test_profile.py
+++ b/tests/rest/client/v1/test_profile.py
@@ -52,8 +52,8 @@ class ProfileTestCase(unittest.TestCase):
             replication_layer=Mock(),
         )
 
-        def _get_user_by_req(request=None):
-            return (UserID.from_string(myid), "")
+        def _get_user_by_req(request=None, allow_guest=False):
+            return (UserID.from_string(myid), "", False)
 
         hs.get_v1auth().get_user_by_req = _get_user_by_req
 
diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py
index c83348acf9..7749378064 100644
--- a/tests/rest/client/v1/test_rooms.py
+++ b/tests/rest/client/v1/test_rooms.py
@@ -54,14 +54,13 @@ class RoomPermissionsTestCase(RestTestCase):
 
         hs.get_handlers().federation_handler = Mock()
 
-        def _get_user_by_token(token=None):
+        def _get_user_by_access_token(token=None, allow_guest=False):
             return {
                 "user": UserID.from_string(self.auth_user_id),
-                "admin": False,
-                "device_id": None,
                 "token_id": 1,
+                "is_guest": False,
             }
-        hs.get_v1auth().get_user_by_token = _get_user_by_token
+        hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
 
         def _insert_client_ip(*args, **kwargs):
             return defer.succeed(None)
@@ -241,7 +240,7 @@ class RoomPermissionsTestCase(RestTestCase):
                            "PUT", topic_path, topic_content)
         self.assertEquals(403, code, msg=str(response))
         (code, response) = yield self.mock_resource.trigger_get(topic_path)
-        self.assertEquals(403, code, msg=str(response))
+        self.assertEquals(200, code, msg=str(response))
 
         # get topic in PUBLIC room, not joined, expect 403
         (code, response) = yield self.mock_resource.trigger_get(
@@ -279,10 +278,10 @@ class RoomPermissionsTestCase(RestTestCase):
                           expect_code=403)
 
         # set [invite/join/left] of self, set [invite/join/left] of other,
-        # expect all 403s
+        # expect all 404s because room doesn't exist on any server
         for usr in [self.user_id, self.rmcreator_id]:
             yield self.join(room=room, user=usr, expect_code=404)
-            yield self.leave(room=room, user=usr, expect_code=403)
+            yield self.leave(room=room, user=usr, expect_code=404)
 
     @defer.inlineCallbacks
     def test_membership_private_room_perms(self):
@@ -303,11 +302,11 @@ class RoomPermissionsTestCase(RestTestCase):
             room=room, expect_code=200)
 
         # get membership of self, get membership of other, private room + left
-        # expect all 403s
+        # expect all 200s
         yield self.leave(room=room, user=self.user_id)
         yield self._test_get_membership(
             members=[self.user_id, self.rmcreator_id],
-            room=room, expect_code=403)
+            room=room, expect_code=200)
 
     @defer.inlineCallbacks
     def test_membership_public_room_perms(self):
@@ -328,11 +327,11 @@ class RoomPermissionsTestCase(RestTestCase):
             room=room, expect_code=200)
 
         # get membership of self, get membership of other, public room + left
-        # expect 403.
+        # expect 200.
         yield self.leave(room=room, user=self.user_id)
         yield self._test_get_membership(
             members=[self.user_id, self.rmcreator_id],
-            room=room, expect_code=403)
+            room=room, expect_code=200)
 
     @defer.inlineCallbacks
     def test_invited_permissions(self):
@@ -441,14 +440,13 @@ class RoomsMemberListTestCase(RestTestCase):
 
         self.auth_user_id = self.user_id
 
-        def _get_user_by_token(token=None):
+        def _get_user_by_access_token(token=None, allow_guest=False):
             return {
                 "user": UserID.from_string(self.auth_user_id),
-                "admin": False,
-                "device_id": None,
                 "token_id": 1,
+                "is_guest": False,
             }
-        hs.get_v1auth().get_user_by_token = _get_user_by_token
+        hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
 
         def _insert_client_ip(*args, **kwargs):
             return defer.succeed(None)
@@ -496,9 +494,9 @@ class RoomsMemberListTestCase(RestTestCase):
         self.assertEquals(200, code, msg=str(response))
 
         yield self.leave(room=room_id, user=self.user_id)
-        # can no longer see list, you've left.
+        # can see old list once left
         (code, response) = yield self.mock_resource.trigger_get(room_path)
-        self.assertEquals(403, code, msg=str(response))
+        self.assertEquals(200, code, msg=str(response))
 
 
 class RoomsCreateTestCase(RestTestCase):
@@ -521,14 +519,13 @@ class RoomsCreateTestCase(RestTestCase):
 
         hs.get_handlers().federation_handler = Mock()
 
-        def _get_user_by_token(token=None):
+        def _get_user_by_access_token(token=None, allow_guest=False):
             return {
                 "user": UserID.from_string(self.auth_user_id),
-                "admin": False,
-                "device_id": None,
                 "token_id": 1,
+                "is_guest": False,
             }
-        hs.get_v1auth().get_user_by_token = _get_user_by_token
+        hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
 
         def _insert_client_ip(*args, **kwargs):
             return defer.succeed(None)
@@ -614,15 +611,14 @@ class RoomTopicTestCase(RestTestCase):
 
         hs.get_handlers().federation_handler = Mock()
 
-        def _get_user_by_token(token=None):
+        def _get_user_by_access_token(token=None, allow_guest=False):
             return {
                 "user": UserID.from_string(self.auth_user_id),
-                "admin": False,
-                "device_id": None,
                 "token_id": 1,
+                "is_guest": False,
             }
 
-        hs.get_v1auth().get_user_by_token = _get_user_by_token
+        hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
 
         def _insert_client_ip(*args, **kwargs):
             return defer.succeed(None)
@@ -721,14 +717,13 @@ class RoomMemberStateTestCase(RestTestCase):
 
         hs.get_handlers().federation_handler = Mock()
 
-        def _get_user_by_token(token=None):
+        def _get_user_by_access_token(token=None, allow_guest=False):
             return {
                 "user": UserID.from_string(self.auth_user_id),
-                "admin": False,
-                "device_id": None,
                 "token_id": 1,
+                "is_guest": False,
             }
-        hs.get_v1auth().get_user_by_token = _get_user_by_token
+        hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
 
         def _insert_client_ip(*args, **kwargs):
             return defer.succeed(None)
@@ -848,14 +843,13 @@ class RoomMessagesTestCase(RestTestCase):
 
         hs.get_handlers().federation_handler = Mock()
 
-        def _get_user_by_token(token=None):
+        def _get_user_by_access_token(token=None, allow_guest=False):
             return {
                 "user": UserID.from_string(self.auth_user_id),
-                "admin": False,
-                "device_id": None,
                 "token_id": 1,
+                "is_guest": False,
             }
-        hs.get_v1auth().get_user_by_token = _get_user_by_token
+        hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
 
         def _insert_client_ip(*args, **kwargs):
             return defer.succeed(None)
@@ -945,14 +939,13 @@ class RoomInitialSyncTestCase(RestTestCase):
 
         hs.get_handlers().federation_handler = Mock()
 
-        def _get_user_by_token(token=None):
+        def _get_user_by_access_token(token=None, allow_guest=False):
             return {
                 "user": UserID.from_string(self.auth_user_id),
-                "admin": False,
-                "device_id": None,
                 "token_id": 1,
+                "is_guest": False,
             }
-        hs.get_v1auth().get_user_by_token = _get_user_by_token
+        hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
 
         def _insert_client_ip(*args, **kwargs):
             return defer.succeed(None)
@@ -1001,3 +994,59 @@ class RoomInitialSyncTestCase(RestTestCase):
         }
         self.assertTrue(self.user_id in presence_by_user)
         self.assertEquals("m.presence", presence_by_user[self.user_id]["type"])
+
+
+class RoomMessageListTestCase(RestTestCase):
+    """ Tests /rooms/$room_id/messages REST events. """
+    user_id = "@sid1:red"
+
+    @defer.inlineCallbacks
+    def setUp(self):
+        self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
+        self.auth_user_id = self.user_id
+
+        hs = yield setup_test_homeserver(
+            "red",
+            http_client=None,
+            replication_layer=Mock(),
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
+        )
+        self.ratelimiter = hs.get_ratelimiter()
+        self.ratelimiter.send_message.return_value = (True, 0)
+
+        hs.get_handlers().federation_handler = Mock()
+
+        def _get_user_by_access_token(token=None, allow_guest=False):
+            return {
+                "user": UserID.from_string(self.auth_user_id),
+                "token_id": 1,
+                "is_guest": False,
+            }
+        hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
+
+        def _insert_client_ip(*args, **kwargs):
+            return defer.succeed(None)
+        hs.get_datastore().insert_client_ip = _insert_client_ip
+
+        synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
+
+        self.room_id = yield self.create_room_as(self.user_id)
+
+    @defer.inlineCallbacks
+    def test_topo_token_is_accepted(self):
+        token = "t1-0_0_0_0_0"
+        (code, response) = yield self.mock_resource.trigger_get(
+            "/rooms/%s/messages?access_token=x&from=%s" %
+            (self.room_id, token))
+        self.assertEquals(200, code)
+        self.assertTrue("start" in response)
+        self.assertEquals(token, response['start'])
+        self.assertTrue("chunk" in response)
+        self.assertTrue("end" in response)
+
+    @defer.inlineCallbacks
+    def test_stream_token_is_rejected(self):
+        (code, response) = yield self.mock_resource.trigger_get(
+            "/rooms/%s/messages?access_token=x&from=s0_0_0_0" %
+            self.room_id)
+        self.assertEquals(400, code)
diff --git a/tests/rest/client/v1/test_typing.py b/tests/rest/client/v1/test_typing.py
index 7d8b1c2683..61b9cc743b 100644
--- a/tests/rest/client/v1/test_typing.py
+++ b/tests/rest/client/v1/test_typing.py
@@ -61,15 +61,14 @@ class RoomTypingTestCase(RestTestCase):
 
         hs.get_handlers().federation_handler = Mock()
 
-        def _get_user_by_token(token=None):
+        def _get_user_by_access_token(token=None, allow_guest=False):
             return {
                 "user": UserID.from_string(self.auth_user_id),
-                "admin": False,
-                "device_id": None,
                 "token_id": 1,
+                "is_guest": False,
             }
 
-        hs.get_v1auth().get_user_by_token = _get_user_by_token
+        hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
 
         def _insert_client_ip(*args, **kwargs):
             return defer.succeed(None)
@@ -117,7 +116,10 @@ class RoomTypingTestCase(RestTestCase):
         self.assertEquals(200, code)
 
         self.assertEquals(self.event_source.get_current_key(), 1)
-        events = yield self.event_source.get_new_events_for_user(self.user, 0, None)
+        events = yield self.event_source.get_new_events(
+            from_key=0,
+            room_ids=[self.room_id],
+        )
         self.assertEquals(
             events[0],
             [
diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py
index 579441fb4a..85096a0326 100644
--- a/tests/rest/client/v1/utils.py
+++ b/tests/rest/client/v1/utils.py
@@ -37,9 +37,6 @@ class RestTestCase(unittest.TestCase):
         self.mock_resource = None
         self.auth_user_id = None
 
-    def mock_get_user_by_token(self, token=None):
-        return self.auth_user_id
-
     @defer.inlineCallbacks
     def create_room_as(self, room_creator, is_public=True, tok=None):
         temp_id = self.auth_user_id
diff --git a/tests/rest/client/v2_alpha/__init__.py b/tests/rest/client/v2_alpha/__init__.py
index de5a917e6a..fa9e17ec4f 100644
--- a/tests/rest/client/v2_alpha/__init__.py
+++ b/tests/rest/client/v2_alpha/__init__.py
@@ -43,14 +43,13 @@ class V2AlphaRestTestCase(unittest.TestCase):
             resource_for_federation=self.mock_resource,
         )
 
-        def _get_user_by_token(token=None):
+        def _get_user_by_access_token(token=None, allow_guest=False):
             return {
                 "user": UserID.from_string(self.USER_ID),
-                "admin": False,
-                "device_id": None,
                 "token_id": 1,
+                "is_guest": False,
             }
-        hs.get_auth().get_user_by_token = _get_user_by_token
+        hs.get_auth()._get_user_by_access_token = _get_user_by_access_token
 
         for r in self.TO_REGISTER:
             r.register_servlets(hs, self.mock_resource)
diff --git a/tests/storage/event_injector.py b/tests/storage/event_injector.py
new file mode 100644
index 0000000000..42bd8928bd
--- /dev/null
+++ b/tests/storage/event_injector.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from tests import unittest
+from twisted.internet import defer
+
+from synapse.api.constants import EventTypes, Membership
+from synapse.types import UserID, RoomID
+
+from tests.utils import setup_test_homeserver
+
+from mock import Mock
+
+
+class EventInjector:
+    def __init__(self, hs):
+        self.hs = hs
+        self.store = hs.get_datastore()
+        self.message_handler = hs.get_handlers().message_handler
+        self.event_builder_factory = hs.get_event_builder_factory()
+
+    @defer.inlineCallbacks
+    def create_room(self, room):
+        builder = self.event_builder_factory.new({
+            "type": EventTypes.Create,
+            "room_id": room.to_string(),
+            "content": {},
+        })
+
+        event, context = yield self.message_handler._create_new_client_event(
+            builder
+        )
+
+        yield self.store.persist_event(event, context)
+
+    @defer.inlineCallbacks
+    def inject_room_member(self, room, user, membership):
+        builder = self.event_builder_factory.new({
+            "type": EventTypes.Member,
+            "sender": user.to_string(),
+            "state_key": user.to_string(),
+            "room_id": room.to_string(),
+            "content": {"membership": membership},
+        })
+
+        event, context = yield self.message_handler._create_new_client_event(
+            builder
+        )
+
+        yield self.store.persist_event(event, context)
+
+        defer.returnValue(event)
+
+    @defer.inlineCallbacks
+    def inject_message(self, room, user, body):
+        builder = self.event_builder_factory.new({
+            "type": EventTypes.Message,
+            "sender": user.to_string(),
+            "state_key": user.to_string(),
+            "room_id": room.to_string(),
+            "content": {"body": body, "msgtype": u"message"},
+        })
+
+        event, context = yield self.message_handler._create_new_client_event(
+            builder
+        )
+
+        yield self.store.persist_event(event, context)
diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py
new file mode 100644
index 0000000000..29289fa9b4
--- /dev/null
+++ b/tests/storage/test_background_update.py
@@ -0,0 +1,76 @@
+from tests import unittest
+from twisted.internet import defer
+
+from synapse.api.constants import EventTypes
+from synapse.types import UserID, RoomID, RoomAlias
+
+from tests.utils import setup_test_homeserver
+
+from mock import Mock
+
+class BackgroundUpdateTestCase(unittest.TestCase):
+
+    @defer.inlineCallbacks
+    def setUp(self):
+        hs = yield setup_test_homeserver()
+        self.store = hs.get_datastore()
+        self.clock = hs.get_clock()
+
+        self.update_handler = Mock()
+
+        yield self.store.register_background_update_handler(
+            "test_update", self.update_handler
+        )
+
+    @defer.inlineCallbacks
+    def test_do_background_update(self):
+        desired_count = 1000;
+        duration_ms = 42;
+
+        @defer.inlineCallbacks
+        def update(progress, count):
+            self.clock.advance_time_msec(count * duration_ms)
+            progress = {"my_key": progress["my_key"] + 1}
+            yield self.store.runInteraction(
+                "update_progress",
+                self.store._background_update_progress_txn,
+                "test_update",
+                progress,
+            )
+            defer.returnValue(count)
+
+        self.update_handler.side_effect = update
+
+        yield self.store.start_background_update("test_update", {"my_key": 1})
+
+        self.update_handler.reset_mock()
+        result = yield self.store.do_background_update(
+            duration_ms * desired_count
+        )
+        self.assertIsNotNone(result)
+        self.update_handler.assert_called_once_with(
+            {"my_key": 1}, self.store.DEFAULT_BACKGROUND_BATCH_SIZE
+        )
+
+        @defer.inlineCallbacks
+        def update(progress, count):
+            yield self.store._end_background_update("test_update")
+            defer.returnValue(count)
+
+        self.update_handler.side_effect = update
+
+        self.update_handler.reset_mock()
+        result = yield self.store.do_background_update(
+            duration_ms * desired_count
+        )
+        self.assertIsNotNone(result)
+        self.update_handler.assert_called_once_with(
+            {"my_key": 2}, desired_count
+        )
+
+        self.update_handler.reset_mock()
+        result = yield self.store.do_background_update(
+            duration_ms * desired_count
+        )
+        self.assertIsNone(result)
+        self.assertFalse(self.update_handler.called)
diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py
index 8573f18b55..1ddca1da4c 100644
--- a/tests/storage/test_base.py
+++ b/tests/storage/test_base.py
@@ -186,26 +186,6 @@ class SQLBaseStoreTestCase(unittest.TestCase):
         )
 
     @defer.inlineCallbacks
-    def test_update_one_with_return(self):
-        self.mock_txn.rowcount = 1
-        self.mock_txn.fetchone.return_value = ("Old Value",)
-
-        ret = yield self.datastore._simple_selectupdate_one(
-                table="tablename",
-                keyvalues={"keycol": "TheKey"},
-                updatevalues={"columname": "New Value"},
-                retcols=["columname"]
-        )
-
-        self.assertEquals({"columname": "Old Value"}, ret)
-        self.mock_txn.execute.assert_has_calls([
-                call('SELECT columname FROM tablename WHERE keycol = ?',
-                    ['TheKey']),
-                call("UPDATE tablename SET columname = ? WHERE keycol = ?",
-                    ["New Value", "TheKey"])
-        ])
-
-    @defer.inlineCallbacks
     def test_delete_one(self):
         self.mock_txn.rowcount = 1
 
diff --git a/tests/storage/test_events.py b/tests/storage/test_events.py
new file mode 100644
index 0000000000..313013009e
--- /dev/null
+++ b/tests/storage/test_events.py
@@ -0,0 +1,116 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import uuid
+from mock.mock import Mock
+from synapse.types import RoomID, UserID
+
+from tests import unittest
+from twisted.internet import defer
+from tests.storage.event_injector import EventInjector
+
+from tests.utils import setup_test_homeserver
+
+
+class EventsStoreTestCase(unittest.TestCase):
+
+    @defer.inlineCallbacks
+    def setUp(self):
+        self.hs = yield setup_test_homeserver(
+            resource_for_federation=Mock(),
+            http_client=None,
+        )
+        self.store = self.hs.get_datastore()
+        self.db_pool = self.hs.get_db_pool()
+        self.message_handler = self.hs.get_handlers().message_handler
+        self.event_injector = EventInjector(self.hs)
+
+    @defer.inlineCallbacks
+    def test_count_daily_messages(self):
+        self.db_pool.runQuery("DELETE FROM stats_reporting")
+
+        self.hs.clock.now = 100
+
+        # Never reported before, and nothing which could be reported
+        count = yield self.store.count_daily_messages()
+        self.assertIsNone(count)
+        count = yield self.db_pool.runQuery("SELECT COUNT(*) FROM stats_reporting")
+        self.assertEqual([(0,)], count)
+
+        # Create something to report
+        room = RoomID.from_string("!abc123:test")
+        user = UserID.from_string("@raccoonlover:test")
+        yield self.event_injector.create_room(room)
+
+        self.base_event = yield self._get_last_stream_token()
+
+        yield self.event_injector.inject_message(room, user, "Raccoons are really cute")
+
+        # Never reported before, something could be reported, but isn't because
+        # it isn't old enough.
+        count = yield self.store.count_daily_messages()
+        self.assertIsNone(count)
+        self._assert_stats_reporting(1, self.hs.clock.now)
+
+        # Already reported yesterday, two new events from today.
+        yield self.event_injector.inject_message(room, user, "Yeah they are!")
+        yield self.event_injector.inject_message(room, user, "Incredibly!")
+        self.hs.clock.now += 60 * 60 * 24
+        count = yield self.store.count_daily_messages()
+        self.assertEqual(2, count)  # 2 since yesterday
+        self._assert_stats_reporting(3, self.hs.clock.now)  # 3 ever
+
+        # Last reported too recently.
+        yield self.event_injector.inject_message(room, user, "Who could disagree?")
+        self.hs.clock.now += 60 * 60 * 22
+        count = yield self.store.count_daily_messages()
+        self.assertIsNone(count)
+        self._assert_stats_reporting(4, self.hs.clock.now)
+
+        # Last reported too long ago
+        yield self.event_injector.inject_message(room, user, "No one.")
+        self.hs.clock.now += 60 * 60 * 26
+        count = yield self.store.count_daily_messages()
+        self.assertIsNone(count)
+        self._assert_stats_reporting(5, self.hs.clock.now)
+
+        # And now let's actually report something
+        yield self.event_injector.inject_message(room, user, "Indeed.")
+        yield self.event_injector.inject_message(room, user, "Indeed.")
+        yield self.event_injector.inject_message(room, user, "Indeed.")
+        # A little over 24 hours is fine :)
+        self.hs.clock.now += (60 * 60 * 24) + 50
+        count = yield self.store.count_daily_messages()
+        self.assertEqual(3, count)
+        self._assert_stats_reporting(8, self.hs.clock.now)
+
+    @defer.inlineCallbacks
+    def _get_last_stream_token(self):
+        rows = yield self.db_pool.runQuery(
+            "SELECT stream_ordering"
+            " FROM events"
+            " ORDER BY stream_ordering DESC"
+            " LIMIT 1"
+        )
+        if not rows:
+            defer.returnValue(0)
+        else:
+            defer.returnValue(rows[0][0])
+
+    @defer.inlineCallbacks
+    def _assert_stats_reporting(self, messages, time):
+        rows = yield self.db_pool.runQuery(
+            "SELECT reported_stream_token, reported_time FROM stats_reporting"
+        )
+        self.assertEqual([(self.base_event + messages, time,)], rows)
diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py
index b57006fcb4..dbf9700e6a 100644
--- a/tests/storage/test_redaction.py
+++ b/tests/storage/test_redaction.py
@@ -120,7 +120,6 @@ class RedactionTestCase(unittest.TestCase):
             self.u_alice.to_string(),
             start,
             end,
-            None,  # Is currently ignored
         )
 
         self.assertEqual(1, len(results))
@@ -149,7 +148,6 @@ class RedactionTestCase(unittest.TestCase):
             self.u_alice.to_string(),
             start,
             end,
-            None,  # Is currently ignored
         )
 
         self.assertEqual(1, len(results))
@@ -199,7 +197,6 @@ class RedactionTestCase(unittest.TestCase):
             self.u_alice.to_string(),
             start,
             end,
-            None,  # Is currently ignored
         )
 
         self.assertEqual(1, len(results))
@@ -228,7 +225,6 @@ class RedactionTestCase(unittest.TestCase):
             self.u_alice.to_string(),
             start,
             end,
-            None,  # Is currently ignored
         )
 
         self.assertEqual(1, len(results))
diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py
index 2702291178..0cce6c37df 100644
--- a/tests/storage/test_registration.py
+++ b/tests/storage/test_registration.py
@@ -17,7 +17,9 @@
 from tests import unittest
 from twisted.internet import defer
 
+from synapse.api.errors import StoreError
 from synapse.storage.registration import RegistrationStore
+from synapse.util import stringutils
 
 from tests.utils import setup_test_homeserver
 
@@ -27,6 +29,7 @@ class RegistrationStoreTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def setUp(self):
         hs = yield setup_test_homeserver()
+        self.db_pool = hs.get_db_pool()
 
         self.store = RegistrationStore(hs)
 
@@ -46,13 +49,11 @@ class RegistrationStoreTestCase(unittest.TestCase):
             (yield self.store.get_user_by_id(self.user_id))
         )
 
-        result = yield self.store.get_user_by_token(self.tokens[0])
+        result = yield self.store.get_user_by_access_token(self.tokens[0])
 
         self.assertDictContainsSubset(
             {
-                "admin": 0,
-                 "device_id": None,
-                 "name": self.user_id,
+                "name": self.user_id,
             },
             result
         )
@@ -64,16 +65,66 @@ class RegistrationStoreTestCase(unittest.TestCase):
         yield self.store.register(self.user_id, self.tokens[0], self.pwhash)
         yield self.store.add_access_token_to_user(self.user_id, self.tokens[1])
 
-        result = yield self.store.get_user_by_token(self.tokens[1])
+        result = yield self.store.get_user_by_access_token(self.tokens[1])
 
         self.assertDictContainsSubset(
             {
-                "admin": 0,
-                 "device_id": None,
-                 "name": self.user_id,
+                "name": self.user_id,
             },
             result
         )
 
         self.assertTrue("token_id" in result)
 
+    @defer.inlineCallbacks
+    def test_exchange_refresh_token_valid(self):
+        uid = stringutils.random_string(32)
+        generator = TokenGenerator()
+        last_token = generator.generate(uid)
+
+        self.db_pool.runQuery(
+            "INSERT INTO refresh_tokens(user_id, token) VALUES(?,?)",
+            (uid, last_token,))
+
+        (found_user_id, refresh_token) = yield self.store.exchange_refresh_token(
+            last_token, generator.generate)
+        self.assertEqual(uid, found_user_id)
+
+        rows = yield self.db_pool.runQuery(
+            "SELECT token FROM refresh_tokens WHERE user_id = ?", (uid, ))
+        self.assertEqual([(refresh_token,)], rows)
+        # We issued token 1, then exchanged it for token 2
+        expected_refresh_token = u"%s-%d" % (uid, 2,)
+        self.assertEqual(expected_refresh_token, refresh_token)
+
+    @defer.inlineCallbacks
+    def test_exchange_refresh_token_none(self):
+        uid = stringutils.random_string(32)
+        generator = TokenGenerator()
+        last_token = generator.generate(uid)
+
+        with self.assertRaises(StoreError):
+            yield self.store.exchange_refresh_token(last_token, generator.generate)
+
+    @defer.inlineCallbacks
+    def test_exchange_refresh_token_invalid(self):
+        uid = stringutils.random_string(32)
+        generator = TokenGenerator()
+        last_token = generator.generate(uid)
+        wrong_token = "%s-wrong" % (last_token,)
+
+        self.db_pool.runQuery(
+            "INSERT INTO refresh_tokens(user_id, token) VALUES(?,?)",
+            (uid, wrong_token,))
+
+        with self.assertRaises(StoreError):
+            yield self.store.exchange_refresh_token(last_token, generator.generate)
+
+
+class TokenGenerator:
+    def __init__(self):
+        self._last_issued_token = 0
+
+    def generate(self, user_id):
+        self._last_issued_token += 1
+        return u"%s-%d" % (user_id, self._last_issued_token,)
diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py
index ab7625a3ca..91c967548d 100644
--- a/tests/storage/test_room.py
+++ b/tests/storage/test_room.py
@@ -73,6 +73,8 @@ class RoomStoreTestCase(unittest.TestCase):
             "room_id": self.room.to_string(),
             "topic": None,
             "aliases": [self.alias.to_string()],
+            "world_readable": False,
+            "guest_can_join": False,
         }, rooms[0])
 
 
@@ -85,7 +87,7 @@ class RoomEventsStoreTestCase(unittest.TestCase):
         # Room events need the full datastore, for persist_event() and
         # get_room_state()
         self.store = hs.get_datastore()
-        self.event_factory = hs.get_event_factory();
+        self.event_factory = hs.get_event_factory()
 
         self.room = RoomID.from_string("!abcde:test")
 
diff --git a/tests/storage/test_stream.py b/tests/storage/test_stream.py
index 0c9b89d765..e5c2c5cc8e 100644
--- a/tests/storage/test_stream.py
+++ b/tests/storage/test_stream.py
@@ -19,6 +19,7 @@ from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, Membership
 from synapse.types import UserID, RoomID
+from tests.storage.event_injector import EventInjector
 
 from tests.utils import setup_test_homeserver
 
@@ -36,6 +37,7 @@ class StreamStoreTestCase(unittest.TestCase):
 
         self.store = hs.get_datastore()
         self.event_builder_factory = hs.get_event_builder_factory()
+        self.event_injector = EventInjector(hs)
         self.handlers = hs.get_handlers()
         self.message_handler = self.handlers.message_handler
 
@@ -45,60 +47,20 @@ class StreamStoreTestCase(unittest.TestCase):
         self.room1 = RoomID.from_string("!abc123:test")
         self.room2 = RoomID.from_string("!xyx987:test")
 
-        self.depth = 1
-
-    @defer.inlineCallbacks
-    def inject_room_member(self, room, user, membership):
-        self.depth += 1
-
-        builder = self.event_builder_factory.new({
-            "type": EventTypes.Member,
-            "sender": user.to_string(),
-            "state_key": user.to_string(),
-            "room_id": room.to_string(),
-            "content": {"membership": membership},
-        })
-
-        event, context = yield self.message_handler._create_new_client_event(
-            builder
-        )
-
-        yield self.store.persist_event(event, context)
-
-        defer.returnValue(event)
-
-    @defer.inlineCallbacks
-    def inject_message(self, room, user, body):
-        self.depth += 1
-
-        builder = self.event_builder_factory.new({
-            "type": EventTypes.Message,
-            "sender": user.to_string(),
-            "state_key": user.to_string(),
-            "room_id": room.to_string(),
-            "content": {"body": body, "msgtype": u"message"},
-        })
-
-        event, context = yield self.message_handler._create_new_client_event(
-            builder
-        )
-
-        yield self.store.persist_event(event, context)
-
     @defer.inlineCallbacks
     def test_event_stream_get_other(self):
         # Both bob and alice joins the room
-        yield self.inject_room_member(
+        yield self.event_injector.inject_room_member(
             self.room1, self.u_alice, Membership.JOIN
         )
-        yield self.inject_room_member(
+        yield self.event_injector.inject_room_member(
             self.room1, self.u_bob, Membership.JOIN
         )
 
         # Initial stream key:
         start = yield self.store.get_room_events_max_id()
 
-        yield self.inject_message(self.room1, self.u_alice, u"test")
+        yield self.event_injector.inject_message(self.room1, self.u_alice, u"test")
 
         end = yield self.store.get_room_events_max_id()
 
@@ -106,7 +68,6 @@ class StreamStoreTestCase(unittest.TestCase):
             self.u_bob.to_string(),
             start,
             end,
-            None,  # Is currently ignored
         )
 
         self.assertEqual(1, len(results))
@@ -125,17 +86,17 @@ class StreamStoreTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def test_event_stream_get_own(self):
         # Both bob and alice joins the room
-        yield self.inject_room_member(
+        yield self.event_injector.inject_room_member(
             self.room1, self.u_alice, Membership.JOIN
         )
-        yield self.inject_room_member(
+        yield self.event_injector.inject_room_member(
             self.room1, self.u_bob, Membership.JOIN
         )
 
         # Initial stream key:
         start = yield self.store.get_room_events_max_id()
 
-        yield self.inject_message(self.room1, self.u_alice, u"test")
+        yield self.event_injector.inject_message(self.room1, self.u_alice, u"test")
 
         end = yield self.store.get_room_events_max_id()
 
@@ -143,7 +104,6 @@ class StreamStoreTestCase(unittest.TestCase):
             self.u_alice.to_string(),
             start,
             end,
-            None,  # Is currently ignored
         )
 
         self.assertEqual(1, len(results))
@@ -162,22 +122,22 @@ class StreamStoreTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def test_event_stream_join_leave(self):
         # Both bob and alice joins the room
-        yield self.inject_room_member(
+        yield self.event_injector.inject_room_member(
             self.room1, self.u_alice, Membership.JOIN
         )
-        yield self.inject_room_member(
+        yield self.event_injector.inject_room_member(
             self.room1, self.u_bob, Membership.JOIN
         )
 
         # Then bob leaves again.
-        yield self.inject_room_member(
+        yield self.event_injector.inject_room_member(
             self.room1, self.u_bob, Membership.LEAVE
         )
 
         # Initial stream key:
         start = yield self.store.get_room_events_max_id()
 
-        yield self.inject_message(self.room1, self.u_alice, u"test")
+        yield self.event_injector.inject_message(self.room1, self.u_alice, u"test")
 
         end = yield self.store.get_room_events_max_id()
 
@@ -185,7 +145,6 @@ class StreamStoreTestCase(unittest.TestCase):
             self.u_bob.to_string(),
             start,
             end,
-            None,  # Is currently ignored
         )
 
         # We should not get the message, as it happened *after* bob left.
@@ -193,17 +152,17 @@ class StreamStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_event_stream_prev_content(self):
-        yield self.inject_room_member(
+        yield self.event_injector.inject_room_member(
             self.room1, self.u_bob, Membership.JOIN
         )
 
-        event1 = yield self.inject_room_member(
+        event1 = yield self.event_injector.inject_room_member(
             self.room1, self.u_alice, Membership.JOIN
         )
 
         start = yield self.store.get_room_events_max_id()
 
-        event2 = yield self.inject_room_member(
+        event2 = yield self.event_injector.inject_room_member(
             self.room1, self.u_alice, Membership.JOIN,
         )
 
@@ -213,7 +172,6 @@ class StreamStoreTestCase(unittest.TestCase):
             self.u_bob.to_string(),
             start,
             end,
-            None,  # Is currently ignored
         )
 
         # We should not get the message, as it happened *after* bob left.
diff --git a/tests/test_state.py b/tests/test_state.py
index 5845358754..e4e995b756 100644
--- a/tests/test_state.py
+++ b/tests/test_state.py
@@ -35,7 +35,7 @@ def create_event(name=None, type=None, state_key=None, depth=2, event_id=None,
 
     if not event_id:
         _next_event_id += 1
-        event_id = str(_next_event_id)
+        event_id = "$%s:test" % (_next_event_id,)
 
     if not name:
         if state_key is not None:
@@ -204,8 +204,8 @@ class StateTestCase(unittest.TestCase):
             nodes={
                 "START": DictObj(
                     type=EventTypes.Create,
-                    state_key="creator",
-                    content={"membership": "@user_id:example.com"},
+                    state_key="",
+                    content={"creator": "@user_id:example.com"},
                     depth=1,
                 ),
                 "A": DictObj(
@@ -259,8 +259,8 @@ class StateTestCase(unittest.TestCase):
             nodes={
                 "START": DictObj(
                     type=EventTypes.Create,
-                    state_key="creator",
-                    content={"membership": "@user_id:example.com"},
+                    state_key="",
+                    content={"creator": "@user_id:example.com"},
                     depth=1,
                 ),
                 "A": DictObj(
@@ -318,6 +318,99 @@ class StateTestCase(unittest.TestCase):
         )
 
     @defer.inlineCallbacks
+    def test_branch_have_perms_conflict(self):
+        userid1 = "@user_id:example.com"
+        userid2 = "@user_id2:example.com"
+
+        nodes = {
+            "A1": DictObj(
+                type=EventTypes.Create,
+                state_key="",
+                content={"creator": userid1},
+                depth=1,
+            ),
+            "A2": DictObj(
+                type=EventTypes.Member,
+                state_key=userid1,
+                content={"membership": Membership.JOIN},
+                membership=Membership.JOIN,
+            ),
+            "A3": DictObj(
+                type=EventTypes.Member,
+                state_key=userid2,
+                content={"membership": Membership.JOIN},
+                membership=Membership.JOIN,
+            ),
+            "A4": DictObj(
+                type=EventTypes.PowerLevels,
+                state_key="",
+                content={
+                    "events": {"m.room.name": 50},
+                    "users": {userid1: 100,
+                              userid2: 60},
+                },
+            ),
+            "A5": DictObj(
+                type=EventTypes.Name,
+                state_key="",
+            ),
+            "B": DictObj(
+                type=EventTypes.PowerLevels,
+                state_key="",
+                content={
+                    "events": {"m.room.name": 50},
+                    "users": {userid2: 30},
+                },
+            ),
+            "C": DictObj(
+                type=EventTypes.Name,
+                state_key="",
+                sender=userid2,
+            ),
+            "D": DictObj(
+                type=EventTypes.Message,
+            ),
+        }
+        edges = {
+            "A2": ["A1"],
+            "A3": ["A2"],
+            "A4": ["A3"],
+            "A5": ["A4"],
+            "B": ["A5"],
+            "C": ["A5"],
+            "D": ["B", "C"]
+        }
+        self._add_depths(nodes, edges)
+        graph = Graph(nodes, edges)
+
+        store = StateGroupStore()
+        self.store.get_state_groups.side_effect = store.get_state_groups
+
+        context_store = {}
+
+        for event in graph.walk():
+            context = yield self.state.compute_event_context(event)
+            store.store_state_groups(event, context)
+            context_store[event.event_id] = context
+
+        self.assertSetEqual(
+            {"A1", "A2", "A3", "A5", "B"},
+            {e.event_id for e in context_store["D"].current_state.values()}
+        )
+
+    def _add_depths(self, nodes, edges):
+        def _get_depth(ev):
+            node = nodes[ev]
+            if 'depth' not in node:
+                prevs = edges[ev]
+                depth = max(_get_depth(prev) for prev in prevs) + 1
+                node['depth'] = depth
+            return node['depth']
+
+        for n in nodes:
+            _get_depth(n)
+
+    @defer.inlineCallbacks
     def test_annotate_with_old_message(self):
         event = create_event(type="test_message", name="event")
 
@@ -432,13 +525,19 @@ class StateTestCase(unittest.TestCase):
     def test_resolve_message_conflict(self):
         event = create_event(type="test_message", name="event")
 
+        creation = create_event(
+            type=EventTypes.Create, state_key=""
+        )
+
         old_state_1 = [
+            creation,
             create_event(type="test1", state_key="1"),
             create_event(type="test1", state_key="2"),
             create_event(type="test2", state_key=""),
         ]
 
         old_state_2 = [
+            creation,
             create_event(type="test1", state_key="1"),
             create_event(type="test3", state_key="2"),
             create_event(type="test4", state_key=""),
@@ -446,7 +545,7 @@ class StateTestCase(unittest.TestCase):
 
         context = yield self._get_context(event, old_state_1, old_state_2)
 
-        self.assertEqual(len(context.current_state), 5)
+        self.assertEqual(len(context.current_state), 6)
 
         self.assertIsNone(context.state_group)
 
@@ -454,13 +553,19 @@ class StateTestCase(unittest.TestCase):
     def test_resolve_state_conflict(self):
         event = create_event(type="test4", state_key="", name="event")
 
+        creation = create_event(
+            type=EventTypes.Create, state_key=""
+        )
+
         old_state_1 = [
+            creation,
             create_event(type="test1", state_key="1"),
             create_event(type="test1", state_key="2"),
             create_event(type="test2", state_key=""),
         ]
 
         old_state_2 = [
+            creation,
             create_event(type="test1", state_key="1"),
             create_event(type="test3", state_key="2"),
             create_event(type="test4", state_key=""),
@@ -468,7 +573,7 @@ class StateTestCase(unittest.TestCase):
 
         context = yield self._get_context(event, old_state_1, old_state_2)
 
-        self.assertEqual(len(context.current_state), 5)
+        self.assertEqual(len(context.current_state), 6)
 
         self.assertIsNone(context.state_group)
 
@@ -484,36 +589,45 @@ class StateTestCase(unittest.TestCase):
             }
         )
 
+        creation = create_event(
+            type=EventTypes.Create, state_key="",
+            content={"creator": "@foo:bar"}
+        )
+
         old_state_1 = [
+            creation,
             member_event,
             create_event(type="test1", state_key="1", depth=1),
         ]
 
         old_state_2 = [
+            creation,
             member_event,
             create_event(type="test1", state_key="1", depth=2),
         ]
 
         context = yield self._get_context(event, old_state_1, old_state_2)
 
-        self.assertEqual(old_state_2[1], context.current_state[("test1", "1")])
+        self.assertEqual(old_state_2[2], context.current_state[("test1", "1")])
 
         # Reverse the depth to make sure we are actually using the depths
         # during state resolution.
 
         old_state_1 = [
+            creation,
             member_event,
             create_event(type="test1", state_key="1", depth=2),
         ]
 
         old_state_2 = [
+            creation,
             member_event,
             create_event(type="test1", state_key="1", depth=1),
         ]
 
         context = yield self._get_context(event, old_state_1, old_state_2)
 
-        self.assertEqual(old_state_1[1], context.current_state[("test1", "1")])
+        self.assertEqual(old_state_1[2], context.current_state[("test1", "1")])
 
     def _get_context(self, event, old_state_1, old_state_2):
         group_name_1 = "group_name_1"
diff --git a/tests/test_types.py b/tests/test_types.py
index b29a8415b1..495cd20f02 100644
--- a/tests/test_types.py
+++ b/tests/test_types.py
@@ -15,13 +15,14 @@
 
 from tests import unittest
 
+from synapse.api.errors import SynapseError
 from synapse.server import BaseHomeServer
 from synapse.types import UserID, RoomAlias
 
 mock_homeserver = BaseHomeServer(hostname="my.domain")
 
-class UserIDTestCase(unittest.TestCase):
 
+class UserIDTestCase(unittest.TestCase):
     def test_parse(self):
         user = UserID.from_string("@1234abcd:my.domain")
 
@@ -29,6 +30,11 @@ class UserIDTestCase(unittest.TestCase):
         self.assertEquals("my.domain", user.domain)
         self.assertEquals(True, mock_homeserver.is_mine(user))
 
+    def test_pase_empty(self):
+        with self.assertRaises(SynapseError):
+            UserID.from_string("")
+
+
     def test_build(self):
         user = UserID("5678efgh", "my.domain")
 
@@ -44,7 +50,6 @@ class UserIDTestCase(unittest.TestCase):
 
 
 class RoomAliasTestCase(unittest.TestCase):
-
     def test_parse(self):
         room = RoomAlias.from_string("#channel:my.domain")
 
diff --git a/tests/util/test_lock.py b/tests/util/test_lock.py
deleted file mode 100644
index 6a1e521b1e..0000000000
--- a/tests/util/test_lock.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014 OpenMarket Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from twisted.internet import defer
-from tests import unittest
-
-from synapse.util.lockutils import LockManager
-
-
-class LockManagerTestCase(unittest.TestCase):
-
-    def setUp(self):
-        self.lock_manager = LockManager()
-
-    @defer.inlineCallbacks
-    def test_one_lock(self):
-        key = "test"
-        deferred_lock1 = self.lock_manager.lock(key)
-
-        self.assertTrue(deferred_lock1.called)
-
-        lock1 = yield deferred_lock1
-
-        self.assertFalse(lock1.released)
-
-        lock1.release()
-
-        self.assertTrue(lock1.released)
-
-    @defer.inlineCallbacks
-    def test_concurrent_locks(self):
-        key = "test"
-        deferred_lock1 = self.lock_manager.lock(key)
-        deferred_lock2 = self.lock_manager.lock(key)
-
-        self.assertTrue(deferred_lock1.called)
-        self.assertFalse(deferred_lock2.called)
-
-        lock1 = yield deferred_lock1
-
-        self.assertFalse(lock1.released)
-        self.assertFalse(deferred_lock2.called)
-
-        lock1.release()
-
-        self.assertTrue(lock1.released)
-        self.assertTrue(deferred_lock2.called)
-
-        lock2 = yield deferred_lock2
-
-        lock2.release()
-
-    @defer.inlineCallbacks
-    def test_sequential_locks(self):
-        key = "test"
-        deferred_lock1 = self.lock_manager.lock(key)
-
-        self.assertTrue(deferred_lock1.called)
-
-        lock1 = yield deferred_lock1
-
-        self.assertFalse(lock1.released)
-
-        lock1.release()
-
-        self.assertTrue(lock1.released)
-
-        deferred_lock2 = self.lock_manager.lock(key)
-
-        self.assertTrue(deferred_lock2.called)
-
-        lock2 = yield deferred_lock2
-
-        self.assertFalse(lock2.released)
-
-        lock2.release()
-
-        self.assertTrue(lock2.released)
-
-    @defer.inlineCallbacks
-    def test_with_statement(self):
-        key = "test"
-        with (yield self.lock_manager.lock(key)) as lock:
-            self.assertFalse(lock.released)
-
-        self.assertTrue(lock.released)
-
-    @defer.inlineCallbacks
-    def test_two_with_statement(self):
-        key = "test"
-        with (yield self.lock_manager.lock(key)):
-            pass
-
-        with (yield self.lock_manager.lock(key)):
-            pass
diff --git a/tests/utils.py b/tests/utils.py
index eb035cf48f..91040c2efd 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -16,7 +16,7 @@
 from synapse.http.server import HttpServer
 from synapse.api.errors import cs_error, CodeMessageException, StoreError
 from synapse.api.constants import EventTypes
-from synapse.storage import prepare_database
+from synapse.storage.prepare_database import prepare_database
 from synapse.storage.engines import create_engine
 from synapse.server import HomeServer
 
@@ -27,6 +27,7 @@ from twisted.enterprise.adbapi import ConnectionPool
 
 from collections import namedtuple
 from mock import patch, Mock
+import hashlib
 import urllib
 import urlparse
 
@@ -44,6 +45,8 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
         config.signing_key = [MockKey()]
         config.event_cache_size = 1
         config.disable_registration = False
+        config.macaroon_secret_key = "not even a little secret"
+        config.server_name = "server.under.test"
 
     if "clock" not in kargs:
         kargs["clock"] = MockClock()
@@ -65,6 +68,18 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
             **kargs
         )
 
+    # bcrypt is far too slow to be doing in unit tests
+    def swap_out_hash_for_testing(old_build_handlers):
+        def build_handlers():
+            handlers = old_build_handlers()
+            auth_handler = handlers.auth_handler
+            auth_handler.hash = lambda p: hashlib.md5(p).hexdigest()
+            auth_handler.validate_hash = lambda p, h: hashlib.md5(p).hexdigest() == h
+            return handlers
+        return build_handlers
+
+    hs.build_handlers = swap_out_hash_for_testing(hs.build_handlers)
+
     defer.returnValue(hs)
 
 
@@ -228,6 +243,9 @@ class MockClock(object):
             else:
                 self.timers.append(t)
 
+    def advance_time_msec(self, ms):
+        self.advance_time(ms / 1000.)
+
 
 class SQLiteMemoryDbPool(ConnectionPool, object):
     def __init__(self):
@@ -275,12 +293,10 @@ class MemoryDataStore(object):
             raise StoreError(400, "User in use.")
         self.tokens_to_users[token] = user_id
 
-    def get_user_by_token(self, token):
+    def get_user_by_access_token(self, token):
         try:
             return {
                 "name": self.tokens_to_users[token],
-                "admin": 0,
-                "device_id": None,
             }
         except:
             raise StoreError(400, "User does not exist.")
@@ -322,7 +338,7 @@ class MemoryDataStore(object):
         ]
 
     def get_room_events_stream(self, user_id=None, from_key=None, to_key=None,
-                            room_id=None, limit=0, with_feedback=False):
+                            limit=0, with_feedback=False):
         return ([], from_key)  # TODO
 
     def get_joined_hosts_for_room(self, room_id):
@@ -378,7 +394,7 @@ class MemoryDataStore(object):
     def get_ops_levels(self, room_id):
         return defer.succeed((5, 5, 5))
 
-    def insert_client_ip(self, user, device_id, access_token, ip, user_agent):
+    def insert_client_ip(self, user, access_token, ip, user_agent):
         return defer.succeed(None)
 
 
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000000..95424765c3
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,28 @@
+[tox]
+envlist = packaging, py27, pep8
+
+[testenv]
+deps =
+    coverage
+    Twisted>=15.1
+    mock
+    python-subunit
+    junitxml
+setenv =
+    PYTHONDONTWRITEBYTECODE = no_byte_code
+commands =
+    /bin/bash -c "coverage run --source=synapse {envbindir}/trial {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}"
+    {env:DUMP_COVERAGE_COMMAND:coverage report -m}
+
+[testenv:packaging]
+deps =
+    check-manifest
+commands =
+    check-manifest
+
+[testenv:pep8]
+skip_install = True
+basepython = python2.7
+deps =
+    flake8
+commands = /bin/bash -c "flake8 synapse {env:PEP8SUFFIX:}"