summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--.gitignore18
-rw-r--r--CHANGES.rst137
-rw-r--r--MANIFEST.in14
-rw-r--r--README.rst117
-rw-r--r--UPGRADE.rst29
-rw-r--r--VERSION1
-rw-r--r--contrib/graph/graph2.py3
-rw-r--r--contrib/jitsimeetbridge/jitsimeetbridge.py410
-rwxr-xr-xcontrib/vertobot/bridge.pl489
-rwxr-xr-xdemo/start.sh3
-rw-r--r--docs/turn-howto.rst2
-rw-r--r--scripts/check_auth.py65
-rw-r--r--scripts/federation_client.py7
-rwxr-xr-xscripts/make_identicons.pl39
-rw-r--r--setup.cfg8
-rwxr-xr-xsetup.py61
-rw-r--r--static/client/register/index.html32
-rw-r--r--static/client/register/js/jquery-2.1.3.min.js4
-rw-r--r--static/client/register/js/recaptcha_ajax.js195
-rw-r--r--static/client/register/js/register.js117
-rw-r--r--static/client/register/register_config.sample.js3
-rw-r--r--static/client/register/style.css56
-rw-r--r--synapse/__init__.py2
-rw-r--r--synapse/api/auth.py128
-rw-r--r--synapse/api/constants.py7
-rw-r--r--synapse/api/errors.py42
-rw-r--r--synapse/api/filtering.py229
-rw-r--r--synapse/api/urls.py3
-rwxr-xr-xsynapse/app/homeserver.py161
-rwxr-xr-xsynapse/app/synctl.py2
-rw-r--r--synapse/appservice/__init__.py176
-rw-r--r--synapse/appservice/api.py108
-rw-r--r--synapse/config/_base.py15
-rw-r--r--synapse/config/database.py5
-rw-r--r--synapse/config/homeserver.py3
-rw-r--r--synapse/config/logger.py4
-rw-r--r--synapse/config/ratelimiting.py36
-rw-r--r--synapse/config/registration.py33
-rw-r--r--synapse/config/server.py10
-rw-r--r--synapse/config/tls.py15
-rw-r--r--synapse/config/voip.py2
-rw-r--r--synapse/crypto/context_factory.py5
-rw-r--r--synapse/crypto/keyclient.py13
-rw-r--r--synapse/crypto/keyring.py27
-rw-r--r--synapse/events/__init__.py10
-rw-r--r--synapse/events/builder.py5
-rw-r--r--synapse/events/snapshot.py1
-rw-r--r--synapse/events/utils.py102
-rw-r--r--synapse/federation/federation_base.py133
-rw-r--r--synapse/federation/federation_client.py563
-rw-r--r--synapse/federation/federation_server.py474
-rw-r--r--synapse/federation/persistence.py7
-rw-r--r--synapse/federation/replication.py897
-rw-r--r--synapse/federation/transaction_queue.py359
-rw-r--r--synapse/federation/transport/__init__.py12
-rw-r--r--synapse/federation/transport/client.py67
-rw-r--r--synapse/federation/transport/server.py68
-rw-r--r--synapse/handlers/__init__.py7
-rw-r--r--synapse/handlers/_base.py5
-rw-r--r--synapse/handlers/appservice.py211
-rw-r--r--synapse/handlers/directory.py122
-rw-r--r--synapse/handlers/events.py86
-rw-r--r--synapse/handlers/federation.py637
-rw-r--r--synapse/handlers/login.py33
-rw-r--r--synapse/handlers/message.py77
-rw-r--r--synapse/handlers/presence.py48
-rw-r--r--synapse/handlers/profile.py23
-rw-r--r--synapse/handlers/register.py118
-rw-r--r--synapse/handlers/room.py71
-rw-r--r--synapse/handlers/sync.py439
-rw-r--r--synapse/handlers/typing.py5
-rw-r--r--synapse/http/client.py96
-rw-r--r--synapse/http/matrixfederationclient.py118
-rw-r--r--synapse/http/server.py85
-rw-r--r--synapse/http/server_key_resource.py6
-rw-r--r--synapse/http/servlet.py113
-rw-r--r--synapse/notifier.py102
-rw-r--r--synapse/push/__init__.py427
-rw-r--r--synapse/push/baserules.py209
-rw-r--r--synapse/push/httppusher.py148
-rw-r--r--synapse/push/pusherpool.py154
-rw-r--r--synapse/push/rulekinds.py8
-rw-r--r--synapse/python_dependencies.py50
-rw-r--r--synapse/rest/__init__.py35
-rw-r--r--synapse/rest/appservice/__init__.py14
-rw-r--r--synapse/rest/appservice/v1/__init__.py29
-rw-r--r--synapse/rest/appservice/v1/base.py48
-rw-r--r--synapse/rest/appservice/v1/register.py98
-rw-r--r--synapse/rest/base.py80
-rw-r--r--synapse/rest/client/__init__.py14
-rw-r--r--synapse/rest/client/v1/__init__.py44
-rw-r--r--synapse/rest/client/v1/admin.py (renamed from synapse/rest/admin.py)10
-rw-r--r--synapse/rest/client/v1/base.py52
-rw-r--r--synapse/rest/client/v1/directory.py (renamed from synapse/rest/directory.py)77
-rw-r--r--synapse/rest/client/v1/events.py (renamed from synapse/rest/events.py)18
-rw-r--r--synapse/rest/client/v1/initial_sync.py (renamed from synapse/rest/initial_sync.py)6
-rw-r--r--synapse/rest/client/v1/login.py (renamed from synapse/rest/login.py)10
-rw-r--r--synapse/rest/client/v1/presence.py (renamed from synapse/rest/presence.py)29
-rw-r--r--synapse/rest/client/v1/profile.py (renamed from synapse/rest/profile.py)25
-rw-r--r--synapse/rest/client/v1/push_rule.py456
-rw-r--r--synapse/rest/client/v1/pusher.py89
-rw-r--r--synapse/rest/client/v1/register.py (renamed from synapse/rest/register.py)36
-rw-r--r--synapse/rest/client/v1/room.py (renamed from synapse/rest/room.py)112
-rw-r--r--synapse/rest/client/v1/transactions.py (renamed from synapse/rest/transactions.py)0
-rw-r--r--synapse/rest/client/v1/voip.py (renamed from synapse/rest/voip.py)6
-rw-r--r--synapse/rest/client/v2_alpha/__init__.py34
-rw-r--r--synapse/rest/client/v2_alpha/_base.py38
-rw-r--r--synapse/rest/client/v2_alpha/filter.py104
-rw-r--r--synapse/rest/client/v2_alpha/sync.py207
-rw-r--r--synapse/rest/media/__init__.py (renamed from synapse/media/__init__.py)0
-rw-r--r--synapse/rest/media/v0/__init__.py (renamed from synapse/media/v0/__init__.py)0
-rw-r--r--synapse/rest/media/v0/content_repository.py (renamed from synapse/media/v0/content_repository.py)4
-rw-r--r--synapse/rest/media/v1/__init__.py (renamed from synapse/media/v1/__init__.py)0
-rw-r--r--synapse/rest/media/v1/base_resource.py (renamed from synapse/media/v1/base_resource.py)4
-rw-r--r--synapse/rest/media/v1/download_resource.py (renamed from synapse/media/v1/download_resource.py)0
-rw-r--r--synapse/rest/media/v1/filepath.py (renamed from synapse/media/v1/filepath.py)0
-rw-r--r--synapse/rest/media/v1/identicon_resource.py51
-rw-r--r--synapse/rest/media/v1/media_repository.py (renamed from synapse/media/v1/media_repository.py)3
-rw-r--r--synapse/rest/media/v1/thumbnail_resource.py (renamed from synapse/media/v1/thumbnail_resource.py)0
-rw-r--r--synapse/rest/media/v1/thumbnailer.py (renamed from synapse/media/v1/thumbnailer.py)0
-rw-r--r--synapse/rest/media/v1/upload_resource.py (renamed from synapse/media/v1/upload_resource.py)64
-rw-r--r--synapse/server.py50
-rw-r--r--synapse/state.py255
-rw-r--r--synapse/storage/__init__.py648
-rw-r--r--synapse/storage/_base.py253
-rw-r--r--synapse/storage/appservice.py338
-rw-r--r--synapse/storage/event_federation.py62
-rw-r--r--synapse/storage/filtering.py63
-rw-r--r--synapse/storage/push_rule.py264
-rw-r--r--synapse/storage/pusher.py173
-rw-r--r--synapse/storage/registration.py3
-rw-r--r--synapse/storage/rejections.py43
-rw-r--r--synapse/storage/room.py67
-rw-r--r--synapse/storage/roommember.py83
-rw-r--r--synapse/storage/schema/delta/11/v11.sql (renamed from synapse/storage/schema/delta/v11.sql)0
-rw-r--r--synapse/storage/schema/delta/12/v12.sql67
-rw-r--r--synapse/storage/schema/delta/13/v13.sql (renamed from synapse/storage/schema/delta/v3.sql)25
-rw-r--r--synapse/storage/schema/delta/14/upgrade_appservice_db.py23
-rw-r--r--synapse/storage/schema/delta/14/v14.sql9
-rw-r--r--synapse/storage/schema/delta/v2.sql168
-rw-r--r--synapse/storage/schema/delta/v4.sql26
-rw-r--r--synapse/storage/schema/delta/v5.sql30
-rw-r--r--synapse/storage/schema/delta/v6.sql31
-rw-r--r--synapse/storage/schema/delta/v8.sql34
-rw-r--r--synapse/storage/schema/delta/v9.sql79
-rw-r--r--synapse/storage/schema/full_schemas/11/event_edges.sql (renamed from synapse/storage/schema/event_edges.sql)0
-rw-r--r--synapse/storage/schema/full_schemas/11/event_signatures.sql (renamed from synapse/storage/schema/event_signatures.sql)0
-rw-r--r--synapse/storage/schema/full_schemas/11/im.sql (renamed from synapse/storage/schema/im.sql)0
-rw-r--r--synapse/storage/schema/full_schemas/11/keys.sql (renamed from synapse/storage/schema/keys.sql)0
-rw-r--r--synapse/storage/schema/full_schemas/11/media_repository.sql (renamed from synapse/storage/schema/media_repository.sql)0
-rw-r--r--synapse/storage/schema/full_schemas/11/presence.sql (renamed from synapse/storage/schema/presence.sql)0
-rw-r--r--synapse/storage/schema/full_schemas/11/profiles.sql (renamed from synapse/storage/schema/profiles.sql)0
-rw-r--r--synapse/storage/schema/full_schemas/11/redactions.sql (renamed from synapse/storage/schema/redactions.sql)0
-rw-r--r--synapse/storage/schema/full_schemas/11/room_aliases.sql (renamed from synapse/storage/schema/room_aliases.sql)0
-rw-r--r--synapse/storage/schema/full_schemas/11/state.sql (renamed from synapse/storage/schema/state.sql)0
-rw-r--r--synapse/storage/schema/full_schemas/11/transactions.sql (renamed from synapse/storage/schema/transactions.sql)0
-rw-r--r--synapse/storage/schema/full_schemas/11/users.sql (renamed from synapse/storage/schema/users.sql)0
-rw-r--r--synapse/storage/schema/schema_version.sql30
-rw-r--r--synapse/storage/stream.py147
-rw-r--r--synapse/storage/transactions.py16
-rw-r--r--synapse/types.py3
-rw-r--r--synapse/util/__init__.py61
-rw-r--r--synapse/util/expiringcache.py115
-rw-r--r--synapse/util/frozenutils.py8
-rw-r--r--synapse/util/lrucache.py117
-rw-r--r--synapse/util/ratelimitutils.py216
-rw-r--r--synapse/util/retryutils.py153
-rw-r--r--tests/api/test_auth.py139
-rw-r--r--tests/api/test_filtering.py502
-rw-r--r--tests/appservice/__init__.py14
-rw-r--r--tests/appservice/test_appservice.py225
-rw-r--r--tests/federation/test_federation.py11
-rw-r--r--tests/handlers/test_appservice.py93
-rw-r--r--tests/handlers/test_directory.py21
-rw-r--r--tests/handlers/test_federation.py16
-rw-r--r--tests/handlers/test_presence.py518
-rw-r--r--tests/handlers/test_presencelike.py48
-rw-r--r--tests/handlers/test_profile.py37
-rw-r--r--tests/handlers/test_room.py28
-rw-r--r--tests/handlers/test_typing.py54
-rw-r--r--tests/rest/__init__.py3
-rw-r--r--tests/rest/client/__init__.py14
-rw-r--r--tests/rest/client/v1/__init__.py (renamed from synapse/http/agent_name.py)5
-rw-r--r--tests/rest/client/v1/test_events.py (renamed from tests/rest/test_events.py)27
-rw-r--r--tests/rest/client/v1/test_presence.py (renamed from tests/rest/test_presence.py)61
-rw-r--r--tests/rest/client/v1/test_profile.py (renamed from tests/rest/test_profile.py)20
-rw-r--r--tests/rest/client/v1/test_rooms.py (renamed from tests/rest/test_rooms.py)141
-rw-r--r--tests/rest/client/v1/test_typing.py (renamed from tests/rest/test_typing.py)23
-rw-r--r--tests/rest/client/v1/utils.py (renamed from tests/rest/utils.py)0
-rw-r--r--tests/rest/client/v2_alpha/__init__.py63
-rw-r--r--tests/rest/client/v2_alpha/test_filter.py95
-rw-r--r--tests/storage/TESTS_NEEDED_FOR5
-rw-r--r--tests/storage/test__base.py110
-rw-r--r--tests/storage/test_appservice.py116
-rw-r--r--tests/storage/test_base.py6
-rw-r--r--tests/storage/test_directory.py16
-rw-r--r--tests/storage/test_presence.py16
-rw-r--r--tests/storage/test_profile.py13
-rw-r--r--tests/storage/test_redaction.py21
-rw-r--r--tests/storage/test_registration.py20
-rw-r--r--tests/storage/test_room.py26
-rw-r--r--tests/storage/test_roommember.py23
-rw-r--r--tests/storage/test_stream.py23
-rw-r--r--tests/test_state.py433
-rw-r--r--tests/test_types.py12
-rw-r--r--tests/util/test_lrucache.py56
-rw-r--r--tests/utils.py31
207 files changed, 13910 insertions, 3564 deletions
diff --git a/.gitignore b/.gitignore
index af90668c89..960183a794 100644
--- a/.gitignore
+++ b/.gitignore
@@ -26,17 +26,19 @@ htmlcov
 
 demo/*.db
 demo/*.log
+demo/*.log.*
 demo/*.pid
+demo/media_store.*
 demo/etc
 
-graph/*.svg
-graph/*.png
-graph/*.dot
-
-**/webclient/config.js
-**/webclient/test/coverage/
-**/webclient/test/environment-protractor.js
-
 uploads
 
 .idea/
+media_store/
+
+*.tac
+
+build/
+
+localhost-800*/
+static/client/register/register_config.js
diff --git a/CHANGES.rst b/CHANGES.rst
index 297ae914fd..f89542a2bb 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,54 +1,135 @@
+Changes in synapse v0.8.0 (2015-03-06)
+======================================
+
+General:
+
+* Add support for registration fallback. This is a page hosted on the server
+  which allows a user to register for an account, regardless of what client
+  they are using (e.g. mobile devices).
+
+* Added new default push rules and made them configurable by clients:
+
+  * Suppress all notice messages.
+  * Notify when invited to a new room.
+  * Notify for messages that don't match any rule.
+  * Notify on incoming call.
+
+Federation:
+
+* Added per host server side rate-limiting of incoming federation requests.
+* Added a ``/get_missing_events/`` API to federation to reduce number of
+  ``/events/`` requests.
+
+Configuration:
+
+* Added configuration option to disable registration:
+  ``disable_registration``.
+* Added configuration option to change soft limit of number of open file
+  descriptors: ``soft_file_limit``.
+* Make ``tls_private_key_path`` optional when running with ``no_tls``.
+
+Application services:
+
+* Application services can now poll on the CS API ``/events`` for their events,
+  by providing their application service ``access_token``.
+* Added exclusive namespace support to application services API.
+
+
+Changes in synapse v0.7.1 (2015-02-19)
+======================================
+
+* Initial alpha implementation of parts of the Application Services API.
+  Including:
+
+  - AS Registration / Unregistration
+  - User Query API
+  - Room Alias Query API
+  - Push transport for receiving events.
+  - User/Alias namespace admin control
+
+* Add cache when fetching events from remote servers to stop repeatedly
+  fetching events with bad signatures.
+* Respect the per remote server retry scheme when fetching both events and
+  server keys to reduce the number of times we send requests to dead servers.
+* Inform remote servers when the local server fails to handle a received event.
+* Turn off python bytecode generation due to problems experienced when
+  upgrading from previous versions.
+
+Changes in synapse v0.7.0 (2015-02-12)
+======================================
+
+* Add initial implementation of the query auth federation API, allowing
+  servers to agree on whether an event should be allowed or rejected.
+* Persist events we have rejected from federation, fixing the bug where
+  servers would keep requesting the same events.
+* Various federation performance improvements, including:
+
+  - Add in memory caches on queries such as:
+
+     * Computing the state of a room at a point in time, used for
+       authorization on federation requests.
+     * Fetching events from the database.
+     * User's room membership, used for authorizing presence updates.
+
+  - Upgraded JSON library to improve parsing and serialisation speeds.
+
+* Add default avatars to new user accounts using pydenticon library.
+* Correctly time out federation requests.
+* Retry federation requests against different servers.
+* Add support for push and push rules.
+* Add alpha versions of proposed new CSv2 APIs, including ``/sync`` API.
+
 Changes in synapse 0.6.1 (2015-01-07)
 =====================================
 
- * Major optimizations to improve performance of initial sync and event sending
-   in large rooms (by up to 10x)
- * Media repository now includes a Content-Length header on media downloads.
- * Improve quality of thumbnails by changing resizing algorithm.
+* Major optimizations to improve performance of initial sync and event sending
+  in large rooms (by up to 10x)
+* Media repository now includes a Content-Length header on media downloads.
+* Improve quality of thumbnails by changing resizing algorithm.
 
 Changes in synapse 0.6.0 (2014-12-16)
 =====================================
 
- * Add new API for media upload and download that supports thumbnailing.
- * Replicate media uploads over multiple homeservers so media is always served
-   to clients from their local homeserver.  This obsoletes the
-   --content-addr parameter and confusion over accessing content directly
-   from remote homeservers.
- * Implement exponential backoff when retrying federation requests when
-   sending to remote homeservers which are offline.
- * Implement typing notifications.
- * Fix bugs where we sent events with invalid signatures due to bugs where
-   we incorrectly persisted events.
- * Improve performance of database queries involving retrieving events.
+* Add new API for media upload and download that supports thumbnailing.
+* Replicate media uploads over multiple homeservers so media is always served
+  to clients from their local homeserver.  This obsoletes the
+  --content-addr parameter and confusion over accessing content directly
+  from remote homeservers.
+* Implement exponential backoff when retrying federation requests when
+  sending to remote homeservers which are offline.
+* Implement typing notifications.
+* Fix bugs where we sent events with invalid signatures due to bugs where
+  we incorrectly persisted events.
+* Improve performance of database queries involving retrieving events.
 
 Changes in synapse 0.5.4a (2014-12-13)
 ======================================
 
- * Fix bug while generating the error message when a file path specified in
-   the config doesn't exist.
+* Fix bug while generating the error message when a file path specified in
+  the config doesn't exist.
 
 Changes in synapse 0.5.4 (2014-12-03)
 =====================================
 
- * Fix presence bug where some rooms did not display presence updates for
-   remote users.
- * Do not log SQL timing log lines when started with "-v"
- * Fix potential memory leak.
+* Fix presence bug where some rooms did not display presence updates for
+  remote users.
+* Do not log SQL timing log lines when started with "-v"
+* Fix potential memory leak.
 
 Changes in synapse 0.5.3c (2014-12-02)
 ======================================
 
- * Change the default value for the `content_addr` option to use the HTTP
-   listener, as by default the HTTPS listener will be using a self-signed
-   certificate.
+* Change the default value for the `content_addr` option to use the HTTP
+  listener, as by default the HTTPS listener will be using a self-signed
+  certificate.
 
 Changes in synapse 0.5.3 (2014-11-27)
 =====================================
 
- * Fix bug that caused joining a remote room to fail if a single event was not
-   signed correctly.
- * Fix bug which caused servers to continuously try and fetch events from other
-   servers.
+* Fix bug that caused joining a remote room to fail if a single event was not
+  signed correctly.
+* Fix bug which caused servers to continuously try and fetch events from other
+  servers.
 
 Changes in synapse 0.5.2 (2014-11-26)
 =====================================
diff --git a/MANIFEST.in b/MANIFEST.in
index a1a77ff540..8243a942ee 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,14 @@
+include synctl
+include LICENSE
+include VERSION
+include *.rst
+include demo/README
+
+recursive-include synapse/storage/schema *.sql
+
+recursive-include demo *.dh
+recursive-include demo *.py
+recursive-include demo *.sh
 recursive-include docs *
+recursive-include scripts *
 recursive-include tests *.py
-recursive-include synapse/storage/schema *.sql
-recursive-include syweb/webclient *
diff --git a/README.rst b/README.rst
index 768da3df64..c2af7c9332 100644
--- a/README.rst
+++ b/README.rst
@@ -6,7 +6,7 @@ VoIP.  The basics you need to know to get up and running are:
 
 - Everything in Matrix happens in a room.  Rooms are distributed and do not
   exist on any single server.  Rooms can be located using convenience aliases 
-  like ``#matrix:matrix.org`` or ``#test:localhost:8008``.
+  like ``#matrix:matrix.org`` or ``#test:localhost:8448``.
 
 - Matrix user IDs look like ``@matthew:matrix.org`` (although in the future
   you will normally refer to yourself and others using a 3PID: email
@@ -95,27 +95,36 @@ Installing prerequisites on Ubuntu or Debian::
 
     $ sudo apt-get install build-essential python2.7-dev libffi-dev \
                            python-pip python-setuptools sqlite3 \
-                           libssl-dev
+                           libssl-dev python-virtualenv libjpeg-dev
+                           
+Installing prerequisites on ArchLinux::
+
+    $ sudo pacman -S base-devel python2 python-pip \
+                     python-setuptools python-virtualenv sqlite3
 
 Installing prerequisites on Mac OS X::
 
     $ xcode-select --install
+    $ sudo pip install virtualenv
     
 To install the synapse homeserver run::
 
-    $ pip install --user --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
+    $ virtualenv ~/.synapse
+    $ source ~/.synapse/bin/activate
+    $ pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
 
-This installs synapse, along with the libraries it uses, into
-``$HOME/.local/lib/`` on Linux or ``$HOME/Library/Python/2.7/lib/`` on OSX.
+This installs synapse, along with the libraries it uses, into a virtual
+environment under ``~/.synapse``.
 
-Your python may not give priority to locally installed libraries over system
-libraries, in which case you must add your local packages to your python path::
+To set up your homeserver, run (in your virtualenv, as before)::
 
-    $ # on Linux:
-    $ export PYTHONPATH=$HOME/.local/lib/python2.7/site-packages:$PYTHONPATH
+    $ cd ~/.synapse
+    $ python -m synapse.app.homeserver \
+        --server-name machine.my.domain.name \
+        --config-path homeserver.yaml \
+        --generate-config
 
-    $ # on OSX:
-    $ export PYTHONPATH=$HOME/Library/Python/2.7/lib/python/site-packages:$PYTHONPATH
+Substituting your host and domain name as appropriate.
 
 For reliable VoIP calls to be routed via this homeserver, you MUST configure
 a TURN server.  See docs/turn-howto.rst for details.
@@ -128,23 +137,57 @@ you get errors about ``error: no such option: --process-dependency-links`` you
 may need to manually upgrade it::
 
     $ sudo pip install --upgrade pip
-    
+
 If pip crashes mid-installation for reason (e.g. lost terminal), pip may
 refuse to run until you remove the temporary installation directory it
 created. To reset the installation::
 
     $ rm -rf /tmp/pip_install_matrix
-    
+
 pip seems to leak *lots* of memory during installation.  For instance, a Linux 
 host with 512MB of RAM may run out of memory whilst installing Twisted.  If this 
 happens, you will have to individually install the dependencies which are 
 failing, e.g.::
 
-    $ pip install --user twisted
+    $ pip install twisted
 
 On OSX, if you encounter clang: error: unknown argument: '-mno-fused-madd' you
 will need to export CFLAGS=-Qunused-arguments.
 
+ArchLinux
+---------
+
+Installation on ArchLinux may encounter a few hiccups as Arch defaults to
+python 3, but synapse currently assumes python 2.7 by default.
+
+pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
+
+    $ sudo pip2.7 install --upgrade pip
+    
+You also may need to explicitly specify python 2.7 again during the install
+request::
+
+    $ pip2.7 install --process-dependency-links \
+        https://github.com/matrix-org/synapse/tarball/master
+    
+If you encounter an error with lib bcrypt causing an Wrong ELF Class:
+ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
+compile it under the right architecture. (This should not be needed if
+installing under virtualenv)::
+
+    $ sudo pip2.7 uninstall py-bcrypt
+    $ sudo pip2.7 install py-bcrypt
+    
+During setup of homeserver you need to call python2.7 directly again::
+
+    $ cd ~/.synapse
+    $ python2.7 -m synapse.app.homeserver \
+      --server-name machine.my.domain.name \
+      --config-path homeserver.yaml \
+      --generate-config
+        
+...substituting your host and domain name as appropriate.
+
 Windows Install
 ---------------
 Synapse can be installed on Cygwin. It requires the following Cygwin packages:
@@ -155,7 +198,7 @@ Synapse can be installed on Cygwin. It requires the following Cygwin packages:
  - openssl (and openssl-devel, python-openssl)
  - python
  - python-setuptools
- 
+
 The content repository requires additional packages and will be unable to process
 uploads without them:
  - libjpeg8
@@ -182,23 +225,13 @@ Running Your Homeserver
 To actually run your new homeserver, pick a working directory for Synapse to run 
 (e.g. ``~/.synapse``), and::
 
-    $ mkdir ~/.synapse
     $ cd ~/.synapse
-    
-    $ # on Linux
-    $ ~/.local/bin/synctl start
-    
-    $ # on OSX
-    $ ~/Library/Python/2.7/bin/synctl start
+    $ source ./bin/activate
+    $ synctl start
 
 Troubleshooting Running
 -----------------------
 
-If ``synctl`` fails with ``pkg_resources.DistributionNotFound`` errors you may 
-need a newer version of setuptools than that provided by your OS.::
-
-    $ sudo pip install setuptools --upgrade
-
 If synapse fails with ``missing "sodium.h"`` crypto errors, you may need 
 to manually upgrade PyNaCL, as synapse uses NaCl (http://nacl.cr.yp.to/) for 
 encryption and digital signatures.
@@ -214,6 +247,14 @@ fix try re-installing from PyPI or directly from
     $ # Install from github
     $ pip install --user https://github.com/pyca/pynacl/tarball/master
 
+ArchLinux
+---------
+
+If running `$ synctl start` fails wit 'returned non-zero exit status 1', you will need to explicitly call Python2.7 - either running as::
+
+    $ python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml --pid-file homeserver.pid
+    
+...or by editing synctl with the correct python executable.
 
 Homeserver Development
 ======================
@@ -225,13 +266,15 @@ directory of your choice::
     $ cd synapse
 
 The homeserver has a number of external dependencies, that are easiest
-to install by making setup.py do so, in --user mode::
+to install using pip and a virtualenv::
 
-    $ python setup.py develop --user
+    $ virtualenv env
+    $ source env/bin/activate
+    $ python synapse/python_dependencies.py | xargs -n1 pip install
+    $ pip install setuptools_trial mock
 
-This will run a process of downloading and installing into your
-user's .local/lib directory all of the required dependencies that are
-missing.
+This will run a process of downloading and installing all the needed
+dependencies into a virtual env.
 
 Once this is done, you may wish to run the homeserver's unit tests, to
 check that everything is installed as it should be::
@@ -252,7 +295,7 @@ IMPORTANT: Before upgrading an existing homeserver to a new version, please
 refer to UPGRADE.rst for any additional instructions.
 
 Otherwise, simply re-install the new codebase over the current one - e.g.
-by ``pip install --user --process-dependency-links
+by ``pip install --process-dependency-links
 https://github.com/matrix-org/synapse/tarball/master``
 if using pip, or by ``git pull`` if running off a git working copy.
 
@@ -279,9 +322,9 @@ For the first form, simply pass the required hostname (of the machine) as the
 
     $ python -m synapse.app.homeserver \
         --server-name machine.my.domain.name \
-        --config-path homeserver.config \
+        --config-path homeserver.yaml \
         --generate-config
-    $ python -m synapse.app.homeserver --config-path homeserver.config
+    $ python -m synapse.app.homeserver --config-path homeserver.yaml
 
 Alternatively, you can run ``synctl start`` to guide you through the process.
 
@@ -301,9 +344,9 @@ SRV record, as that is the name other machines will expect it to have::
     $ python -m synapse.app.homeserver \
         --server-name YOURDOMAIN \
         --bind-port 8448 \
-        --config-path homeserver.config \
+        --config-path homeserver.yaml \
         --generate-config
-    $ python -m synapse.app.homeserver --config-path homeserver.config
+    $ python -m synapse.app.homeserver --config-path homeserver.yaml
 
 
 You may additionally want to pass one or more "-v" options, in order to
diff --git a/UPGRADE.rst b/UPGRADE.rst
index 0f81f3e11f..87dd6e04a8 100644
--- a/UPGRADE.rst
+++ b/UPGRADE.rst
@@ -1,3 +1,32 @@
+Upgrading to v0.8.0
+===================
+
+Servers which use captchas will need to add their public key to::
+
+  static/client/register/register_config.js
+
+    window.matrixRegistrationConfig = {
+        recaptcha_public_key: "YOUR_PUBLIC_KEY"
+    };
+
+This is required in order to support registration fallback (typically used on
+mobile devices).
+
+
+Upgrading to v0.7.0
+===================
+
+New dependencies are:
+
+- pydenticon
+- simplejson
+- syutil
+- matrix-angular-sdk
+
+To pull in these dependencies in a virtual env, run::
+
+    python synapse/python_dependencies.py | xargs -n 1 pip install
+
 Upgrading to v0.6.0
 ===================
 
diff --git a/VERSION b/VERSION
deleted file mode 100644
index 3b3e723172..0000000000
--- a/VERSION
+++ /dev/null
@@ -1 +0,0 @@
-0.6.1b
diff --git a/contrib/graph/graph2.py b/contrib/graph/graph2.py
index 6b551d42e5..d0d2cfe7c0 100644
--- a/contrib/graph/graph2.py
+++ b/contrib/graph/graph2.py
@@ -21,6 +21,7 @@ import datetime
 import argparse
 
 from synapse.events import FrozenEvent
+from synapse.util.frozenutils import unfreeze
 
 
 def make_graph(db_name, room_id, file_prefix, limit):
@@ -70,7 +71,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
             float(event.origin_server_ts) / 1000
         ).strftime('%Y-%m-%d %H:%M:%S,%f')
 
-        content = json.dumps(event.get_dict()["content"])
+        content = json.dumps(unfreeze(event.get_dict()["content"]))
 
         label = (
             "<"
diff --git a/contrib/jitsimeetbridge/jitsimeetbridge.py b/contrib/jitsimeetbridge/jitsimeetbridge.py
index dbc6f6ffa5..15f8e1c48b 100644
--- a/contrib/jitsimeetbridge/jitsimeetbridge.py
+++ b/contrib/jitsimeetbridge/jitsimeetbridge.py
@@ -39,43 +39,43 @@ ROOMDOMAIN="meet.jit.si"
 #ROOMDOMAIN="conference.jitsi.vuc.me"
 
 class TrivialMatrixClient:
-	def __init__(self, access_token):
-		self.token = None
-		self.access_token = access_token
-
-	def getEvent(self):
-		while True:
-			url = MATRIXBASE+'events?access_token='+self.access_token+"&timeout=60000"
-			if self.token:
-				url += "&from="+self.token
-			req = grequests.get(url)
-			resps = grequests.map([req])
-			obj = json.loads(resps[0].content)
-			print "incoming from matrix",obj
-			if 'end' not in obj:
-				continue
-			self.token = obj['end']
-			if len(obj['chunk']):
-				return obj['chunk'][0]
-
-	def joinRoom(self, roomId):
-		url = MATRIXBASE+'rooms/'+roomId+'/join?access_token='+self.access_token
-		print url
-		headers={ 'Content-Type': 'application/json' }
-		req = grequests.post(url, headers=headers, data='{}')
-		resps = grequests.map([req])
-		obj = json.loads(resps[0].content)
-		print "response: ",obj
-
-	def sendEvent(self, roomId, evType, event):
-		url = MATRIXBASE+'rooms/'+roomId+'/send/'+evType+'?access_token='+self.access_token
-		print url
-		print json.dumps(event)
-		headers={ 'Content-Type': 'application/json' }
-		req = grequests.post(url, headers=headers, data=json.dumps(event))
-		resps = grequests.map([req])
-		obj = json.loads(resps[0].content)
-		print "response: ",obj
+    def __init__(self, access_token):
+        self.token = None
+        self.access_token = access_token
+
+    def getEvent(self):
+        while True:
+            url = MATRIXBASE+'events?access_token='+self.access_token+"&timeout=60000"
+            if self.token:
+                url += "&from="+self.token
+            req = grequests.get(url)
+            resps = grequests.map([req])
+            obj = json.loads(resps[0].content)
+            print "incoming from matrix",obj
+            if 'end' not in obj:
+                continue
+            self.token = obj['end']
+            if len(obj['chunk']):
+                return obj['chunk'][0]
+
+    def joinRoom(self, roomId):
+        url = MATRIXBASE+'rooms/'+roomId+'/join?access_token='+self.access_token
+        print url
+        headers={ 'Content-Type': 'application/json' }
+        req = grequests.post(url, headers=headers, data='{}')
+        resps = grequests.map([req])
+        obj = json.loads(resps[0].content)
+        print "response: ",obj
+
+    def sendEvent(self, roomId, evType, event):
+        url = MATRIXBASE+'rooms/'+roomId+'/send/'+evType+'?access_token='+self.access_token
+        print url
+        print json.dumps(event)
+        headers={ 'Content-Type': 'application/json' }
+        req = grequests.post(url, headers=headers, data=json.dumps(event))
+        resps = grequests.map([req])
+        obj = json.loads(resps[0].content)
+        print "response: ",obj
 
 
 
@@ -83,178 +83,178 @@ xmppClients = {}
 
 
 def matrixLoop():
-	while True:
-		ev = matrixCli.getEvent()
-		print ev
-		if ev['type'] == 'm.room.member':
-			print 'membership event'
-			if ev['membership'] == 'invite' and ev['state_key'] == MYUSERNAME:
-				roomId = ev['room_id']
-				print "joining room %s" % (roomId)
-				matrixCli.joinRoom(roomId)
-		elif ev['type'] == 'm.room.message':
-			if ev['room_id'] in xmppClients:
-				print "already have a bridge for that user, ignoring"
-				continue
-			print "got message, connecting"
-			xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
-			gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
-		elif ev['type'] == 'm.call.invite':
-			print "Incoming call"
-			#sdp = ev['content']['offer']['sdp']
-			#print "sdp: %s" % (sdp)
-			#xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
-			#gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
-		elif ev['type'] == 'm.call.answer':
-			print "Call answered"
-			sdp = ev['content']['answer']['sdp']
-			if ev['room_id'] not in xmppClients:
-				print "We didn't have a call for that room"
-				continue
-			# should probably check call ID too
-			xmppCli = xmppClients[ev['room_id']]
-			xmppCli.sendAnswer(sdp)
-		elif ev['type'] == 'm.call.hangup':
-			if ev['room_id'] in xmppClients:
-				xmppClients[ev['room_id']].stop()
-				del xmppClients[ev['room_id']]
-			
+    while True:
+        ev = matrixCli.getEvent()
+        print ev
+        if ev['type'] == 'm.room.member':
+            print 'membership event'
+            if ev['membership'] == 'invite' and ev['state_key'] == MYUSERNAME:
+                roomId = ev['room_id']
+                print "joining room %s" % (roomId)
+                matrixCli.joinRoom(roomId)
+        elif ev['type'] == 'm.room.message':
+            if ev['room_id'] in xmppClients:
+                print "already have a bridge for that user, ignoring"
+                continue
+            print "got message, connecting"
+            xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
+            gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
+        elif ev['type'] == 'm.call.invite':
+            print "Incoming call"
+            #sdp = ev['content']['offer']['sdp']
+            #print "sdp: %s" % (sdp)
+            #xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
+            #gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
+        elif ev['type'] == 'm.call.answer':
+            print "Call answered"
+            sdp = ev['content']['answer']['sdp']
+            if ev['room_id'] not in xmppClients:
+                print "We didn't have a call for that room"
+                continue
+            # should probably check call ID too
+            xmppCli = xmppClients[ev['room_id']]
+            xmppCli.sendAnswer(sdp)
+        elif ev['type'] == 'm.call.hangup':
+            if ev['room_id'] in xmppClients:
+                xmppClients[ev['room_id']].stop()
+                del xmppClients[ev['room_id']]
+
 class TrivialXmppClient:
-	def __init__(self, matrixRoom, userId):
-		self.rid = 0
-		self.matrixRoom = matrixRoom
-		self.userId = userId
-		self.running = True
-
-	def stop(self):
-		self.running = False
-
-	def nextRid(self):
-		self.rid += 1
-		return '%d' % (self.rid)
-
-	def sendIq(self, xml):
-		fullXml = "<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>" % (self.nextRid(), self.sid, xml)
-		#print "\t>>>%s" % (fullXml)
-		return self.xmppPoke(fullXml)
-		
-	def xmppPoke(self, xml):
-		headers = {'Content-Type': 'application/xml'}
-		req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
-		resps = grequests.map([req])
-		obj = BeautifulSoup(resps[0].content)
-		return obj
-
-	def sendAnswer(self, answer):
-		print "sdp from matrix client",answer
-		p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--sdp'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
-		jingle, out_err = p.communicate(answer)
-		jingle = jingle % {
-			'tojid': self.callfrom,
-			'action': 'session-accept',
-			'initiator': self.callfrom,
-			'responder': self.jid,
-			'sid': self.callsid
-		}
-		print "answer jingle from sdp",jingle
-		res = self.sendIq(jingle)
-		print "reply from answer: ",res
-		
-		self.ssrcs = {}
-		jingleSoup = BeautifulSoup(jingle)
-		for cont in jingleSoup.iq.jingle.findAll('content'):
-			if cont.description:
-				self.ssrcs[cont['name']] = cont.description['ssrc']
-		print "my ssrcs:",self.ssrcs
-
-		gevent.joinall([
- 		       gevent.spawn(self.advertiseSsrcs)
-		])
-		
-	def advertiseSsrcs(self):
+    def __init__(self, matrixRoom, userId):
+        self.rid = 0
+        self.matrixRoom = matrixRoom
+        self.userId = userId
+        self.running = True
+
+    def stop(self):
+        self.running = False
+
+    def nextRid(self):
+        self.rid += 1
+        return '%d' % (self.rid)
+
+    def sendIq(self, xml):
+        fullXml = "<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>" % (self.nextRid(), self.sid, xml)
+        #print "\t>>>%s" % (fullXml)
+        return self.xmppPoke(fullXml)
+
+    def xmppPoke(self, xml):
+        headers = {'Content-Type': 'application/xml'}
+        req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
+        resps = grequests.map([req])
+        obj = BeautifulSoup(resps[0].content)
+        return obj
+
+    def sendAnswer(self, answer):
+        print "sdp from matrix client",answer
+        p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--sdp'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+        jingle, out_err = p.communicate(answer)
+        jingle = jingle % {
+            'tojid': self.callfrom,
+            'action': 'session-accept',
+            'initiator': self.callfrom,
+            'responder': self.jid,
+            'sid': self.callsid
+        }
+        print "answer jingle from sdp",jingle
+        res = self.sendIq(jingle)
+        print "reply from answer: ",res
+
+        self.ssrcs = {}
+        jingleSoup = BeautifulSoup(jingle)
+        for cont in jingleSoup.iq.jingle.findAll('content'):
+            if cont.description:
+                self.ssrcs[cont['name']] = cont.description['ssrc']
+        print "my ssrcs:",self.ssrcs
+
+        gevent.joinall([
+                gevent.spawn(self.advertiseSsrcs)
+        ])
+
+    def advertiseSsrcs(self):
                 time.sleep(7)
-		print "SSRC spammer started"
-		while self.running:
-			ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % { 'tojid': "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid), 'nick': self.userId, 'assrc': self.ssrcs['audio'], 'vssrc': self.ssrcs['video'] }
-			res = self.sendIq(ssrcMsg)
-			print "reply from ssrc announce: ",res
-			time.sleep(10)
-		
-		
-
-	def xmppLoop(self):
-		self.matrixCallId = time.time()
-		res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), HOST))
-
-		print res
-		self.sid = res.body['sid']
-		print "sid %s" % (self.sid)
-
-		res = self.sendIq("<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>")
-
-		res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), self.sid, HOST))
-	
-		res = self.sendIq("<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>")
-		print res
-
-		self.jid = res.body.iq.bind.jid.string
-		print "jid: %s" % (self.jid)
-		self.shortJid = self.jid.split('-')[0]
-
-		res = self.sendIq("<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>")
-
-		#randomthing = res.body.iq['to']
-		#whatsitpart = randomthing.split('-')[0]
-
-		#print "other random bind thing: %s" % (randomthing)
-
-		# advertise preence to the jitsi room, with our nick
-		res = self.sendIq("<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>" % (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId))
-		self.muc = {'users': []}
-		for p in res.body.findAll('presence'):
-			u = {}
-			u['shortJid'] = p['from'].split('/')[1]
-			if p.c and p.c.nick:
-				u['nick'] = p.c.nick.string
-			self.muc['users'].append(u)
-		print "muc: ",self.muc
-
-		# wait for stuff
-		while True:
-			print "waiting..."
-			res = self.sendIq("")
-			print "got from stream: ",res
-			if res.body.iq:
-				jingles = res.body.iq.findAll('jingle')
-				if len(jingles):
-					self.callfrom = res.body.iq['from']
-					self.handleInvite(jingles[0])
-			elif 'type' in res.body and res.body['type'] == 'terminate':
-				self.running = False
-				del xmppClients[self.matrixRoom]
-            			return 
-
-	def handleInvite(self, jingle):
-		self.initiator = jingle['initiator']
-		self.callsid = jingle['sid']
-		p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--jingle'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
-		print "raw jingle invite",str(jingle)
-		sdp, out_err = p.communicate(str(jingle))
-		print "transformed remote offer sdp",sdp
-		inviteEvent = {
-			'offer': {
-				'type': 'offer',
-				'sdp': sdp
-			},
-			'call_id': self.matrixCallId,
-			'version': 0,
-			'lifetime': 30000
-		}
-		matrixCli.sendEvent(self.matrixRoom, 'm.call.invite', inviteEvent)
-		
+        print "SSRC spammer started"
+        while self.running:
+            ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % { 'tojid': "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid), 'nick': self.userId, 'assrc': self.ssrcs['audio'], 'vssrc': self.ssrcs['video'] }
+            res = self.sendIq(ssrcMsg)
+            print "reply from ssrc announce: ",res
+            time.sleep(10)
+
+
+
+    def xmppLoop(self):
+        self.matrixCallId = time.time()
+        res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), HOST))
+
+        print res
+        self.sid = res.body['sid']
+        print "sid %s" % (self.sid)
+
+        res = self.sendIq("<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>")
+
+        res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), self.sid, HOST))
+
+        res = self.sendIq("<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>")
+        print res
+
+        self.jid = res.body.iq.bind.jid.string
+        print "jid: %s" % (self.jid)
+        self.shortJid = self.jid.split('-')[0]
+
+        res = self.sendIq("<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>")
+
+        #randomthing = res.body.iq['to']
+        #whatsitpart = randomthing.split('-')[0]
+
+        #print "other random bind thing: %s" % (randomthing)
+
+        # advertise preence to the jitsi room, with our nick
+        res = self.sendIq("<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>" % (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId))
+        self.muc = {'users': []}
+        for p in res.body.findAll('presence'):
+            u = {}
+            u['shortJid'] = p['from'].split('/')[1]
+            if p.c and p.c.nick:
+                u['nick'] = p.c.nick.string
+            self.muc['users'].append(u)
+        print "muc: ",self.muc
+
+        # wait for stuff
+        while True:
+            print "waiting..."
+            res = self.sendIq("")
+            print "got from stream: ",res
+            if res.body.iq:
+                jingles = res.body.iq.findAll('jingle')
+                if len(jingles):
+                    self.callfrom = res.body.iq['from']
+                    self.handleInvite(jingles[0])
+            elif 'type' in res.body and res.body['type'] == 'terminate':
+                self.running = False
+                del xmppClients[self.matrixRoom]
+                        return
+
+    def handleInvite(self, jingle):
+        self.initiator = jingle['initiator']
+        self.callsid = jingle['sid']
+        p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--jingle'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+        print "raw jingle invite",str(jingle)
+        sdp, out_err = p.communicate(str(jingle))
+        print "transformed remote offer sdp",sdp
+        inviteEvent = {
+            'offer': {
+                'type': 'offer',
+                'sdp': sdp
+            },
+            'call_id': self.matrixCallId,
+            'version': 0,
+            'lifetime': 30000
+        }
+        matrixCli.sendEvent(self.matrixRoom, 'm.call.invite', inviteEvent)
+
 matrixCli = TrivialMatrixClient(ACCESS_TOKEN)
 
 gevent.joinall([
-	gevent.spawn(matrixLoop)
+    gevent.spawn(matrixLoop)
 ])
 
diff --git a/contrib/vertobot/bridge.pl b/contrib/vertobot/bridge.pl
new file mode 100755
index 0000000000..e1a07f6659
--- /dev/null
+++ b/contrib/vertobot/bridge.pl
@@ -0,0 +1,489 @@
+#!/usr/bin/env perl 
+
+use strict;
+use warnings;
+use 5.010; # //
+use IO::Socket::SSL qw(SSL_VERIFY_NONE);
+use IO::Async::Loop;
+use Net::Async::WebSocket::Client;
+use Net::Async::HTTP;
+use Net::Async::HTTP::Server;
+use JSON;
+use YAML;
+use Data::UUID;
+use Getopt::Long;
+use Data::Dumper;
+use URI::Encode qw(uri_encode uri_decode);
+    
+binmode STDOUT, ":encoding(UTF-8)";
+binmode STDERR, ":encoding(UTF-8)";
+
+my $msisdn_to_matrix = {
+    '447417892400' => '@matthew:matrix.org',
+};
+
+my $matrix_to_msisdn = {};
+foreach (keys %$msisdn_to_matrix) {
+    $matrix_to_msisdn->{$msisdn_to_matrix->{$_}} = $_;
+}
+
+
+my $loop = IO::Async::Loop->new;
+# Net::Async::HTTP + SSL + IO::Poll doesn't play well. See
+#   https://rt.cpan.org/Ticket/Display.html?id=93107
+# ref $loop eq "IO::Async::Loop::Poll" and
+#     warn "Using SSL with IO::Poll causes known memory-leaks!!\n";
+
+GetOptions(
+   'C|config=s' => \my $CONFIG,
+   'eval-from=s' => \my $EVAL_FROM,
+) or exit 1;
+
+if( defined $EVAL_FROM ) {
+    # An emergency 'eval() this file' hack
+    $SIG{HUP} = sub {
+        my $code = do {
+            open my $fh, "<", $EVAL_FROM or warn( "Cannot read - $!" ), return;
+            local $/; <$fh>
+        };
+
+        eval $code or warn "Cannot eval() - $@";
+    };
+}
+
+defined $CONFIG or die "Must supply --config\n";
+
+my %CONFIG = %{ YAML::LoadFile( $CONFIG ) };
+
+my %MATRIX_CONFIG = %{ $CONFIG{matrix} };
+# No harm in always applying this
+$MATRIX_CONFIG{SSL_verify_mode} = SSL_VERIFY_NONE;
+
+my $bridgestate = {};
+my $roomid_by_callid = {};
+    
+my $sessid = lc new Data::UUID->create_str();    
+my $as_token = $CONFIG{"matrix-bot"}->{as_token};
+my $hs_domain = $CONFIG{"matrix-bot"}->{domain};
+
+my $http = Net::Async::HTTP->new();
+$loop->add( $http );
+
+sub create_virtual_user
+{
+    my ($localpart) = @_;
+    my ( $response ) = $http->do_request(
+        method => "POST",
+        uri => URI->new(
+            $CONFIG{"matrix"}->{server}.
+                "/_matrix/client/api/v1/register?".
+                "access_token=$as_token&user_id=$localpart"
+        ),
+        content_type => "application/json",
+        content => <<EOT
+{
+    "type": "m.login.application_service",
+    "user": "$localpart"
+}
+EOT
+    )->get;    
+    warn $response->as_string if ($response->code != 200);
+}
+    
+my $http_server =  Net::Async::HTTP::Server->new(
+    on_request => sub {
+        my $self = shift;
+        my ( $req ) = @_;
+
+        my $response;
+        my $path = uri_decode($req->path);
+        warn("request: $path");
+        if ($path =~ m#/users/\@(\+.*)#) {
+            # when queried about virtual users, auto-create them in the HS
+            my $localpart = $1;
+            create_virtual_user($localpart);
+            $response = HTTP::Response->new( 200 );
+            $response->add_content('{}');
+            $response->content_type( "application/json" );
+        }
+        elsif ($path =~ m#/transactions/(.*)#) {
+            my $event = JSON->new->decode($req->body);
+            print Dumper($event);
+
+            my $room_id = $event->{room_id};
+            my %dp = %{$CONFIG{'verto-dialog-params'}};
+            $dp{callID} = $bridgestate->{$room_id}->{callid};
+
+            if ($event->{type} eq 'm.room.membership') {
+                my $membership = $event->{content}->{membership};
+                my $state_key = $event->{state_key};
+                my $room_id = $event->{state_id};
+                
+                if ($membership eq 'invite') {
+                    # autojoin invites
+                    my ( $response ) = $http->do_request(
+                        method => "POST",
+                        uri => URI->new(
+                            $CONFIG{"matrix"}->{server}.
+                                "/_matrix/client/api/v1/rooms/$room_id/join?".
+                                "access_token=$as_token&user_id=$state_key"
+                        ),
+                        content_type => "application/json",
+                        content => "{}",
+                    )->get;
+                    warn $response->as_string if ($response->code != 200);
+                }
+            }
+            elsif ($event->{type} eq 'm.call.invite') {
+                my $room_id = $event->{room_id};
+                $bridgestate->{$room_id}->{matrix_callid} = $event->{content}->{call_id};
+                $bridgestate->{$room_id}->{callid} = lc new Data::UUID->create_str();
+                $bridgestate->{$room_id}->{sessid} = $sessid;                
+                # $bridgestate->{$room_id}->{offer} = $event->{content}->{offer}->{sdp};
+                my $offer = $event->{content}->{offer}->{sdp};
+                # $bridgestate->{$room_id}->{gathered_candidates} = 0;
+                $roomid_by_callid->{ $bridgestate->{$room_id}->{callid} } = $room_id;
+                # no trickle ICE in verto apparently
+
+                my $f = send_verto_json_request("verto.invite", {
+                    "sdp" => $offer,
+                    "dialogParams" => \%dp,
+                    "sessid" => $bridgestate->{$room_id}->{sessid},
+                });
+                $self->adopt_future($f);
+            }
+            # elsif ($event->{type} eq 'm.call.candidates') {
+            #     # XXX: this could fire for both matrix->verto and verto->matrix calls
+            #     # and races as it collects candidates. much better to just turn off
+            #     # candidate gathering in the webclient entirely for now
+            #     
+            #     my $room_id = $event->{room_id};
+            #     # XXX: compare call IDs
+            #     if (!$bridgestate->{$room_id}->{gathered_candidates}) {
+            #         $bridgestate->{$room_id}->{gathered_candidates} = 1;
+            #         my $offer = $bridgestate->{$room_id}->{offer};
+            #         my $candidate_block = "";
+            #         foreach (@{$event->{content}->{candidates}}) {
+            #             $candidate_block .= "a=" . $_->{candidate} . "\r\n";
+            #         }
+            #         # XXX: collate using the right m= line - for now assume audio call
+            #         $offer =~ s/(a=rtcp.*[\r\n]+)/$1$candidate_block/;
+            #     
+            #         my $f = send_verto_json_request("verto.invite", {
+            #             "sdp" => $offer,
+            #             "dialogParams" => \%dp,
+            #             "sessid" => $bridgestate->{$room_id}->{sessid},
+            #         });
+            #         $self->adopt_future($f);
+            #     }
+            #     else {
+            #         # ignore them, as no trickle ICE, although we might as well
+            #         # batch them up
+            #         # foreach (@{$event->{content}->{candidates}}) {
+            #         #     push @{$bridgestate->{$room_id}->{candidates}}, $_;
+            #         # }
+            #     }
+            # }
+            elsif ($event->{type} eq 'm.call.answer') {
+                # grab the answer and relay it to verto as a verto.answer
+                my $room_id = $event->{room_id};
+                
+                my $answer = $event->{content}->{answer}->{sdp};
+                my $f = send_verto_json_request("verto.answer", {
+                    "sdp" => $answer,
+                    "dialogParams" => \%dp,
+                    "sessid" => $bridgestate->{$room_id}->{sessid},
+                });
+                $self->adopt_future($f);
+            }
+            elsif ($event->{type} eq 'm.call.hangup') {
+                my $room_id = $event->{room_id};
+                if ($bridgestate->{$room_id}->{matrix_callid} eq $event->{content}->{call_id}) {
+                    my $f = send_verto_json_request("verto.bye", {
+                        "dialogParams" => \%dp,
+                        "sessid" => $bridgestate->{$room_id}->{sessid},
+                    });
+                    $self->adopt_future($f);
+                }
+                else {
+                    warn "Ignoring unrecognised callid: ".$event->{content}->{call_id};
+                }
+            }
+            else {
+                warn "Unhandled event: $event->{type}";
+            }
+            
+            $response = HTTP::Response->new( 200 );
+            $response->add_content('{}');
+            $response->content_type( "application/json" );            
+        }
+        else {
+            warn "Unhandled path: $path";
+            $response = HTTP::Response->new( 404 );
+        }
+
+        $req->respond( $response );
+    },
+);
+$loop->add( $http_server );
+
+$http_server->listen(
+    addr => { family => "inet", socktype => "stream", port => 8009 },
+    on_listen_error => sub { die "Cannot listen - $_[-1]\n" },
+);
+
+my $bot_verto = Net::Async::WebSocket::Client->new(
+    on_frame => sub {
+          my ( $self, $frame ) = @_;
+          warn "[Verto] receiving $frame";
+          on_verto_json($frame);
+    },
+);
+$loop->add( $bot_verto );
+
+my $verto_connecting = $loop->new_future;
+$bot_verto->connect(
+    %{ $CONFIG{"verto-bot"} },
+    on_connected => sub {
+        warn("[Verto] connected to websocket");
+        if (not $verto_connecting->is_done) {
+            $verto_connecting->done($bot_verto);
+
+            send_verto_json_request("login", {
+                'login' => $CONFIG{'verto-dialog-params'}{'login'},
+                'passwd' => $CONFIG{'verto-config'}{'passwd'},
+                'sessid' => $sessid,
+            });
+        }
+    },
+    on_connect_error => sub { die "Cannot connect to verto - $_[-1]" },
+    on_resolve_error => sub { die "Cannot resolve to verto - $_[-1]" },        
+);
+
+# die Dumper($verto_connecting);
+
+my $as_url = $CONFIG{"matrix-bot"}->{as_url};
+
+Future->needs_all(
+    $http->do_request(
+        method => "POST",
+        uri => URI->new( $CONFIG{"matrix"}->{server}."/_matrix/appservice/v1/register" ),
+        content_type => "application/json",
+        content => <<EOT
+{
+    "as_token": "$as_token",
+    "url": "$as_url",
+    "namespaces": { "users": ["\@\\\\+.*"] }
+}
+EOT
+    ),
+    $verto_connecting,
+)->get;
+
+$loop->attach_signal(
+    PIPE => sub { warn "pipe\n" }
+);
+$loop->attach_signal(
+    INT => sub { $loop->stop },
+);
+$loop->attach_signal(
+    TERM => sub { $loop->stop },
+);
+
+eval {
+   $loop->run;
+} or my $e = $@;
+
+die $e if $e;
+
+exit 0;
+
+{    
+    my $json_id;
+    my $requests;
+
+    sub send_verto_json_request
+    {
+        $json_id ||= 1;
+        
+        my ($method, $params) = @_;
+        my $json = {
+            jsonrpc => "2.0",
+            method  => $method,
+            params  => $params,
+            id      => $json_id,
+        };
+        my $text = JSON->new->encode( $json );
+        warn "[Verto] sending $text";
+        $bot_verto->send_frame ( $text );
+        my $request = $loop->new_future;
+        $requests->{$json_id} = $request;
+        $json_id++;
+        return $request;
+    }
+    
+    sub send_verto_json_response
+    {
+        my ($result, $id) = @_;
+        my $json = {
+            jsonrpc => "2.0",
+            result  => $result,
+            id      => $id,
+        };
+        my $text = JSON->new->encode( $json );
+        warn "[Verto] sending $text";
+        $bot_verto->send_frame ( $text );
+    }
+    
+    sub on_verto_json
+    {
+        my $json = JSON->new->decode( $_[0] );
+        if ($json->{method}) {
+            if (($json->{method} eq 'verto.answer' && $json->{params}->{sdp}) ||
+                $json->{method} eq 'verto.media') {
+
+                my $caller = $json->{dialogParams}->{caller_id_number};
+                my $callee = $json->{dialogParams}->{destination_number};
+                my $caller_user = '@+' . $caller . ':' . $hs_domain;
+                my $callee_user = $msisdn_to_matrix->{$callee} || warn "unrecogised callee: $callee";                                
+                my $room_id = $roomid_by_callid->{$json->{params}->{callID}};
+
+                if ($json->{params}->{sdp}) {
+                    $http->do_request(
+                        method => "POST",
+                        uri => URI->new(
+                            $CONFIG{"matrix"}->{server}.
+                                "/_matrix/client/api/v1/send/m.call.answer?".
+                                "access_token=$as_token&user_id=$caller_user"
+                        ),
+                        content_type => "application/json",
+                        content => JSON->new->encode({
+                            call_id => $bridgestate->{$room_id}->{matrix_callid},
+                            version => 0,
+                            answer  => {
+                                sdp => $json->{params}->{sdp},
+                                type => "answer",
+                            },
+                        }),
+                    )->then( sub {
+                        send_verto_json_response( {
+                            method => $json->{method},
+                        }, $json->{id});
+                    })->get;
+                }
+            }
+            elsif ($json->{method} eq 'verto.invite') {
+                my $caller = $json->{dialogParams}->{caller_id_number};
+                my $callee = $json->{dialogParams}->{destination_number};
+                my $caller_user = '@+' . $caller . ':' . $hs_domain;
+                my $callee_user = $msisdn_to_matrix->{$callee} || warn "unrecogised callee: $callee";
+                    
+                my $alias = ($caller lt $callee) ? ($caller.'-'.$callee) : ($callee.'-'.$caller);
+                my $room_id;
+
+                # create a virtual user for the caller if needed.
+                create_virtual_user($caller);
+                
+                # create a room of form #peer-peer and invite the callee
+                $http->do_request(
+                    method => "POST",
+                    uri => URI->new(
+                        $CONFIG{"matrix"}->{server}.
+                            "/_matrix/client/api/v1/createRoom?".
+                            "access_token=$as_token&user_id=$caller_user"
+                    ),
+                    content_type => "application/json",
+                    content => JSON->new->encode({
+                        room_alias_name => $alias,
+                        invite => [ $callee_user ],
+                    }),
+                )->then( sub {
+                    my ( $response ) = @_;
+                    my $resp = JSON->new->decode($response->content);
+                    $room_id = $resp->{room_id};
+                    $roomid_by_callid->{$json->{params}->{callID}} = $room_id;
+                })->get;
+
+                # join it
+                my ($response) = $http->do_request(
+                    method => "POST",
+                    uri => URI->new(
+                        $CONFIG{"matrix"}->{server}.
+                            "/_matrix/client/api/v1/join/$room_id?".
+                            "access_token=$as_token&user_id=$caller_user"
+                    ),
+                    content_type => "application/json",
+                    content => '{}',
+                )->get;
+
+                $bridgestate->{$room_id}->{matrix_callid} = lc new Data::UUID->create_str();
+                $bridgestate->{$room_id}->{callid} = $json->{dialogParams}->{callID};
+                $bridgestate->{$room_id}->{sessid} = $sessid;
+
+                # put the m.call.invite in there
+                $http->do_request(
+                    method => "POST",
+                    uri => URI->new(
+                        $CONFIG{"matrix"}->{server}.
+                            "/_matrix/client/api/v1/send/m.call.invite?".
+                            "access_token=$as_token&user_id=$caller_user"
+                    ),
+                    content_type => "application/json",
+                    content => JSON->new->encode({
+                        call_id => $bridgestate->{$room_id}->{matrix_callid},
+                        version => 0,
+                        answer  => {
+                            sdp => $json->{params}->{sdp},
+                            type => "offer",
+                        },
+                    }),
+                )->then( sub {
+                    # acknowledge the verto
+                    send_verto_json_response( {
+                        method => $json->{method},
+                    }, $json->{id});
+                })->get;
+            }
+            elsif ($json->{method} eq 'verto.bye') {
+                my $caller = $json->{dialogParams}->{caller_id_number};
+                my $callee = $json->{dialogParams}->{destination_number};
+                my $caller_user = '@+' . $caller . ':' . $hs_domain;
+                my $callee_user = $msisdn_to_matrix->{$callee} || warn "unrecogised callee: $callee";                                
+                my $room_id = $roomid_by_callid->{$json->{params}->{callID}};
+                
+                # put the m.call.hangup into the room
+                $http->do_request(
+                    method => "POST",
+                    uri => URI->new(
+                        $CONFIG{"matrix"}->{server}.
+                            "/_matrix/client/api/v1/send/m.call.hangup?".
+                            "access_token=$as_token&user_id=$caller_user"
+                    ),
+                    content_type => "application/json",
+                    content => JSON->new->encode({
+                        call_id => $bridgestate->{$room_id}->{matrix_callid},
+                        version => 0,
+                    }),
+                )->then( sub {
+                    # acknowledge the verto
+                    send_verto_json_response( {
+                        method => $json->{method},
+                    }, $json->{id});
+                })->get;
+            }
+            else {
+                warn ("[Verto] unhandled method: " . $json->{method});
+                send_verto_json_response( {
+                    method => $json->{method},
+                }, $json->{id});
+            }
+        }
+        elsif ($json->{result}) {
+            $requests->{$json->{id}}->done($json->{result});
+        }
+        elsif ($json->{error}) {
+            $requests->{$json->{id}}->fail($json->{error}->{message}, $json->{error});
+        }
+    }
+}
+
diff --git a/demo/start.sh b/demo/start.sh
index ce3e292486..bb2248770d 100755
--- a/demo/start.sh
+++ b/demo/start.sh
@@ -32,7 +32,8 @@ for port in 8080 8081 8082; do
         -D --pid-file "$DIR/$port.pid" \
         --manhole $((port + 1000)) \
         --tls-dh-params-path "demo/demo.tls.dh" \
-		$PARAMS $SYNAPSE_PARAMS
+        --media-store-path "demo/media_store.$port" \
+		$PARAMS $SYNAPSE_PARAMS \
 
     python -m synapse.app.homeserver \
         --config-path "demo/etc/$port.config" \
diff --git a/docs/turn-howto.rst b/docs/turn-howto.rst
index 82b59538c8..e2c73458e2 100644
--- a/docs/turn-howto.rst
+++ b/docs/turn-howto.rst
@@ -81,7 +81,7 @@ Your home server configuration file needs the following extra keys:
 As an example, here is the relevant section of the config file for
 matrix.org::
 
-    turn_uris: turn:turn.matrix.org:3478?transport=udp,turn:turn.matrix.org:3478?transport=tcp
+    turn_uris: [ "turn:turn.matrix.org:3478?transport=udp", "turn:turn.matrix.org:3478?transport=tcp" ]
     turn_shared_secret: n0t4ctuAllymatr1Xd0TorgSshar3d5ecret4obvIousreAsons
     turn_user_lifetime: 86400000
 
diff --git a/scripts/check_auth.py b/scripts/check_auth.py
new file mode 100644
index 0000000000..b889ac7fa7
--- /dev/null
+++ b/scripts/check_auth.py
@@ -0,0 +1,65 @@
+from synapse.events import FrozenEvent
+from synapse.api.auth import Auth
+
+from mock import Mock
+
+import argparse
+import itertools
+import json
+import sys
+
+
+def check_auth(auth, auth_chain, events):
+    auth_chain.sort(key=lambda e: e.depth)
+
+    auth_map = {
+        e.event_id: e
+        for e in auth_chain
+    }
+
+    create_events = {}
+    for e in auth_chain:
+        if e.type == "m.room.create":
+            create_events[e.room_id] = e
+
+    for e in itertools.chain(auth_chain, events):
+        auth_events_list = [auth_map[i] for i, _ in e.auth_events]
+
+        auth_events = {
+            (e.type, e.state_key): e
+            for e in auth_events_list
+        }
+
+        auth_events[("m.room.create", "")] = create_events[e.room_id]
+
+        try:
+            auth.check(e, auth_events=auth_events)
+        except Exception as ex:
+            print "Failed:", e.event_id, e.type, e.state_key
+            print "Auth_events:", auth_events
+            print ex
+            print json.dumps(e.get_dict(), sort_keys=True, indent=4)
+            # raise
+        print "Success:", e.event_id, e.type, e.state_key
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+
+    parser.add_argument(
+        'json',
+        nargs='?',
+        type=argparse.FileType('r'),
+        default=sys.stdin,
+    )
+
+    args = parser.parse_args()
+
+    js = json.load(args.json)
+
+
+    auth = Auth(Mock())
+    check_auth(
+        auth,
+        [FrozenEvent(d) for d in js["auth_chain"]],
+        [FrozenEvent(d) for d in js["pdus"]],
+    )
diff --git a/scripts/federation_client.py b/scripts/federation_client.py
index 3139c61761..ea62dceb36 100644
--- a/scripts/federation_client.py
+++ b/scripts/federation_client.py
@@ -97,8 +97,11 @@ def lookup(destination, path):
     if ":" in destination:
         return "https://%s%s" % (destination, path)
     else:
-        srv = srvlookup.lookup("matrix", "tcp", destination)[0]
-        return "https://%s:%d%s" % (srv.host, srv.port, path)
+        try:
+            srv = srvlookup.lookup("matrix", "tcp", destination)[0]
+            return "https://%s:%d%s" % (srv.host, srv.port, path)
+        except:
+            return "https://%s:%d%s" % (destination, 8448, path)
 
 def get_json(origin_name, origin_key, destination, path):
     request_json = {
diff --git a/scripts/make_identicons.pl b/scripts/make_identicons.pl
new file mode 100755
index 0000000000..cbff63e298
--- /dev/null
+++ b/scripts/make_identicons.pl
@@ -0,0 +1,39 @@
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+
+use DBI;
+use DBD::SQLite;
+use JSON;
+use Getopt::Long;
+
+my $db; # = "homeserver.db";
+my $server = "http://localhost:8008";
+my $size = 320;
+
+GetOptions("db|d=s",     \$db,
+           "server|s=s", \$server,
+           "width|w=i",  \$size) or usage();
+
+usage() unless $db;
+
+my $dbh = DBI->connect("dbi:SQLite:dbname=$db","","") || die $DBI::errstr;
+
+my $res = $dbh->selectall_arrayref("select token, name from access_tokens, users where access_tokens.user_id = users.id group by user_id") || die $DBI::errstr;
+
+foreach (@$res) {
+    my ($token, $mxid) = ($_->[0], $_->[1]);
+    my ($user_id) = ($mxid =~ m/@(.*):/);
+    my ($url) = $dbh->selectrow_array("select avatar_url from profiles where user_id=?", undef, $user_id);
+    if (!$url || $url =~ /#auto$/) {
+        `curl -s -o tmp.png "$server/_matrix/media/v1/identicon?name=${mxid}&width=$size&height=$size"`;
+        my $json = `curl -s -X POST -H "Content-Type: image/png" -T "tmp.png" $server/_matrix/media/v1/upload?access_token=$token`;
+        my $content_uri = from_json($json)->{content_uri};
+        `curl -X PUT -H "Content-Type: application/json" --data '{ "avatar_url": "${content_uri}#auto"}' $server/_matrix/client/api/v1/profile/${mxid}/avatar_url?access_token=$token`;
+    }
+}
+
+sub usage {
+    die "usage: ./make-identicons.pl\n\t-d database [e.g. homeserver.db]\n\t-s homeserver (default: http://localhost:8008)\n\t-w identicon size in pixels (default 320)";
+}
\ No newline at end of file
diff --git a/setup.cfg b/setup.cfg
index 2830831f00..888ad6ed4a 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -8,3 +8,11 @@ test = trial
 
 [trial]
 test_suite = tests
+
+[check-manifest]
+ignore =
+    contrib
+    contrib/*
+    docs/*
+    pylint.cfg
+    tox.ini
diff --git a/setup.py b/setup.py
index 043cd044a7..2d812fa389 100755
--- a/setup.py
+++ b/setup.py
@@ -18,49 +18,42 @@ import os
 from setuptools import setup, find_packages
 
 
-# Utility function to read the README file.
-# Used for the long_description.  It's nice, because now 1) we have a top level
-# README file and 2) it's easier to type in the README file than to put a raw
-# string in below ...
-def read(fname):
-    return open(os.path.join(os.path.dirname(__file__), fname)).read()
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+def read_file(path_segments):
+    """Read a file from the package. Takes a list of strings to join to
+    make the path"""
+    file_path = os.path.join(here, *path_segments)
+    with open(file_path) as f:
+        return f.read()
+
+
+def exec_file(path_segments):
+    """Execute a single python file to get the variables defined in it"""
+    result = {}
+    code = read_file(path_segments)
+    exec(code, result)
+    return result
+
+version = exec_file(("synapse", "__init__.py"))["__version__"]
+dependencies = exec_file(("synapse", "python_dependencies.py"))
+long_description = read_file(("README.rst",))
 
 setup(
     name="matrix-synapse",
-    version=read("VERSION").strip(),
+    version=version,
     packages=find_packages(exclude=["tests", "tests.*"]),
     description="Reference Synapse Home Server",
-    install_requires=[
-        "syutil==0.0.2",
-        "matrix_angular_sdk==0.6.0",
-        "Twisted>=14.0.0",
-        "service_identity>=1.0.0",
-        "pyopenssl>=0.14",
-        "pyyaml",
-        "pyasn1",
-        "pynacl",
-        "daemonize",
-        "py-bcrypt",
-        "frozendict>=0.4",
-        "pillow",
-    ],
-    dependency_links=[
-        "https://github.com/matrix-org/syutil/tarball/v0.0.2#egg=syutil-0.0.2",
-        "https://github.com/pyca/pynacl/tarball/d4d3175589b892f6ea7c22f466e0e223853516fa#egg=pynacl-0.3.0",
-        "https://github.com/matrix-org/matrix-angular-sdk/tarball/v0.6.0/#egg=matrix_angular_sdk-0.6.0",
-    ],
+    install_requires=dependencies["REQUIREMENTS"].keys(),
     setup_requires=[
+        "Twisted==14.0.2", # Here to override setuptools_trial's dependency on Twisted>=2.4.0
         "setuptools_trial",
-        "setuptools>=1.0.0", # Needs setuptools that supports git+ssh.
-                             # TODO: Do we need this now? we don't use git+ssh.
         "mock"
     ],
+    dependency_links=dependencies["DEPENDENCY_LINKS"],
     include_package_data=True,
     zip_safe=False,
-    long_description=read("README.rst"),
-    entry_points="""
-    [console_scripts]
-    synctl=synapse.app.synctl:main
-    synapse-homeserver=synapse.app.homeserver:main
-    """
+    long_description=long_description,
+    scripts=["synctl"],
 )
diff --git a/static/client/register/index.html b/static/client/register/index.html
new file mode 100644
index 0000000000..600b3ee41e
--- /dev/null
+++ b/static/client/register/index.html
@@ -0,0 +1,32 @@
+<html>
+<head>
+<title> Registration </title>
+<meta name='viewport' content='width=device-width, initial-scale=1, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0'> 
+<link rel="stylesheet" href="style.css">
+<script src="js/jquery-2.1.3.min.js"></script>
+<script src="js/recaptcha_ajax.js"></script>
+<script src="register_config.js"></script>
+<script src="js/register.js"></script>
+</head>
+<body onload="matrixRegistration.onLoad()">
+<form id="registrationForm" onsubmit="matrixRegistration.signUp(); return false;">
+    <div>
+        Create account:<br/>
+        
+        <div style="text-align: center">
+            <input id="desired_user_id" size="32" type="text" placeholder="Matrix ID (e.g. bob)" autocapitalize="off" autocorrect="off" />
+            <br/>
+            <input id="pwd1" size="32" type="password" placeholder="Type a password"/>
+            <br/>
+            <input id="pwd2" size="32" type="password" placeholder="Confirm your password"/>
+            <br/>
+            <span id="feedback" style="color: #f00"></span>
+            <br/>
+            <div id="regcaptcha"></div>
+
+            <button type="submit" style="margin: 10px">Sign up</button>
+        </div>
+    </div>
+</form>
+</body>
+</html>
diff --git a/static/client/register/js/jquery-2.1.3.min.js b/static/client/register/js/jquery-2.1.3.min.js
new file mode 100644
index 0000000000..25714ed29a
--- /dev/null
+++ b/static/client/register/js/jquery-2.1.3.min.js
@@ -0,0 +1,4 @@
+/*! jQuery v2.1.3 | (c) 2005, 2014 jQuery Foundation, Inc. | jquery.org/license */
+!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k={},l=a.document,m="2.1.3",n=function(a,b){return new n.fn.init(a,b)},o=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,p=/^-ms-/,q=/-([\da-z])/gi,r=function(a,b){return b.toUpperCase()};n.fn=n.prototype={jquery:m,constructor:n,selector:"",length:0,toArray:function(){return d.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:d.call(this)},pushStack:function(a){var b=n.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return n.each(this,a,b)},map:function(a){return this.pushStack(n.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:c.sort,splice:c.splice},n.extend=n.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||n.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(a=arguments[h]))for(b in a)c=g[b],d=a[b],g!==d&&(j&&d&&(n.isPlainObject(d)||(e=n.isArray(d)))?(e?(e=!1,f=c&&n.isArray(c)?c:[]):f=c&&n.isPlainObject(c)?c:{},g[b]=n.extend(j,f,d)):void 0!==d&&(g[b]=d));return g},n.extend({expando:"jQuery"+(m+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===n.type(a)},isArray:Array.isArray,isWindow:function(a){return null!=a&&a===a.window},isNumeric:function(a){return!n.isArray(a)&&a-parseFloat(a)+1>=0},isPlainObject:function(a){return"object"!==n.type(a)||a.nodeType||n.isWindow(a)?!1:a.constructor&&!j.call(a.constructor.prototype,"isPrototypeOf")?!1:!0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?h[i.call(a)]||"object":typeof a},globalEval:function(a){var b,c=eval;a=n.trim(a),a&&(1===a.indexOf("use strict")?(b=l.createElement("script"),b.text=a,l.head.appendChild(b).parentNode.removeChild(b)):c(a))},camelCase:function(a){return a.replace(p,"ms-").replace(q,r)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,c){var d,e=0,f=a.length,g=s(a);if(c){if(g){for(;f>e;e++)if(d=b.apply(a[e],c),d===!1)break}else for(e in a)if(d=b.apply(a[e],c),d===!1)break}else if(g){for(;f>e;e++)if(d=b.call(a[e],e,a[e]),d===!1)break}else for(e in a)if(d=b.call(a[e],e,a[e]),d===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(o,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(s(Object(a))?n.merge(c,"string"==typeof a?[a]:a):f.call(c,a)),c},inArray:function(a,b,c){return null==b?-1:g.call(b,a,c)},merge:function(a,b){for(var c=+b.length,d=0,e=a.length;c>d;d++)a[e++]=b[d];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,f=0,g=a.length,h=s(a),i=[];if(h)for(;g>f;f++)d=b(a[f],f,c),null!=d&&i.push(d);else for(f in a)d=b(a[f],f,c),null!=d&&i.push(d);return e.apply([],i)},guid:1,proxy:function(a,b){var c,e,f;return"string"==typeof b&&(c=a[b],b=a,a=c),n.isFunction(a)?(e=d.call(arguments,2),f=function(){return a.apply(b||this,e.concat(d.call(arguments)))},f.guid=a.guid=a.guid||n.guid++,f):void 0},now:Date.now,support:k}),n.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){h["[object "+b+"]"]=b.toLowerCase()});function s(a){var b=a.length,c=n.type(a);return"function"===c||n.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var t=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+1*new Date,v=a.document,w=0,x=0,y=hb(),z=hb(),A=hb(),B=function(a,b){return a===b&&(l=!0),0},C=1<<31,D={}.hasOwnProperty,E=[],F=E.pop,G=E.push,H=E.push,I=E.slice,J=function(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return c;return-1},K="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",L="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",N=M.replace("w","w#"),O="\\["+L+"*("+M+")(?:"+L+"*([*^$|!~]?=)"+L+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+N+"))|)"+L+"*\\]",P=":("+M+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+O+")*)|.*)\\)|)",Q=new RegExp(L+"+","g"),R=new RegExp("^"+L+"+|((?:^|[^\\\\])(?:\\\\.)*)"+L+"+$","g"),S=new RegExp("^"+L+"*,"+L+"*"),T=new RegExp("^"+L+"*([>+~]|"+L+")"+L+"*"),U=new RegExp("="+L+"*([^\\]'\"]*?)"+L+"*\\]","g"),V=new RegExp(P),W=new RegExp("^"+N+"$"),X={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+O),PSEUDO:new RegExp("^"+P),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+L+"*(even|odd|(([+-]|)(\\d*)n|)"+L+"*(?:([+-]|)"+L+"*(\\d+)|))"+L+"*\\)|)","i"),bool:new RegExp("^(?:"+K+")$","i"),needsContext:new RegExp("^"+L+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+L+"*((?:-\\d)?\\d*)"+L+"*\\)|)(?=[^-]|$)","i")},Y=/^(?:input|select|textarea|button)$/i,Z=/^h\d$/i,$=/^[^{]+\{\s*\[native \w/,_=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ab=/[+~]/,bb=/'|\\/g,cb=new RegExp("\\\\([\\da-f]{1,6}"+L+"?|("+L+")|.)","ig"),db=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)},eb=function(){m()};try{H.apply(E=I.call(v.childNodes),v.childNodes),E[v.childNodes.length].nodeType}catch(fb){H={apply:E.length?function(a,b){G.apply(a,I.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function gb(a,b,d,e){var f,h,j,k,l,o,r,s,w,x;if((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,d=d||[],k=b.nodeType,"string"!=typeof a||!a||1!==k&&9!==k&&11!==k)return d;if(!e&&p){if(11!==k&&(f=_.exec(a)))if(j=f[1]){if(9===k){if(h=b.getElementById(j),!h||!h.parentNode)return d;if(h.id===j)return d.push(h),d}else if(b.ownerDocument&&(h=b.ownerDocument.getElementById(j))&&t(b,h)&&h.id===j)return d.push(h),d}else{if(f[2])return H.apply(d,b.getElementsByTagName(a)),d;if((j=f[3])&&c.getElementsByClassName)return H.apply(d,b.getElementsByClassName(j)),d}if(c.qsa&&(!q||!q.test(a))){if(s=r=u,w=b,x=1!==k&&a,1===k&&"object"!==b.nodeName.toLowerCase()){o=g(a),(r=b.getAttribute("id"))?s=r.replace(bb,"\\$&"):b.setAttribute("id",s),s="[id='"+s+"'] ",l=o.length;while(l--)o[l]=s+rb(o[l]);w=ab.test(a)&&pb(b.parentNode)||b,x=o.join(",")}if(x)try{return H.apply(d,w.querySelectorAll(x)),d}catch(y){}finally{r||b.removeAttribute("id")}}}return i(a.replace(R,"$1"),b,d,e)}function hb(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function ib(a){return a[u]=!0,a}function jb(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function kb(a,b){var c=a.split("|"),e=a.length;while(e--)d.attrHandle[c[e]]=b}function lb(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||C)-(~a.sourceIndex||C);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function mb(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function nb(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function ob(a){return ib(function(b){return b=+b,ib(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function pb(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}c=gb.support={},f=gb.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=gb.setDocument=function(a){var b,e,g=a?a.ownerDocument||a:v;return g!==n&&9===g.nodeType&&g.documentElement?(n=g,o=g.documentElement,e=g.defaultView,e&&e!==e.top&&(e.addEventListener?e.addEventListener("unload",eb,!1):e.attachEvent&&e.attachEvent("onunload",eb)),p=!f(g),c.attributes=jb(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=jb(function(a){return a.appendChild(g.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=$.test(g.getElementsByClassName),c.getById=jb(function(a){return o.appendChild(a).id=u,!g.getElementsByName||!g.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){var c="undefined"!=typeof a.getAttributeNode&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return"undefined"!=typeof b.getElementsByTagName?b.getElementsByTagName(a):c.qsa?b.querySelectorAll(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=$.test(g.querySelectorAll))&&(jb(function(a){o.appendChild(a).innerHTML="<a id='"+u+"'></a><select id='"+u+"-\f]' msallowcapture=''><option selected=''></option></select>",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+L+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+L+"*(?:value|"+K+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),jb(function(a){var b=g.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+L+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=$.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&jb(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",P)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=$.test(o.compareDocumentPosition),t=b||$.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===g||a.ownerDocument===v&&t(v,a)?-1:b===g||b.ownerDocument===v&&t(v,b)?1:k?J(k,a)-J(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,h=[a],i=[b];if(!e||!f)return a===g?-1:b===g?1:e?-1:f?1:k?J(k,a)-J(k,b):0;if(e===f)return lb(a,b);c=a;while(c=c.parentNode)h.unshift(c);c=b;while(c=c.parentNode)i.unshift(c);while(h[d]===i[d])d++;return d?lb(h[d],i[d]):h[d]===v?-1:i[d]===v?1:0},g):n},gb.matches=function(a,b){return gb(a,null,null,b)},gb.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(U,"='$1']"),!(!c.matchesSelector||!p||r&&r.test(b)||q&&q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return gb(b,n,null,[a]).length>0},gb.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},gb.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&D.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},gb.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},gb.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=gb.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=gb.selectors={cacheLength:50,createPseudo:ib,match:X,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(cb,db),a[3]=(a[3]||a[4]||a[5]||"").replace(cb,db),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||gb.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&gb.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return X.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&V.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(cb,db).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+L+")"+a+"("+L+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=gb.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(Q," ")+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h;if(q){if(f){while(p){l=b;while(l=l[p])if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){k=q[u]||(q[u]={}),j=k[a]||[],n=j[0]===w&&j[1],m=j[0]===w&&j[2],l=n&&q.childNodes[n];while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if(1===l.nodeType&&++m&&l===b){k[a]=[w,n,m];break}}else if(s&&(j=(b[u]||(b[u]={}))[a])&&j[0]===w)m=j[1];else while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if((h?l.nodeName.toLowerCase()===r:1===l.nodeType)&&++m&&(s&&((l[u]||(l[u]={}))[a]=[w,m]),l===b))break;return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||gb.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ib(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=J(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ib(function(a){var b=[],c=[],d=h(a.replace(R,"$1"));return d[u]?ib(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ib(function(a){return function(b){return gb(a,b).length>0}}),contains:ib(function(a){return a=a.replace(cb,db),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ib(function(a){return W.test(a||"")||gb.error("unsupported lang: "+a),a=a.replace(cb,db).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Z.test(a.nodeName)},input:function(a){return Y.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:ob(function(){return[0]}),last:ob(function(a,b){return[b-1]}),eq:ob(function(a,b,c){return[0>c?c+b:c]}),even:ob(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:ob(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:ob(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:ob(function(a,b,c){for(var d=0>c?c+b:c;++d<b;)a.push(d);return a})}},d.pseudos.nth=d.pseudos.eq;for(b in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})d.pseudos[b]=mb(b);for(b in{submit:!0,reset:!0})d.pseudos[b]=nb(b);function qb(){}qb.prototype=d.filters=d.pseudos,d.setFilters=new qb,g=gb.tokenize=function(a,b){var c,e,f,g,h,i,j,k=z[a+" "];if(k)return b?0:k.slice(0);h=a,i=[],j=d.preFilter;while(h){(!c||(e=S.exec(h)))&&(e&&(h=h.slice(e[0].length)||h),i.push(f=[])),c=!1,(e=T.exec(h))&&(c=e.shift(),f.push({value:c,type:e[0].replace(R," ")}),h=h.slice(c.length));for(g in d.filter)!(e=X[g].exec(h))||j[g]&&!(e=j[g](e))||(c=e.shift(),f.push({value:c,type:g,matches:e}),h=h.slice(c.length));if(!c)break}return b?h.length:h?gb.error(a):z(a,i).slice(0)};function rb(a){for(var b=0,c=a.length,d="";c>b;b++)d+=a[b].value;return d}function sb(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(i=b[u]||(b[u]={}),(h=i[d])&&h[0]===w&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function tb(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function ub(a,b,c){for(var d=0,e=b.length;e>d;d++)gb(a,b[d],c);return c}function vb(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function wb(a,b,c,d,e,f){return d&&!d[u]&&(d=wb(d)),e&&!e[u]&&(e=wb(e,f)),ib(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||ub(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:vb(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=vb(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?J(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=vb(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):H.apply(g,r)})}function xb(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=sb(function(a){return a===b},h,!0),l=sb(function(a){return J(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];f>i;i++)if(c=d.relative[a[i].type])m=[sb(tb(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return wb(i>1&&tb(m),i>1&&rb(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(R,"$1"),c,e>i&&xb(a.slice(i,e)),f>e&&xb(a=a.slice(e)),f>e&&rb(a))}m.push(c)}return tb(m)}function yb(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,m,o,p=0,q="0",r=f&&[],s=[],t=j,u=f||e&&d.find.TAG("*",k),v=w+=null==t?1:Math.random()||.1,x=u.length;for(k&&(j=g!==n&&g);q!==x&&null!=(l=u[q]);q++){if(e&&l){m=0;while(o=a[m++])if(o(l,g,h)){i.push(l);break}k&&(w=v)}c&&((l=!o&&l)&&p--,f&&r.push(l))}if(p+=q,c&&q!==p){m=0;while(o=b[m++])o(r,s,g,h);if(f){if(p>0)while(q--)r[q]||s[q]||(s[q]=F.call(i));s=vb(s)}H.apply(i,s),k&&!f&&s.length>0&&p+b.length>1&&gb.uniqueSort(i)}return k&&(w=v,j=t),r};return c?ib(f):f}return h=gb.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=xb(b[c]),f[u]?d.push(f):e.push(f);f=A(a,yb(e,d)),f.selector=a}return f},i=gb.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(cb,db),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=X.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(cb,db),ab.test(j[0].type)&&pb(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&rb(j),!a)return H.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,ab.test(a)&&pb(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=jb(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),jb(function(a){return a.innerHTML="<a href='#'></a>","#"===a.firstChild.getAttribute("href")})||kb("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&jb(function(a){return a.innerHTML="<input/>",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||kb("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),jb(function(a){return null==a.getAttribute("disabled")})||kb(K,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),gb}(a);n.find=t,n.expr=t.selectors,n.expr[":"]=n.expr.pseudos,n.unique=t.uniqueSort,n.text=t.getText,n.isXMLDoc=t.isXML,n.contains=t.contains;var u=n.expr.match.needsContext,v=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,w=/^.[^:#\[\.,]*$/;function x(a,b,c){if(n.isFunction(b))return n.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return n.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(w.test(b))return n.filter(b,a,c);b=n.filter(b,a)}return n.grep(a,function(a){return g.call(b,a)>=0!==c})}n.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?n.find.matchesSelector(d,a)?[d]:[]:n.find.matches(a,n.grep(b,function(a){return 1===a.nodeType}))},n.fn.extend({find:function(a){var b,c=this.length,d=[],e=this;if("string"!=typeof a)return this.pushStack(n(a).filter(function(){for(b=0;c>b;b++)if(n.contains(e[b],this))return!0}));for(b=0;c>b;b++)n.find(a,e[b],d);return d=this.pushStack(c>1?n.unique(d):d),d.selector=this.selector?this.selector+" "+a:a,d},filter:function(a){return this.pushStack(x(this,a||[],!1))},not:function(a){return this.pushStack(x(this,a||[],!0))},is:function(a){return!!x(this,"string"==typeof a&&u.test(a)?n(a):a||[],!1).length}});var y,z=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,A=n.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a[0]&&">"===a[a.length-1]&&a.length>=3?[null,a,null]:z.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||y).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof n?b[0]:b,n.merge(this,n.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:l,!0)),v.test(c[1])&&n.isPlainObject(b))for(c in b)n.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}return d=l.getElementById(c[2]),d&&d.parentNode&&(this.length=1,this[0]=d),this.context=l,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):n.isFunction(a)?"undefined"!=typeof y.ready?y.ready(a):a(n):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),n.makeArray(a,this))};A.prototype=n.fn,y=n(l);var B=/^(?:parents|prev(?:Until|All))/,C={children:!0,contents:!0,next:!0,prev:!0};n.extend({dir:function(a,b,c){var d=[],e=void 0!==c;while((a=a[b])&&9!==a.nodeType)if(1===a.nodeType){if(e&&n(a).is(c))break;d.push(a)}return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),n.fn.extend({has:function(a){var b=n(a,this),c=b.length;return this.filter(function(){for(var a=0;c>a;a++)if(n.contains(this,b[a]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=u.test(a)||"string"!=typeof a?n(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&n.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?n.unique(f):f)},index:function(a){return a?"string"==typeof a?g.call(n(a),this[0]):g.call(this,a.jquery?a[0]:a):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(n.unique(n.merge(this.get(),n(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function D(a,b){while((a=a[b])&&1!==a.nodeType);return a}n.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return n.dir(a,"parentNode")},parentsUntil:function(a,b,c){return n.dir(a,"parentNode",c)},next:function(a){return D(a,"nextSibling")},prev:function(a){return D(a,"previousSibling")},nextAll:function(a){return n.dir(a,"nextSibling")},prevAll:function(a){return n.dir(a,"previousSibling")},nextUntil:function(a,b,c){return n.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return n.dir(a,"previousSibling",c)},siblings:function(a){return n.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return n.sibling(a.firstChild)},contents:function(a){return a.contentDocument||n.merge([],a.childNodes)}},function(a,b){n.fn[a]=function(c,d){var e=n.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=n.filter(d,e)),this.length>1&&(C[a]||n.unique(e),B.test(a)&&e.reverse()),this.pushStack(e)}});var E=/\S+/g,F={};function G(a){var b=F[a]={};return n.each(a.match(E)||[],function(a,c){b[c]=!0}),b}n.Callbacks=function(a){a="string"==typeof a?F[a]||G(a):n.extend({},a);var b,c,d,e,f,g,h=[],i=!a.once&&[],j=function(l){for(b=a.memory&&l,c=!0,g=e||0,e=0,f=h.length,d=!0;h&&f>g;g++)if(h[g].apply(l[0],l[1])===!1&&a.stopOnFalse){b=!1;break}d=!1,h&&(i?i.length&&j(i.shift()):b?h=[]:k.disable())},k={add:function(){if(h){var c=h.length;!function g(b){n.each(b,function(b,c){var d=n.type(c);"function"===d?a.unique&&k.has(c)||h.push(c):c&&c.length&&"string"!==d&&g(c)})}(arguments),d?f=h.length:b&&(e=c,j(b))}return this},remove:function(){return h&&n.each(arguments,function(a,b){var c;while((c=n.inArray(b,h,c))>-1)h.splice(c,1),d&&(f>=c&&f--,g>=c&&g--)}),this},has:function(a){return a?n.inArray(a,h)>-1:!(!h||!h.length)},empty:function(){return h=[],f=0,this},disable:function(){return h=i=b=void 0,this},disabled:function(){return!h},lock:function(){return i=void 0,b||k.disable(),this},locked:function(){return!i},fireWith:function(a,b){return!h||c&&!i||(b=b||[],b=[a,b.slice?b.slice():b],d?i.push(b):j(b)),this},fire:function(){return k.fireWith(this,arguments),this},fired:function(){return!!c}};return k},n.extend({Deferred:function(a){var b=[["resolve","done",n.Callbacks("once memory"),"resolved"],["reject","fail",n.Callbacks("once memory"),"rejected"],["notify","progress",n.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return n.Deferred(function(c){n.each(b,function(b,f){var g=n.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&n.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?n.extend(a,d):d}},e={};return d.pipe=d.then,n.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=d.call(arguments),e=c.length,f=1!==e||a&&n.isFunction(a.promise)?e:0,g=1===f?a:n.Deferred(),h=function(a,b,c){return function(e){b[a]=this,c[a]=arguments.length>1?d.call(arguments):e,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(e>1)for(i=new Array(e),j=new Array(e),k=new Array(e);e>b;b++)c[b]&&n.isFunction(c[b].promise)?c[b].promise().done(h(b,k,c)).fail(g.reject).progress(h(b,j,i)):--f;return f||g.resolveWith(k,c),g.promise()}});var H;n.fn.ready=function(a){return n.ready.promise().done(a),this},n.extend({isReady:!1,readyWait:1,holdReady:function(a){a?n.readyWait++:n.ready(!0)},ready:function(a){(a===!0?--n.readyWait:n.isReady)||(n.isReady=!0,a!==!0&&--n.readyWait>0||(H.resolveWith(l,[n]),n.fn.triggerHandler&&(n(l).triggerHandler("ready"),n(l).off("ready"))))}});function I(){l.removeEventListener("DOMContentLoaded",I,!1),a.removeEventListener("load",I,!1),n.ready()}n.ready.promise=function(b){return H||(H=n.Deferred(),"complete"===l.readyState?setTimeout(n.ready):(l.addEventListener("DOMContentLoaded",I,!1),a.addEventListener("load",I,!1))),H.promise(b)},n.ready.promise();var J=n.access=function(a,b,c,d,e,f,g){var h=0,i=a.length,j=null==c;if("object"===n.type(c)){e=!0;for(h in c)n.access(a,b,h,c[h],!0,f,g)}else if(void 0!==d&&(e=!0,n.isFunction(d)||(g=!0),j&&(g?(b.call(a,d),b=null):(j=b,b=function(a,b,c){return j.call(n(a),c)})),b))for(;i>h;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f};n.acceptData=function(a){return 1===a.nodeType||9===a.nodeType||!+a.nodeType};function K(){Object.defineProperty(this.cache={},0,{get:function(){return{}}}),this.expando=n.expando+K.uid++}K.uid=1,K.accepts=n.acceptData,K.prototype={key:function(a){if(!K.accepts(a))return 0;var b={},c=a[this.expando];if(!c){c=K.uid++;try{b[this.expando]={value:c},Object.defineProperties(a,b)}catch(d){b[this.expando]=c,n.extend(a,b)}}return this.cache[c]||(this.cache[c]={}),c},set:function(a,b,c){var d,e=this.key(a),f=this.cache[e];if("string"==typeof b)f[b]=c;else if(n.isEmptyObject(f))n.extend(this.cache[e],b);else for(d in b)f[d]=b[d];return f},get:function(a,b){var c=this.cache[this.key(a)];return void 0===b?c:c[b]},access:function(a,b,c){var d;return void 0===b||b&&"string"==typeof b&&void 0===c?(d=this.get(a,b),void 0!==d?d:this.get(a,n.camelCase(b))):(this.set(a,b,c),void 0!==c?c:b)},remove:function(a,b){var c,d,e,f=this.key(a),g=this.cache[f];if(void 0===b)this.cache[f]={};else{n.isArray(b)?d=b.concat(b.map(n.camelCase)):(e=n.camelCase(b),b in g?d=[b,e]:(d=e,d=d in g?[d]:d.match(E)||[])),c=d.length;while(c--)delete g[d[c]]}},hasData:function(a){return!n.isEmptyObject(this.cache[a[this.expando]]||{})},discard:function(a){a[this.expando]&&delete this.cache[a[this.expando]]}};var L=new K,M=new K,N=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,O=/([A-Z])/g;function P(a,b,c){var d;if(void 0===c&&1===a.nodeType)if(d="data-"+b.replace(O,"-$1").toLowerCase(),c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:N.test(c)?n.parseJSON(c):c}catch(e){}M.set(a,b,c)}else c=void 0;return c}n.extend({hasData:function(a){return M.hasData(a)||L.hasData(a)},data:function(a,b,c){return M.access(a,b,c)
+},removeData:function(a,b){M.remove(a,b)},_data:function(a,b,c){return L.access(a,b,c)},_removeData:function(a,b){L.remove(a,b)}}),n.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=M.get(f),1===f.nodeType&&!L.get(f,"hasDataAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=n.camelCase(d.slice(5)),P(f,d,e[d])));L.set(f,"hasDataAttrs",!0)}return e}return"object"==typeof a?this.each(function(){M.set(this,a)}):J(this,function(b){var c,d=n.camelCase(a);if(f&&void 0===b){if(c=M.get(f,a),void 0!==c)return c;if(c=M.get(f,d),void 0!==c)return c;if(c=P(f,d,void 0),void 0!==c)return c}else this.each(function(){var c=M.get(this,d);M.set(this,d,b),-1!==a.indexOf("-")&&void 0!==c&&M.set(this,a,b)})},null,b,arguments.length>1,null,!0)},removeData:function(a){return this.each(function(){M.remove(this,a)})}}),n.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=L.get(a,b),c&&(!d||n.isArray(c)?d=L.access(a,b,n.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=n.queue(a,b),d=c.length,e=c.shift(),f=n._queueHooks(a,b),g=function(){n.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return L.get(a,c)||L.access(a,c,{empty:n.Callbacks("once memory").add(function(){L.remove(a,[b+"queue",c])})})}}),n.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.length<c?n.queue(this[0],a):void 0===b?this:this.each(function(){var c=n.queue(this,a,b);n._queueHooks(this,a),"fx"===a&&"inprogress"!==c[0]&&n.dequeue(this,a)})},dequeue:function(a){return this.each(function(){n.dequeue(this,a)})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,b){var c,d=1,e=n.Deferred(),f=this,g=this.length,h=function(){--d||e.resolveWith(f,[f])};"string"!=typeof a&&(b=a,a=void 0),a=a||"fx";while(g--)c=L.get(f[g],a+"queueHooks"),c&&c.empty&&(d++,c.empty.add(h));return h(),e.promise(b)}});var Q=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,R=["Top","Right","Bottom","Left"],S=function(a,b){return a=b||a,"none"===n.css(a,"display")||!n.contains(a.ownerDocument,a)},T=/^(?:checkbox|radio)$/i;!function(){var a=l.createDocumentFragment(),b=a.appendChild(l.createElement("div")),c=l.createElement("input");c.setAttribute("type","radio"),c.setAttribute("checked","checked"),c.setAttribute("name","t"),b.appendChild(c),k.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,b.innerHTML="<textarea>x</textarea>",k.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue}();var U="undefined";k.focusinBubbles="onfocusin"in a;var V=/^key/,W=/^(?:mouse|pointer|contextmenu)|click/,X=/^(?:focusinfocus|focusoutblur)$/,Y=/^([^.]*)(?:\.(.+)|)$/;function Z(){return!0}function $(){return!1}function _(){try{return l.activeElement}catch(a){}}n.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=L.get(a);if(r){c.handler&&(f=c,c=f.handler,e=f.selector),c.guid||(c.guid=n.guid++),(i=r.events)||(i=r.events={}),(g=r.handle)||(g=r.handle=function(b){return typeof n!==U&&n.event.triggered!==b.type?n.event.dispatch.apply(a,arguments):void 0}),b=(b||"").match(E)||[""],j=b.length;while(j--)h=Y.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o&&(l=n.event.special[o]||{},o=(e?l.delegateType:l.bindType)||o,l=n.event.special[o]||{},k=n.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&n.expr.match.needsContext.test(e),namespace:p.join(".")},f),(m=i[o])||(m=i[o]=[],m.delegateCount=0,l.setup&&l.setup.call(a,d,p,g)!==!1||a.addEventListener&&a.addEventListener(o,g,!1)),l.add&&(l.add.call(a,k),k.handler.guid||(k.handler.guid=c.guid)),e?m.splice(m.delegateCount++,0,k):m.push(k),n.event.global[o]=!0)}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=L.hasData(a)&&L.get(a);if(r&&(i=r.events)){b=(b||"").match(E)||[""],j=b.length;while(j--)if(h=Y.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=n.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,m=i[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),g=f=m.length;while(f--)k=m[f],!e&&q!==k.origType||c&&c.guid!==k.guid||h&&!h.test(k.namespace)||d&&d!==k.selector&&("**"!==d||!k.selector)||(m.splice(f,1),k.selector&&m.delegateCount--,l.remove&&l.remove.call(a,k));g&&!m.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||n.removeEvent(a,o,r.handle),delete i[o])}else for(o in i)n.event.remove(a,o+b[j],c,d,!0);n.isEmptyObject(i)&&(delete r.handle,L.remove(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,k,m,o,p=[d||l],q=j.call(b,"type")?b.type:b,r=j.call(b,"namespace")?b.namespace.split("."):[];if(g=h=d=d||l,3!==d.nodeType&&8!==d.nodeType&&!X.test(q+n.event.triggered)&&(q.indexOf(".")>=0&&(r=q.split("."),q=r.shift(),r.sort()),k=q.indexOf(":")<0&&"on"+q,b=b[n.expando]?b:new n.Event(q,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=r.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+r.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:n.makeArray(c,[b]),o=n.event.special[q]||{},e||!o.trigger||o.trigger.apply(d,c)!==!1)){if(!e&&!o.noBubble&&!n.isWindow(d)){for(i=o.delegateType||q,X.test(i+q)||(g=g.parentNode);g;g=g.parentNode)p.push(g),h=g;h===(d.ownerDocument||l)&&p.push(h.defaultView||h.parentWindow||a)}f=0;while((g=p[f++])&&!b.isPropagationStopped())b.type=f>1?i:o.bindType||q,m=(L.get(g,"events")||{})[b.type]&&L.get(g,"handle"),m&&m.apply(g,c),m=k&&g[k],m&&m.apply&&n.acceptData(g)&&(b.result=m.apply(g,c),b.result===!1&&b.preventDefault());return b.type=q,e||b.isDefaultPrevented()||o._default&&o._default.apply(p.pop(),c)!==!1||!n.acceptData(d)||k&&n.isFunction(d[q])&&!n.isWindow(d)&&(h=d[k],h&&(d[k]=null),n.event.triggered=q,d[q](),n.event.triggered=void 0,h&&(d[k]=h)),b.result}},dispatch:function(a){a=n.event.fix(a);var b,c,e,f,g,h=[],i=d.call(arguments),j=(L.get(this,"events")||{})[a.type]||[],k=n.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=n.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,c=0;while((g=f.handlers[c++])&&!a.isImmediatePropagationStopped())(!a.namespace_re||a.namespace_re.test(g.namespace))&&(a.handleObj=g,a.data=g.data,e=((n.event.special[g.origType]||{}).handle||g.handler).apply(f.elem,i),void 0!==e&&(a.result=e)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!==this;i=i.parentNode||this)if(i.disabled!==!0||"click"!==a.type){for(d=[],c=0;h>c;c++)f=b[c],e=f.selector+" ",void 0===d[e]&&(d[e]=f.needsContext?n(e,this).index(i)>=0:n.find(e,this,null,[i]).length),d[e]&&d.push(f);d.length&&g.push({elem:i,handlers:d})}return h<b.length&&g.push({elem:this,handlers:b.slice(h)}),g},props:"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){return null==a.which&&(a.which=null!=b.charCode?b.charCode:b.keyCode),a}},mouseHooks:{props:"button buttons clientX clientY offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,b){var c,d,e,f=b.button;return null==a.pageX&&null!=b.clientX&&(c=a.target.ownerDocument||l,d=c.documentElement,e=c.body,a.pageX=b.clientX+(d&&d.scrollLeft||e&&e.scrollLeft||0)-(d&&d.clientLeft||e&&e.clientLeft||0),a.pageY=b.clientY+(d&&d.scrollTop||e&&e.scrollTop||0)-(d&&d.clientTop||e&&e.clientTop||0)),a.which||void 0===f||(a.which=1&f?1:2&f?3:4&f?2:0),a}},fix:function(a){if(a[n.expando])return a;var b,c,d,e=a.type,f=a,g=this.fixHooks[e];g||(this.fixHooks[e]=g=W.test(e)?this.mouseHooks:V.test(e)?this.keyHooks:{}),d=g.props?this.props.concat(g.props):this.props,a=new n.Event(f),b=d.length;while(b--)c=d[b],a[c]=f[c];return a.target||(a.target=l),3===a.target.nodeType&&(a.target=a.target.parentNode),g.filter?g.filter(a,f):a},special:{load:{noBubble:!0},focus:{trigger:function(){return this!==_()&&this.focus?(this.focus(),!1):void 0},delegateType:"focusin"},blur:{trigger:function(){return this===_()&&this.blur?(this.blur(),!1):void 0},delegateType:"focusout"},click:{trigger:function(){return"checkbox"===this.type&&this.click&&n.nodeName(this,"input")?(this.click(),!1):void 0},_default:function(a){return n.nodeName(a.target,"a")}},beforeunload:{postDispatch:function(a){void 0!==a.result&&a.originalEvent&&(a.originalEvent.returnValue=a.result)}}},simulate:function(a,b,c,d){var e=n.extend(new n.Event,c,{type:a,isSimulated:!0,originalEvent:{}});d?n.event.trigger(e,null,b):n.event.dispatch.call(b,e),e.isDefaultPrevented()&&c.preventDefault()}},n.removeEvent=function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c,!1)},n.Event=function(a,b){return this instanceof n.Event?(a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||void 0===a.defaultPrevented&&a.returnValue===!1?Z:$):this.type=a,b&&n.extend(this,b),this.timeStamp=a&&a.timeStamp||n.now(),void(this[n.expando]=!0)):new n.Event(a,b)},n.Event.prototype={isDefaultPrevented:$,isPropagationStopped:$,isImmediatePropagationStopped:$,preventDefault:function(){var a=this.originalEvent;this.isDefaultPrevented=Z,a&&a.preventDefault&&a.preventDefault()},stopPropagation:function(){var a=this.originalEvent;this.isPropagationStopped=Z,a&&a.stopPropagation&&a.stopPropagation()},stopImmediatePropagation:function(){var a=this.originalEvent;this.isImmediatePropagationStopped=Z,a&&a.stopImmediatePropagation&&a.stopImmediatePropagation(),this.stopPropagation()}},n.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(a,b){n.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c,d=this,e=a.relatedTarget,f=a.handleObj;return(!e||e!==d&&!n.contains(d,e))&&(a.type=f.origType,c=f.handler.apply(this,arguments),a.type=b),c}}}),k.focusinBubbles||n.each({focus:"focusin",blur:"focusout"},function(a,b){var c=function(a){n.event.simulate(b,a.target,n.event.fix(a),!0)};n.event.special[b]={setup:function(){var d=this.ownerDocument||this,e=L.access(d,b);e||d.addEventListener(a,c,!0),L.access(d,b,(e||0)+1)},teardown:function(){var d=this.ownerDocument||this,e=L.access(d,b)-1;e?L.access(d,b,e):(d.removeEventListener(a,c,!0),L.remove(d,b))}}}),n.fn.extend({on:function(a,b,c,d,e){var f,g;if("object"==typeof a){"string"!=typeof b&&(c=c||b,b=void 0);for(g in a)this.on(g,b,c,a[g],e);return this}if(null==c&&null==d?(d=b,c=b=void 0):null==d&&("string"==typeof b?(d=c,c=void 0):(d=c,c=b,b=void 0)),d===!1)d=$;else if(!d)return this;return 1===e&&(f=d,d=function(a){return n().off(a),f.apply(this,arguments)},d.guid=f.guid||(f.guid=n.guid++)),this.each(function(){n.event.add(this,a,d,c,b)})},one:function(a,b,c,d){return this.on(a,b,c,d,1)},off:function(a,b,c){var d,e;if(a&&a.preventDefault&&a.handleObj)return d=a.handleObj,n(a.delegateTarget).off(d.namespace?d.origType+"."+d.namespace:d.origType,d.selector,d.handler),this;if("object"==typeof a){for(e in a)this.off(e,b,a[e]);return this}return(b===!1||"function"==typeof b)&&(c=b,b=void 0),c===!1&&(c=$),this.each(function(){n.event.remove(this,a,c,b)})},trigger:function(a,b){return this.each(function(){n.event.trigger(a,b,this)})},triggerHandler:function(a,b){var c=this[0];return c?n.event.trigger(a,b,c,!0):void 0}});var ab=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,bb=/<([\w:]+)/,cb=/<|&#?\w+;/,db=/<(?:script|style|link)/i,eb=/checked\s*(?:[^=]|=\s*.checked.)/i,fb=/^$|\/(?:java|ecma)script/i,gb=/^true\/(.*)/,hb=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,ib={option:[1,"<select multiple='multiple'>","</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};ib.optgroup=ib.option,ib.tbody=ib.tfoot=ib.colgroup=ib.caption=ib.thead,ib.th=ib.td;function jb(a,b){return n.nodeName(a,"table")&&n.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function kb(a){return a.type=(null!==a.getAttribute("type"))+"/"+a.type,a}function lb(a){var b=gb.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function mb(a,b){for(var c=0,d=a.length;d>c;c++)L.set(a[c],"globalEval",!b||L.get(b[c],"globalEval"))}function nb(a,b){var c,d,e,f,g,h,i,j;if(1===b.nodeType){if(L.hasData(a)&&(f=L.access(a),g=L.set(b,f),j=f.events)){delete g.handle,g.events={};for(e in j)for(c=0,d=j[e].length;d>c;c++)n.event.add(b,e,j[e][c])}M.hasData(a)&&(h=M.access(a),i=n.extend({},h),M.set(b,i))}}function ob(a,b){var c=a.getElementsByTagName?a.getElementsByTagName(b||"*"):a.querySelectorAll?a.querySelectorAll(b||"*"):[];return void 0===b||b&&n.nodeName(a,b)?n.merge([a],c):c}function pb(a,b){var c=b.nodeName.toLowerCase();"input"===c&&T.test(a.type)?b.checked=a.checked:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}n.extend({clone:function(a,b,c){var d,e,f,g,h=a.cloneNode(!0),i=n.contains(a.ownerDocument,a);if(!(k.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||n.isXMLDoc(a)))for(g=ob(h),f=ob(a),d=0,e=f.length;e>d;d++)pb(f[d],g[d]);if(b)if(c)for(f=f||ob(a),g=g||ob(h),d=0,e=f.length;e>d;d++)nb(f[d],g[d]);else nb(a,h);return g=ob(h,"script"),g.length>0&&mb(g,!i&&ob(a,"script")),h},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,k=b.createDocumentFragment(),l=[],m=0,o=a.length;o>m;m++)if(e=a[m],e||0===e)if("object"===n.type(e))n.merge(l,e.nodeType?[e]:e);else if(cb.test(e)){f=f||k.appendChild(b.createElement("div")),g=(bb.exec(e)||["",""])[1].toLowerCase(),h=ib[g]||ib._default,f.innerHTML=h[1]+e.replace(ab,"<$1></$2>")+h[2],j=h[0];while(j--)f=f.lastChild;n.merge(l,f.childNodes),f=k.firstChild,f.textContent=""}else l.push(b.createTextNode(e));k.textContent="",m=0;while(e=l[m++])if((!d||-1===n.inArray(e,d))&&(i=n.contains(e.ownerDocument,e),f=ob(k.appendChild(e),"script"),i&&mb(f),c)){j=0;while(e=f[j++])fb.test(e.type||"")&&c.push(e)}return k},cleanData:function(a){for(var b,c,d,e,f=n.event.special,g=0;void 0!==(c=a[g]);g++){if(n.acceptData(c)&&(e=c[L.expando],e&&(b=L.cache[e]))){if(b.events)for(d in b.events)f[d]?n.event.remove(c,d):n.removeEvent(c,d,b.handle);L.cache[e]&&delete L.cache[e]}delete M.cache[c[M.expando]]}}}),n.fn.extend({text:function(a){return J(this,function(a){return void 0===a?n.text(this):this.empty().each(function(){(1===this.nodeType||11===this.nodeType||9===this.nodeType)&&(this.textContent=a)})},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=jb(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=jb(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?n.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||n.cleanData(ob(c)),c.parentNode&&(b&&n.contains(c.ownerDocument,c)&&mb(ob(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++)1===a.nodeType&&(n.cleanData(ob(a,!1)),a.textContent="");return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return n.clone(this,a,b)})},html:function(a){return J(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a&&1===b.nodeType)return b.innerHTML;if("string"==typeof a&&!db.test(a)&&!ib[(bb.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(ab,"<$1></$2>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(n.cleanData(ob(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,n.cleanData(ob(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=e.apply([],a);var c,d,f,g,h,i,j=0,l=this.length,m=this,o=l-1,p=a[0],q=n.isFunction(p);if(q||l>1&&"string"==typeof p&&!k.checkClone&&eb.test(p))return this.each(function(c){var d=m.eq(c);q&&(a[0]=p.call(this,c,d.html())),d.domManip(a,b)});if(l&&(c=n.buildFragment(a,this[0].ownerDocument,!1,this),d=c.firstChild,1===c.childNodes.length&&(c=d),d)){for(f=n.map(ob(c,"script"),kb),g=f.length;l>j;j++)h=c,j!==o&&(h=n.clone(h,!0,!0),g&&n.merge(f,ob(h,"script"))),b.call(this[j],h,j);if(g)for(i=f[f.length-1].ownerDocument,n.map(f,lb),j=0;g>j;j++)h=f[j],fb.test(h.type||"")&&!L.access(h,"globalEval")&&n.contains(i,h)&&(h.src?n._evalUrl&&n._evalUrl(h.src):n.globalEval(h.textContent.replace(hb,"")))}return this}}),n.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){n.fn[a]=function(a){for(var c,d=[],e=n(a),g=e.length-1,h=0;g>=h;h++)c=h===g?this:this.clone(!0),n(e[h])[b](c),f.apply(d,c.get());return this.pushStack(d)}});var qb,rb={};function sb(b,c){var d,e=n(c.createElement(b)).appendTo(c.body),f=a.getDefaultComputedStyle&&(d=a.getDefaultComputedStyle(e[0]))?d.display:n.css(e[0],"display");return e.detach(),f}function tb(a){var b=l,c=rb[a];return c||(c=sb(a,b),"none"!==c&&c||(qb=(qb||n("<iframe frameborder='0' width='0' height='0'/>")).appendTo(b.documentElement),b=qb[0].contentDocument,b.write(),b.close(),c=sb(a,b),qb.detach()),rb[a]=c),c}var ub=/^margin/,vb=new RegExp("^("+Q+")(?!px)[a-z%]+$","i"),wb=function(b){return b.ownerDocument.defaultView.opener?b.ownerDocument.defaultView.getComputedStyle(b,null):a.getComputedStyle(b,null)};function xb(a,b,c){var d,e,f,g,h=a.style;return c=c||wb(a),c&&(g=c.getPropertyValue(b)||c[b]),c&&(""!==g||n.contains(a.ownerDocument,a)||(g=n.style(a,b)),vb.test(g)&&ub.test(b)&&(d=h.width,e=h.minWidth,f=h.maxWidth,h.minWidth=h.maxWidth=h.width=g,g=c.width,h.width=d,h.minWidth=e,h.maxWidth=f)),void 0!==g?g+"":g}function yb(a,b){return{get:function(){return a()?void delete this.get:(this.get=b).apply(this,arguments)}}}!function(){var b,c,d=l.documentElement,e=l.createElement("div"),f=l.createElement("div");if(f.style){f.style.backgroundClip="content-box",f.cloneNode(!0).style.backgroundClip="",k.clearCloneStyle="content-box"===f.style.backgroundClip,e.style.cssText="border:0;width:0;height:0;top:0;left:-9999px;margin-top:1px;position:absolute",e.appendChild(f);function g(){f.style.cssText="-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;display:block;margin-top:1%;top:1%;border:1px;padding:1px;width:4px;position:absolute",f.innerHTML="",d.appendChild(e);var g=a.getComputedStyle(f,null);b="1%"!==g.top,c="4px"===g.width,d.removeChild(e)}a.getComputedStyle&&n.extend(k,{pixelPosition:function(){return g(),b},boxSizingReliable:function(){return null==c&&g(),c},reliableMarginRight:function(){var b,c=f.appendChild(l.createElement("div"));return c.style.cssText=f.style.cssText="-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;display:block;margin:0;border:0;padding:0",c.style.marginRight=c.style.width="0",f.style.width="1px",d.appendChild(e),b=!parseFloat(a.getComputedStyle(c,null).marginRight),d.removeChild(e),f.removeChild(c),b}})}}(),n.swap=function(a,b,c,d){var e,f,g={};for(f in b)g[f]=a.style[f],a.style[f]=b[f];e=c.apply(a,d||[]);for(f in b)a.style[f]=g[f];return e};var zb=/^(none|table(?!-c[ea]).+)/,Ab=new RegExp("^("+Q+")(.*)$","i"),Bb=new RegExp("^([+-])=("+Q+")","i"),Cb={position:"absolute",visibility:"hidden",display:"block"},Db={letterSpacing:"0",fontWeight:"400"},Eb=["Webkit","O","Moz","ms"];function Fb(a,b){if(b in a)return b;var c=b[0].toUpperCase()+b.slice(1),d=b,e=Eb.length;while(e--)if(b=Eb[e]+c,b in a)return b;return d}function Gb(a,b,c){var d=Ab.exec(b);return d?Math.max(0,d[1]-(c||0))+(d[2]||"px"):b}function Hb(a,b,c,d,e){for(var f=c===(d?"border":"content")?4:"width"===b?1:0,g=0;4>f;f+=2)"margin"===c&&(g+=n.css(a,c+R[f],!0,e)),d?("content"===c&&(g-=n.css(a,"padding"+R[f],!0,e)),"margin"!==c&&(g-=n.css(a,"border"+R[f]+"Width",!0,e))):(g+=n.css(a,"padding"+R[f],!0,e),"padding"!==c&&(g+=n.css(a,"border"+R[f]+"Width",!0,e)));return g}function Ib(a,b,c){var d=!0,e="width"===b?a.offsetWidth:a.offsetHeight,f=wb(a),g="border-box"===n.css(a,"boxSizing",!1,f);if(0>=e||null==e){if(e=xb(a,b,f),(0>e||null==e)&&(e=a.style[b]),vb.test(e))return e;d=g&&(k.boxSizingReliable()||e===a.style[b]),e=parseFloat(e)||0}return e+Hb(a,b,c||(g?"border":"content"),d,f)+"px"}function Jb(a,b){for(var c,d,e,f=[],g=0,h=a.length;h>g;g++)d=a[g],d.style&&(f[g]=L.get(d,"olddisplay"),c=d.style.display,b?(f[g]||"none"!==c||(d.style.display=""),""===d.style.display&&S(d)&&(f[g]=L.access(d,"olddisplay",tb(d.nodeName)))):(e=S(d),"none"===c&&e||L.set(d,"olddisplay",e?c:n.css(d,"display"))));for(g=0;h>g;g++)d=a[g],d.style&&(b&&"none"!==d.style.display&&""!==d.style.display||(d.style.display=b?f[g]||"":"none"));return a}n.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=xb(a,"opacity");return""===c?"1":c}}}},cssNumber:{columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":"cssFloat"},style:function(a,b,c,d){if(a&&3!==a.nodeType&&8!==a.nodeType&&a.style){var e,f,g,h=n.camelCase(b),i=a.style;return b=n.cssProps[h]||(n.cssProps[h]=Fb(i,h)),g=n.cssHooks[b]||n.cssHooks[h],void 0===c?g&&"get"in g&&void 0!==(e=g.get(a,!1,d))?e:i[b]:(f=typeof c,"string"===f&&(e=Bb.exec(c))&&(c=(e[1]+1)*e[2]+parseFloat(n.css(a,b)),f="number"),null!=c&&c===c&&("number"!==f||n.cssNumber[h]||(c+="px"),k.clearCloneStyle||""!==c||0!==b.indexOf("background")||(i[b]="inherit"),g&&"set"in g&&void 0===(c=g.set(a,c,d))||(i[b]=c)),void 0)}},css:function(a,b,c,d){var e,f,g,h=n.camelCase(b);return b=n.cssProps[h]||(n.cssProps[h]=Fb(a.style,h)),g=n.cssHooks[b]||n.cssHooks[h],g&&"get"in g&&(e=g.get(a,!0,c)),void 0===e&&(e=xb(a,b,d)),"normal"===e&&b in Db&&(e=Db[b]),""===c||c?(f=parseFloat(e),c===!0||n.isNumeric(f)?f||0:e):e}}),n.each(["height","width"],function(a,b){n.cssHooks[b]={get:function(a,c,d){return c?zb.test(n.css(a,"display"))&&0===a.offsetWidth?n.swap(a,Cb,function(){return Ib(a,b,d)}):Ib(a,b,d):void 0},set:function(a,c,d){var e=d&&wb(a);return Gb(a,c,d?Hb(a,b,d,"border-box"===n.css(a,"boxSizing",!1,e),e):0)}}}),n.cssHooks.marginRight=yb(k.reliableMarginRight,function(a,b){return b?n.swap(a,{display:"inline-block"},xb,[a,"marginRight"]):void 0}),n.each({margin:"",padding:"",border:"Width"},function(a,b){n.cssHooks[a+b]={expand:function(c){for(var d=0,e={},f="string"==typeof c?c.split(" "):[c];4>d;d++)e[a+R[d]+b]=f[d]||f[d-2]||f[0];return e}},ub.test(a)||(n.cssHooks[a+b].set=Gb)}),n.fn.extend({css:function(a,b){return J(this,function(a,b,c){var d,e,f={},g=0;if(n.isArray(b)){for(d=wb(a),e=b.length;e>g;g++)f[b[g]]=n.css(a,b[g],!1,d);return f}return void 0!==c?n.style(a,b,c):n.css(a,b)},a,b,arguments.length>1)},show:function(){return Jb(this,!0)},hide:function(){return Jb(this)},toggle:function(a){return"boolean"==typeof a?a?this.show():this.hide():this.each(function(){S(this)?n(this).show():n(this).hide()})}});function Kb(a,b,c,d,e){return new Kb.prototype.init(a,b,c,d,e)}n.Tween=Kb,Kb.prototype={constructor:Kb,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||"swing",this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(n.cssNumber[c]?"":"px")},cur:function(){var a=Kb.propHooks[this.prop];return a&&a.get?a.get(this):Kb.propHooks._default.get(this)},run:function(a){var b,c=Kb.propHooks[this.prop];return this.pos=b=this.options.duration?n.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):Kb.propHooks._default.set(this),this}},Kb.prototype.init.prototype=Kb.prototype,Kb.propHooks={_default:{get:function(a){var b;return null==a.elem[a.prop]||a.elem.style&&null!=a.elem.style[a.prop]?(b=n.css(a.elem,a.prop,""),b&&"auto"!==b?b:0):a.elem[a.prop]},set:function(a){n.fx.step[a.prop]?n.fx.step[a.prop](a):a.elem.style&&(null!=a.elem.style[n.cssProps[a.prop]]||n.cssHooks[a.prop])?n.style(a.elem,a.prop,a.now+a.unit):a.elem[a.prop]=a.now}}},Kb.propHooks.scrollTop=Kb.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},n.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2}},n.fx=Kb.prototype.init,n.fx.step={};var Lb,Mb,Nb=/^(?:toggle|show|hide)$/,Ob=new RegExp("^(?:([+-])=|)("+Q+")([a-z%]*)$","i"),Pb=/queueHooks$/,Qb=[Vb],Rb={"*":[function(a,b){var c=this.createTween(a,b),d=c.cur(),e=Ob.exec(b),f=e&&e[3]||(n.cssNumber[a]?"":"px"),g=(n.cssNumber[a]||"px"!==f&&+d)&&Ob.exec(n.css(c.elem,a)),h=1,i=20;if(g&&g[3]!==f){f=f||g[3],e=e||[],g=+d||1;do h=h||".5",g/=h,n.style(c.elem,a,g+f);while(h!==(h=c.cur()/d)&&1!==h&&--i)}return e&&(g=c.start=+g||+d||0,c.unit=f,c.end=e[1]?g+(e[1]+1)*e[2]:+e[2]),c}]};function Sb(){return setTimeout(function(){Lb=void 0}),Lb=n.now()}function Tb(a,b){var c,d=0,e={height:a};for(b=b?1:0;4>d;d+=2-b)c=R[d],e["margin"+c]=e["padding"+c]=a;return b&&(e.opacity=e.width=a),e}function Ub(a,b,c){for(var d,e=(Rb[b]||[]).concat(Rb["*"]),f=0,g=e.length;g>f;f++)if(d=e[f].call(c,b,a))return d}function Vb(a,b,c){var d,e,f,g,h,i,j,k,l=this,m={},o=a.style,p=a.nodeType&&S(a),q=L.get(a,"fxshow");c.queue||(h=n._queueHooks(a,"fx"),null==h.unqueued&&(h.unqueued=0,i=h.empty.fire,h.empty.fire=function(){h.unqueued||i()}),h.unqueued++,l.always(function(){l.always(function(){h.unqueued--,n.queue(a,"fx").length||h.empty.fire()})})),1===a.nodeType&&("height"in b||"width"in b)&&(c.overflow=[o.overflow,o.overflowX,o.overflowY],j=n.css(a,"display"),k="none"===j?L.get(a,"olddisplay")||tb(a.nodeName):j,"inline"===k&&"none"===n.css(a,"float")&&(o.display="inline-block")),c.overflow&&(o.overflow="hidden",l.always(function(){o.overflow=c.overflow[0],o.overflowX=c.overflow[1],o.overflowY=c.overflow[2]}));for(d in b)if(e=b[d],Nb.exec(e)){if(delete b[d],f=f||"toggle"===e,e===(p?"hide":"show")){if("show"!==e||!q||void 0===q[d])continue;p=!0}m[d]=q&&q[d]||n.style(a,d)}else j=void 0;if(n.isEmptyObject(m))"inline"===("none"===j?tb(a.nodeName):j)&&(o.display=j);else{q?"hidden"in q&&(p=q.hidden):q=L.access(a,"fxshow",{}),f&&(q.hidden=!p),p?n(a).show():l.done(function(){n(a).hide()}),l.done(function(){var b;L.remove(a,"fxshow");for(b in m)n.style(a,b,m[b])});for(d in m)g=Ub(p?q[d]:0,d,l),d in q||(q[d]=g.start,p&&(g.end=g.start,g.start="width"===d||"height"===d?1:0))}}function Wb(a,b){var c,d,e,f,g;for(c in a)if(d=n.camelCase(c),e=b[d],f=a[c],n.isArray(f)&&(e=f[1],f=a[c]=f[0]),c!==d&&(a[d]=f,delete a[c]),g=n.cssHooks[d],g&&"expand"in g){f=g.expand(f),delete a[d];for(c in f)c in a||(a[c]=f[c],b[c]=e)}else b[d]=e}function Xb(a,b,c){var d,e,f=0,g=Qb.length,h=n.Deferred().always(function(){delete i.elem}),i=function(){if(e)return!1;for(var b=Lb||Sb(),c=Math.max(0,j.startTime+j.duration-b),d=c/j.duration||0,f=1-d,g=0,i=j.tweens.length;i>g;g++)j.tweens[g].run(f);return h.notifyWith(a,[j,f,c]),1>f&&i?c:(h.resolveWith(a,[j]),!1)},j=h.promise({elem:a,props:n.extend({},b),opts:n.extend(!0,{specialEasing:{}},c),originalProperties:b,originalOptions:c,startTime:Lb||Sb(),duration:c.duration,tweens:[],createTween:function(b,c){var d=n.Tween(a,j.opts,b,c,j.opts.specialEasing[b]||j.opts.easing);return j.tweens.push(d),d},stop:function(b){var c=0,d=b?j.tweens.length:0;if(e)return this;for(e=!0;d>c;c++)j.tweens[c].run(1);return b?h.resolveWith(a,[j,b]):h.rejectWith(a,[j,b]),this}}),k=j.props;for(Wb(k,j.opts.specialEasing);g>f;f++)if(d=Qb[f].call(j,a,k,j.opts))return d;return n.map(k,Ub,j),n.isFunction(j.opts.start)&&j.opts.start.call(a,j),n.fx.timer(n.extend(i,{elem:a,anim:j,queue:j.opts.queue})),j.progress(j.opts.progress).done(j.opts.done,j.opts.complete).fail(j.opts.fail).always(j.opts.always)}n.Animation=n.extend(Xb,{tweener:function(a,b){n.isFunction(a)?(b=a,a=["*"]):a=a.split(" ");for(var c,d=0,e=a.length;e>d;d++)c=a[d],Rb[c]=Rb[c]||[],Rb[c].unshift(b)},prefilter:function(a,b){b?Qb.unshift(a):Qb.push(a)}}),n.speed=function(a,b,c){var d=a&&"object"==typeof a?n.extend({},a):{complete:c||!c&&b||n.isFunction(a)&&a,duration:a,easing:c&&b||b&&!n.isFunction(b)&&b};return d.duration=n.fx.off?0:"number"==typeof d.duration?d.duration:d.duration in n.fx.speeds?n.fx.speeds[d.duration]:n.fx.speeds._default,(null==d.queue||d.queue===!0)&&(d.queue="fx"),d.old=d.complete,d.complete=function(){n.isFunction(d.old)&&d.old.call(this),d.queue&&n.dequeue(this,d.queue)},d},n.fn.extend({fadeTo:function(a,b,c,d){return this.filter(S).css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){var e=n.isEmptyObject(a),f=n.speed(b,c,d),g=function(){var b=Xb(this,n.extend({},a),f);(e||L.get(this,"finish"))&&b.stop(!0)};return g.finish=g,e||f.queue===!1?this.each(g):this.queue(f.queue,g)},stop:function(a,b,c){var d=function(a){var b=a.stop;delete a.stop,b(c)};return"string"!=typeof a&&(c=b,b=a,a=void 0),b&&a!==!1&&this.queue(a||"fx",[]),this.each(function(){var b=!0,e=null!=a&&a+"queueHooks",f=n.timers,g=L.get(this);if(e)g[e]&&g[e].stop&&d(g[e]);else for(e in g)g[e]&&g[e].stop&&Pb.test(e)&&d(g[e]);for(e=f.length;e--;)f[e].elem!==this||null!=a&&f[e].queue!==a||(f[e].anim.stop(c),b=!1,f.splice(e,1));(b||!c)&&n.dequeue(this,a)})},finish:function(a){return a!==!1&&(a=a||"fx"),this.each(function(){var b,c=L.get(this),d=c[a+"queue"],e=c[a+"queueHooks"],f=n.timers,g=d?d.length:0;for(c.finish=!0,n.queue(this,a,[]),e&&e.stop&&e.stop.call(this,!0),b=f.length;b--;)f[b].elem===this&&f[b].queue===a&&(f[b].anim.stop(!0),f.splice(b,1));for(b=0;g>b;b++)d[b]&&d[b].finish&&d[b].finish.call(this);delete c.finish})}}),n.each(["toggle","show","hide"],function(a,b){var c=n.fn[b];n.fn[b]=function(a,d,e){return null==a||"boolean"==typeof a?c.apply(this,arguments):this.animate(Tb(b,!0),a,d,e)}}),n.each({slideDown:Tb("show"),slideUp:Tb("hide"),slideToggle:Tb("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){n.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),n.timers=[],n.fx.tick=function(){var a,b=0,c=n.timers;for(Lb=n.now();b<c.length;b++)a=c[b],a()||c[b]!==a||c.splice(b--,1);c.length||n.fx.stop(),Lb=void 0},n.fx.timer=function(a){n.timers.push(a),a()?n.fx.start():n.timers.pop()},n.fx.interval=13,n.fx.start=function(){Mb||(Mb=setInterval(n.fx.tick,n.fx.interval))},n.fx.stop=function(){clearInterval(Mb),Mb=null},n.fx.speeds={slow:600,fast:200,_default:400},n.fn.delay=function(a,b){return a=n.fx?n.fx.speeds[a]||a:a,b=b||"fx",this.queue(b,function(b,c){var d=setTimeout(b,a);c.stop=function(){clearTimeout(d)}})},function(){var a=l.createElement("input"),b=l.createElement("select"),c=b.appendChild(l.createElement("option"));a.type="checkbox",k.checkOn=""!==a.value,k.optSelected=c.selected,b.disabled=!0,k.optDisabled=!c.disabled,a=l.createElement("input"),a.value="t",a.type="radio",k.radioValue="t"===a.value}();var Yb,Zb,$b=n.expr.attrHandle;n.fn.extend({attr:function(a,b){return J(this,n.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){n.removeAttr(this,a)})}}),n.extend({attr:function(a,b,c){var d,e,f=a.nodeType;if(a&&3!==f&&8!==f&&2!==f)return typeof a.getAttribute===U?n.prop(a,b,c):(1===f&&n.isXMLDoc(a)||(b=b.toLowerCase(),d=n.attrHooks[b]||(n.expr.match.bool.test(b)?Zb:Yb)),void 0===c?d&&"get"in d&&null!==(e=d.get(a,b))?e:(e=n.find.attr(a,b),null==e?void 0:e):null!==c?d&&"set"in d&&void 0!==(e=d.set(a,c,b))?e:(a.setAttribute(b,c+""),c):void n.removeAttr(a,b))
+},removeAttr:function(a,b){var c,d,e=0,f=b&&b.match(E);if(f&&1===a.nodeType)while(c=f[e++])d=n.propFix[c]||c,n.expr.match.bool.test(c)&&(a[d]=!1),a.removeAttribute(c)},attrHooks:{type:{set:function(a,b){if(!k.radioValue&&"radio"===b&&n.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}}}}),Zb={set:function(a,b,c){return b===!1?n.removeAttr(a,c):a.setAttribute(c,c),c}},n.each(n.expr.match.bool.source.match(/\w+/g),function(a,b){var c=$b[b]||n.find.attr;$b[b]=function(a,b,d){var e,f;return d||(f=$b[b],$b[b]=e,e=null!=c(a,b,d)?b.toLowerCase():null,$b[b]=f),e}});var _b=/^(?:input|select|textarea|button)$/i;n.fn.extend({prop:function(a,b){return J(this,n.prop,a,b,arguments.length>1)},removeProp:function(a){return this.each(function(){delete this[n.propFix[a]||a]})}}),n.extend({propFix:{"for":"htmlFor","class":"className"},prop:function(a,b,c){var d,e,f,g=a.nodeType;if(a&&3!==g&&8!==g&&2!==g)return f=1!==g||!n.isXMLDoc(a),f&&(b=n.propFix[b]||b,e=n.propHooks[b]),void 0!==c?e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:a[b]=c:e&&"get"in e&&null!==(d=e.get(a,b))?d:a[b]},propHooks:{tabIndex:{get:function(a){return a.hasAttribute("tabindex")||_b.test(a.nodeName)||a.href?a.tabIndex:-1}}}}),k.optSelected||(n.propHooks.selected={get:function(a){var b=a.parentNode;return b&&b.parentNode&&b.parentNode.selectedIndex,null}}),n.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){n.propFix[this.toLowerCase()]=this});var ac=/[\t\r\n\f]/g;n.fn.extend({addClass:function(a){var b,c,d,e,f,g,h="string"==typeof a&&a,i=0,j=this.length;if(n.isFunction(a))return this.each(function(b){n(this).addClass(a.call(this,b,this.className))});if(h)for(b=(a||"").match(E)||[];j>i;i++)if(c=this[i],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(ac," "):" ")){f=0;while(e=b[f++])d.indexOf(" "+e+" ")<0&&(d+=e+" ");g=n.trim(d),c.className!==g&&(c.className=g)}return this},removeClass:function(a){var b,c,d,e,f,g,h=0===arguments.length||"string"==typeof a&&a,i=0,j=this.length;if(n.isFunction(a))return this.each(function(b){n(this).removeClass(a.call(this,b,this.className))});if(h)for(b=(a||"").match(E)||[];j>i;i++)if(c=this[i],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(ac," "):"")){f=0;while(e=b[f++])while(d.indexOf(" "+e+" ")>=0)d=d.replace(" "+e+" "," ");g=a?n.trim(d):"",c.className!==g&&(c.className=g)}return this},toggleClass:function(a,b){var c=typeof a;return"boolean"==typeof b&&"string"===c?b?this.addClass(a):this.removeClass(a):this.each(n.isFunction(a)?function(c){n(this).toggleClass(a.call(this,c,this.className,b),b)}:function(){if("string"===c){var b,d=0,e=n(this),f=a.match(E)||[];while(b=f[d++])e.hasClass(b)?e.removeClass(b):e.addClass(b)}else(c===U||"boolean"===c)&&(this.className&&L.set(this,"__className__",this.className),this.className=this.className||a===!1?"":L.get(this,"__className__")||"")})},hasClass:function(a){for(var b=" "+a+" ",c=0,d=this.length;d>c;c++)if(1===this[c].nodeType&&(" "+this[c].className+" ").replace(ac," ").indexOf(b)>=0)return!0;return!1}});var bc=/\r/g;n.fn.extend({val:function(a){var b,c,d,e=this[0];{if(arguments.length)return d=n.isFunction(a),this.each(function(c){var e;1===this.nodeType&&(e=d?a.call(this,c,n(this).val()):a,null==e?e="":"number"==typeof e?e+="":n.isArray(e)&&(e=n.map(e,function(a){return null==a?"":a+""})),b=n.valHooks[this.type]||n.valHooks[this.nodeName.toLowerCase()],b&&"set"in b&&void 0!==b.set(this,e,"value")||(this.value=e))});if(e)return b=n.valHooks[e.type]||n.valHooks[e.nodeName.toLowerCase()],b&&"get"in b&&void 0!==(c=b.get(e,"value"))?c:(c=e.value,"string"==typeof c?c.replace(bc,""):null==c?"":c)}}}),n.extend({valHooks:{option:{get:function(a){var b=n.find.attr(a,"value");return null!=b?b:n.trim(n.text(a))}},select:{get:function(a){for(var b,c,d=a.options,e=a.selectedIndex,f="select-one"===a.type||0>e,g=f?null:[],h=f?e+1:d.length,i=0>e?h:f?e:0;h>i;i++)if(c=d[i],!(!c.selected&&i!==e||(k.optDisabled?c.disabled:null!==c.getAttribute("disabled"))||c.parentNode.disabled&&n.nodeName(c.parentNode,"optgroup"))){if(b=n(c).val(),f)return b;g.push(b)}return g},set:function(a,b){var c,d,e=a.options,f=n.makeArray(b),g=e.length;while(g--)d=e[g],(d.selected=n.inArray(d.value,f)>=0)&&(c=!0);return c||(a.selectedIndex=-1),f}}}}),n.each(["radio","checkbox"],function(){n.valHooks[this]={set:function(a,b){return n.isArray(b)?a.checked=n.inArray(n(a).val(),b)>=0:void 0}},k.checkOn||(n.valHooks[this].get=function(a){return null===a.getAttribute("value")?"on":a.value})}),n.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){n.fn[b]=function(a,c){return arguments.length>0?this.on(b,null,a,c):this.trigger(b)}}),n.fn.extend({hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)},bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return 1===arguments.length?this.off(a,"**"):this.off(b,a||"**",c)}});var cc=n.now(),dc=/\?/;n.parseJSON=function(a){return JSON.parse(a+"")},n.parseXML=function(a){var b,c;if(!a||"string"!=typeof a)return null;try{c=new DOMParser,b=c.parseFromString(a,"text/xml")}catch(d){b=void 0}return(!b||b.getElementsByTagName("parsererror").length)&&n.error("Invalid XML: "+a),b};var ec=/#.*$/,fc=/([?&])_=[^&]*/,gc=/^(.*?):[ \t]*([^\r\n]*)$/gm,hc=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,ic=/^(?:GET|HEAD)$/,jc=/^\/\//,kc=/^([\w.+-]+:)(?:\/\/(?:[^\/?#]*@|)([^\/?#:]*)(?::(\d+)|)|)/,lc={},mc={},nc="*/".concat("*"),oc=a.location.href,pc=kc.exec(oc.toLowerCase())||[];function qc(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var d,e=0,f=b.toLowerCase().match(E)||[];if(n.isFunction(c))while(d=f[e++])"+"===d[0]?(d=d.slice(1)||"*",(a[d]=a[d]||[]).unshift(c)):(a[d]=a[d]||[]).push(c)}}function rc(a,b,c,d){var e={},f=a===mc;function g(h){var i;return e[h]=!0,n.each(a[h]||[],function(a,h){var j=h(b,c,d);return"string"!=typeof j||f||e[j]?f?!(i=j):void 0:(b.dataTypes.unshift(j),g(j),!1)}),i}return g(b.dataTypes[0])||!e["*"]&&g("*")}function sc(a,b){var c,d,e=n.ajaxSettings.flatOptions||{};for(c in b)void 0!==b[c]&&((e[c]?a:d||(d={}))[c]=b[c]);return d&&n.extend(!0,a,d),a}function tc(a,b,c){var d,e,f,g,h=a.contents,i=a.dataTypes;while("*"===i[0])i.shift(),void 0===d&&(d=a.mimeType||b.getResponseHeader("Content-Type"));if(d)for(e in h)if(h[e]&&h[e].test(d)){i.unshift(e);break}if(i[0]in c)f=i[0];else{for(e in c){if(!i[0]||a.converters[e+" "+i[0]]){f=e;break}g||(g=e)}f=f||g}return f?(f!==i[0]&&i.unshift(f),c[f]):void 0}function uc(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])for(g in a.converters)j[g.toLowerCase()]=a.converters[g];f=k.shift();while(f)if(a.responseFields[f]&&(c[a.responseFields[f]]=b),!i&&d&&a.dataFilter&&(b=a.dataFilter(b,a.dataType)),i=f,f=k.shift())if("*"===f)f=i;else if("*"!==i&&i!==f){if(g=j[i+" "+f]||j["* "+f],!g)for(e in j)if(h=e.split(" "),h[1]===f&&(g=j[i+" "+h[0]]||j["* "+h[0]])){g===!0?g=j[e]:j[e]!==!0&&(f=h[0],k.unshift(h[1]));break}if(g!==!0)if(g&&a["throws"])b=g(b);else try{b=g(b)}catch(l){return{state:"parsererror",error:g?l:"No conversion from "+i+" to "+f}}}return{state:"success",data:b}}n.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:oc,type:"GET",isLocal:hc.test(pc[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":nc,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":n.parseJSON,"text xml":n.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(a,b){return b?sc(sc(a,n.ajaxSettings),b):sc(n.ajaxSettings,a)},ajaxPrefilter:qc(lc),ajaxTransport:qc(mc),ajax:function(a,b){"object"==typeof a&&(b=a,a=void 0),b=b||{};var c,d,e,f,g,h,i,j,k=n.ajaxSetup({},b),l=k.context||k,m=k.context&&(l.nodeType||l.jquery)?n(l):n.event,o=n.Deferred(),p=n.Callbacks("once memory"),q=k.statusCode||{},r={},s={},t=0,u="canceled",v={readyState:0,getResponseHeader:function(a){var b;if(2===t){if(!f){f={};while(b=gc.exec(e))f[b[1].toLowerCase()]=b[2]}b=f[a.toLowerCase()]}return null==b?null:b},getAllResponseHeaders:function(){return 2===t?e:null},setRequestHeader:function(a,b){var c=a.toLowerCase();return t||(a=s[c]=s[c]||a,r[a]=b),this},overrideMimeType:function(a){return t||(k.mimeType=a),this},statusCode:function(a){var b;if(a)if(2>t)for(b in a)q[b]=[q[b],a[b]];else v.always(a[v.status]);return this},abort:function(a){var b=a||u;return c&&c.abort(b),x(0,b),this}};if(o.promise(v).complete=p.add,v.success=v.done,v.error=v.fail,k.url=((a||k.url||oc)+"").replace(ec,"").replace(jc,pc[1]+"//"),k.type=b.method||b.type||k.method||k.type,k.dataTypes=n.trim(k.dataType||"*").toLowerCase().match(E)||[""],null==k.crossDomain&&(h=kc.exec(k.url.toLowerCase()),k.crossDomain=!(!h||h[1]===pc[1]&&h[2]===pc[2]&&(h[3]||("http:"===h[1]?"80":"443"))===(pc[3]||("http:"===pc[1]?"80":"443")))),k.data&&k.processData&&"string"!=typeof k.data&&(k.data=n.param(k.data,k.traditional)),rc(lc,k,b,v),2===t)return v;i=n.event&&k.global,i&&0===n.active++&&n.event.trigger("ajaxStart"),k.type=k.type.toUpperCase(),k.hasContent=!ic.test(k.type),d=k.url,k.hasContent||(k.data&&(d=k.url+=(dc.test(d)?"&":"?")+k.data,delete k.data),k.cache===!1&&(k.url=fc.test(d)?d.replace(fc,"$1_="+cc++):d+(dc.test(d)?"&":"?")+"_="+cc++)),k.ifModified&&(n.lastModified[d]&&v.setRequestHeader("If-Modified-Since",n.lastModified[d]),n.etag[d]&&v.setRequestHeader("If-None-Match",n.etag[d])),(k.data&&k.hasContent&&k.contentType!==!1||b.contentType)&&v.setRequestHeader("Content-Type",k.contentType),v.setRequestHeader("Accept",k.dataTypes[0]&&k.accepts[k.dataTypes[0]]?k.accepts[k.dataTypes[0]]+("*"!==k.dataTypes[0]?", "+nc+"; q=0.01":""):k.accepts["*"]);for(j in k.headers)v.setRequestHeader(j,k.headers[j]);if(k.beforeSend&&(k.beforeSend.call(l,v,k)===!1||2===t))return v.abort();u="abort";for(j in{success:1,error:1,complete:1})v[j](k[j]);if(c=rc(mc,k,b,v)){v.readyState=1,i&&m.trigger("ajaxSend",[v,k]),k.async&&k.timeout>0&&(g=setTimeout(function(){v.abort("timeout")},k.timeout));try{t=1,c.send(r,x)}catch(w){if(!(2>t))throw w;x(-1,w)}}else x(-1,"No Transport");function x(a,b,f,h){var j,r,s,u,w,x=b;2!==t&&(t=2,g&&clearTimeout(g),c=void 0,e=h||"",v.readyState=a>0?4:0,j=a>=200&&300>a||304===a,f&&(u=tc(k,v,f)),u=uc(k,u,v,j),j?(k.ifModified&&(w=v.getResponseHeader("Last-Modified"),w&&(n.lastModified[d]=w),w=v.getResponseHeader("etag"),w&&(n.etag[d]=w)),204===a||"HEAD"===k.type?x="nocontent":304===a?x="notmodified":(x=u.state,r=u.data,s=u.error,j=!s)):(s=x,(a||!x)&&(x="error",0>a&&(a=0))),v.status=a,v.statusText=(b||x)+"",j?o.resolveWith(l,[r,x,v]):o.rejectWith(l,[v,x,s]),v.statusCode(q),q=void 0,i&&m.trigger(j?"ajaxSuccess":"ajaxError",[v,k,j?r:s]),p.fireWith(l,[v,x]),i&&(m.trigger("ajaxComplete",[v,k]),--n.active||n.event.trigger("ajaxStop")))}return v},getJSON:function(a,b,c){return n.get(a,b,c,"json")},getScript:function(a,b){return n.get(a,void 0,b,"script")}}),n.each(["get","post"],function(a,b){n[b]=function(a,c,d,e){return n.isFunction(c)&&(e=e||d,d=c,c=void 0),n.ajax({url:a,type:b,dataType:e,data:c,success:d})}}),n._evalUrl=function(a){return n.ajax({url:a,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0})},n.fn.extend({wrapAll:function(a){var b;return n.isFunction(a)?this.each(function(b){n(this).wrapAll(a.call(this,b))}):(this[0]&&(b=n(a,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstElementChild)a=a.firstElementChild;return a}).append(this)),this)},wrapInner:function(a){return this.each(n.isFunction(a)?function(b){n(this).wrapInner(a.call(this,b))}:function(){var b=n(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=n.isFunction(a);return this.each(function(c){n(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){n.nodeName(this,"body")||n(this).replaceWith(this.childNodes)}).end()}}),n.expr.filters.hidden=function(a){return a.offsetWidth<=0&&a.offsetHeight<=0},n.expr.filters.visible=function(a){return!n.expr.filters.hidden(a)};var vc=/%20/g,wc=/\[\]$/,xc=/\r?\n/g,yc=/^(?:submit|button|image|reset|file)$/i,zc=/^(?:input|select|textarea|keygen)/i;function Ac(a,b,c,d){var e;if(n.isArray(b))n.each(b,function(b,e){c||wc.test(a)?d(a,e):Ac(a+"["+("object"==typeof e?b:"")+"]",e,c,d)});else if(c||"object"!==n.type(b))d(a,b);else for(e in b)Ac(a+"["+e+"]",b[e],c,d)}n.param=function(a,b){var c,d=[],e=function(a,b){b=n.isFunction(b)?b():null==b?"":b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};if(void 0===b&&(b=n.ajaxSettings&&n.ajaxSettings.traditional),n.isArray(a)||a.jquery&&!n.isPlainObject(a))n.each(a,function(){e(this.name,this.value)});else for(c in a)Ac(c,a[c],b,e);return d.join("&").replace(vc,"+")},n.fn.extend({serialize:function(){return n.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var a=n.prop(this,"elements");return a?n.makeArray(a):this}).filter(function(){var a=this.type;return this.name&&!n(this).is(":disabled")&&zc.test(this.nodeName)&&!yc.test(a)&&(this.checked||!T.test(a))}).map(function(a,b){var c=n(this).val();return null==c?null:n.isArray(c)?n.map(c,function(a){return{name:b.name,value:a.replace(xc,"\r\n")}}):{name:b.name,value:c.replace(xc,"\r\n")}}).get()}}),n.ajaxSettings.xhr=function(){try{return new XMLHttpRequest}catch(a){}};var Bc=0,Cc={},Dc={0:200,1223:204},Ec=n.ajaxSettings.xhr();a.attachEvent&&a.attachEvent("onunload",function(){for(var a in Cc)Cc[a]()}),k.cors=!!Ec&&"withCredentials"in Ec,k.ajax=Ec=!!Ec,n.ajaxTransport(function(a){var b;return k.cors||Ec&&!a.crossDomain?{send:function(c,d){var e,f=a.xhr(),g=++Bc;if(f.open(a.type,a.url,a.async,a.username,a.password),a.xhrFields)for(e in a.xhrFields)f[e]=a.xhrFields[e];a.mimeType&&f.overrideMimeType&&f.overrideMimeType(a.mimeType),a.crossDomain||c["X-Requested-With"]||(c["X-Requested-With"]="XMLHttpRequest");for(e in c)f.setRequestHeader(e,c[e]);b=function(a){return function(){b&&(delete Cc[g],b=f.onload=f.onerror=null,"abort"===a?f.abort():"error"===a?d(f.status,f.statusText):d(Dc[f.status]||f.status,f.statusText,"string"==typeof f.responseText?{text:f.responseText}:void 0,f.getAllResponseHeaders()))}},f.onload=b(),f.onerror=b("error"),b=Cc[g]=b("abort");try{f.send(a.hasContent&&a.data||null)}catch(h){if(b)throw h}},abort:function(){b&&b()}}:void 0}),n.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/(?:java|ecma)script/},converters:{"text script":function(a){return n.globalEval(a),a}}}),n.ajaxPrefilter("script",function(a){void 0===a.cache&&(a.cache=!1),a.crossDomain&&(a.type="GET")}),n.ajaxTransport("script",function(a){if(a.crossDomain){var b,c;return{send:function(d,e){b=n("<script>").prop({async:!0,charset:a.scriptCharset,src:a.url}).on("load error",c=function(a){b.remove(),c=null,a&&e("error"===a.type?404:200,a.type)}),l.head.appendChild(b[0])},abort:function(){c&&c()}}}});var Fc=[],Gc=/(=)\?(?=&|$)|\?\?/;n.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=Fc.pop()||n.expando+"_"+cc++;return this[a]=!0,a}}),n.ajaxPrefilter("json jsonp",function(b,c,d){var e,f,g,h=b.jsonp!==!1&&(Gc.test(b.url)?"url":"string"==typeof b.data&&!(b.contentType||"").indexOf("application/x-www-form-urlencoded")&&Gc.test(b.data)&&"data");return h||"jsonp"===b.dataTypes[0]?(e=b.jsonpCallback=n.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,h?b[h]=b[h].replace(Gc,"$1"+e):b.jsonp!==!1&&(b.url+=(dc.test(b.url)?"&":"?")+b.jsonp+"="+e),b.converters["script json"]=function(){return g||n.error(e+" was not called"),g[0]},b.dataTypes[0]="json",f=a[e],a[e]=function(){g=arguments},d.always(function(){a[e]=f,b[e]&&(b.jsonpCallback=c.jsonpCallback,Fc.push(e)),g&&n.isFunction(f)&&f(g[0]),g=f=void 0}),"script"):void 0}),n.parseHTML=function(a,b,c){if(!a||"string"!=typeof a)return null;"boolean"==typeof b&&(c=b,b=!1),b=b||l;var d=v.exec(a),e=!c&&[];return d?[b.createElement(d[1])]:(d=n.buildFragment([a],b,e),e&&e.length&&n(e).remove(),n.merge([],d.childNodes))};var Hc=n.fn.load;n.fn.load=function(a,b,c){if("string"!=typeof a&&Hc)return Hc.apply(this,arguments);var d,e,f,g=this,h=a.indexOf(" ");return h>=0&&(d=n.trim(a.slice(h)),a=a.slice(0,h)),n.isFunction(b)?(c=b,b=void 0):b&&"object"==typeof b&&(e="POST"),g.length>0&&n.ajax({url:a,type:e,dataType:"html",data:b}).done(function(a){f=arguments,g.html(d?n("<div>").append(n.parseHTML(a)).find(d):a)}).complete(c&&function(a,b){g.each(c,f||[a.responseText,b,a])}),this},n.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(a,b){n.fn[b]=function(a){return this.on(b,a)}}),n.expr.filters.animated=function(a){return n.grep(n.timers,function(b){return a===b.elem}).length};var Ic=a.document.documentElement;function Jc(a){return n.isWindow(a)?a:9===a.nodeType&&a.defaultView}n.offset={setOffset:function(a,b,c){var d,e,f,g,h,i,j,k=n.css(a,"position"),l=n(a),m={};"static"===k&&(a.style.position="relative"),h=l.offset(),f=n.css(a,"top"),i=n.css(a,"left"),j=("absolute"===k||"fixed"===k)&&(f+i).indexOf("auto")>-1,j?(d=l.position(),g=d.top,e=d.left):(g=parseFloat(f)||0,e=parseFloat(i)||0),n.isFunction(b)&&(b=b.call(a,c,h)),null!=b.top&&(m.top=b.top-h.top+g),null!=b.left&&(m.left=b.left-h.left+e),"using"in b?b.using.call(a,m):l.css(m)}},n.fn.extend({offset:function(a){if(arguments.length)return void 0===a?this:this.each(function(b){n.offset.setOffset(this,a,b)});var b,c,d=this[0],e={top:0,left:0},f=d&&d.ownerDocument;if(f)return b=f.documentElement,n.contains(b,d)?(typeof d.getBoundingClientRect!==U&&(e=d.getBoundingClientRect()),c=Jc(f),{top:e.top+c.pageYOffset-b.clientTop,left:e.left+c.pageXOffset-b.clientLeft}):e},position:function(){if(this[0]){var a,b,c=this[0],d={top:0,left:0};return"fixed"===n.css(c,"position")?b=c.getBoundingClientRect():(a=this.offsetParent(),b=this.offset(),n.nodeName(a[0],"html")||(d=a.offset()),d.top+=n.css(a[0],"borderTopWidth",!0),d.left+=n.css(a[0],"borderLeftWidth",!0)),{top:b.top-d.top-n.css(c,"marginTop",!0),left:b.left-d.left-n.css(c,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||Ic;while(a&&!n.nodeName(a,"html")&&"static"===n.css(a,"position"))a=a.offsetParent;return a||Ic})}}),n.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(b,c){var d="pageYOffset"===c;n.fn[b]=function(e){return J(this,function(b,e,f){var g=Jc(b);return void 0===f?g?g[c]:b[e]:void(g?g.scrollTo(d?a.pageXOffset:f,d?f:a.pageYOffset):b[e]=f)},b,e,arguments.length,null)}}),n.each(["top","left"],function(a,b){n.cssHooks[b]=yb(k.pixelPosition,function(a,c){return c?(c=xb(a,b),vb.test(c)?n(a).position()[b]+"px":c):void 0})}),n.each({Height:"height",Width:"width"},function(a,b){n.each({padding:"inner"+a,content:b,"":"outer"+a},function(c,d){n.fn[d]=function(d,e){var f=arguments.length&&(c||"boolean"!=typeof d),g=c||(d===!0||e===!0?"margin":"border");return J(this,function(b,c,d){var e;return n.isWindow(b)?b.document.documentElement["client"+a]:9===b.nodeType?(e=b.documentElement,Math.max(b.body["scroll"+a],e["scroll"+a],b.body["offset"+a],e["offset"+a],e["client"+a])):void 0===d?n.css(b,c,g):n.style(b,c,d,g)},b,f?d:void 0,f,null)}})}),n.fn.size=function(){return this.length},n.fn.andSelf=n.fn.addBack,"function"==typeof define&&define.amd&&define("jquery",[],function(){return n});var Kc=a.jQuery,Lc=a.$;return n.noConflict=function(b){return a.$===n&&(a.$=Lc),b&&a.jQuery===n&&(a.jQuery=Kc),n},typeof b===U&&(a.jQuery=a.$=n),n});
diff --git a/static/client/register/js/recaptcha_ajax.js b/static/client/register/js/recaptcha_ajax.js
new file mode 100644
index 0000000000..d0e71e5b88
--- /dev/null
+++ b/static/client/register/js/recaptcha_ajax.js
@@ -0,0 +1,195 @@
+(function(){var h,k=this,l=function(a){return void 0!==a},ba=function(){},n=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&
+!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},p=function(a){return"array"==n(a)},ca=function(a){var b=n(a);return"array"==b||"object"==b&&"number"==typeof a.length},q=function(a){return"string"==typeof a},r=function(a){return"function"==n(a)},da=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},ea=function(a,b,c){return a.call.apply(a.bind,arguments)},fa=function(a,b,c){if(!a)throw Error();
+if(2<arguments.length){var d=Array.prototype.slice.call(arguments,2);return function(){var c=Array.prototype.slice.call(arguments);Array.prototype.unshift.apply(c,d);return a.apply(b,c)}}return function(){return a.apply(b,arguments)}},s=function(a,b,c){s=Function.prototype.bind&&-1!=Function.prototype.bind.toString().indexOf("native code")?ea:fa;return s.apply(null,arguments)},ga=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=c.slice();b.push.apply(b,arguments);
+return a.apply(this,b)}},ha=Date.now||function(){return+new Date},ia=null,t=function(a,b){var c=a.split("."),d=k;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)!c.length&&l(b)?d[e]=b:d=d[e]?d[e]:d[e]={}},u=function(a,b){function c(){}c.prototype=b.prototype;a.superClass_=b.prototype;a.prototype=new c;a.base=function(a,c,g){return b.prototype[c].apply(a,Array.prototype.slice.call(arguments,2))}};
+Function.prototype.bind=Function.prototype.bind||function(a,b){if(1<arguments.length){var c=Array.prototype.slice.call(arguments,1);c.unshift(this,a);return s.apply(null,c)}return s(this,a)};var v={};t("RecaptchaTemplates",v);v.VertHtml='<table id="recaptcha_table" class="recaptchatable" > <tr> <td colspan="6" class=\'recaptcha_r1_c1\'></td> </tr> <tr> <td class=\'recaptcha_r2_c1\'></td> <td colspan="4" class=\'recaptcha_image_cell\'><center><div id="recaptcha_image"></div></center></td> <td class=\'recaptcha_r2_c2\'></td> </tr> <tr> <td rowspan="6" class=\'recaptcha_r3_c1\'></td> <td colspan="4" class=\'recaptcha_r3_c2\'></td> <td rowspan="6" class=\'recaptcha_r3_c3\'></td> </tr> <tr> <td rowspan="3" class=\'recaptcha_r4_c1\' height="49"> <div class="recaptcha_input_area"> <input name="recaptcha_response_field" id="recaptcha_response_field" type="text" autocorrect="off" autocapitalize="off" placeholder="" /> <span id="recaptcha_privacy" class="recaptcha_only_if_privacy"></span> </div> </td> <td rowspan="4" class=\'recaptcha_r4_c2\'></td> <td><a id=\'recaptcha_reload_btn\'><img id=\'recaptcha_reload\' width="25" height="17" /></a></td> <td rowspan="4" class=\'recaptcha_r4_c4\'></td> </tr> <tr> <td><a id=\'recaptcha_switch_audio_btn\' class="recaptcha_only_if_image"><img id=\'recaptcha_switch_audio\' width="25" height="16" alt="" /></a><a id=\'recaptcha_switch_img_btn\' class="recaptcha_only_if_audio"><img id=\'recaptcha_switch_img\' width="25" height="16" alt=""/></a></td> </tr> <tr> <td><a id=\'recaptcha_whatsthis_btn\'><img id=\'recaptcha_whatsthis\' width="25" height="16" /></a></td> </tr> <tr> <td class=\'recaptcha_r7_c1\'></td> <td class=\'recaptcha_r8_c1\'></td> </tr> </table> ';v.CleanCss=".recaptchatable td img{display:block}.recaptchatable .recaptcha_image_cell center img{height:57px}.recaptchatable .recaptcha_image_cell center{height:57px}.recaptchatable .recaptcha_image_cell{background-color:white;height:57px;padding:7px!important}.recaptchatable,#recaptcha_area tr,#recaptcha_area td,#recaptcha_area th{margin:0!important;border:0!important;border-collapse:collapse!important;vertical-align:middle!important}.recaptchatable *{margin:0;padding:0;border:0;color:black;position:static;top:auto;left:auto;right:auto;bottom:auto}.recaptchatable #recaptcha_image{position:relative;margin:auto;border:1px solid #dfdfdf!important}.recaptchatable #recaptcha_image #recaptcha_challenge_image{display:block}.recaptchatable #recaptcha_image #recaptcha_ad_image{display:block;position:absolute;top:0}.recaptchatable a img{border:0}.recaptchatable a,.recaptchatable a:hover{cursor:pointer;outline:none;border:0!important;padding:0!important;text-decoration:none;color:blue;background:none!important;font-weight:normal}.recaptcha_input_area{position:relative!important;background:none!important}.recaptchatable label.recaptcha_input_area_text{border:1px solid #dfdfdf!important;margin:0!important;padding:0!important;position:static!important;top:auto!important;left:auto!important;right:auto!important;bottom:auto!important}.recaptcha_theme_red label.recaptcha_input_area_text,.recaptcha_theme_white label.recaptcha_input_area_text{color:black!important}.recaptcha_theme_blackglass label.recaptcha_input_area_text{color:white!important}.recaptchatable #recaptcha_response_field{font-size:11pt}.recaptcha_theme_blackglass #recaptcha_response_field,.recaptcha_theme_white #recaptcha_response_field{border:1px solid gray}.recaptcha_theme_red #recaptcha_response_field{border:1px solid #cca940}.recaptcha_audio_cant_hear_link{font-size:7pt;color:black}.recaptchatable{line-height:1em;border:1px solid #dfdfdf!important}.recaptcha_error_text{color:red}.recaptcha_only_if_privacy{float:right;text-align:right;margin-right:7px}#recaptcha-ad-choices{position:absolute;height:15px;top:0;right:0}#recaptcha-ad-choices img{height:15px}.recaptcha-ad-choices-collapsed{width:30px;height:15px;display:block}.recaptcha-ad-choices-expanded{width:75px;height:15px;display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-collapsed{display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-expanded{display:block}";v.CleanHtml='<table id="recaptcha_table" class="recaptchatable"> <tr height="73"> <td class=\'recaptcha_image_cell\' width="302"><center><div id="recaptcha_image"></div></center></td> <td style="padding: 10px 7px 7px 7px;"> <a id=\'recaptcha_reload_btn\'><img id=\'recaptcha_reload\' width="25" height="18" alt="" /></a> <a id=\'recaptcha_switch_audio_btn\' class="recaptcha_only_if_image"><img id=\'recaptcha_switch_audio\' width="25" height="15" alt="" /></a><a id=\'recaptcha_switch_img_btn\' class="recaptcha_only_if_audio"><img id=\'recaptcha_switch_img\' width="25" height="15" alt=""/></a> <a id=\'recaptcha_whatsthis_btn\'><img id=\'recaptcha_whatsthis\' width="25" height="16" /></a> </td> <td style="padding: 18px 7px 18px 7px;"> <img id=\'recaptcha_logo\' alt="" width="71" height="36" /> </td> </tr> <tr> <td style="padding-left: 7px;"> <div class="recaptcha_input_area" style="padding-top: 2px; padding-bottom: 7px;"> <input style="border: 1px solid #3c3c3c; width: 302px;" name="recaptcha_response_field" id="recaptcha_response_field" type="text" /> </div> </td> <td colspan=2><span id="recaptcha_privacy" class="recaptcha_only_if_privacy"></span></td> </tr> </table> ';v.VertCss=".recaptchatable td img{display:block}.recaptchatable .recaptcha_r1_c1{background:url('IMGROOT/sprite.png') 0 -63px no-repeat;width:318px;height:9px}.recaptchatable .recaptcha_r2_c1{background:url('IMGROOT/sprite.png') -18px 0 no-repeat;width:9px;height:57px}.recaptchatable .recaptcha_r2_c2{background:url('IMGROOT/sprite.png') -27px 0 no-repeat;width:9px;height:57px}.recaptchatable .recaptcha_r3_c1{background:url('IMGROOT/sprite.png') 0 0 no-repeat;width:9px;height:63px}.recaptchatable .recaptcha_r3_c2{background:url('IMGROOT/sprite.png') -18px -57px no-repeat;width:300px;height:6px}.recaptchatable .recaptcha_r3_c3{background:url('IMGROOT/sprite.png') -9px 0 no-repeat;width:9px;height:63px}.recaptchatable .recaptcha_r4_c1{background:url('IMGROOT/sprite.png') -43px 0 no-repeat;width:171px;height:49px}.recaptchatable .recaptcha_r4_c2{background:url('IMGROOT/sprite.png') -36px 0 no-repeat;width:7px;height:57px}.recaptchatable .recaptcha_r4_c4{background:url('IMGROOT/sprite.png') -214px 0 no-repeat;width:97px;height:57px}.recaptchatable .recaptcha_r7_c1{background:url('IMGROOT/sprite.png') -43px -49px no-repeat;width:171px;height:8px}.recaptchatable .recaptcha_r8_c1{background:url('IMGROOT/sprite.png') -43px -49px no-repeat;width:25px;height:8px}.recaptchatable .recaptcha_image_cell center img{height:57px}.recaptchatable .recaptcha_image_cell center{height:57px}.recaptchatable .recaptcha_image_cell{background-color:white;height:57px}#recaptcha_area,#recaptcha_table{width:318px!important}.recaptchatable,#recaptcha_area tr,#recaptcha_area td,#recaptcha_area th{margin:0!important;border:0!important;padding:0!important;border-collapse:collapse!important;vertical-align:middle!important}.recaptchatable *{margin:0;padding:0;border:0;font-family:helvetica,sans-serif;font-size:8pt;color:black;position:static;top:auto;left:auto;right:auto;bottom:auto}.recaptchatable #recaptcha_image{position:relative;margin:auto}.recaptchatable #recaptcha_image #recaptcha_challenge_image{display:block}.recaptchatable #recaptcha_image #recaptcha_ad_image{display:block;position:absolute;top:0}.recaptchatable img{border:0!important;margin:0!important;padding:0!important}.recaptchatable a,.recaptchatable a:hover{cursor:pointer;outline:none;border:0!important;padding:0!important;text-decoration:none;color:blue;background:none!important;font-weight:normal}.recaptcha_input_area{position:relative!important;width:153px!important;height:45px!important;margin-left:7px!important;margin-right:7px!important;background:none!important}.recaptchatable label.recaptcha_input_area_text{margin:0!important;padding:0!important;position:static!important;top:auto!important;left:auto!important;right:auto!important;bottom:auto!important;background:none!important;height:auto!important;width:auto!important}.recaptcha_theme_red label.recaptcha_input_area_text,.recaptcha_theme_white label.recaptcha_input_area_text{color:black!important}.recaptcha_theme_blackglass label.recaptcha_input_area_text{color:white!important}.recaptchatable #recaptcha_response_field{width:153px!important;position:relative!important;bottom:7px!important;padding:0!important;margin:15px 0 0 0!important;font-size:10pt}.recaptcha_theme_blackglass #recaptcha_response_field,.recaptcha_theme_white #recaptcha_response_field{border:1px solid gray}.recaptcha_theme_red #recaptcha_response_field{border:1px solid #cca940}.recaptcha_audio_cant_hear_link{font-size:7pt;color:black}.recaptchatable{line-height:1!important}#recaptcha_instructions_error{color:red!important}.recaptcha_only_if_privacy{float:right;text-align:right}#recaptcha-ad-choices{position:absolute;height:15px;top:0;right:0}#recaptcha-ad-choices img{height:15px}.recaptcha-ad-choices-collapsed{width:30px;height:15px;display:block}.recaptcha-ad-choices-expanded{width:75px;height:15px;display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-collapsed{display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-expanded{display:block}";var w={visual_challenge:"Get a visual challenge",audio_challenge:"Get an audio challenge",refresh_btn:"Get a new challenge",instructions_visual:"Type the text:",instructions_audio:"Type what you hear:",help_btn:"Help",play_again:"Play sound again",cant_hear_this:"Download sound as MP3",incorrect_try_again:"Incorrect. Try again.",image_alt_text:"reCAPTCHA challenge image",privacy_and_terms:"Privacy & Terms"},ja={visual_challenge:"\u0627\u0644\u062d\u0635\u0648\u0644 \u0639\u0644\u0649 \u062a\u062d\u062f\u064d \u0645\u0631\u0626\u064a",
+audio_challenge:"\u0627\u0644\u062d\u0635\u0648\u0644 \u0639\u0644\u0649 \u062a\u062d\u062f\u064d \u0635\u0648\u062a\u064a",refresh_btn:"\u0627\u0644\u062d\u0635\u0648\u0644 \u0639\u0644\u0649 \u062a\u062d\u062f\u064d \u062c\u062f\u064a\u062f",instructions_visual:"\u064a\u0631\u062c\u0649 \u0643\u062a\u0627\u0628\u0629 \u0627\u0644\u0646\u0635:",instructions_audio:"\u0627\u0643\u062a\u0628 \u0645\u0627 \u062a\u0633\u0645\u0639\u0647:",help_btn:"\u0645\u0633\u0627\u0639\u062f\u0629",play_again:"\u062a\u0634\u063a\u064a\u0644 \u0627\u0644\u0635\u0648\u062a \u0645\u0631\u0629 \u0623\u062e\u0631\u0649",
+cant_hear_this:"\u062a\u0646\u0632\u064a\u0644 \u0627\u0644\u0635\u0648\u062a \u0628\u062a\u0646\u0633\u064a\u0642 MP3",incorrect_try_again:"\u063a\u064a\u0631 \u0635\u062d\u064a\u062d. \u0623\u0639\u062f \u0627\u0644\u0645\u062d\u0627\u0648\u0644\u0629.",image_alt_text:"\u0635\u0648\u0631\u0629 \u0627\u0644\u062a\u062d\u062f\u064a \u0645\u0646 reCAPTCHA",privacy_and_terms:"\u0627\u0644\u062e\u0635\u0648\u0635\u064a\u0629 \u0648\u0627\u0644\u0628\u0646\u0648\u062f"},ka={visual_challenge:"Obtener una pista visual",
+audio_challenge:"Obtener una pista sonora",refresh_btn:"Obtener una pista nueva",instructions_visual:"Introduzca el texto:",instructions_audio:"Escribe lo que oigas:",help_btn:"Ayuda",play_again:"Volver a reproducir el sonido",cant_hear_this:"Descargar el sonido en MP3",incorrect_try_again:"Incorrecto. Vu\u00e9lvelo a intentar.",image_alt_text:"Pista de imagen reCAPTCHA",privacy_and_terms:"Privacidad y condiciones"},la={visual_challenge:"Kumuha ng pagsubok na visual",audio_challenge:"Kumuha ng pagsubok na audio",
+refresh_btn:"Kumuha ng bagong pagsubok",instructions_visual:"I-type ang teksto:",instructions_audio:"I-type ang iyong narinig",help_btn:"Tulong",play_again:"I-play muli ang tunog",cant_hear_this:"I-download ang tunog bilang MP3",incorrect_try_again:"Hindi wasto. Muling subukan.",image_alt_text:"larawang panghamon ng reCAPTCHA",privacy_and_terms:"Privacy at Mga Tuntunin"},ma={visual_challenge:"Test visuel",audio_challenge:"Test audio",refresh_btn:"Nouveau test",instructions_visual:"Saisissez le texte\u00a0:",
+instructions_audio:"Qu'entendez-vous ?",help_btn:"Aide",play_again:"R\u00e9\u00e9couter",cant_hear_this:"T\u00e9l\u00e9charger l'audio au format MP3",incorrect_try_again:"Incorrect. Veuillez r\u00e9essayer.",image_alt_text:"Image reCAPTCHA",privacy_and_terms:"Confidentialit\u00e9 et conditions d'utilisation"},na={visual_challenge:"Dapatkan kata pengujian berbentuk visual",audio_challenge:"Dapatkan kata pengujian berbentuk audio",refresh_btn:"Dapatkan kata pengujian baru",instructions_visual:"Ketik teks:",
+instructions_audio:"Ketik yang Anda dengar:",help_btn:"Bantuan",play_again:"Putar suara sekali lagi",cant_hear_this:"Unduh suara sebagai MP3",incorrect_try_again:"Salah. Coba lagi.",image_alt_text:"Gambar tantangan reCAPTCHA",privacy_and_terms:"Privasi & Persyaratan"},oa={visual_challenge:"\u05e7\u05d1\u05dc \u05d0\u05ea\u05d2\u05e8 \u05d7\u05d6\u05d5\u05ea\u05d9",audio_challenge:"\u05e7\u05d1\u05dc \u05d0\u05ea\u05d2\u05e8 \u05e9\u05de\u05e2",refresh_btn:"\u05e7\u05d1\u05dc \u05d0\u05ea\u05d2\u05e8 \u05d7\u05d3\u05e9",
+instructions_visual:"\u05d4\u05e7\u05dc\u05d3 \u05d0\u05ea \u05d4\u05d8\u05e7\u05e1\u05d8:",instructions_audio:"\u05d4\u05e7\u05dc\u05d3 \u05d0\u05ea \u05de\u05d4 \u05e9\u05d0\u05ea\u05d4 \u05e9\u05d5\u05de\u05e2:",help_btn:"\u05e2\u05d6\u05e8\u05d4",play_again:"\u05d4\u05e4\u05e2\u05dc \u05e9\u05d5\u05d1 \u05d0\u05ea \u05d4\u05e9\u05de\u05e2",cant_hear_this:"\u05d4\u05d5\u05e8\u05d3 \u05e9\u05de\u05e2 \u05db-3MP",incorrect_try_again:"\u05e9\u05d2\u05d5\u05d9. \u05e0\u05e1\u05d4 \u05e9\u05d5\u05d1.",
+image_alt_text:"\u05ea\u05de\u05d5\u05e0\u05ea \u05d0\u05ea\u05d2\u05e8 \u05e9\u05dc reCAPTCHA",privacy_and_terms:"\u05e4\u05e8\u05d8\u05d9\u05d5\u05ea \u05d5\u05ea\u05e0\u05d0\u05d9\u05dd"},pa={visual_challenge:"Obter um desafio visual",audio_challenge:"Obter um desafio de \u00e1udio",refresh_btn:"Obter um novo desafio",instructions_visual:"Digite o texto:",instructions_audio:"Digite o que voc\u00ea ouve:",help_btn:"Ajuda",play_again:"Reproduzir som novamente",cant_hear_this:"Fazer download do som no formato MP3",
+incorrect_try_again:"Incorreto. Tente novamente.",image_alt_text:"Imagem de desafio reCAPTCHA",privacy_and_terms:"Privacidade e Termos"},qa={visual_challenge:"Ob\u0163ine\u0163i un cod captcha vizual",audio_challenge:"Ob\u0163ine\u0163i un cod captcha audio",refresh_btn:"Ob\u0163ine\u0163i un nou cod captcha",instructions_visual:"Introduce\u021bi textul:",instructions_audio:"Introduce\u0163i ceea ce auzi\u0163i:",help_btn:"Ajutor",play_again:"Reda\u0163i sunetul din nou",cant_hear_this:"Desc\u0103rca\u0163i fi\u015fierul audio ca MP3",
+incorrect_try_again:"Incorect. \u00cencerca\u0163i din nou.",image_alt_text:"Imagine de verificare reCAPTCHA",privacy_and_terms:"Confiden\u0163ialitate \u015fi termeni"},ra={visual_challenge:"\u6536\u5230\u4e00\u4e2a\u89c6\u9891\u9080\u8bf7",audio_challenge:"\u6362\u4e00\u7ec4\u97f3\u9891\u9a8c\u8bc1\u7801",refresh_btn:"\u6362\u4e00\u7ec4\u9a8c\u8bc1\u7801",instructions_visual:"\u8f93\u5165\u6587\u5b57\uff1a",instructions_audio:"\u8bf7\u952e\u5165\u60a8\u542c\u5230\u7684\u5185\u5bb9\uff1a",help_btn:"\u5e2e\u52a9",
+play_again:"\u91cd\u65b0\u64ad\u653e",cant_hear_this:"\u4ee5 MP3 \u683c\u5f0f\u4e0b\u8f7d\u58f0\u97f3",incorrect_try_again:"\u4e0d\u6b63\u786e\uff0c\u8bf7\u91cd\u8bd5\u3002",image_alt_text:"reCAPTCHA \u9a8c\u8bc1\u56fe\u7247",privacy_and_terms:"\u9690\u79c1\u6743\u548c\u4f7f\u7528\u6761\u6b3e"},sa={en:w,af:{visual_challenge:"Kry 'n visuele verifi\u00ebring",audio_challenge:"Kry 'n klankverifi\u00ebring",refresh_btn:"Kry 'n nuwe verifi\u00ebring",instructions_visual:"",instructions_audio:"Tik wat jy hoor:",
+help_btn:"Hulp",play_again:"Speel geluid weer",cant_hear_this:"Laai die klank af as MP3",incorrect_try_again:"Verkeerd. Probeer weer.",image_alt_text:"reCAPTCHA-uitdagingprent",privacy_and_terms:"Privaatheid en bepalings"},am:{visual_challenge:"\u12e8\u12a5\u12ed\u1273 \u1270\u130b\u1323\u121a \u12a0\u130d\u129d",audio_challenge:"\u120c\u120b \u12a0\u12f2\u1235 \u12e8\u12f5\u121d\u133d \u1325\u12eb\u1244 \u12ed\u1245\u1228\u1265",refresh_btn:"\u120c\u120b \u12a0\u12f2\u1235 \u1325\u12eb\u1244 \u12ed\u1245\u1228\u1265",
+instructions_visual:"",instructions_audio:"\u12e8\u121d\u1275\u1230\u121b\u12cd\u1295 \u1270\u12ed\u1265\u1361-",help_btn:"\u12a5\u1308\u12db",play_again:"\u12f5\u121d\u1339\u1295 \u12a5\u1295\u12f0\u1308\u1293 \u12a0\u132b\u12cd\u1275",cant_hear_this:"\u12f5\u121d\u1339\u1295 \u1260MP3 \u1245\u122d\u133d \u12a0\u12cd\u122d\u12f5",incorrect_try_again:"\u1275\u12ad\u12ad\u120d \u12a0\u12ed\u12f0\u1208\u121d\u1362 \u12a5\u1295\u12f0\u1308\u1293 \u121e\u12ad\u122d\u1362",image_alt_text:"reCAPTCHA \u121d\u1235\u120d \u130d\u1320\u121d",
+privacy_and_terms:"\u130d\u120b\u12ca\u1290\u1275 \u12a5\u1293 \u12cd\u120d"},ar:ja,"ar-EG":ja,bg:{visual_challenge:"\u041f\u043e\u043b\u0443\u0447\u0430\u0432\u0430\u043d\u0435 \u043d\u0430 \u0432\u0438\u0437\u0443\u0430\u043b\u043d\u0430 \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430",audio_challenge:"\u0417\u0430\u0440\u0435\u0436\u0434\u0430\u043d\u0435 \u043d\u0430 \u0430\u0443\u0434\u0438\u043e\u0442\u0435\u0441\u0442",refresh_btn:"\u0417\u0430\u0440\u0435\u0436\u0434\u0430\u043d\u0435 \u043d\u0430 \u043d\u043e\u0432 \u0442\u0435\u0441\u0442",
+instructions_visual:"\u0412\u044a\u0432\u0435\u0434\u0435\u0442\u0435 \u0442\u0435\u043a\u0441\u0442\u0430:",instructions_audio:"\u0412\u044a\u0432\u0435\u0434\u0435\u0442\u0435 \u0447\u0443\u0442\u043e\u0442\u043e:",help_btn:"\u041f\u043e\u043c\u043e\u0449",play_again:"\u041f\u043e\u0432\u0442\u043e\u0440\u043d\u043e \u043f\u0443\u0441\u043a\u0430\u043d\u0435 \u043d\u0430 \u0437\u0432\u0443\u043a\u0430",cant_hear_this:"\u0418\u0437\u0442\u0435\u0433\u043b\u044f\u043d\u0435 \u043d\u0430 \u0437\u0432\u0443\u043a\u0430 \u0432\u044a\u0432 \u0444\u043e\u0440\u043c\u0430\u0442 MP3",
+incorrect_try_again:"\u041d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u043d\u043e. \u041e\u043f\u0438\u0442\u0430\u0439\u0442\u0435 \u043e\u0442\u043d\u043e\u0432\u043e.",image_alt_text:"\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435 \u043d\u0430 \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430\u0442\u0430 \u0441 reCAPTCHA",privacy_and_terms:"\u041f\u043e\u0432\u0435\u0440\u0438\u0442\u0435\u043b\u043d\u043e\u0441\u0442 \u0438 \u041e\u0431\u0449\u0438 \u0443\u0441\u043b\u043e\u0432\u0438\u044f"},
+bn:{visual_challenge:"\u098f\u0995\u099f\u09bf \u09a6\u09c3\u09b6\u09cd\u09af\u09ae\u09be\u09a8 \u09aa\u09cd\u09b0\u09a4\u09bf\u09a6\u09cd\u09ac\u09a8\u09cd\u09a6\u09cd\u09ac\u09bf\u09a4\u09be \u09aa\u09be\u09a8",audio_challenge:"\u098f\u0995\u099f\u09bf \u0985\u09a1\u09bf\u0993 \u09aa\u09cd\u09b0\u09a4\u09bf\u09a6\u09cd\u09ac\u09a8\u09cd\u09a6\u09cd\u09ac\u09bf\u09a4\u09be  \u09aa\u09be\u09a8",refresh_btn:"\u098f\u0995\u099f\u09bf \u09a8\u09a4\u09c1\u09a8 \u09aa\u09cd\u09b0\u09a4\u09bf\u09a6\u09cd\u09ac\u09a8\u09cd\u09a6\u09cd\u09ac\u09bf\u09a4\u09be  \u09aa\u09be\u09a8",
+instructions_visual:"",instructions_audio:"\u0986\u09aa\u09a8\u09bf \u09af\u09be \u09b6\u09c1\u09a8\u099b\u09c7\u09a8 \u09a4\u09be \u09b2\u09bf\u0996\u09c1\u09a8:",help_btn:"\u09b8\u09b9\u09be\u09df\u09a4\u09be",play_again:"\u0986\u09ac\u09be\u09b0 \u09b8\u09be\u0989\u09a8\u09cd\u09a1 \u09aa\u09cd\u09b2\u09c7 \u0995\u09b0\u09c1\u09a8",cant_hear_this:"MP3 \u09b0\u09c2\u09aa\u09c7 \u09b6\u09ac\u09cd\u09a6 \u09a1\u09be\u0989\u09a8\u09b2\u09cb\u09a1 \u0995\u09b0\u09c1\u09a8",incorrect_try_again:"\u09ac\u09c7\u09a0\u09bf\u0995\u09f7 \u0986\u09ac\u09be\u09b0 \u099a\u09c7\u09b7\u09cd\u099f\u09be \u0995\u09b0\u09c1\u09a8\u09f7",
+image_alt_text:"reCAPTCHA \u099a\u09cd\u09af\u09be\u09b2\u09c7\u099e\u09cd\u099c \u099a\u09bf\u09a4\u09cd\u09b0",privacy_and_terms:"\u0997\u09cb\u09aa\u09a8\u09c0\u09af\u09bc\u09a4\u09be \u0993 \u09b6\u09b0\u09cd\u09a4\u09be\u09ac\u09b2\u09c0"},ca:{visual_challenge:"Obt\u00e9n un repte visual",audio_challenge:"Obteniu una pista sonora",refresh_btn:"Obteniu una pista nova",instructions_visual:"Escriviu el text:",instructions_audio:"Escriviu el que escolteu:",help_btn:"Ajuda",play_again:"Torna a reproduir el so",
+cant_hear_this:"Baixa el so com a MP3",incorrect_try_again:"No \u00e9s correcte. Torna-ho a provar.",image_alt_text:"Imatge del repte de reCAPTCHA",privacy_and_terms:"Privadesa i condicions"},cs:{visual_challenge:"Zobrazit vizu\u00e1ln\u00ed podobu v\u00fdrazu",audio_challenge:"P\u0159ehr\u00e1t zvukovou podobu v\u00fdrazu",refresh_btn:"Zobrazit nov\u00fd v\u00fdraz",instructions_visual:"Zadejte text:",instructions_audio:"Napi\u0161te, co jste sly\u0161eli:",help_btn:"N\u00e1pov\u011bda",play_again:"Znovu p\u0159ehr\u00e1t zvuk",
+cant_hear_this:"St\u00e1hnout zvuk ve form\u00e1tu MP3",incorrect_try_again:"\u0160patn\u011b. Zkuste to znovu.",image_alt_text:"Obr\u00e1zek reCAPTCHA",privacy_and_terms:"Ochrana soukrom\u00ed a smluvn\u00ed podm\u00ednky"},da:{visual_challenge:"Hent en visuel udfordring",audio_challenge:"Hent en lydudfordring",refresh_btn:"Hent en ny udfordring",instructions_visual:"Indtast teksten:",instructions_audio:"Indtast det, du h\u00f8rer:",help_btn:"Hj\u00e6lp",play_again:"Afspil lyden igen",cant_hear_this:"Download lyd som MP3",
+incorrect_try_again:"Forkert. Pr\u00f8v igen.",image_alt_text:"reCAPTCHA-udfordringsbillede",privacy_and_terms:"Privatliv og vilk\u00e5r"},de:{visual_challenge:"Captcha abrufen",audio_challenge:"Audio-Captcha abrufen",refresh_btn:"Neues Captcha abrufen",instructions_visual:"Geben Sie den angezeigten Text ein:",instructions_audio:"Geben Sie das Geh\u00f6rte ein:",help_btn:"Hilfe",play_again:"Wort erneut abspielen",cant_hear_this:"Wort als MP3 herunterladen",incorrect_try_again:"Falsch. Bitte versuchen Sie es erneut.",
+image_alt_text:"reCAPTCHA-Bild",privacy_and_terms:"Datenschutzerkl\u00e4rung & Nutzungsbedingungen"},el:{visual_challenge:"\u039f\u03c0\u03c4\u03b9\u03ba\u03ae \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7",audio_challenge:"\u0397\u03c7\u03b7\u03c4\u03b9\u03ba\u03ae \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7",refresh_btn:"\u039d\u03ad\u03b1 \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7",instructions_visual:"\u03a0\u03bb\u03b7\u03ba\u03c4\u03c1\u03bf\u03bb\u03bf\u03b3\u03ae\u03c3\u03c4\u03b5 \u03c4\u03bf \u03ba\u03b5\u03af\u03bc\u03b5\u03bd\u03bf:",
+instructions_audio:"\u03a0\u03bb\u03b7\u03ba\u03c4\u03c1\u03bf\u03bb\u03bf\u03b3\u03ae\u03c3\u03c4\u03b5 \u03cc\u03c4\u03b9 \u03b1\u03ba\u03bf\u03cd\u03c4\u03b5:",help_btn:"\u0392\u03bf\u03ae\u03b8\u03b5\u03b9\u03b1",play_again:"\u0391\u03bd\u03b1\u03c0\u03b1\u03c1\u03b1\u03b3\u03c9\u03b3\u03ae \u03ae\u03c7\u03bf\u03c5 \u03be\u03b1\u03bd\u03ac",cant_hear_this:"\u039b\u03ae\u03c8\u03b7 \u03ae\u03c7\u03bf\u03c5 \u03c9\u03c2 \u039c\u03a13",incorrect_try_again:"\u039b\u03ac\u03b8\u03bf\u03c2. \u0394\u03bf\u03ba\u03b9\u03bc\u03ac\u03c3\u03c4\u03b5 \u03be\u03b1\u03bd\u03ac.",
+image_alt_text:"\u0395\u03b9\u03ba\u03cc\u03bd\u03b1 \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7\u03c2 reCAPTCHA",privacy_and_terms:"\u0391\u03c0\u03cc\u03c1\u03c1\u03b7\u03c4\u03bf \u03ba\u03b1\u03b9 \u03cc\u03c1\u03bf\u03b9"},"en-GB":w,"en-US":w,es:ka,"es-419":{visual_challenge:"Enfrentar un desaf\u00edo visual",audio_challenge:"Enfrentar un desaf\u00edo de audio",refresh_btn:"Enfrentar un nuevo desaf\u00edo",instructions_visual:"Escriba el texto:",instructions_audio:"Escribe lo que escuchas:",
+help_btn:"Ayuda",play_again:"Reproducir sonido de nuevo",cant_hear_this:"Descargar sonido en formato MP3",incorrect_try_again:"Incorrecto. Vuelve a intentarlo.",image_alt_text:"Imagen del desaf\u00edo de la reCAPTCHA",privacy_and_terms:"Privacidad y condiciones"},"es-ES":ka,et:{visual_challenge:"Kuva kuvap\u00f5hine robotil\u00f5ks",audio_challenge:"Kuva helip\u00f5hine robotil\u00f5ks",refresh_btn:"Kuva uus robotil\u00f5ks",instructions_visual:"Tippige tekst:",instructions_audio:"Tippige, mida kuulete.",
+help_btn:"Abi",play_again:"Esita heli uuesti",cant_hear_this:"Laadi heli alla MP3-vormingus",incorrect_try_again:"Vale. Proovige uuesti.",image_alt_text:"reCAPTCHA robotil\u00f5ksu kujutis",privacy_and_terms:"Privaatsus ja tingimused"},eu:{visual_challenge:"Eskuratu ikusizko erronka",audio_challenge:"Eskuratu audio-erronka",refresh_btn:"Eskuratu erronka berria",instructions_visual:"",instructions_audio:"Idatzi entzuten duzuna:",help_btn:"Laguntza",play_again:"Erreproduzitu soinua berriro",cant_hear_this:"Deskargatu soinua MP3 gisa",
+incorrect_try_again:"Ez da zuzena. Saiatu berriro.",image_alt_text:"reCAPTCHA erronkaren irudia",privacy_and_terms:"Pribatutasuna eta baldintzak"},fa:{visual_challenge:"\u062f\u0631\u06cc\u0627\u0641\u062a \u06cc\u06a9 \u0645\u0639\u0645\u0627\u06cc \u062f\u06cc\u062f\u0627\u0631\u06cc",audio_challenge:"\u062f\u0631\u06cc\u0627\u0641\u062a \u06cc\u06a9 \u0645\u0639\u0645\u0627\u06cc \u0635\u0648\u062a\u06cc",refresh_btn:"\u062f\u0631\u06cc\u0627\u0641\u062a \u06cc\u06a9 \u0645\u0639\u0645\u0627\u06cc \u062c\u062f\u06cc\u062f",
+instructions_visual:"",instructions_audio:"\u0622\u0646\u0686\u0647 \u0631\u0627 \u06a9\u0647 \u0645\u06cc\u200c\u0634\u0646\u0648\u06cc\u062f \u062a\u0627\u06cc\u067e \u06a9\u0646\u06cc\u062f:",help_btn:"\u0631\u0627\u0647\u0646\u0645\u0627\u06cc\u06cc",play_again:"\u067e\u062e\u0634 \u0645\u062c\u062f\u062f \u0635\u062f\u0627",cant_hear_this:"\u062f\u0627\u0646\u0644\u0648\u062f \u0635\u062f\u0627 \u0628\u0647 \u0635\u0648\u0631\u062a MP3",incorrect_try_again:"\u0646\u0627\u062f\u0631\u0633\u062a. \u062f\u0648\u0628\u0627\u0631\u0647 \u0627\u0645\u062a\u062d\u0627\u0646 \u06a9\u0646\u06cc\u062f.",
+image_alt_text:"\u062a\u0635\u0648\u06cc\u0631 \u0686\u0627\u0644\u0634\u06cc reCAPTCHA",privacy_and_terms:"\u062d\u0631\u06cc\u0645 \u062e\u0635\u0648\u0635\u06cc \u0648 \u0634\u0631\u0627\u06cc\u0637"},fi:{visual_challenge:"Kuvavahvistus",audio_challenge:"\u00c4\u00e4nivahvistus",refresh_btn:"Uusi kuva",instructions_visual:"Kirjoita teksti:",instructions_audio:"Kirjoita kuulemasi:",help_btn:"Ohje",play_again:"Toista \u00e4\u00e4ni uudelleen",cant_hear_this:"Lataa \u00e4\u00e4ni MP3-tiedostona",
+incorrect_try_again:"V\u00e4\u00e4rin. Yrit\u00e4 uudelleen.",image_alt_text:"reCAPTCHA-kuva",privacy_and_terms:"Tietosuoja ja k\u00e4ytt\u00f6ehdot"},fil:la,fr:ma,"fr-CA":{visual_challenge:"Obtenir un test visuel",audio_challenge:"Obtenir un test audio",refresh_btn:"Obtenir un nouveau test",instructions_visual:"Saisissez le texte\u00a0:",instructions_audio:"Tapez ce que vous entendez\u00a0:",help_btn:"Aide",play_again:"Jouer le son de nouveau",cant_hear_this:"T\u00e9l\u00e9charger le son en format MP3",
+incorrect_try_again:"Erreur, essayez \u00e0 nouveau",image_alt_text:"Image reCAPTCHA",privacy_and_terms:"Confidentialit\u00e9 et conditions d'utilisation"},"fr-FR":ma,gl:{visual_challenge:"Obter unha proba visual",audio_challenge:"Obter unha proba de audio",refresh_btn:"Obter unha proba nova",instructions_visual:"",instructions_audio:"Escribe o que escoitas:",help_btn:"Axuda",play_again:"Reproducir o son de novo",cant_hear_this:"Descargar son como MP3",incorrect_try_again:"Incorrecto. T\u00e9ntao de novo.",
+image_alt_text:"Imaxe de proba de reCAPTCHA",privacy_and_terms:"Privacidade e condici\u00f3ns"},gu:{visual_challenge:"\u0a8f\u0a95 \u0aa6\u0ac3\u0ab6\u0acd\u0aaf\u0abe\u0aa4\u0acd\u0aae\u0a95 \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0aae\u0ac7\u0ab3\u0ab5\u0acb",audio_challenge:"\u0a8f\u0a95 \u0a91\u0aa1\u0abf\u0a93 \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0aae\u0ac7\u0ab3\u0ab5\u0acb",refresh_btn:"\u0a8f\u0a95 \u0aa8\u0ab5\u0acb \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0aae\u0ac7\u0ab3\u0ab5\u0acb",instructions_visual:"",
+instructions_audio:"\u0aa4\u0aae\u0ac7 \u0a9c\u0ac7 \u0ab8\u0abe\u0a82\u0aad\u0ab3\u0acb \u0a9b\u0acb \u0aa4\u0ac7 \u0ab2\u0a96\u0acb:",help_btn:"\u0ab8\u0ab9\u0abe\u0aaf",play_again:"\u0aa7\u0acd\u0ab5\u0aa8\u0abf \u0aab\u0ab0\u0ac0\u0aa5\u0ac0 \u0a9a\u0ab2\u0abe\u0ab5\u0acb",cant_hear_this:"MP3 \u0aa4\u0ab0\u0ac0\u0a95\u0ac7 \u0aa7\u0acd\u0ab5\u0aa8\u0abf\u0aa8\u0ac7 \u0aa1\u0abe\u0a89\u0aa8\u0ab2\u0acb\u0aa1 \u0a95\u0ab0\u0acb",incorrect_try_again:"\u0a96\u0acb\u0a9f\u0ac1\u0a82. \u0aab\u0ab0\u0ac0 \u0aaa\u0acd\u0ab0\u0aaf\u0abe\u0ab8 \u0a95\u0ab0\u0acb.",
+image_alt_text:"reCAPTCHA \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0a9b\u0aac\u0ac0",privacy_and_terms:"\u0a97\u0acb\u0aaa\u0aa8\u0ac0\u0aaf\u0aa4\u0abe \u0a85\u0aa8\u0ac7 \u0ab6\u0ab0\u0aa4\u0acb"},hi:{visual_challenge:"\u0915\u094b\u0908 \u0935\u093f\u091c\u0941\u0905\u0932 \u091a\u0941\u0928\u094c\u0924\u0940 \u0932\u0947\u0902",audio_challenge:"\u0915\u094b\u0908 \u0911\u0921\u093f\u092f\u094b \u091a\u0941\u0928\u094c\u0924\u0940 \u0932\u0947\u0902",refresh_btn:"\u0915\u094b\u0908 \u0928\u0908 \u091a\u0941\u0928\u094c\u0924\u0940 \u0932\u0947\u0902",
+instructions_visual:"\u091f\u0947\u0915\u094d\u0938\u094d\u091f \u091f\u093e\u0907\u092a \u0915\u0930\u0947\u0902:",instructions_audio:"\u091c\u094b \u0906\u092a \u0938\u0941\u0928 \u0930\u0939\u0947 \u0939\u0948\u0902 \u0909\u0938\u0947 \u0932\u093f\u0916\u0947\u0902:",help_btn:"\u0938\u0939\u093e\u092f\u0924\u093e",play_again:"\u0927\u094d\u200d\u0935\u0928\u093f \u092a\u0941\u0928: \u091a\u0932\u093e\u090f\u0902",cant_hear_this:"\u0927\u094d\u200d\u0935\u0928\u093f \u0915\u094b MP3 \u0915\u0947 \u0930\u0942\u092a \u092e\u0947\u0902 \u0921\u093e\u0909\u0928\u0932\u094b\u0921 \u0915\u0930\u0947\u0902",
+incorrect_try_again:"\u0917\u0932\u0924. \u092a\u0941\u0928: \u092a\u094d\u0930\u092f\u093e\u0938 \u0915\u0930\u0947\u0902.",image_alt_text:"reCAPTCHA \u091a\u0941\u0928\u094c\u0924\u0940 \u091a\u093f\u0924\u094d\u0930",privacy_and_terms:"\u0917\u094b\u092a\u0928\u0940\u092f\u0924\u093e \u0914\u0930 \u0936\u0930\u094d\u0924\u0947\u0902"},hr:{visual_challenge:"Dohvati vizualni upit",audio_challenge:"Dohvati zvu\u010dni upit",refresh_btn:"Dohvati novi upit",instructions_visual:"Unesite tekst:",instructions_audio:"Upi\u0161ite \u0161to \u010dujete:",
+help_btn:"Pomo\u0107",play_again:"Ponovi zvuk",cant_hear_this:"Preuzmi zvuk u MP3 formatu",incorrect_try_again:"Nije to\u010dno. Poku\u0161ajte ponovno.",image_alt_text:"Slikovni izazov reCAPTCHA",privacy_and_terms:"Privatnost i odredbe"},hu:{visual_challenge:"Vizu\u00e1lis kih\u00edv\u00e1s k\u00e9r\u00e9se",audio_challenge:"Hangkih\u00edv\u00e1s k\u00e9r\u00e9se",refresh_btn:"\u00daj kih\u00edv\u00e1s k\u00e9r\u00e9se",instructions_visual:"\u00cdrja be a sz\u00f6veget:",instructions_audio:"\u00cdrja le, amit hall:",
+help_btn:"S\u00fag\u00f3",play_again:"Hang ism\u00e9telt lej\u00e1tsz\u00e1sa",cant_hear_this:"Hang let\u00f6lt\u00e9se MP3 form\u00e1tumban",incorrect_try_again:"Hib\u00e1s. Pr\u00f3b\u00e1lkozzon \u00fajra.",image_alt_text:"reCAPTCHA ellen\u0151rz\u0151 k\u00e9p",privacy_and_terms:"Adatv\u00e9delem \u00e9s Szerz\u0151d\u00e9si Felt\u00e9telek"},hy:{visual_challenge:"\u054d\u057f\u0561\u0576\u0561\u056c \u057f\u0565\u057d\u0578\u0572\u0561\u056f\u0561\u0576 \u056d\u0576\u0564\u056b\u0580",audio_challenge:"\u054d\u057f\u0561\u0576\u0561\u056c \u0571\u0561\u0575\u0576\u0561\u0575\u056b\u0576 \u056d\u0576\u0564\u056b\u0580",
+refresh_btn:"\u054d\u057f\u0561\u0576\u0561\u056c \u0576\u0578\u0580 \u056d\u0576\u0564\u056b\u0580",instructions_visual:"\u0544\u0578\u0582\u057f\u0584\u0561\u0563\u0580\u0565\u0584 \u057f\u0565\u0584\u057d\u057f\u0568\u055d",instructions_audio:"\u0544\u0578\u0582\u057f\u0584\u0561\u0563\u0580\u0565\u0584 \u0561\u0575\u0576, \u056b\u0576\u0579 \u056c\u057d\u0578\u0582\u0574 \u0565\u0584\u055d",help_btn:"\u0555\u0563\u0576\u0578\u0582\u0569\u0575\u0578\u0582\u0576",play_again:"\u0546\u057e\u0561\u0563\u0561\u0580\u056f\u0565\u056c \u0571\u0561\u0575\u0576\u0568 \u056f\u0580\u056f\u056b\u0576",
+cant_hear_this:"\u0532\u0565\u057c\u0576\u0565\u056c \u0571\u0561\u0575\u0576\u0568 \u0578\u0580\u057a\u0565\u057d MP3",incorrect_try_again:"\u054d\u056d\u0561\u056c \u0567: \u0553\u0578\u0580\u0571\u0565\u0584 \u056f\u0580\u056f\u056b\u0576:",image_alt_text:"reCAPTCHA \u057a\u0561\u057f\u056f\u0565\u0580\u0578\u057e \u056d\u0576\u0564\u056b\u0580",privacy_and_terms:"\u0533\u0561\u0572\u057f\u0576\u056b\u0578\u0582\u0569\u0575\u0561\u0576 & \u057a\u0561\u0575\u0574\u0561\u0576\u0576\u0565\u0580"},
+id:na,is:{visual_challenge:"F\u00e1 a\u00f0gangspr\u00f3f sem mynd",audio_challenge:"F\u00e1 a\u00f0gangspr\u00f3f sem hlj\u00f3\u00f0skr\u00e1",refresh_btn:"F\u00e1 n\u00fdtt a\u00f0gangspr\u00f3f",instructions_visual:"",instructions_audio:"Sl\u00e1\u00f0u inn \u00fea\u00f0 sem \u00fe\u00fa heyrir:",help_btn:"Hj\u00e1lp",play_again:"Spila hlj\u00f3\u00f0 aftur",cant_hear_this:"S\u00e6kja hlj\u00f3\u00f0 sem MP3",incorrect_try_again:"Rangt. Reyndu aftur.",image_alt_text:"mynd reCAPTCHA a\u00f0gangspr\u00f3fs",
+privacy_and_terms:"Pers\u00f3nuvernd og skilm\u00e1lar"},it:{visual_challenge:"Verifica visiva",audio_challenge:"Verifica audio",refresh_btn:"Nuova verifica",instructions_visual:"Digita il testo:",instructions_audio:"Digita ci\u00f2 che senti:",help_btn:"Guida",play_again:"Riproduci di nuovo audio",cant_hear_this:"Scarica audio in MP3",incorrect_try_again:"Sbagliato. Riprova.",image_alt_text:"Immagine di verifica reCAPTCHA",privacy_and_terms:"Privacy e Termini"},iw:oa,ja:{visual_challenge:"\u753b\u50cf\u3067\u78ba\u8a8d\u3057\u307e\u3059",
+audio_challenge:"\u97f3\u58f0\u3067\u78ba\u8a8d\u3057\u307e\u3059",refresh_btn:"\u5225\u306e\u5358\u8a9e\u3067\u3084\u308a\u76f4\u3057\u307e\u3059",instructions_visual:"\u30c6\u30ad\u30b9\u30c8\u3092\u5165\u529b:",instructions_audio:"\u805e\u3053\u3048\u305f\u5358\u8a9e\u3092\u5165\u529b\u3057\u307e\u3059:",help_btn:"\u30d8\u30eb\u30d7",play_again:"\u3082\u3046\u4e00\u5ea6\u805e\u304f",cant_hear_this:"MP3 \u3067\u97f3\u58f0\u3092\u30c0\u30a6\u30f3\u30ed\u30fc\u30c9",incorrect_try_again:"\u6b63\u3057\u304f\u3042\u308a\u307e\u305b\u3093\u3002\u3082\u3046\u4e00\u5ea6\u3084\u308a\u76f4\u3057\u3066\u304f\u3060\u3055\u3044\u3002",
+image_alt_text:"reCAPTCHA \u78ba\u8a8d\u7528\u753b\u50cf",privacy_and_terms:"\u30d7\u30e9\u30a4\u30d0\u30b7\u30fc\u3068\u5229\u7528\u898f\u7d04"},kn:{visual_challenge:"\u0ca6\u0cc3\u0cb6\u0ccd\u0caf \u0cb8\u0cb5\u0cbe\u0cb2\u0cca\u0c82\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0cb8\u0ccd\u0cb5\u0cc0\u0c95\u0cb0\u0cbf\u0cb8\u0cbf",audio_challenge:"\u0c86\u0ca1\u0cbf\u0caf\u0ccb \u0cb8\u0cb5\u0cbe\u0cb2\u0cca\u0c82\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0cb8\u0ccd\u0cb5\u0cc0\u0c95\u0cb0\u0cbf\u0cb8\u0cbf",refresh_btn:"\u0cb9\u0cca\u0cb8 \u0cb8\u0cb5\u0cbe\u0cb2\u0cca\u0c82\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0caa\u0ca1\u0cc6\u0caf\u0cbf\u0cb0\u0cbf",
+instructions_visual:"",instructions_audio:"\u0ca8\u0cbf\u0cae\u0c97\u0cc6 \u0c95\u0cc7\u0cb3\u0cbf\u0cb8\u0cc1\u0cb5\u0cc1\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0c9f\u0cc8\u0caa\u0ccd\u200c \u0cae\u0cbe\u0ca1\u0cbf:",help_btn:"\u0cb8\u0cb9\u0cbe\u0caf",play_again:"\u0ca7\u0ccd\u0cb5\u0ca8\u0cbf\u0caf\u0ca8\u0ccd\u0ca8\u0cc1 \u0cae\u0ca4\u0ccd\u0ca4\u0cc6 \u0caa\u0ccd\u0cb2\u0cc7 \u0cae\u0cbe\u0ca1\u0cbf",cant_hear_this:"\u0ca7\u0ccd\u0cb5\u0ca8\u0cbf\u0caf\u0ca8\u0ccd\u0ca8\u0cc1 MP3 \u0cb0\u0cc2\u0caa\u0ca6\u0cb2\u0ccd\u0cb2\u0cbf \u0ca1\u0ccc\u0ca8\u0ccd\u200c\u0cb2\u0ccb\u0ca1\u0ccd \u0cae\u0cbe\u0ca1\u0cbf",
+incorrect_try_again:"\u0ca4\u0caa\u0ccd\u0caa\u0cbe\u0c97\u0cbf\u0ca6\u0cc6. \u0cae\u0ca4\u0ccd\u0ca4\u0cca\u0cae\u0ccd\u0cae\u0cc6 \u0caa\u0ccd\u0cb0\u0caf\u0ca4\u0ccd\u0ca8\u0cbf\u0cb8\u0cbf.",image_alt_text:"reCAPTCHA \u0cb8\u0cb5\u0cbe\u0cb2\u0cc1 \u0c9a\u0cbf\u0ca4\u0ccd\u0cb0",privacy_and_terms:"\u0c97\u0ccc\u0caa\u0ccd\u0caf\u0ca4\u0cc6 \u0cae\u0ca4\u0ccd\u0ca4\u0cc1 \u0ca8\u0cbf\u0caf\u0cae\u0c97\u0cb3\u0cc1"},ko:{visual_challenge:"\uadf8\ub9bc\uc73c\ub85c \ubcf4\uc548\ubb38\uc790 \ubc1b\uae30",
+audio_challenge:"\uc74c\uc131\uc73c\ub85c \ubcf4\uc548\ubb38\uc790 \ubc1b\uae30",refresh_btn:"\ubcf4\uc548\ubb38\uc790 \uc0c8\ub85c \ubc1b\uae30",instructions_visual:"\ud14d\uc2a4\ud2b8 \uc785\ub825:",instructions_audio:"\uc74c\uc131 \ubcf4\uc548\ubb38\uc790 \uc785\ub825:",help_btn:"\ub3c4\uc6c0\ub9d0",play_again:"\uc74c\uc131 \ub2e4\uc2dc \ub4e3\uae30",cant_hear_this:"\uc74c\uc131\uc744 MP3\ub85c \ub2e4\uc6b4\ub85c\ub4dc",incorrect_try_again:"\ud2c0\ub838\uc2b5\ub2c8\ub2e4. \ub2e4\uc2dc \uc2dc\ub3c4\ud574 \uc8fc\uc138\uc694.",
+image_alt_text:"reCAPTCHA \ubcf4\uc548\ubb38\uc790 \uc774\ubbf8\uc9c0",privacy_and_terms:"\uac1c\uc778\uc815\ubcf4 \ubcf4\ud638 \ubc0f \uc57d\uad00"},ln:ma,lt:{visual_challenge:"Gauti vaizdin\u012f atpa\u017einimo test\u0105",audio_challenge:"Gauti garso atpa\u017einimo test\u0105",refresh_btn:"Gauti nauj\u0105 atpa\u017einimo test\u0105",instructions_visual:"\u012eveskite tekst\u0105:",instructions_audio:"\u012eveskite tai, k\u0105 girdite:",help_btn:"Pagalba",play_again:"Dar kart\u0105 paleisti gars\u0105",
+cant_hear_this:"Atsisi\u0173sti gars\u0105 kaip MP3",incorrect_try_again:"Neteisingai. Bandykite dar kart\u0105.",image_alt_text:"Testo \u201ereCAPTCHA\u201c vaizdas",privacy_and_terms:"Privatumas ir s\u0105lygos"},lv:{visual_challenge:"Sa\u0146emt vizu\u0101lu izaicin\u0101jumu",audio_challenge:"Sa\u0146emt audio izaicin\u0101jumu",refresh_btn:"Sa\u0146emt jaunu izaicin\u0101jumu",instructions_visual:"Ievadiet tekstu:",instructions_audio:"Ierakstiet dzirdamo:",help_btn:"Pal\u012bdz\u012bba",play_again:"V\u0113lreiz atska\u0146ot ska\u0146u",
+cant_hear_this:"Lejupiel\u0101d\u0113t ska\u0146u MP3\u00a0form\u0101t\u0101",incorrect_try_again:"Nepareizi. M\u0113\u0123iniet v\u0113lreiz.",image_alt_text:"reCAPTCHA izaicin\u0101juma att\u0113ls",privacy_and_terms:"Konfidencialit\u0101te un noteikumi"},ml:{visual_challenge:"\u0d12\u0d30\u0d41 \u0d26\u0d43\u0d36\u0d4d\u0d2f \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d28\u0d47\u0d1f\u0d41\u0d15",audio_challenge:"\u0d12\u0d30\u0d41 \u0d13\u0d21\u0d3f\u0d2f\u0d4b \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d28\u0d47\u0d1f\u0d41\u0d15",
+refresh_btn:"\u0d12\u0d30\u0d41 \u0d2a\u0d41\u0d24\u0d3f\u0d2f \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d28\u0d47\u0d1f\u0d41\u0d15",instructions_visual:"",instructions_audio:"\u0d15\u0d47\u0d7e\u0d15\u0d4d\u0d15\u0d41\u0d28\u0d4d\u0d28\u0d24\u0d4d \u0d1f\u0d48\u0d2a\u0d4d\u0d2a\u0d4d \u0d1a\u0d46\u0d2f\u0d4d\u0d2f\u0d42:",help_btn:"\u0d38\u0d39\u0d3e\u0d2f\u0d02",play_again:"\u0d36\u0d2c\u0d4d\u200c\u0d26\u0d02 \u0d35\u0d40\u0d23\u0d4d\u0d1f\u0d41\u0d02 \u0d2a\u0d4d\u0d32\u0d47 \u0d1a\u0d46\u0d2f\u0d4d\u0d2f\u0d41\u0d15",
+cant_hear_this:"\u0d36\u0d2c\u0d4d\u200c\u0d26\u0d02 MP3 \u0d06\u0d2f\u0d3f \u0d21\u0d57\u0d7a\u0d32\u0d4b\u0d21\u0d4d \u0d1a\u0d46\u0d2f\u0d4d\u0d2f\u0d41\u0d15",incorrect_try_again:"\u0d24\u0d46\u0d31\u0d4d\u0d31\u0d3e\u0d23\u0d4d. \u0d35\u0d40\u0d23\u0d4d\u0d1f\u0d41\u0d02 \u0d36\u0d4d\u0d30\u0d2e\u0d3f\u0d15\u0d4d\u0d15\u0d41\u0d15.",image_alt_text:"reCAPTCHA \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d07\u0d2e\u0d47\u0d1c\u0d4d",privacy_and_terms:"\u0d38\u0d4d\u0d35\u0d15\u0d3e\u0d30\u0d4d\u0d2f\u0d24\u0d2f\u0d41\u0d02 \u0d28\u0d3f\u0d2c\u0d28\u0d4d\u0d27\u0d28\u0d15\u0d33\u0d41\u0d02"},
+mr:{visual_challenge:"\u0926\u0943\u0936\u094d\u200d\u092f\u092e\u093e\u0928 \u0906\u0935\u094d\u0939\u093e\u0928 \u092a\u094d\u0930\u093e\u092a\u094d\u0924 \u0915\u0930\u093e",audio_challenge:"\u0911\u0921\u0940\u0913 \u0906\u0935\u094d\u0939\u093e\u0928 \u092a\u094d\u0930\u093e\u092a\u094d\u0924 \u0915\u0930\u093e",refresh_btn:"\u090f\u0915 \u0928\u0935\u0940\u0928 \u0906\u0935\u094d\u0939\u093e\u0928 \u092a\u094d\u0930\u093e\u092a\u094d\u0924 \u0915\u0930\u093e",instructions_visual:"",instructions_audio:"\u0906\u092a\u0932\u094d\u092f\u093e\u0932\u093e \u091c\u0947 \u0910\u0915\u0942 \u092f\u0947\u0908\u0932 \u0924\u0947 \u091f\u093e\u0907\u092a \u0915\u0930\u093e:",
+help_btn:"\u092e\u0926\u0924",play_again:"\u0927\u094d\u200d\u0935\u0928\u0940 \u092a\u0941\u0928\u094d\u0939\u093e \u092a\u094d\u200d\u0932\u0947 \u0915\u0930\u093e",cant_hear_this:"MP3 \u0930\u0941\u092a\u093e\u0924 \u0927\u094d\u200d\u0935\u0928\u0940 \u0921\u093e\u0909\u0928\u0932\u094b\u0921 \u0915\u0930\u093e",incorrect_try_again:"\u0905\u092f\u094b\u0917\u094d\u200d\u092f. \u092a\u0941\u0928\u094d\u200d\u0939\u093e \u092a\u094d\u0930\u092f\u0924\u094d\u200d\u0928 \u0915\u0930\u093e.",image_alt_text:"reCAPTCHA \u0906\u0935\u094d\u200d\u0939\u093e\u0928 \u092a\u094d\u0930\u0924\u093f\u092e\u093e",
+privacy_and_terms:"\u0917\u094b\u092a\u0928\u0940\u092f\u0924\u093e \u0906\u0923\u093f \u0905\u091f\u0940"},ms:{visual_challenge:"Dapatkan cabaran visual",audio_challenge:"Dapatkan cabaran audio",refresh_btn:"Dapatkan cabaran baru",instructions_visual:"Taipkan teksnya:",instructions_audio:"Taip apa yang didengari:",help_btn:"Bantuan",play_again:"Mainkan bunyi sekali lagi",cant_hear_this:"Muat turun bunyi sebagai MP3",incorrect_try_again:"Tidak betul. Cuba lagi.",image_alt_text:"Imej cabaran reCAPTCHA",
+privacy_and_terms:"Privasi & Syarat"},nl:{visual_challenge:"Een visuele uitdaging proberen",audio_challenge:"Een audio-uitdaging proberen",refresh_btn:"Een nieuwe uitdaging proberen",instructions_visual:"Typ de tekst:",instructions_audio:"Typ wat u hoort:",help_btn:"Help",play_again:"Geluid opnieuw afspelen",cant_hear_this:"Geluid downloaden als MP3",incorrect_try_again:"Onjuist. Probeer het opnieuw.",image_alt_text:"reCAPTCHA-uitdagingsafbeelding",privacy_and_terms:"Privacy en voorwaarden"},no:{visual_challenge:"F\u00e5 en bildeutfordring",
+audio_challenge:"F\u00e5 en lydutfordring",refresh_btn:"F\u00e5 en ny utfordring",instructions_visual:"Skriv inn teksten:",instructions_audio:"Skriv inn det du h\u00f8rer:",help_btn:"Hjelp",play_again:"Spill av lyd p\u00e5 nytt",cant_hear_this:"Last ned lyd som MP3",incorrect_try_again:"Feil. Pr\u00f8v p\u00e5 nytt.",image_alt_text:"reCAPTCHA-utfordringsbilde",privacy_and_terms:"Personvern og vilk\u00e5r"},pl:{visual_challenge:"Poka\u017c podpowied\u017a wizualn\u0105",audio_challenge:"Odtw\u00f3rz podpowied\u017a d\u017awi\u0119kow\u0105",
+refresh_btn:"Nowa podpowied\u017a",instructions_visual:"Przepisz tekst:",instructions_audio:"Wpisz us\u0142yszane s\u0142owa:",help_btn:"Pomoc",play_again:"Odtw\u00f3rz d\u017awi\u0119k ponownie",cant_hear_this:"Pobierz d\u017awi\u0119k jako plik MP3",incorrect_try_again:"Nieprawid\u0142owo. Spr\u00f3buj ponownie.",image_alt_text:"Zadanie obrazkowe reCAPTCHA",privacy_and_terms:"Prywatno\u015b\u0107 i warunki"},pt:pa,"pt-BR":pa,"pt-PT":{visual_challenge:"Obter um desafio visual",audio_challenge:"Obter um desafio de \u00e1udio",
+refresh_btn:"Obter um novo desafio",instructions_visual:"Introduza o texto:",instructions_audio:"Escreva o que ouvir:",help_btn:"Ajuda",play_again:"Reproduzir som novamente",cant_hear_this:"Transferir som como MP3",incorrect_try_again:"Incorreto. Tente novamente.",image_alt_text:"Imagem de teste reCAPTCHA",privacy_and_terms:"Privacidade e Termos de Utiliza\u00e7\u00e3o"},ro:qa,ru:{visual_challenge:"\u0412\u0438\u0437\u0443\u0430\u043b\u044c\u043d\u0430\u044f \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430",
+audio_challenge:"\u0417\u0432\u0443\u043a\u043e\u0432\u0430\u044f \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430",refresh_btn:"\u041e\u0431\u043d\u043e\u0432\u0438\u0442\u044c",instructions_visual:"\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0442\u0435\u043a\u0441\u0442:",instructions_audio:"\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0442\u043e, \u0447\u0442\u043e \u0441\u043b\u044b\u0448\u0438\u0442\u0435:",help_btn:"\u0421\u043f\u0440\u0430\u0432\u043a\u0430",play_again:"\u041f\u0440\u043e\u0441\u043b\u0443\u0448\u0430\u0442\u044c \u0435\u0449\u0435 \u0440\u0430\u0437",
+cant_hear_this:"\u0417\u0430\u0433\u0440\u0443\u0437\u0438\u0442\u044c MP3-\u0444\u0430\u0439\u043b",incorrect_try_again:"\u041d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u043e. \u041f\u043e\u0432\u0442\u043e\u0440\u0438\u0442\u0435 \u043f\u043e\u043f\u044b\u0442\u043a\u0443.",image_alt_text:"\u041f\u0440\u043e\u0432\u0435\u0440\u043a\u0430 \u043f\u043e \u0441\u043b\u043e\u0432\u0443 reCAPTCHA",privacy_and_terms:"\u041f\u0440\u0430\u0432\u0438\u043b\u0430 \u0438 \u043f\u0440\u0438\u043d\u0446\u0438\u043f\u044b"},
+sk:{visual_challenge:"Zobrazi\u0165 vizu\u00e1lnu podobu",audio_challenge:"Prehra\u0165 zvukov\u00fa podobu",refresh_btn:"Zobrazi\u0165 nov\u00fd v\u00fdraz",instructions_visual:"Zadajte text:",instructions_audio:"Zadajte, \u010do po\u010dujete:",help_btn:"Pomocn\u00edk",play_again:"Znova prehra\u0165 zvuk",cant_hear_this:"Prevzia\u0165 zvuk v podobe s\u00faboru MP3",incorrect_try_again:"Nespr\u00e1vne. Sk\u00faste to znova.",image_alt_text:"Obr\u00e1zok zadania reCAPTCHA",privacy_and_terms:"Ochrana osobn\u00fdch \u00fadajov a Zmluvn\u00e9 podmienky"},
+sl:{visual_challenge:"Vizualni preskus",audio_challenge:"Zvo\u010dni preskus",refresh_btn:"Nov preskus",instructions_visual:"Vnesite besedilo:",instructions_audio:"Natipkajte, kaj sli\u0161ite:",help_btn:"Pomo\u010d",play_again:"Znova predvajaj zvok",cant_hear_this:"Prenesi zvok kot MP3",incorrect_try_again:"Napa\u010dno. Poskusite znova.",image_alt_text:"Slika izziva reCAPTCHA",privacy_and_terms:"Zasebnost in pogoji"},sr:{visual_challenge:"\u041f\u0440\u0438\u043c\u0438\u0442\u0435 \u0432\u0438\u0437\u0443\u0435\u043b\u043d\u0438 \u0443\u043f\u0438\u0442",
+audio_challenge:"\u041f\u0440\u0438\u043c\u0438\u0442\u0435 \u0430\u0443\u0434\u0438\u043e \u0443\u043f\u0438\u0442",refresh_btn:"\u041f\u0440\u0438\u043c\u0438\u0442\u0435 \u043d\u043e\u0432\u0438 \u0443\u043f\u0438\u0442",instructions_visual:"\u0423\u043d\u0435\u0441\u0438\u0442\u0435 \u0442\u0435\u043a\u0441\u0442:",instructions_audio:"\u041e\u0442\u043a\u0443\u0446\u0430\u0458\u0442\u0435 \u043e\u043d\u043e \u0448\u0442\u043e \u0447\u0443\u0458\u0435\u0442\u0435:",help_btn:"\u041f\u043e\u043c\u043e\u045b",
+play_again:"\u041f\u043e\u043d\u043e\u0432\u043e \u043f\u0443\u0441\u0442\u0438 \u0437\u0432\u0443\u043a",cant_hear_this:"\u041f\u0440\u0435\u0443\u0437\u043c\u0438 \u0437\u0432\u0443\u043a \u043a\u0430\u043e MP3 \u0441\u043d\u0438\u043c\u0430\u043a",incorrect_try_again:"\u041d\u0435\u0442\u0430\u0447\u043d\u043e. \u041f\u043e\u043a\u0443\u0448\u0430\u0458\u0442\u0435 \u043f\u043e\u043d\u043e\u0432\u043e.",image_alt_text:"\u0421\u043b\u0438\u043a\u0430 reCAPTCHA \u043f\u0440\u043e\u0432\u0435\u0440\u0435",
+privacy_and_terms:"\u041f\u0440\u0438\u0432\u0430\u0442\u043d\u043e\u0441\u0442 \u0438 \u0443\u0441\u043b\u043e\u0432\u0438"},sv:{visual_challenge:"H\u00e4mta captcha i bildformat",audio_challenge:"H\u00e4mta captcha i ljudformat",refresh_btn:"H\u00e4mta ny captcha",instructions_visual:"Skriv texten:",instructions_audio:"Skriv det du h\u00f6r:",help_btn:"Hj\u00e4lp",play_again:"Spela upp ljudet igen",cant_hear_this:"H\u00e4mta ljud som MP3",incorrect_try_again:"Fel. F\u00f6rs\u00f6k igen.",image_alt_text:"reCAPTCHA-bild",
+privacy_and_terms:"Sekretess och villkor"},sw:{visual_challenge:"Pata herufi za kusoma",audio_challenge:"Pata herufi za kusikiliza",refresh_btn:"Pata herufi mpya",instructions_visual:"",instructions_audio:"Charaza unachosikia:",help_btn:"Usaidizi",play_again:"Cheza sauti tena",cant_hear_this:"Pakua sauti kama MP3",incorrect_try_again:"Sio sahihi. Jaribu tena.",image_alt_text:"picha ya changamoto ya reCAPTCHA",privacy_and_terms:"Faragha & Masharti"},ta:{visual_challenge:"\u0baa\u0bbe\u0bb0\u0bcd\u0bb5\u0bc8 \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bc8\u0baa\u0bcd \u0baa\u0bc6\u0bb1\u0bc1\u0b95",
+audio_challenge:"\u0b86\u0b9f\u0bbf\u0baf\u0bcb \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bc8\u0baa\u0bcd \u0baa\u0bc6\u0bb1\u0bc1\u0b95",refresh_btn:"\u0baa\u0bc1\u0ba4\u0bbf\u0baf \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bc8\u0baa\u0bcd \u0baa\u0bc6\u0bb1\u0bc1\u0b95",instructions_visual:"",instructions_audio:"\u0b95\u0bc7\u0b9f\u0bcd\u0baa\u0ba4\u0bc8 \u0b9f\u0bc8\u0baa\u0bcd \u0b9a\u0bc6\u0baf\u0bcd\u0b95:",help_btn:"\u0b89\u0ba4\u0bb5\u0bbf",play_again:"\u0b92\u0bb2\u0bbf\u0baf\u0bc8 \u0bae\u0bc0\u0ba3\u0bcd\u0b9f\u0bc1\u0bae\u0bcd \u0b87\u0baf\u0b95\u0bcd\u0b95\u0bc1",
+cant_hear_this:"\u0b92\u0bb2\u0bbf\u0baf\u0bc8 MP3 \u0b86\u0b95 \u0baa\u0ba4\u0bbf\u0bb5\u0bbf\u0bb1\u0b95\u0bcd\u0b95\u0bc1\u0b95",incorrect_try_again:"\u0ba4\u0bb5\u0bb1\u0bbe\u0ba9\u0ba4\u0bc1. \u0bae\u0bc0\u0ba3\u0bcd\u0b9f\u0bc1\u0bae\u0bcd \u0bae\u0bc1\u0baf\u0bb2\u0bb5\u0bc1\u0bae\u0bcd.",image_alt_text:"reCAPTCHA \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bcd \u0baa\u0b9f\u0bae\u0bcd",privacy_and_terms:"\u0ba4\u0ba9\u0bbf\u0baf\u0bc1\u0bb0\u0bbf\u0bae\u0bc8 & \u0bb5\u0bbf\u0ba4\u0bbf\u0bae\u0bc1\u0bb1\u0bc8\u0b95\u0bb3\u0bcd"},
+te:{visual_challenge:"\u0c12\u0c15 \u0c26\u0c43\u0c36\u0c4d\u0c2f\u0c2e\u0c3e\u0c28 \u0c38\u0c35\u0c3e\u0c32\u0c41\u0c28\u0c41 \u0c38\u0c4d\u0c35\u0c40\u0c15\u0c30\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f",audio_challenge:"\u0c12\u0c15 \u0c06\u0c21\u0c3f\u0c2f\u0c4b \u0c38\u0c35\u0c3e\u0c32\u0c41\u0c28\u0c41 \u0c38\u0c4d\u0c35\u0c40\u0c15\u0c30\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f",refresh_btn:"\u0c15\u0c4d\u0c30\u0c4a\u0c24\u0c4d\u0c24 \u0c38\u0c35\u0c3e\u0c32\u0c41\u0c28\u0c41 \u0c38\u0c4d\u0c35\u0c40\u0c15\u0c30\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f",
+instructions_visual:"",instructions_audio:"\u0c2e\u0c40\u0c30\u0c41 \u0c35\u0c3f\u0c28\u0c4d\u0c28\u0c26\u0c3f \u0c1f\u0c48\u0c2a\u0c4d \u0c1a\u0c47\u0c2f\u0c02\u0c21\u0c3f:",help_btn:"\u0c38\u0c39\u0c3e\u0c2f\u0c02",play_again:"\u0c27\u0c4d\u0c35\u0c28\u0c3f\u0c28\u0c3f \u0c2e\u0c33\u0c4d\u0c32\u0c40 \u0c2a\u0c4d\u0c32\u0c47 \u0c1a\u0c47\u0c2f\u0c3f",cant_hear_this:"\u0c27\u0c4d\u0c35\u0c28\u0c3f\u0c28\u0c3f MP3 \u0c35\u0c32\u0c46 \u0c21\u0c4c\u0c28\u0c4d\u200c\u0c32\u0c4b\u0c21\u0c4d \u0c1a\u0c47\u0c2f\u0c3f",
+incorrect_try_again:"\u0c24\u0c2a\u0c4d\u0c2a\u0c41. \u0c2e\u0c33\u0c4d\u0c32\u0c40 \u0c2a\u0c4d\u0c30\u0c2f\u0c24\u0c4d\u0c28\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f.",image_alt_text:"reCAPTCHA \u0c38\u0c35\u0c3e\u0c32\u0c41 \u0c1a\u0c3f\u0c24\u0c4d\u0c30\u0c02",privacy_and_terms:"\u0c17\u0c4b\u0c2a\u0c4d\u0c2f\u0c24 & \u0c28\u0c3f\u0c2c\u0c02\u0c27\u0c28\u0c32\u0c41"},th:{visual_challenge:"\u0e23\u0e31\u0e1a\u0e04\u0e27\u0e32\u0e21\u0e17\u0e49\u0e32\u0e17\u0e32\u0e22\u0e14\u0e49\u0e32\u0e19\u0e20\u0e32\u0e1e",
+audio_challenge:"\u0e23\u0e31\u0e1a\u0e04\u0e27\u0e32\u0e21\u0e17\u0e49\u0e32\u0e17\u0e32\u0e22\u0e14\u0e49\u0e32\u0e19\u0e40\u0e2a\u0e35\u0e22\u0e07",refresh_btn:"\u0e23\u0e31\u0e1a\u0e04\u0e27\u0e32\u0e21\u0e17\u0e49\u0e32\u0e17\u0e32\u0e22\u0e43\u0e2b\u0e21\u0e48",instructions_visual:"\u0e1e\u0e34\u0e21\u0e1e\u0e4c\u0e02\u0e49\u0e2d\u0e04\u0e27\u0e32\u0e21\u0e19\u0e35\u0e49:",instructions_audio:"\u0e1e\u0e34\u0e21\u0e1e\u0e4c\u0e2a\u0e34\u0e48\u0e07\u0e17\u0e35\u0e48\u0e04\u0e38\u0e13\u0e44\u0e14\u0e49\u0e22\u0e34\u0e19:",
+help_btn:"\u0e04\u0e27\u0e32\u0e21\u0e0a\u0e48\u0e27\u0e22\u0e40\u0e2b\u0e25\u0e37\u0e2d",play_again:"\u0e40\u0e25\u0e48\u0e19\u0e40\u0e2a\u0e35\u0e22\u0e07\u0e2d\u0e35\u0e01\u0e04\u0e23\u0e31\u0e49\u0e07",cant_hear_this:"\u0e14\u0e32\u0e27\u0e42\u0e2b\u0e25\u0e14\u0e40\u0e2a\u0e35\u0e22\u0e07\u0e40\u0e1b\u0e47\u0e19 MP3",incorrect_try_again:"\u0e44\u0e21\u0e48\u0e16\u0e39\u0e01\u0e15\u0e49\u0e2d\u0e07 \u0e25\u0e2d\u0e07\u0e2d\u0e35\u0e01\u0e04\u0e23\u0e31\u0e49\u0e07",image_alt_text:"\u0e23\u0e2b\u0e31\u0e2a\u0e20\u0e32\u0e1e reCAPTCHA",
+privacy_and_terms:"\u0e19\u0e42\u0e22\u0e1a\u0e32\u0e22\u0e2a\u0e48\u0e27\u0e19\u0e1a\u0e38\u0e04\u0e04\u0e25\u0e41\u0e25\u0e30\u0e02\u0e49\u0e2d\u0e01\u0e33\u0e2b\u0e19\u0e14"},tr:{visual_challenge:"G\u00f6rsel sorgu al",audio_challenge:"Sesli sorgu al",refresh_btn:"Yeniden y\u00fckle",instructions_visual:"Metni yaz\u0131n:",instructions_audio:"Duydu\u011funuzu yaz\u0131n:",help_btn:"Yard\u0131m",play_again:"Sesi tekrar \u00e7al",cant_hear_this:"Sesi MP3 olarak indir",incorrect_try_again:"Yanl\u0131\u015f. Tekrar deneyin.",
+image_alt_text:"reCAPTCHA sorusu resmi",privacy_and_terms:"Gizlilik ve \u015eartlar"},uk:{visual_challenge:"\u041e\u0442\u0440\u0438\u043c\u0430\u0442\u0438 \u0432\u0456\u0437\u0443\u0430\u043b\u044c\u043d\u0438\u0439 \u0442\u0435\u043a\u0441\u0442",audio_challenge:"\u041e\u0442\u0440\u0438\u043c\u0430\u0442\u0438 \u0430\u0443\u0434\u0456\u043e\u0437\u0430\u043f\u0438\u0441",refresh_btn:"\u041e\u043d\u043e\u0432\u0438\u0442\u0438 \u0442\u0435\u043a\u0441\u0442",instructions_visual:"\u0412\u0432\u0435\u0434\u0456\u0442\u044c \u0442\u0435\u043a\u0441\u0442:",
+instructions_audio:"\u0412\u0432\u0435\u0434\u0456\u0442\u044c \u043f\u043e\u0447\u0443\u0442\u0435:",help_btn:"\u0414\u043e\u0432\u0456\u0434\u043a\u0430",play_again:"\u0412\u0456\u0434\u0442\u0432\u043e\u0440\u0438\u0442\u0438 \u0437\u0430\u043f\u0438\u0441 \u0449\u0435 \u0440\u0430\u0437",cant_hear_this:"\u0417\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0438\u0442\u0438 \u0437\u0430\u043f\u0438\u0441 \u044f\u043a MP3",incorrect_try_again:"\u041d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u043e. \u0421\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435 \u0449\u0435 \u0440\u0430\u0437.",
+image_alt_text:"\u0417\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u043d\u044f \u0437\u0430\u0432\u0434\u0430\u043d\u043d\u044f reCAPTCHA",privacy_and_terms:"\u041a\u043e\u043d\u0444\u0456\u0434\u0435\u043d\u0446\u0456\u0439\u043d\u0456\u0441\u0442\u044c \u0456 \u0443\u043c\u043e\u0432\u0438"},ur:{visual_challenge:"\u0627\u06cc\u06a9 \u0645\u0631\u0626\u06cc \u0686\u06cc\u0644\u0646\u062c \u062d\u0627\u0635\u0644 \u06a9\u0631\u06cc\u06ba",audio_challenge:"\u0627\u06cc\u06a9 \u0622\u0688\u06cc\u0648 \u0686\u06cc\u0644\u0646\u062c \u062d\u0627\u0635\u0644 \u06a9\u0631\u06cc\u06ba",
+refresh_btn:"\u0627\u06cc\u06a9 \u0646\u06cc\u0627 \u0686\u06cc\u0644\u0646\u062c \u062d\u0627\u0635\u0644 \u06a9\u0631\u06cc\u06ba",instructions_visual:"",instructions_audio:"\u062c\u0648 \u0633\u0646\u0627\u0626\u06cc \u062f\u06cc\u062a\u0627 \u06c1\u06d2 \u0648\u06c1 \u0679\u0627\u0626\u067e \u06a9\u0631\u06cc\u06ba:",help_btn:"\u0645\u062f\u062f",play_again:"\u0622\u0648\u0627\u0632 \u062f\u0648\u0628\u0627\u0631\u06c1 \u0686\u0644\u0627\u0626\u06cc\u06ba",cant_hear_this:"\u0622\u0648\u0627\u0632 \u06a9\u0648 MP3 \u06a9\u06d2 \u0628\u0637\u0648\u0631 \u0688\u0627\u0624\u0646 \u0644\u0648\u0688 \u06a9\u0631\u06cc\u06ba",
+incorrect_try_again:"\u063a\u0644\u0637\u06d4 \u062f\u0648\u0628\u0627\u0631\u06c1 \u06a9\u0648\u0634\u0634 \u06a9\u0631\u06cc\u06ba\u06d4",image_alt_text:"reCAPTCHA \u0686\u06cc\u0644\u0646\u062c \u0648\u0627\u0644\u06cc \u0634\u0628\u06cc\u06c1",privacy_and_terms:"\u0631\u0627\u0632\u062f\u0627\u0631\u06cc \u0648 \u0634\u0631\u0627\u0626\u0637"},vi:{visual_challenge:"Nh\u1eadn th\u1eed th\u00e1ch h\u00ecnh \u1ea3nh",audio_challenge:"Nh\u1eadn th\u1eed th\u00e1ch \u00e2m thanh",refresh_btn:"Nh\u1eadn th\u1eed th\u00e1ch m\u1edbi",
+instructions_visual:"Nh\u1eadp v\u0103n b\u1ea3n:",instructions_audio:"Nh\u1eadp n\u1ed9i dung b\u1ea1n nghe th\u1ea5y:",help_btn:"Tr\u1ee3 gi\u00fap",play_again:"Ph\u00e1t l\u1ea1i \u00e2m thanh",cant_hear_this:"T\u1ea3i \u00e2m thanh xu\u1ed1ng d\u01b0\u1edbi d\u1ea1ng MP3",incorrect_try_again:"Kh\u00f4ng ch\u00ednh x\u00e1c. H\u00e3y th\u1eed l\u1ea1i.",image_alt_text:"H\u00ecnh x\u00e1c th\u1ef1c reCAPTCHA",privacy_and_terms:"B\u1ea3o m\u1eadt v\u00e0 \u0111i\u1ec1u kho\u1ea3n"},"zh-CN":ra,"zh-HK":{visual_challenge:"\u56de\u7b54\u5716\u50cf\u9a57\u8b49\u554f\u984c",
+audio_challenge:"\u53d6\u5f97\u8a9e\u97f3\u9a57\u8b49\u554f\u984c",refresh_btn:"\u63db\u4e00\u500b\u9a57\u8b49\u554f\u984c",instructions_visual:"\u8f38\u5165\u6587\u5b57\uff1a",instructions_audio:"\u9375\u5165\u60a8\u6240\u807d\u5230\u7684\uff1a",help_btn:"\u8aaa\u660e",play_again:"\u518d\u6b21\u64ad\u653e\u8072\u97f3",cant_hear_this:"\u5c07\u8072\u97f3\u4e0b\u8f09\u70ba MP3",incorrect_try_again:"\u4e0d\u6b63\u78ba\uff0c\u518d\u8a66\u4e00\u6b21\u3002",image_alt_text:"reCAPTCHA \u9a57\u8b49\u6587\u5b57\u5716\u7247",
+privacy_and_terms:"\u79c1\u96b1\u6b0a\u8207\u689d\u6b3e"},"zh-TW":{visual_challenge:"\u53d6\u5f97\u5716\u7247\u9a57\u8b49\u554f\u984c",audio_challenge:"\u53d6\u5f97\u8a9e\u97f3\u9a57\u8b49\u554f\u984c",refresh_btn:"\u53d6\u5f97\u65b0\u7684\u9a57\u8b49\u554f\u984c",instructions_visual:"\u8acb\u8f38\u5165\u5716\u7247\u4e2d\u7684\u6587\u5b57\uff1a",instructions_audio:"\u8acb\u8f38\u5165\u8a9e\u97f3\u5167\u5bb9\uff1a",help_btn:"\u8aaa\u660e",play_again:"\u518d\u6b21\u64ad\u653e",cant_hear_this:"\u4ee5 MP3 \u683c\u5f0f\u4e0b\u8f09\u8072\u97f3",
+incorrect_try_again:"\u9a57\u8b49\u78bc\u6709\u8aa4\uff0c\u8acb\u518d\u8a66\u4e00\u6b21\u3002",image_alt_text:"reCAPTCHA \u9a57\u8b49\u6587\u5b57\u5716\u7247",privacy_and_terms:"\u96b1\u79c1\u6b0a\u8207\u689d\u6b3e"},zu:{visual_challenge:"Thola inselelo ebonakalayo",audio_challenge:"Thola inselelo yokulalelwayo",refresh_btn:"Thola inselelo entsha",instructions_visual:"",instructions_audio:"Bhala okuzwayo:",help_btn:"Usizo",play_again:"Phinda udlale okulalelwayo futhi",cant_hear_this:"Layisha umsindo njenge-MP3",
+incorrect_try_again:"Akulungile. Zama futhi.",image_alt_text:"umfanekiso oyinselelo we-reCAPTCHA",privacy_and_terms:"Okwangasese kanye nemigomo"},tl:la,he:oa,"in":na,mo:qa,zh:ra};var x=function(a){if(Error.captureStackTrace)Error.captureStackTrace(this,x);else{var b=Error().stack;b&&(this.stack=b)}a&&(this.message=String(a))};u(x,Error);x.prototype.name="CustomError";var ta;var ua=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},va=String.prototype.trim?function(a){return a.trim()}:function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},Da=function(a){if(!wa.test(a))return a;-1!=a.indexOf("&")&&(a=a.replace(xa,"&amp;"));-1!=a.indexOf("<")&&(a=a.replace(ya,"&lt;"));-1!=a.indexOf(">")&&(a=a.replace(za,"&gt;"));-1!=a.indexOf('"')&&(a=a.replace(Aa,"&quot;"));-1!=a.indexOf("'")&&
+(a=a.replace(Ba,"&#39;"));-1!=a.indexOf("\x00")&&(a=a.replace(Ca,"&#0;"));return a},xa=/&/g,ya=/</g,za=/>/g,Aa=/"/g,Ba=/'/g,Ca=/\x00/g,wa=/[\x00&<>"']/,Ea=function(a,b){return a<b?-1:a>b?1:0},Fa=function(a){return String(a).replace(/\-([a-z])/g,function(a,c){return c.toUpperCase()})},Ga=function(a){var b=q(void 0)?"undefined".replace(/([-()\[\]{}+?*.$\^|,:#<!\\])/g,"\\$1").replace(/\x08/g,"\\x08"):"\\s";return a.replace(new RegExp("(^"+(b?"|["+b+"]+":"")+")([a-z])","g"),function(a,b,e){return b+e.toUpperCase()})};var Ha=function(a,b){b.unshift(a);x.call(this,ua.apply(null,b));b.shift()};u(Ha,x);Ha.prototype.name="AssertionError";
+var Ia=function(a,b,c,d){var e="Assertion failed";if(c)var e=e+(": "+c),g=d;else a&&(e+=": "+a,g=b);throw new Ha(""+e,g||[]);},y=function(a,b,c){a||Ia("",null,b,Array.prototype.slice.call(arguments,2))},Ja=function(a,b){throw new Ha("Failure"+(a?": "+a:""),Array.prototype.slice.call(arguments,1));},Ka=function(a,b,c){q(a)||Ia("Expected string but got %s: %s.",[n(a),a],b,Array.prototype.slice.call(arguments,2));return a},La=function(a,b,c){r(a)||Ia("Expected function but got %s: %s.",[n(a),a],b,Array.prototype.slice.call(arguments,
+2))};var z=Array.prototype,Ma=z.indexOf?function(a,b,c){y(null!=a.length);return z.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(q(a))return q(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},Na=z.forEach?function(a,b,c){y(null!=a.length);z.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=q(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},Oa=z.map?function(a,b,c){y(null!=a.length);return z.map.call(a,b,c)}:
+function(a,b,c){for(var d=a.length,e=Array(d),g=q(a)?a.split(""):a,f=0;f<d;f++)f in g&&(e[f]=b.call(c,g[f],f,a));return e},Pa=z.some?function(a,b,c){y(null!=a.length);return z.some.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=q(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&b.call(c,e[g],g,a))return!0;return!1},Qa=function(a,b){var c=Ma(a,b),d;if(d=0<=c)y(null!=a.length),z.splice.call(a,c,1);return d},Ra=function(a){var b=a.length;if(0<b){for(var c=Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},
+Sa=function(a,b,c){y(null!=a.length);return 2>=arguments.length?z.slice.call(a,b):z.slice.call(a,b,c)};var Ta=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Ua=function(a){var b=[],c=0,d;for(d in a)b[c++]=d;return b},Va=function(a){for(var b in a)return!1;return!0},Xa=function(){var a=Wa()?k.google_ad:null,b={},c;for(c in a)b[c]=a[c];return b},Ya="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Za=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Ya.length;g++)c=Ya[g],Object.prototype.hasOwnProperty.call(d,
+c)&&(a[c]=d[c])}},$a=function(a){var b=arguments.length;if(1==b&&p(arguments[0]))return $a.apply(null,arguments[0]);for(var c={},d=0;d<b;d++)c[arguments[d]]=!0;return c};var A;t:{var ab=k.navigator;if(ab){var bb=ab.userAgent;if(bb){A=bb;break t}}A=""}var B=function(a){return-1!=A.indexOf(a)};var cb=B("Opera")||B("OPR"),C=B("Trident")||B("MSIE"),D=B("Gecko")&&-1==A.toLowerCase().indexOf("webkit")&&!(B("Trident")||B("MSIE")),E=-1!=A.toLowerCase().indexOf("webkit"),db=function(){var a=k.document;return a?a.documentMode:void 0},eb=function(){var a="",b;if(cb&&k.opera)return a=k.opera.version,r(a)?a():a;D?b=/rv\:([^\);]+)(\)|;)/:C?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:E&&(b=/WebKit\/(\S+)/);b&&(a=(a=b.exec(A))?a[1]:"");return C&&(b=db(),b>parseFloat(a))?String(b):a}(),fb={},F=function(a){var b;
+if(!(b=fb[a])){b=0;for(var c=va(String(eb)).split("."),d=va(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var f=c[g]||"",m=d[g]||"",$=RegExp("(\\d*)(\\D*)","g"),K=RegExp("(\\d*)(\\D*)","g");do{var G=$.exec(f)||["","",""],aa=K.exec(m)||["","",""];if(0==G[0].length&&0==aa[0].length)break;b=Ea(0==G[1].length?0:parseInt(G[1],10),0==aa[1].length?0:parseInt(aa[1],10))||Ea(0==G[2].length,0==aa[2].length)||Ea(G[2],aa[2])}while(0==b)}b=fb[a]=0<=b}return b},gb=k.document,hb=gb&&C?db()||
+("CSS1Compat"==gb.compatMode?parseInt(eb,10):5):void 0;var ib=function(a){if(8192>a.length)return String.fromCharCode.apply(null,a);for(var b="",c=0;c<a.length;c+=8192)var d=Sa(a,c,c+8192),b=b+String.fromCharCode.apply(null,d);return b},jb=function(a){return Oa(a,function(a){a=a.toString(16);return 1<a.length?a:"0"+a}).join("")};var kb=null,lb=null,mb=function(a){if(!kb){kb={};lb={};for(var b=0;65>b;b++)kb[b]="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=".charAt(b),lb[kb[b]]=b,62<=b&&(lb["ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.".charAt(b)]=b)}for(var b=lb,c=[],d=0;d<a.length;){var e=b[a.charAt(d++)],g=d<a.length?b[a.charAt(d)]:0;++d;var f=d<a.length?b[a.charAt(d)]:64;++d;var m=d<a.length?b[a.charAt(d)]:64;++d;if(null==e||null==g||null==f||null==m)throw Error();c.push(e<<2|g>>
+4);64!=f&&(c.push(g<<4&240|f>>2),64!=m&&c.push(f<<6&192|m))}return c};var H=function(){this.disposed_=this.disposed_;this.onDisposeCallbacks_=this.onDisposeCallbacks_};H.prototype.disposed_=!1;H.prototype.dispose=function(){this.disposed_||(this.disposed_=!0,this.disposeInternal())};var nb=function(a,b){a.onDisposeCallbacks_||(a.onDisposeCallbacks_=[]);a.onDisposeCallbacks_.push(l(void 0)?s(b,void 0):b)};H.prototype.disposeInternal=function(){if(this.onDisposeCallbacks_)for(;this.onDisposeCallbacks_.length;)this.onDisposeCallbacks_.shift()()};
+var ob=function(a){a&&"function"==typeof a.dispose&&a.dispose()};var pb=!C||C&&9<=hb;!D&&!C||C&&C&&9<=hb||D&&F("1.9.1");C&&F("9");var sb=function(a){return a?new qb(rb(a)):ta||(ta=new qb)},tb=function(a,b){return q(b)?a.getElementById(b):b},vb=function(a,b){Ta(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in ub?a.setAttribute(ub[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},ub={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",
+type:"type",usemap:"useMap",valign:"vAlign",width:"width"},xb=function(a,b,c){return wb(document,arguments)},wb=function(a,b){var c=b[0],d=b[1];if(!pb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',Da(d.name),'"');if(d.type){c.push(' type="',Da(d.type),'"');var e={};Za(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(q(d)?c.className=d:p(d)?c.className=d.join(" "):vb(c,d));2<b.length&&yb(a,c,b);return c},yb=function(a,b,c){function d(c){c&&b.appendChild(q(c)?a.createTextNode(c):
+c)}for(var e=2;e<c.length;e++){var g=c[e];!ca(g)||da(g)&&0<g.nodeType?d(g):Na(zb(g)?Ra(g):g,d)}},Ab=function(a){for(var b;b=a.firstChild;)a.removeChild(b)},Bb=function(a){a&&a.parentNode&&a.parentNode.removeChild(a)},rb=function(a){y(a,"Node cannot be null or undefined.");return 9==a.nodeType?a:a.ownerDocument||a.document},zb=function(a){if(a&&"number"==typeof a.length){if(da(a))return"function"==typeof a.item||"string"==typeof a.item;if(r(a))return"function"==typeof a.item}return!1},qb=function(a){this.document_=
+a||k.document||document};h=qb.prototype;h.getDomHelper=sb;h.getElement=function(a){return tb(this.document_,a)};h.$=qb.prototype.getElement;h.createDom=function(a,b,c){return wb(this.document_,arguments)};h.createElement=function(a){return this.document_.createElement(a)};h.createTextNode=function(a){return this.document_.createTextNode(String(a))};h.appendChild=function(a,b){a.appendChild(b)};var Cb=function(a){k.setTimeout(function(){throw a;},0)},Db,Eb=function(){var a=k.MessageChannel;"undefined"===typeof a&&"undefined"!==typeof window&&window.postMessage&&window.addEventListener&&(a=function(){var a=document.createElement("iframe");a.style.display="none";a.src="";document.documentElement.appendChild(a);var b=a.contentWindow,a=b.document;a.open();a.write("");a.close();var c="callImmediate"+Math.random(),d="file:"==b.location.protocol?"*":b.location.protocol+"//"+b.location.host,a=s(function(a){if(("*"==
+d||a.origin==d)&&a.data==c)this.port1.onmessage()},this);b.addEventListener("message",a,!1);this.port1={};this.port2={postMessage:function(){b.postMessage(c,d)}}});if("undefined"!==typeof a&&!B("Trident")&&!B("MSIE")){var b=new a,c={},d=c;b.port1.onmessage=function(){if(l(c.next)){c=c.next;var a=c.cb;c.cb=null;a()}};return function(a){d.next={cb:a};d=d.next;b.port2.postMessage(0)}}return"undefined"!==typeof document&&"onreadystatechange"in document.createElement("script")?function(a){var b=document.createElement("script");
+b.onreadystatechange=function(){b.onreadystatechange=null;b.parentNode.removeChild(b);b=null;a();a=null};document.documentElement.appendChild(b)}:function(a){k.setTimeout(a,0)}};var Kb=function(a,b){Fb||Gb();Hb||(Fb(),Hb=!0);Ib.push(new Jb(a,b))},Fb,Gb=function(){if(k.Promise&&k.Promise.resolve){var a=k.Promise.resolve();Fb=function(){a.then(Lb)}}else Fb=function(){var a=Lb;!r(k.setImmediate)||k.Window&&k.Window.prototype.setImmediate==k.setImmediate?(Db||(Db=Eb()),Db(a)):k.setImmediate(a)}},Hb=!1,Ib=[],Lb=function(){for(;Ib.length;){var a=Ib;Ib=[];for(var b=0;b<a.length;b++){var c=a[b];try{c.fn.call(c.scope)}catch(d){Cb(d)}}}Hb=!1},Jb=function(a,b){this.fn=a;this.scope=
+b};var Mb=function(a){a.prototype.then=a.prototype.then;a.prototype.$goog_Thenable=!0},Nb=function(a){if(!a)return!1;try{return!!a.$goog_Thenable}catch(b){return!1}};var L=function(a,b){this.state_=0;this.result_=void 0;this.callbackEntries_=this.parent_=null;this.hadUnhandledRejection_=this.executing_=!1;try{var c=this;a.call(b,function(a){I(c,2,a)},function(a){if(!(a instanceof J))try{if(a instanceof Error)throw a;throw Error("Promise rejected.");}catch(b){}I(c,3,a)})}catch(d){I(this,3,d)}};
+L.prototype.then=function(a,b,c){null!=a&&La(a,"opt_onFulfilled should be a function.");null!=b&&La(b,"opt_onRejected should be a function. Did you pass opt_context as the second argument instead of the third?");return Ob(this,r(a)?a:null,r(b)?b:null,c)};Mb(L);L.prototype.cancel=function(a){0==this.state_&&Kb(function(){var b=new J(a);Pb(this,b)},this)};
+var Pb=function(a,b){if(0==a.state_)if(a.parent_){var c=a.parent_;if(c.callbackEntries_){for(var d=0,e=-1,g=0,f;f=c.callbackEntries_[g];g++)if(f=f.child)if(d++,f==a&&(e=g),0<=e&&1<d)break;0<=e&&(0==c.state_&&1==d?Pb(c,b):(d=c.callbackEntries_.splice(e,1)[0],d.child&&Qb(c),d.onRejected(b)))}}else I(a,3,b)},Sb=function(a,b){a.callbackEntries_&&a.callbackEntries_.length||2!=a.state_&&3!=a.state_||Rb(a);a.callbackEntries_||(a.callbackEntries_=[]);a.callbackEntries_.push(b)},Ob=function(a,b,c,d){var e=
+{child:null,onFulfilled:null,onRejected:null};e.child=new L(function(a,f){e.onFulfilled=b?function(c){try{var e=b.call(d,c);a(e)}catch(K){f(K)}}:a;e.onRejected=c?function(b){try{var e=c.call(d,b);!l(e)&&b instanceof J?f(b):a(e)}catch(K){f(K)}}:f});e.child.parent_=a;Sb(a,e);return e.child};L.prototype.unblockAndFulfill_=function(a){y(1==this.state_);this.state_=0;I(this,2,a)};L.prototype.unblockAndReject_=function(a){y(1==this.state_);this.state_=0;I(this,3,a)};
+var I=function(a,b,c){if(0==a.state_){if(a==c)b=3,c=new TypeError("Promise cannot resolve to itself");else{if(Nb(c)){a.state_=1;c.then(a.unblockAndFulfill_,a.unblockAndReject_,a);return}if(da(c))try{var d=c.then;if(r(d)){Tb(a,c,d);return}}catch(e){b=3,c=e}}a.result_=c;a.state_=b;Rb(a);3!=b||c instanceof J||Ub(a,c)}},Tb=function(a,b,c){a.state_=1;var d=!1,e=function(b){d||(d=!0,a.unblockAndFulfill_(b))},g=function(b){d||(d=!0,a.unblockAndReject_(b))};try{c.call(b,e,g)}catch(f){g(f)}},Rb=function(a){a.executing_||
+(a.executing_=!0,Kb(a.executeCallbacks_,a))};L.prototype.executeCallbacks_=function(){for(;this.callbackEntries_&&this.callbackEntries_.length;){var a=this.callbackEntries_;this.callbackEntries_=[];for(var b=0;b<a.length;b++){var c=a[b],d=this.result_;if(2==this.state_)c.onFulfilled(d);else c.child&&Qb(this),c.onRejected(d)}}this.executing_=!1};
+var Qb=function(a){for(;a&&a.hadUnhandledRejection_;a=a.parent_)a.hadUnhandledRejection_=!1},Ub=function(a,b){a.hadUnhandledRejection_=!0;Kb(function(){a.hadUnhandledRejection_&&Vb.call(null,b)})},Vb=Cb,J=function(a){x.call(this,a)};u(J,x);J.prototype.name="cancel";/*
+ Portions of this code are from MochiKit, received by
+ The Closure Authors under the MIT license. All other code is Copyright
+ 2005-2009 The Closure Authors. All Rights Reserved.
+*/
+var M=function(a,b){this.sequence_=[];this.onCancelFunction_=a;this.defaultScope_=b||null;this.hadError_=this.fired_=!1;this.result_=void 0;this.silentlyCanceled_=this.blocking_=this.blocked_=!1;this.unhandledErrorId_=0;this.parent_=null;this.branches_=0};
+M.prototype.cancel=function(a){if(this.fired_)this.result_ instanceof M&&this.result_.cancel();else{if(this.parent_){var b=this.parent_;delete this.parent_;a?b.cancel(a):(b.branches_--,0>=b.branches_&&b.cancel())}this.onCancelFunction_?this.onCancelFunction_.call(this.defaultScope_,this):this.silentlyCanceled_=!0;this.fired_||Wb(this,new Xb)}};M.prototype.continue_=function(a,b){this.blocked_=!1;Yb(this,a,b)};
+var Yb=function(a,b,c){a.fired_=!0;a.result_=c;a.hadError_=!b;Zb(a)},ac=function(a){if(a.fired_){if(!a.silentlyCanceled_)throw new $b;a.silentlyCanceled_=!1}};M.prototype.callback=function(a){ac(this);bc(a);Yb(this,!0,a)};var Wb=function(a,b){ac(a);bc(b);Yb(a,!1,b)},bc=function(a){y(!(a instanceof M),"An execution sequence may not be initiated with a blocking Deferred.")},cc=function(a,b,c,d){y(!a.blocking_,"Blocking Deferreds can not be re-used");a.sequence_.push([b,c,d]);a.fired_&&Zb(a)};
+M.prototype.then=function(a,b,c){var d,e,g=new L(function(a,b){d=a;e=b});cc(this,d,function(a){a instanceof Xb?g.cancel():e(a)});return g.then(a,b,c)};Mb(M);
+var dc=function(a){return Pa(a.sequence_,function(a){return r(a[1])})},Zb=function(a){if(a.unhandledErrorId_&&a.fired_&&dc(a)){var b=a.unhandledErrorId_,c=ec[b];c&&(k.clearTimeout(c.id_),delete ec[b]);a.unhandledErrorId_=0}a.parent_&&(a.parent_.branches_--,delete a.parent_);for(var b=a.result_,d=c=!1;a.sequence_.length&&!a.blocked_;){var e=a.sequence_.shift(),g=e[0],f=e[1],e=e[2];if(g=a.hadError_?f:g)try{var m=g.call(e||a.defaultScope_,b);l(m)&&(a.hadError_=a.hadError_&&(m==b||m instanceof Error),
+a.result_=b=m);Nb(b)&&(d=!0,a.blocked_=!0)}catch($){b=$,a.hadError_=!0,dc(a)||(c=!0)}}a.result_=b;d&&(m=s(a.continue_,a,!0),d=s(a.continue_,a,!1),b instanceof M?(cc(b,m,d),b.blocking_=!0):b.then(m,d));c&&(b=new fc(b),ec[b.id_]=b,a.unhandledErrorId_=b.id_)},$b=function(){x.call(this)};u($b,x);$b.prototype.message="Deferred has already fired";$b.prototype.name="AlreadyCalledError";var Xb=function(){x.call(this)};u(Xb,x);Xb.prototype.message="Deferred was canceled";Xb.prototype.name="CanceledError";
+var fc=function(a){this.id_=k.setTimeout(s(this.throwError,this),0);this.error_=a};fc.prototype.throwError=function(){y(ec[this.id_],"Cannot throw an error that is not scheduled.");delete ec[this.id_];throw this.error_;};var ec={};var kc=function(a){var b={},c=b.document||document,d=document.createElement("SCRIPT"),e={script_:d,timeout_:void 0},g=new M(gc,e),f=null,m=null!=b.timeout?b.timeout:5E3;0<m&&(f=window.setTimeout(function(){hc(d,!0);Wb(g,new ic(1,"Timeout reached for loading script "+a))},m),e.timeout_=f);d.onload=d.onreadystatechange=function(){d.readyState&&"loaded"!=d.readyState&&"complete"!=d.readyState||(hc(d,b.cleanupWhenDone||!1,f),g.callback(null))};d.onerror=function(){hc(d,!0,f);Wb(g,new ic(0,"Error while loading script "+
+a))};vb(d,{type:"text/javascript",charset:"UTF-8",src:a});jc(c).appendChild(d);return g},jc=function(a){var b=a.getElementsByTagName("HEAD");return b&&0!=b.length?b[0]:a.documentElement},gc=function(){if(this&&this.script_){var a=this.script_;a&&"SCRIPT"==a.tagName&&hc(a,!0,this.timeout_)}},hc=function(a,b,c){null!=c&&k.clearTimeout(c);a.onload=ba;a.onerror=ba;a.onreadystatechange=ba;b&&window.setTimeout(function(){Bb(a)},0)},ic=function(a,b){var c="Jsloader error (code #"+a+")";b&&(c+=": "+b);x.call(this,
+c);this.code=a};u(ic,x);var lc=function(a){lc[" "](a);return a};lc[" "]=ba;var mc=!C||C&&9<=hb,nc=C&&!F("9");!E||F("528");D&&F("1.9b")||C&&F("8")||cb&&F("9.5")||E&&F("528");D&&!F("8")||C&&F("9");var N=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.propagationStopped_=!1;this.returnValue_=!0};N.prototype.disposeInternal=function(){};N.prototype.dispose=function(){};N.prototype.preventDefault=function(){this.defaultPrevented=!0;this.returnValue_=!1};var O=function(a,b){N.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.event_=this.state=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(D){var e;t:{try{lc(d.nodeName);e=!0;break t}catch(g){}e=!1}e||(d=null)}}else"mouseover"==
+c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=E||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=E||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=
+a.metaKey;this.state=a.state;this.event_=a;a.defaultPrevented&&this.preventDefault()}};u(O,N);O.prototype.preventDefault=function(){O.superClass_.preventDefault.call(this);var a=this.event_;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,nc)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};O.prototype.disposeInternal=function(){};var oc="closure_listenable_"+(1E6*Math.random()|0),pc=0;var qc=function(a,b,c,d,e){this.listener=a;this.proxy=null;this.src=b;this.type=c;this.capture=!!d;this.handler=e;this.key=++pc;this.removed=this.callOnce=!1},rc=function(a){a.removed=!0;a.listener=null;a.proxy=null;a.src=null;a.handler=null};var P=function(a){this.src=a;this.listeners={};this.typeCount_=0};P.prototype.add=function(a,b,c,d,e){var g=a.toString();a=this.listeners[g];a||(a=this.listeners[g]=[],this.typeCount_++);var f=sc(a,b,d,e);-1<f?(b=a[f],c||(b.callOnce=!1)):(b=new qc(b,this.src,g,!!d,e),b.callOnce=c,a.push(b));return b};
+P.prototype.remove=function(a,b,c,d){a=a.toString();if(!(a in this.listeners))return!1;var e=this.listeners[a];b=sc(e,b,c,d);return-1<b?(rc(e[b]),y(null!=e.length),z.splice.call(e,b,1),0==e.length&&(delete this.listeners[a],this.typeCount_--),!0):!1};var tc=function(a,b){var c=b.type;if(!(c in a.listeners))return!1;var d=Qa(a.listeners[c],b);d&&(rc(b),0==a.listeners[c].length&&(delete a.listeners[c],a.typeCount_--));return d};
+P.prototype.removeAll=function(a){a=a&&a.toString();var b=0,c;for(c in this.listeners)if(!a||c==a){for(var d=this.listeners[c],e=0;e<d.length;e++)++b,rc(d[e]);delete this.listeners[c];this.typeCount_--}return b};P.prototype.getListener=function(a,b,c,d){a=this.listeners[a.toString()];var e=-1;a&&(e=sc(a,b,c,d));return-1<e?a[e]:null};var sc=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.removed&&g.listener==b&&g.capture==!!c&&g.handler==d)return e}return-1};var uc="closure_lm_"+(1E6*Math.random()|0),vc={},wc=0,xc=function(a,b,c,d,e){if(p(b)){for(var g=0;g<b.length;g++)xc(a,b[g],c,d,e);return null}c=yc(c);if(a&&a[oc])a=a.listen(b,c,d,e);else{if(!b)throw Error("Invalid event type");var g=!!d,f=zc(a);f||(a[uc]=f=new P(a));c=f.add(b,c,!1,d,e);c.proxy||(d=Ac(),c.proxy=d,d.src=a,d.listener=c,a.addEventListener?a.addEventListener(b.toString(),d,g):a.attachEvent(Bc(b.toString()),d),wc++);a=c}return a},Ac=function(){var a=Cc,b=mc?function(c){return a.call(b.src,
+b.listener,c)}:function(c){c=a.call(b.src,b.listener,c);if(!c)return c};return b},Dc=function(a,b,c,d,e){if(p(b))for(var g=0;g<b.length;g++)Dc(a,b[g],c,d,e);else c=yc(c),a&&a[oc]?a.unlisten(b,c,d,e):a&&(a=zc(a))&&(b=a.getListener(b,c,!!d,e))&&Ec(b)},Ec=function(a){if("number"==typeof a||!a||a.removed)return!1;var b=a.src;if(b&&b[oc])return tc(b.eventTargetListeners_,a);var c=a.type,d=a.proxy;b.removeEventListener?b.removeEventListener(c,d,a.capture):b.detachEvent&&b.detachEvent(Bc(c),d);wc--;(c=zc(b))?
+(tc(c,a),0==c.typeCount_&&(c.src=null,b[uc]=null)):rc(a);return!0},Bc=function(a){return a in vc?vc[a]:vc[a]="on"+a},Gc=function(a,b,c,d){var e=1;if(a=zc(a))if(b=a.listeners[b.toString()])for(b=b.concat(),a=0;a<b.length;a++){var g=b[a];g&&g.capture==c&&!g.removed&&(e&=!1!==Fc(g,d))}return Boolean(e)},Fc=function(a,b){var c=a.listener,d=a.handler||a.src;a.callOnce&&Ec(a);return c.call(d,b)},Cc=function(a,b){if(a.removed)return!0;if(!mc){var c;if(!(c=b))t:{c=["window","event"];for(var d=k,e;e=c.shift();)if(null!=
+d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new O(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var g=!1;if(0==e.keyCode)try{e.keyCode=-1;break t}catch(f){g=!0}if(g||void 0==e.returnValue)e.returnValue=!0}e=[];for(g=c.currentTarget;g;g=g.parentNode)e.push(g);for(var g=a.type,m=e.length-1;!c.propagationStopped_&&0<=m;m--)c.currentTarget=e[m],d&=Gc(e[m],g,!0,c);for(m=0;!c.propagationStopped_&&m<e.length;m++)c.currentTarget=e[m],d&=Gc(e[m],g,!1,c)}return d}return Fc(a,new O(b,this))},zc=function(a){a=
+a[uc];return a instanceof P?a:null},Hc="__closure_events_fn_"+(1E9*Math.random()>>>0),yc=function(a){y(a,"Listener can not be null.");if(r(a))return a;y(a.handleEvent,"An object listener must have handleEvent method.");a[Hc]||(a[Hc]=function(b){return a.handleEvent(b)});return a[Hc]};var Q=function(a){H.call(this);this.handler_=a;this.keys_={}};u(Q,H);var Ic=[];h=Q.prototype;h.listen=function(a,b,c,d){p(b)||(b&&(Ic[0]=b.toString()),b=Ic);for(var e=0;e<b.length;e++){var g=xc(a,b[e],c||this.handleEvent,d||!1,this.handler_||this);if(!g)break;this.keys_[g.key]=g}return this};
+h.unlisten=function(a,b,c,d,e){if(p(b))for(var g=0;g<b.length;g++)this.unlisten(a,b[g],c,d,e);else c=c||this.handleEvent,e=e||this.handler_||this,c=yc(c),d=!!d,b=a&&a[oc]?a.getListener(b,c,d,e):a?(a=zc(a))?a.getListener(b,c,d,e):null:null,b&&(Ec(b),delete this.keys_[b.key]);return this};h.removeAll=function(){Ta(this.keys_,Ec);this.keys_={}};h.disposeInternal=function(){Q.superClass_.disposeInternal.call(this);this.removeAll()};
+h.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var R=function(){H.call(this);this.eventTargetListeners_=new P(this);this.actualEventTarget_=this;this.parentEventTarget_=null};u(R,H);R.prototype[oc]=!0;h=R.prototype;h.setParentEventTarget=function(a){this.parentEventTarget_=a};h.addEventListener=function(a,b,c,d){xc(this,a,b,c,d)};h.removeEventListener=function(a,b,c,d){Dc(this,a,b,c,d)};
+h.dispatchEvent=function(a){Jc(this);var b,c=this.parentEventTarget_;if(c){b=[];for(var d=1;c;c=c.parentEventTarget_)b.push(c),y(1E3>++d,"infinite loop")}c=this.actualEventTarget_;d=a.type||a;if(q(a))a=new N(a,c);else if(a instanceof N)a.target=a.target||c;else{var e=a;a=new N(d,c);Za(a,e)}var e=!0,g;if(b)for(var f=b.length-1;!a.propagationStopped_&&0<=f;f--)g=a.currentTarget=b[f],e=Kc(g,d,!0,a)&&e;a.propagationStopped_||(g=a.currentTarget=c,e=Kc(g,d,!0,a)&&e,a.propagationStopped_||(e=Kc(g,d,!1,a)&&
+e));if(b)for(f=0;!a.propagationStopped_&&f<b.length;f++)g=a.currentTarget=b[f],e=Kc(g,d,!1,a)&&e;return e};h.disposeInternal=function(){R.superClass_.disposeInternal.call(this);this.eventTargetListeners_&&this.eventTargetListeners_.removeAll(void 0);this.parentEventTarget_=null};h.listen=function(a,b,c,d){Jc(this);return this.eventTargetListeners_.add(String(a),b,!1,c,d)};h.unlisten=function(a,b,c,d){return this.eventTargetListeners_.remove(String(a),b,c,d)};
+var Kc=function(a,b,c,d){b=a.eventTargetListeners_.listeners[String(b)];if(!b)return!0;b=b.concat();for(var e=!0,g=0;g<b.length;++g){var f=b[g];if(f&&!f.removed&&f.capture==c){var m=f.listener,$=f.handler||f.src;f.callOnce&&tc(a.eventTargetListeners_,f);e=!1!==m.call($,d)&&e}}return e&&0!=d.returnValue_};R.prototype.getListener=function(a,b,c,d){return this.eventTargetListeners_.getListener(String(a),b,c,d)};var Jc=function(a){y(a.eventTargetListeners_,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var S=function(a){R.call(this);this.imageIdToRequestMap_={};this.imageIdToImageMap_={};this.handler_=new Q(this);this.parent_=a};u(S,R);var Lc=[C&&!F("11")?"readystatechange":"load","abort","error"],Mc=function(a,b,c){(c=q(c)?c:c.src)&&(a.imageIdToRequestMap_[b]={src:c,corsRequestType:l(void 0)?void 0:null})};
+S.prototype.start=function(){var a=this.imageIdToRequestMap_;Na(Ua(a),function(b){var c=a[b];if(c&&(delete a[b],!this.disposed_)){var d;d=this.parent_?sb(this.parent_).createDom("img"):new Image;c.corsRequestType&&(d.crossOrigin=c.corsRequestType);this.handler_.listen(d,Lc,this.onNetworkEvent_);this.imageIdToImageMap_[b]=d;d.id=b;d.src=c.src}},this)};
+S.prototype.onNetworkEvent_=function(a){var b=a.currentTarget;if(b){if("readystatechange"==a.type)if("complete"==b.readyState)a.type="load";else return;"undefined"==typeof b.naturalWidth&&("load"==a.type?(b.naturalWidth=b.width,b.naturalHeight=b.height):(b.naturalWidth=0,b.naturalHeight=0));this.dispatchEvent({type:a.type,target:b});!this.disposed_&&(a=b.id,delete this.imageIdToRequestMap_[a],b=this.imageIdToImageMap_[a])&&(delete this.imageIdToImageMap_[a],this.handler_.unlisten(b,Lc,this.onNetworkEvent_),
+Va(this.imageIdToImageMap_)&&Va(this.imageIdToRequestMap_)&&this.dispatchEvent("complete"))}};S.prototype.disposeInternal=function(){delete this.imageIdToRequestMap_;delete this.imageIdToImageMap_;ob(this.handler_);S.superClass_.disposeInternal.call(this)};var T=function(){};T.getInstance=function(){return T.instance_?T.instance_:T.instance_=new T};T.prototype.nextId_=0;var U=function(a){R.call(this);this.dom_=a||sb();this.id_=null;this.inDocument_=!1;this.element_=null;this.googUiComponentHandler_=void 0;this.childIndex_=this.children_=this.parent_=null;this.wasDecorated_=!1};u(U,R);h=U.prototype;h.idGenerator_=T.getInstance();h.getElement=function(){return this.element_};h.setParentEventTarget=function(a){if(this.parent_&&this.parent_!=a)throw Error("Method not supported");U.superClass_.setParentEventTarget.call(this,a)};h.getDomHelper=function(){return this.dom_};
+h.createDom=function(){this.element_=this.dom_.createElement("div")};
+var Oc=function(a,b){if(a.inDocument_)throw Error("Component already rendered");a.element_||a.createDom();b?b.insertBefore(a.element_,null):a.dom_.document_.body.appendChild(a.element_);a.parent_&&!a.parent_.inDocument_||Nc(a)},Nc=function(a){a.inDocument_=!0;Pc(a,function(a){!a.inDocument_&&a.getElement()&&Nc(a)})},Qc=function(a){Pc(a,function(a){a.inDocument_&&Qc(a)});a.googUiComponentHandler_&&a.googUiComponentHandler_.removeAll();a.inDocument_=!1};
+U.prototype.disposeInternal=function(){this.inDocument_&&Qc(this);this.googUiComponentHandler_&&(this.googUiComponentHandler_.dispose(),delete this.googUiComponentHandler_);Pc(this,function(a){a.dispose()});!this.wasDecorated_&&this.element_&&Bb(this.element_);this.parent_=this.element_=this.childIndex_=this.children_=null;U.superClass_.disposeInternal.call(this)};var Pc=function(a,b){a.children_&&Na(a.children_,b,void 0)};
+U.prototype.removeChild=function(a,b){if(a){var c=q(a)?a:a.id_||(a.id_=":"+(a.idGenerator_.nextId_++).toString(36)),d;this.childIndex_&&c?(d=this.childIndex_,d=(c in d?d[c]:void 0)||null):d=null;a=d;if(c&&a){d=this.childIndex_;c in d&&delete d[c];Qa(this.children_,a);b&&(Qc(a),a.element_&&Bb(a.element_));c=a;if(null==c)throw Error("Unable to set parent component");c.parent_=null;U.superClass_.setParentEventTarget.call(c,null)}}if(!a)throw Error("Child is not in parent component");return a};var V=function(a,b,c){U.call(this,c);this.captchaImage_=a;this.adImage_=b&&300==b.naturalWidth&&57==b.naturalHeight?b:null};u(V,U);V.prototype.createDom=function(){V.superClass_.createDom.call(this);var a=this.getElement();this.captchaImage_.alt=W.image_alt_text;this.getDomHelper().appendChild(a,this.captchaImage_);this.adImage_&&(this.adImage_.alt=W.image_alt_text,this.getDomHelper().appendChild(a,this.adImage_),this.adImage_&&Rc(this.adImage_)&&(a.innerHTML+='<div id="recaptcha-ad-choices"><div class="recaptcha-ad-choices-collapsed"><img height="15" width="30" alt="AdChoices" border="0" src="//www.gstatic.com/recaptcha/api/img/adicon.png"/></div><div class="recaptcha-ad-choices-expanded"><a href="https://support.google.com/adsense/troubleshooter/1631343" target="_blank"><img height="15" width="75" alt="AdChoices" border="0" src="//www.gstatic.com/recaptcha/api/img/adchoices.png"/></a></div></div>'))};
+var Rc=function(a){var b=Sc(a,"visibility");a=Sc(a,"display");return"hidden"!=b&&"none"!=a},Sc=function(a,b){var c;t:{c=rb(a);if(c.defaultView&&c.defaultView.getComputedStyle&&(c=c.defaultView.getComputedStyle(a,null))){c=c[b]||c.getPropertyValue(b)||"";break t}c=""}if(!(c=c||(a.currentStyle?a.currentStyle[b]:null))&&(c=a.style[Fa(b)],"undefined"===typeof c)){c=a.style;var d;t:if(d=Fa(b),void 0===a.style[d]){var e=(E?"Webkit":D?"Moz":C?"ms":cb?"O":null)+Ga(d);if(void 0!==a.style[e]){d=e;break t}}c=
+c[d]||""}return c};V.prototype.disposeInternal=function(){delete this.captchaImage_;delete this.adImage_;V.superClass_.disposeInternal.call(this)};var Tc=function(a,b,c){H.call(this);this.listener_=a;this.interval_=b||0;this.handler_=c;this.callback_=s(this.doAction_,this)};u(Tc,H);h=Tc.prototype;h.id_=0;h.disposeInternal=function(){Tc.superClass_.disposeInternal.call(this);this.stop();delete this.listener_;delete this.handler_};
+h.start=function(a){this.stop();var b=this.callback_;a=l(a)?a:this.interval_;if(!r(b))if(b&&"function"==typeof b.handleEvent)b=s(b.handleEvent,b);else throw Error("Invalid listener argument");this.id_=2147483647<a?-1:k.setTimeout(b,a||0)};h.stop=function(){this.isActive()&&k.clearTimeout(this.id_);this.id_=0};h.isActive=function(){return 0!=this.id_};h.doAction_=function(){this.id_=0;this.listener_&&this.listener_.call(this.handler_)};var Uc=function(a,b){H.call(this);this.listener_=a;this.handler_=b;this.delay_=new Tc(s(this.onTick_,this),0,this)};u(Uc,H);h=Uc.prototype;h.interval_=0;h.runUntil_=0;h.disposeInternal=function(){this.delay_.dispose();delete this.listener_;delete this.handler_;Uc.superClass_.disposeInternal.call(this)};h.start=function(a,b){this.stop();var c=b||0;this.interval_=Math.max(a||0,0);this.runUntil_=0>c?-1:ha()+c;this.delay_.start(0>c?this.interval_:Math.min(this.interval_,c))};h.stop=function(){this.delay_.stop()};
+h.isActive=function(){return this.delay_.isActive()};h.onSuccess=function(){};h.onFailure=function(){};h.onTick_=function(){if(this.listener_.call(this.handler_))this.onSuccess();else if(0>this.runUntil_)this.delay_.start(this.interval_);else{var a=this.runUntil_-ha();if(0>=a)this.onFailure();else this.delay_.start(Math.min(this.interval_,a))}};$a("area base br col command embed hr img input keygen link meta param source track wbr".split(" "));$a("action","cite","data","formaction","href","manifest","poster","src");$a("link","script","style");var Vc={sanitizedContentKindHtml:!0},Wc={sanitizedContentKindText:!0},Xc=function(){throw Error("Do not instantiate directly");};Xc.prototype.contentDir=null;Xc.prototype.toString=function(){return this.content};var bd=function(a){var b=Yc;y(b,"Soy template may not be null.");var c=sb().createElement("DIV");a=Zc(b(a||$c,void 0,void 0));b=a.match(ad);y(!b,"This template starts with a %s, which cannot be a child of a <div>, as required by soy internals. Consider using goog.soy.renderElement instead.\nTemplate output: %s",b&&b[0],a);c.innerHTML=a;return 1==c.childNodes.length&&(a=c.firstChild,1==a.nodeType)?a:c},Zc=function(a){if(!da(a))return String(a);if(a instanceof Xc){if(a.contentKind===Vc)return Ka(a.content);
+if(a.contentKind===Wc)return Da(a.content)}Ja("Soy template output is unsafe for use as HTML: "+a);return"zSoyz"},ad=/^<(body|caption|col|colgroup|head|html|tr|td|tbody|thead|tfoot)>/i,$c={};C&&F(8);var cd=function(){Xc.call(this)};u(cd,Xc);cd.prototype.contentKind=Vc;var dd=function(a){function b(a){this.content=a}b.prototype=a.prototype;return function(a,d){var e=new b(String(a));void 0!==d&&(e.contentDir=d);return e}}(cd);(function(a){function b(a){this.content=a}b.prototype=a.prototype;return function(a,d){var e=String(a);if(!e)return"";e=new b(e);void 0!==d&&(e.contentDir=d);return e}})(cd);
+var ed={"\x00":"\\x00","\b":"\\x08","\t":"\\t","\n":"\\n","\x0B":"\\x0b","\f":"\\f","\r":"\\r",'"':"\\x22",$:"\\x24","&":"\\x26","'":"\\x27","(":"\\x28",")":"\\x29","*":"\\x2a","+":"\\x2b",",":"\\x2c","-":"\\x2d",".":"\\x2e","/":"\\/",":":"\\x3a","<":"\\x3c","=":"\\x3d",">":"\\x3e","?":"\\x3f","[":"\\x5b","\\":"\\\\","]":"\\x5d","^":"\\x5e","{":"\\x7b","|":"\\x7c","}":"\\x7d","\u0085":"\\x85","\u2028":"\\u2028","\u2029":"\\u2029"},fd=function(a){return ed[a]},gd=/[\x00\x08-\x0d\x22\x26\x27\/\x3c-\x3e\\\x85\u2028\u2029]/g;var Yc=function(a){return dd('<script type="text/javascript">var challenge = \''+String(a.challenge).replace(gd,fd)+"'; var publisherId = '"+String(a.publisherId).replace(gd,fd)+"';"+("ca-mongoogle"==a.publisherId?'google_page_url = "3pcerttesting.com/dab/recaptcha.html";':"")+"\n    google_ad_client = publisherId;\n    google_ad_type = 'html';\n    google_ad_output = 'js';\n    google_image_size = '300x57';\n    google_captcha_token = challenge;\n    google_ad_request_done = function(ad) {\n      window.parent.recaptcha.ads.adutils.googleAdRequestDone(ad);\n    };\n    \x3c/script><script type=\"text/javascript\" src=\"//pagead2.googlesyndication.com/pagead/show_ads.js\">\x3c/script>")};
+Yc.soyTemplateName="recaptcha.soy.ads.iframeAdsLoader.main";var Wa=function(){var a=k.google_ad;return!!(a&&a.token&&a.imageAdUrl&&a.hashedAnswer&&a.salt&&a.delayedImpressionUrl&&a.engagementUrl)},hd=function(){k.google_ad&&(k.google_ad=null)},id=function(a){a=a||document.body;var b=k.google_ad;b&&b.searchUpliftUrl&&(b=xb("iframe",{src:'data:text/html;charset=utf-8,<body><img src="https://'+b.searchUpliftUrl+'"></img></body>',style:"display:none"}),a.appendChild(b))},jd=0,kd=function(a){var b=new S;Mc(b,"recaptcha-url-"+jd++,a);b.start()},ld=function(a,b){var c=
+RecaptchaState.publisher_id;hd();var d=xb("iframe",{id:"recaptcha-loader-"+jd++,style:"display: none"});document.body.appendChild(d);var e=d.contentWindow?d.contentWindow.document:d.contentDocument;e.open("text/html","replace");e.write(bd({challenge:a,publisherId:c}).innerHTML);e.close();c=new Uc(function(){return!!k.google_ad});c.onSuccess=function(){Bb(d);b()};c.onFailure=function(){Bb(d);b()};c.start(50,2E3)};t("recaptcha.ads.adutils.googleAdRequestDone",function(a){k.google_ad=a});var md=function(){this.blockSize=-1};var nd=function(){this.blockSize=-1;this.blockSize=64;this.chain_=Array(4);this.block_=Array(this.blockSize);this.totalLength_=this.blockLength_=0;this.reset()};u(nd,md);nd.prototype.reset=function(){this.chain_[0]=1732584193;this.chain_[1]=4023233417;this.chain_[2]=2562383102;this.chain_[3]=271733878;this.totalLength_=this.blockLength_=0};
+var od=function(a,b,c){c||(c=0);var d=Array(16);if(q(b))for(var e=0;16>e;++e)d[e]=b.charCodeAt(c++)|b.charCodeAt(c++)<<8|b.charCodeAt(c++)<<16|b.charCodeAt(c++)<<24;else for(e=0;16>e;++e)d[e]=b[c++]|b[c++]<<8|b[c++]<<16|b[c++]<<24;b=a.chain_[0];c=a.chain_[1];var e=a.chain_[2],g=a.chain_[3],f=0,f=b+(g^c&(e^g))+d[0]+3614090360&4294967295;b=c+(f<<7&4294967295|f>>>25);f=g+(e^b&(c^e))+d[1]+3905402710&4294967295;g=b+(f<<12&4294967295|f>>>20);f=e+(c^g&(b^c))+d[2]+606105819&4294967295;e=g+(f<<17&4294967295|
+f>>>15);f=c+(b^e&(g^b))+d[3]+3250441966&4294967295;c=e+(f<<22&4294967295|f>>>10);f=b+(g^c&(e^g))+d[4]+4118548399&4294967295;b=c+(f<<7&4294967295|f>>>25);f=g+(e^b&(c^e))+d[5]+1200080426&4294967295;g=b+(f<<12&4294967295|f>>>20);f=e+(c^g&(b^c))+d[6]+2821735955&4294967295;e=g+(f<<17&4294967295|f>>>15);f=c+(b^e&(g^b))+d[7]+4249261313&4294967295;c=e+(f<<22&4294967295|f>>>10);f=b+(g^c&(e^g))+d[8]+1770035416&4294967295;b=c+(f<<7&4294967295|f>>>25);f=g+(e^b&(c^e))+d[9]+2336552879&4294967295;g=b+(f<<12&4294967295|
+f>>>20);f=e+(c^g&(b^c))+d[10]+4294925233&4294967295;e=g+(f<<17&4294967295|f>>>15);f=c+(b^e&(g^b))+d[11]+2304563134&4294967295;c=e+(f<<22&4294967295|f>>>10);f=b+(g^c&(e^g))+d[12]+1804603682&4294967295;b=c+(f<<7&4294967295|f>>>25);f=g+(e^b&(c^e))+d[13]+4254626195&4294967295;g=b+(f<<12&4294967295|f>>>20);f=e+(c^g&(b^c))+d[14]+2792965006&4294967295;e=g+(f<<17&4294967295|f>>>15);f=c+(b^e&(g^b))+d[15]+1236535329&4294967295;c=e+(f<<22&4294967295|f>>>10);f=b+(e^g&(c^e))+d[1]+4129170786&4294967295;b=c+(f<<
+5&4294967295|f>>>27);f=g+(c^e&(b^c))+d[6]+3225465664&4294967295;g=b+(f<<9&4294967295|f>>>23);f=e+(b^c&(g^b))+d[11]+643717713&4294967295;e=g+(f<<14&4294967295|f>>>18);f=c+(g^b&(e^g))+d[0]+3921069994&4294967295;c=e+(f<<20&4294967295|f>>>12);f=b+(e^g&(c^e))+d[5]+3593408605&4294967295;b=c+(f<<5&4294967295|f>>>27);f=g+(c^e&(b^c))+d[10]+38016083&4294967295;g=b+(f<<9&4294967295|f>>>23);f=e+(b^c&(g^b))+d[15]+3634488961&4294967295;e=g+(f<<14&4294967295|f>>>18);f=c+(g^b&(e^g))+d[4]+3889429448&4294967295;c=
+e+(f<<20&4294967295|f>>>12);f=b+(e^g&(c^e))+d[9]+568446438&4294967295;b=c+(f<<5&4294967295|f>>>27);f=g+(c^e&(b^c))+d[14]+3275163606&4294967295;g=b+(f<<9&4294967295|f>>>23);f=e+(b^c&(g^b))+d[3]+4107603335&4294967295;e=g+(f<<14&4294967295|f>>>18);f=c+(g^b&(e^g))+d[8]+1163531501&4294967295;c=e+(f<<20&4294967295|f>>>12);f=b+(e^g&(c^e))+d[13]+2850285829&4294967295;b=c+(f<<5&4294967295|f>>>27);f=g+(c^e&(b^c))+d[2]+4243563512&4294967295;g=b+(f<<9&4294967295|f>>>23);f=e+(b^c&(g^b))+d[7]+1735328473&4294967295;
+e=g+(f<<14&4294967295|f>>>18);f=c+(g^b&(e^g))+d[12]+2368359562&4294967295;c=e+(f<<20&4294967295|f>>>12);f=b+(c^e^g)+d[5]+4294588738&4294967295;b=c+(f<<4&4294967295|f>>>28);f=g+(b^c^e)+d[8]+2272392833&4294967295;g=b+(f<<11&4294967295|f>>>21);f=e+(g^b^c)+d[11]+1839030562&4294967295;e=g+(f<<16&4294967295|f>>>16);f=c+(e^g^b)+d[14]+4259657740&4294967295;c=e+(f<<23&4294967295|f>>>9);f=b+(c^e^g)+d[1]+2763975236&4294967295;b=c+(f<<4&4294967295|f>>>28);f=g+(b^c^e)+d[4]+1272893353&4294967295;g=b+(f<<11&4294967295|
+f>>>21);f=e+(g^b^c)+d[7]+4139469664&4294967295;e=g+(f<<16&4294967295|f>>>16);f=c+(e^g^b)+d[10]+3200236656&4294967295;c=e+(f<<23&4294967295|f>>>9);f=b+(c^e^g)+d[13]+681279174&4294967295;b=c+(f<<4&4294967295|f>>>28);f=g+(b^c^e)+d[0]+3936430074&4294967295;g=b+(f<<11&4294967295|f>>>21);f=e+(g^b^c)+d[3]+3572445317&4294967295;e=g+(f<<16&4294967295|f>>>16);f=c+(e^g^b)+d[6]+76029189&4294967295;c=e+(f<<23&4294967295|f>>>9);f=b+(c^e^g)+d[9]+3654602809&4294967295;b=c+(f<<4&4294967295|f>>>28);f=g+(b^c^e)+d[12]+
+3873151461&4294967295;g=b+(f<<11&4294967295|f>>>21);f=e+(g^b^c)+d[15]+530742520&4294967295;e=g+(f<<16&4294967295|f>>>16);f=c+(e^g^b)+d[2]+3299628645&4294967295;c=e+(f<<23&4294967295|f>>>9);f=b+(e^(c|~g))+d[0]+4096336452&4294967295;b=c+(f<<6&4294967295|f>>>26);f=g+(c^(b|~e))+d[7]+1126891415&4294967295;g=b+(f<<10&4294967295|f>>>22);f=e+(b^(g|~c))+d[14]+2878612391&4294967295;e=g+(f<<15&4294967295|f>>>17);f=c+(g^(e|~b))+d[5]+4237533241&4294967295;c=e+(f<<21&4294967295|f>>>11);f=b+(e^(c|~g))+d[12]+1700485571&
+4294967295;b=c+(f<<6&4294967295|f>>>26);f=g+(c^(b|~e))+d[3]+2399980690&4294967295;g=b+(f<<10&4294967295|f>>>22);f=e+(b^(g|~c))+d[10]+4293915773&4294967295;e=g+(f<<15&4294967295|f>>>17);f=c+(g^(e|~b))+d[1]+2240044497&4294967295;c=e+(f<<21&4294967295|f>>>11);f=b+(e^(c|~g))+d[8]+1873313359&4294967295;b=c+(f<<6&4294967295|f>>>26);f=g+(c^(b|~e))+d[15]+4264355552&4294967295;g=b+(f<<10&4294967295|f>>>22);f=e+(b^(g|~c))+d[6]+2734768916&4294967295;e=g+(f<<15&4294967295|f>>>17);f=c+(g^(e|~b))+d[13]+1309151649&
+4294967295;c=e+(f<<21&4294967295|f>>>11);f=b+(e^(c|~g))+d[4]+4149444226&4294967295;b=c+(f<<6&4294967295|f>>>26);f=g+(c^(b|~e))+d[11]+3174756917&4294967295;g=b+(f<<10&4294967295|f>>>22);f=e+(b^(g|~c))+d[2]+718787259&4294967295;e=g+(f<<15&4294967295|f>>>17);f=c+(g^(e|~b))+d[9]+3951481745&4294967295;a.chain_[0]=a.chain_[0]+b&4294967295;a.chain_[1]=a.chain_[1]+(e+(f<<21&4294967295|f>>>11))&4294967295;a.chain_[2]=a.chain_[2]+e&4294967295;a.chain_[3]=a.chain_[3]+g&4294967295};
+nd.prototype.update=function(a,b){l(b)||(b=a.length);for(var c=b-this.blockSize,d=this.block_,e=this.blockLength_,g=0;g<b;){if(0==e)for(;g<=c;)od(this,a,g),g+=this.blockSize;if(q(a))for(;g<b;){if(d[e++]=a.charCodeAt(g++),e==this.blockSize){od(this,d);e=0;break}}else for(;g<b;)if(d[e++]=a[g++],e==this.blockSize){od(this,d);e=0;break}}this.blockLength_=e;this.totalLength_+=b};var X=function(){Q.call(this);this.callback_=this.element_=null;this.md5_=new nd};u(X,Q);var pd=function(a,b,c,d,e){a.unwatch();a.element_=b;a.callback_=e;a.listen(b,"keyup",s(a.onChanged_,a,c,d))};X.prototype.unwatch=function(){this.element_&&this.callback_&&(this.removeAll(),this.callback_=this.element_=null)};
+X.prototype.onChanged_=function(a,b){var c;c=(c=this.element_.value)?c.replace(/[\s\xa0]+/g,"").toLowerCase():"";this.md5_.reset();this.md5_.update(c+"."+b);c=this.md5_;var d=Array((56>c.blockLength_?c.blockSize:2*c.blockSize)-c.blockLength_);d[0]=128;for(var e=1;e<d.length-8;++e)d[e]=0;for(var g=8*c.totalLength_,e=d.length-8;e<d.length;++e)d[e]=g&255,g/=256;c.update(d);d=Array(16);for(e=g=0;4>e;++e)for(var f=0;32>f;f+=8)d[g++]=c.chain_[e]>>>f&255;jb(d).toLowerCase()==a.toLowerCase()&&this.callback_()};
+X.prototype.disposeInternal=function(){this.element_=null;X.superClass_.disposeInternal.call(this)};var rd=function(a,b,c){this.adObject_=a;this.captchaImageUrl_=b;this.opt_successCallback_=c||null;qd(this)};u(rd,H);var qd=function(a){var b=new S;nb(a,ga(ob,b));Mc(b,"recaptcha_challenge_image",a.captchaImageUrl_);Mc(b,"recaptcha_ad_image",a.adObject_.imageAdUrl);var c={};xc(b,"load",s(function(a,b){a[b.target.id]=b.target},a,c));xc(b,"complete",s(a.handleImagesLoaded_,a,c));b.start()};
+rd.prototype.handleImagesLoaded_=function(a){a=new V(a.recaptcha_challenge_image,a.recaptcha_ad_image);nb(this,ga(ob,a));var b=tb(document,"recaptcha_image");Ab(b);Oc(a,b);a.adImage_&&Rc(a.adImage_)&&(kd(this.adObject_.delayedImpressionUrl),a=new X,nb(this,ga(ob,a)),pd(a,tb(document,"recaptcha_response_field"),this.adObject_.hashedAnswer,this.adObject_.salt,s(function(a,b){a.unwatch();kd(b)},this,a,this.adObject_.engagementUrl)),this.opt_successCallback_&&this.opt_successCallback_("04"+this.adObject_.token))};var W=w;t("RecaptchaStr",W);var Y=k.RecaptchaOptions;t("RecaptchaOptions",Y);var sd={tabindex:0,theme:"red",callback:null,lang:null,custom_theme_widget:null,custom_translations:null};t("RecaptchaDefaultOptions",sd);
+var Z={widget:null,timer_id:-1,style_set:!1,theme:null,type:"image",ajax_verify_cb:null,th1:null,th2:null,th3:null,element:"",ad_captcha_plugin:null,reload_timeout:-1,force_reload:!1,$:function(a){return"string"==typeof a?document.getElementById(a):a},attachEvent:function(a,b,c){a&&a.addEventListener?a.addEventListener(b,c,!1):a&&a.attachEvent&&a.attachEvent("on"+b,c)},create:function(a,b,c){Z.destroy();b&&(Z.widget=Z.$(b),Z.element=b);Z._init_options(c);Z._call_challenge(a)},destroy:function(){var a=
+Z.$("recaptcha_challenge_field");a&&a.parentNode.removeChild(a);-1!=Z.timer_id&&clearInterval(Z.timer_id);Z.timer_id=-1;if(a=Z.$("recaptcha_image"))a.innerHTML="";Z.update_widget();Z.widget&&("custom"!=Z.theme?Z.widget.innerHTML="":Z.widget.style.display="none",Z.widget=null)},focus_response_field:function(){var a=Z.$("recaptcha_response_field");a&&a.focus()},get_challenge:function(){return"undefined"==typeof RecaptchaState?null:RecaptchaState.challenge},get_response:function(){var a=Z.$("recaptcha_response_field");
+return a?a.value:null},ajax_verify:function(a){Z.ajax_verify_cb=a;a=Z.get_challenge()||"";var b=Z.get_response()||"";a=Z._get_api_server()+"/ajaxverify?c="+encodeURIComponent(a)+"&response="+encodeURIComponent(b);Z._add_script(a)},_ajax_verify_callback:function(a){Z.ajax_verify_cb(a)},_get_overridable_url:function(a){var b=window.location.protocol;if("undefined"!=typeof _RecaptchaOverrideApiServer)a=_RecaptchaOverrideApiServer;else if("undefined"!=typeof RecaptchaState&&"string"==typeof RecaptchaState.server&&
+0<RecaptchaState.server.length)return RecaptchaState.server.replace(/\/+$/,"");return b+"//"+a},_get_api_server:function(){return Z._get_overridable_url("www.google.com/recaptcha/api")},_get_static_url_root:function(){return Z._get_overridable_url("www.gstatic.com/recaptcha/api")},_call_challenge:function(a){a=Z._get_api_server()+"/challenge?k="+a+"&ajax=1&cachestop="+Math.random();Z.getLang_()&&(a+="&lang="+Z.getLang_());"undefined"!=typeof Y.extra_challenge_params&&(a+="&"+Y.extra_challenge_params);
+Z._add_script(a)},_add_script:function(a){var b=document.createElement("script");b.type="text/javascript";b.src=a;Z._get_script_area().appendChild(b)},_get_script_area:function(){var a=document.getElementsByTagName("head");return a=!a||1>a.length?document.body:a[0]},_hash_merge:function(a){for(var b={},c=0;c<a.length;c++)for(var d in a[c])b[d]=a[c][d];return b},_init_options:function(a){Y=Z._hash_merge([sd,a||{}])},challenge_callback_internal:function(){Z.update_widget();Z._reset_timer();W=Z._hash_merge([w,
+sa[Z.getLang_()]||{},Y.custom_translations||{}]);window.addEventListener&&window.addEventListener("unload",function(){Z.destroy()},!1);Z._is_ie()&&window.attachEvent&&window.attachEvent("onbeforeunload",function(){});if(0<navigator.userAgent.indexOf("KHTML")){var a=document.createElement("iframe");a.src="about:blank";a.style.height="0px";a.style.width="0px";a.style.visibility="hidden";a.style.border="none";a.appendChild(document.createTextNode("This frame prevents back/forward cache problems in Safari."));
+document.body.appendChild(a)}Z._finish_widget()},_add_css:function(a){if(-1!=navigator.appVersion.indexOf("MSIE 5"))document.write('<style type="text/css">'+a+"</style>");else{var b=document.createElement("style");b.type="text/css";b.styleSheet?b.styleSheet.cssText=a:b.appendChild(document.createTextNode(a));Z._get_script_area().appendChild(b)}},_set_style:function(a){Z.style_set||(Z.style_set=!0,Z._add_css(a+"\n\n.recaptcha_is_showing_audio .recaptcha_only_if_image,.recaptcha_isnot_showing_audio .recaptcha_only_if_audio,.recaptcha_had_incorrect_sol .recaptcha_only_if_no_incorrect_sol,.recaptcha_nothad_incorrect_sol .recaptcha_only_if_incorrect_sol{display:none !important}"))},
+_init_builtin_theme:function(){var a=Z.$,b=Z._get_static_url_root(),c=v.VertCss,d=v.VertHtml,e=b+"/img/"+Z.theme,g="gif",b=Z.theme;"clean"==b&&(c=v.CleanCss,d=v.CleanHtml,g="png");c=c.replace(/IMGROOT/g,e);Z._set_style(c);Z.update_widget();Z.widget.innerHTML='<div id="recaptcha_area">'+d+"</div>";c=Z.getLang_();a("recaptcha_privacy")&&null!=c&&"en"==c.substring(0,2).toLowerCase()&&null!=W.privacy_and_terms&&0<W.privacy_and_terms.length&&(c=document.createElement("a"),c.href="http://www.google.com/intl/en/policies/",
+c.target="_blank",c.innerHTML=W.privacy_and_terms,a("recaptcha_privacy").appendChild(c));c=function(b,c,d,K){var G=a(b);G.src=e+"/"+c+"."+g;c=W[d];G.alt=c;b=a(b+"_btn");b.title=c;Z.attachEvent(b,"click",K)};c("recaptcha_reload","refresh","refresh_btn",function(){Z.reload_internal("r")});c("recaptcha_switch_audio","audio","audio_challenge",function(){Z.switch_type("audio")});c("recaptcha_switch_img","text","visual_challenge",function(){Z.switch_type("image")});c("recaptcha_whatsthis","help","help_btn",
+Z.showhelp);"clean"==b&&(a("recaptcha_logo").src=e+"/logo."+g);a("recaptcha_table").className="recaptchatable recaptcha_theme_"+Z.theme;b=function(b,c){var d=a(b);d&&(RecaptchaState.rtl&&"span"==d.tagName.toLowerCase()&&(d.dir="rtl"),d.appendChild(document.createTextNode(W[c])))};b("recaptcha_instructions_image","instructions_visual");b("recaptcha_instructions_audio","instructions_audio");b("recaptcha_instructions_error","incorrect_try_again");a("recaptcha_instructions_image")||a("recaptcha_instructions_audio")||
+(b="audio"==Z.type?W.instructions_audio:W.instructions_visual,b=b.replace(/:$/,""),a("recaptcha_response_field").setAttribute("placeholder",b))},_finish_widget:function(){var a=Z.$,b=Y,c=b.theme;c in{blackglass:1,clean:1,custom:1,red:1,white:1}||(c="red");Z.theme||(Z.theme=c);"custom"!=Z.theme?Z._init_builtin_theme():Z._set_style("");c=document.createElement("span");c.id="recaptcha_challenge_field_holder";c.style.display="none";a("recaptcha_response_field").parentNode.insertBefore(c,a("recaptcha_response_field"));
+a("recaptcha_response_field").setAttribute("autocomplete","off");a("recaptcha_image").style.width="300px";a("recaptcha_image").style.height="57px";a("recaptcha_challenge_field_holder").innerHTML='<input type="hidden" name="recaptcha_challenge_field" id="recaptcha_challenge_field" value=""/>';Z.th_init();Z.should_focus=!1;Z.th3||Z.force_reload?(Z._set_challenge(RecaptchaState.challenge,"image",!0),setTimeout(function(){Z.reload_internal("i")},100)):Z._set_challenge(RecaptchaState.challenge,"image",
+!1);Z.updateTabIndexes_();Z.update_widget();Z.widget&&(Z.widget.style.display="");b.callback&&b.callback()},updateTabIndexes_:function(){var a=Z.$,b=Y;b.tabindex&&(b=b.tabindex,a("recaptcha_response_field").tabIndex=b++,"audio"==Z.type&&a("recaptcha_audio_play_again")&&(a("recaptcha_audio_play_again").tabIndex=b++,a("recaptcha_audio_download"),a("recaptcha_audio_download").tabIndex=b++),"custom"!=Z.theme&&(a("recaptcha_reload_btn").tabIndex=b++,a("recaptcha_switch_audio_btn").tabIndex=b++,a("recaptcha_switch_img_btn").tabIndex=
+b++,a("recaptcha_whatsthis_btn").tabIndex=b,a("recaptcha_privacy").tabIndex=b++))},switch_type:function(a){if(!((new Date).getTime()<Z.reload_timeout)&&(Z.type=a,Z.reload_internal("audio"==Z.type?"a":"v"),"custom"!=Z.theme)){a=Z.$;var b="audio"==Z.type?W.instructions_audio:W.instructions_visual,b=b.replace(/:$/,"");a("recaptcha_response_field").setAttribute("placeholder",b)}},reload:function(){Z.reload_internal("r")},reload_internal:function(a){var b=Y,c=RecaptchaState,d=(new Date).getTime();d<Z.reload_timeout||
+(Z.reload_timeout=d+1E3,"undefined"==typeof a&&(a="r"),d=Z._get_api_server()+"/reload?c="+c.challenge+"&k="+c.site+"&reason="+a+"&type="+Z.type,Z.getLang_()&&(d+="&lang="+Z.getLang_()),"undefined"!=typeof b.extra_challenge_params&&(d+="&"+b.extra_challenge_params),Z.th_callback_invoke(),Z.th1&&(d+="&th="+Z.th1,Z.th1=""),"audio"==Z.type&&(d=b.audio_beta_12_08?d+"&audio_beta_12_08=1":d+"&new_audio_default=1"),Z.should_focus="t"!=a&&"i"!=a,Z._add_script(d),ob(Z.ad_captcha_plugin),c.publisher_id=null)},
+th_callback_invoke:function(){if(Z.th3)try{var a=Z.th3.exec();a&&1600>a.length&&(Z.th1=a)}catch(b){Z.th1=""}},finish_reload:function(a,b,c,d){RecaptchaState.payload_url=c;RecaptchaState.is_incorrect=!1;RecaptchaState.publisher_id=d;Z._set_challenge(a,b,!1);Z.updateTabIndexes_()},_set_challenge:function(a,b,c){"image"==b&&RecaptchaState.publisher_id?ld(a,function(){Z._set_challenge_internal(a,b,c)}):Z._set_challenge_internal(a,b,c)},_set_challenge_internal:function(a,b,c){var d=Z.$,e=RecaptchaState;
+e.challenge=a;Z.type=b;d("recaptcha_challenge_field").value=e.challenge;c||("audio"==b?(d("recaptcha_image").innerHTML=Z.getAudioCaptchaHtml(),Z._loop_playback()):"image"==b&&(a=e.payload_url,a||(a=Z._get_api_server()+"/image?c="+e.challenge,Z.th_callback_invoke(),Z.th1&&(a+="&th="+Z.th1,Z.th1="")),id(d("recaptcha_widget_div")),Wa()?Z.ad_captcha_plugin=new rd(Xa(),a,function(a){RecaptchaState.challenge=a;d("recaptcha_challenge_field").value=a}):d("recaptcha_image").innerHTML='<img id="recaptcha_challenge_image" alt="'+
+W.image_alt_text+'" height="57" width="300" src="'+a+'" />',hd()));Z._css_toggle("recaptcha_had_incorrect_sol","recaptcha_nothad_incorrect_sol",e.is_incorrect);Z._css_toggle("recaptcha_is_showing_audio","recaptcha_isnot_showing_audio","audio"==b);Z._clear_input();Z.should_focus&&Z.focus_response_field();Z._reset_timer()},_reset_timer:function(){clearInterval(Z.timer_id);var a=Math.max(1E3*(RecaptchaState.timeout-60),6E4);Z.timer_id=setInterval(function(){Z.reload_internal("t")},a);return a},showhelp:function(){window.open(Z._get_help_link(),
+"recaptcha_popup","width=460,height=580,location=no,menubar=no,status=no,toolbar=no,scrollbars=yes,resizable=yes")},_clear_input:function(){Z.$("recaptcha_response_field").value=""},_displayerror:function(a){var b=Z.$;b("recaptcha_image").innerHTML="";b("recaptcha_image").appendChild(document.createTextNode(a))},reloaderror:function(a){Z._displayerror(a)},_is_ie:function(){return 0<navigator.userAgent.indexOf("MSIE")&&!window.opera},_css_toggle:function(a,b,c){Z.update_widget();var d=Z.widget;d||
+(d=document.body);var e=d.className,e=e.replace(new RegExp("(^|\\s+)"+a+"(\\s+|$)")," "),e=e.replace(new RegExp("(^|\\s+)"+b+"(\\s+|$)")," ");d.className=e+(" "+(c?a:b))},_get_help_link:function(){var a="https://support.google.com/recaptcha/";Z.getLang_()&&(a+="?hl="+Z.getLang_());return a},playAgain:function(){Z.$("recaptcha_image").innerHTML=Z.getAudioCaptchaHtml();Z._loop_playback()},_loop_playback:function(){var a=Z.$("recaptcha_audio_play_again");a&&Z.attachEvent(a,"click",function(){Z.playAgain();
+return!1})},getAudioCaptchaHtml:function(){var a=RecaptchaState.payload_url;a||(a=Z._get_api_server()+"/audio.mp3?c="+RecaptchaState.challenge,Z.th_callback_invoke(),Z.th1&&(a+="&th="+Z.th1,Z.th1=""));var b=Z._get_api_server()+"/swf/audiocaptcha.swf?v2",b=Z._is_ie()?'<object classid="clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" id="audiocaptcha" width="0" height="0" codebase="https://fpdownload.macromedia.com/get/flashplayer/current/swflash.cab"><param name="movie" value="'+b+'" /><param name="quality" value="high" /><param name="bgcolor" value="#869ca7" /><param name="allowScriptAccess" value="always" /></object><br/>':
+'<embed src="'+b+'" quality="high" bgcolor="#869ca7" width="0" height="0" name="audiocaptcha" align="middle" play="true" loop="false" quality="high" allowScriptAccess="always" type="application/x-shockwave-flash" pluginspage="http://www.adobe.com/go/getflashplayer" /></embed>',c="";Z.checkFlashVer()&&(c="<br/>"+Z.getSpan_('<a id="recaptcha_audio_play_again" class="recaptcha_audio_cant_hear_link">'+W.play_again+"</a>"));c+="<br/>"+Z.getSpan_('<a id="recaptcha_audio_download" class="recaptcha_audio_cant_hear_link" target="_blank" href="'+
+a+'">'+W.cant_hear_this+"</a>");return b+c},getSpan_:function(a){return"<span"+(RecaptchaState&&RecaptchaState.rtl?' dir="rtl"':"")+">"+a+"</span>"},gethttpwavurl:function(){if("audio"!=Z.type)return"";var a=RecaptchaState.payload_url;a||(a=Z._get_api_server()+"/image?c="+RecaptchaState.challenge,Z.th_callback_invoke(),Z.th1&&(a+="&th="+Z.th1,Z.th1=""));return a},checkFlashVer:function(){var a=-1!=navigator.appVersion.indexOf("MSIE"),b=-1!=navigator.appVersion.toLowerCase().indexOf("win"),c=-1!=navigator.userAgent.indexOf("Opera"),
+d=-1;if(null!=navigator.plugins&&0<navigator.plugins.length){if(navigator.plugins["Shockwave Flash 2.0"]||navigator.plugins["Shockwave Flash"])d=navigator.plugins["Shockwave Flash"+(navigator.plugins["Shockwave Flash 2.0"]?" 2.0":"")].description.split(" ")[2].split(".")[0]}else if(a&&b&&!c)try{d=(new ActiveXObject("ShockwaveFlash.ShockwaveFlash.7")).GetVariable("$version").split(" ")[1].split(",")[0]}catch(e){}return 9<=d},getLang_:function(){return Y.lang?Y.lang:"undefined"!=typeof RecaptchaState&&
+RecaptchaState.lang?RecaptchaState.lang:null},challenge_callback:function(){Z.force_reload=!!RecaptchaState.force_reload;if(RecaptchaState.t3){var a=RecaptchaState.t1?ib(mb(RecaptchaState.t1)):"",b=RecaptchaState.t2?ib(mb(RecaptchaState.t2)):"",c=RecaptchaState.t3?ib(mb(RecaptchaState.t3)):"";Z.th2=c;if(a)b=kc(a),cc(b,Z.challenge_callback_internal,null,void 0),cc(b,null,Z.challenge_callback_internal,void 0);else{if(k.execScript)k.execScript(b,"JavaScript");else if(k.eval)null==ia&&(k.eval("var _et_ = 1;"),
+"undefined"!=typeof k._et_?(delete k._et_,ia=!0):ia=!1),ia?k.eval(b):(a=k.document,c=a.createElement("script"),c.type="text/javascript",c.defer=!1,c.appendChild(a.createTextNode(b)),a.body.appendChild(c),a.body.removeChild(c));else throw Error("goog.globalEval not available");Z.challenge_callback_internal()}}else Z.challenge_callback_internal()},th_init:function(){try{k.thintinel&&k.thintinel.th&&(Z.th3=new k.thintinel.th(Z.th2),Z.th2="")}catch(a){}},update_widget:function(){Z.element&&(Z.widget=
+Z.$(Z.element))}};t("Recaptcha",Z);})()
diff --git a/static/client/register/js/register.js b/static/client/register/js/register.js
new file mode 100644
index 0000000000..b62763a293
--- /dev/null
+++ b/static/client/register/js/register.js
@@ -0,0 +1,117 @@
+window.matrixRegistration = {
+    endpoint: location.origin + "/_matrix/client/api/v1/register"
+};
+
+var setupCaptcha = function() {
+    if (!window.matrixRegistrationConfig) {
+        return;
+    }
+    $.get(matrixRegistration.endpoint, function(response) {
+        var serverExpectsCaptcha = false;
+        for (var i=0; i<response.flows.length; i++) {
+            var flow = response.flows[i];
+            if ("m.login.recaptcha" === flow.type) {
+                serverExpectsCaptcha = true;
+                break;
+            }
+        }
+        if (!serverExpectsCaptcha) {
+            console.log("This server does not require a captcha.");
+            return;
+        }
+        console.log("Setting up ReCaptcha for "+matrixRegistration.endpoint);
+        var public_key = window.matrixRegistrationConfig.recaptcha_public_key;
+        if (public_key === undefined) {
+            console.error("No public key defined for captcha!");
+            setFeedbackString("Misconfigured captcha for server. Contact server admin.");
+            return;
+        }
+        Recaptcha.create(public_key,
+        "regcaptcha",
+        {
+            theme: "red",
+            callback: Recaptcha.focus_response_field
+        });
+        window.matrixRegistration.isUsingRecaptcha = true;
+    }).error(errorFunc);
+    
+};
+
+var submitCaptcha = function(user, pwd) {
+    var challengeToken = Recaptcha.get_challenge();
+    var captchaEntry = Recaptcha.get_response();
+    var data = {
+        type: "m.login.recaptcha",
+        challenge: challengeToken,
+        response: captchaEntry
+    };
+    console.log("Submitting captcha");
+    $.post(matrixRegistration.endpoint, JSON.stringify(data), function(response) {
+        console.log("Success -> "+JSON.stringify(response));
+        submitPassword(user, pwd, response.session);
+    }).error(function(err) {
+        Recaptcha.reload();
+        errorFunc(err);
+    });
+};
+
+var submitPassword = function(user, pwd, session) {
+    console.log("Registering...");
+    var data = {
+        type: "m.login.password",
+        user: user,
+        password: pwd,
+        session: session
+    };
+    $.post(matrixRegistration.endpoint, JSON.stringify(data), function(response) {
+        matrixRegistration.onRegistered(
+            response.home_server, response.user_id, response.access_token
+        );
+    }).error(errorFunc);
+};
+
+var errorFunc = function(err) {
+    if (err.responseJSON && err.responseJSON.error) {
+        setFeedbackString(err.responseJSON.error + " (" + err.responseJSON.errcode + ")");
+    }
+    else {
+        setFeedbackString("Request failed: " + err.status);
+    }
+};
+
+var setFeedbackString = function(text) {
+    $("#feedback").text(text);
+};
+
+matrixRegistration.onLoad = function() {
+    setupCaptcha();
+};
+
+matrixRegistration.signUp = function() {
+    var user = $("#desired_user_id").val();
+    if (user.length == 0) {
+        setFeedbackString("Must specify a username.");
+        return;
+    }
+    var pwd1 = $("#pwd1").val();
+    var pwd2 = $("#pwd2").val();
+    if (pwd1.length < 6) {
+        setFeedbackString("Password: min. 6 characters.");
+        return;
+    }
+    if (pwd1 != pwd2) {
+        setFeedbackString("Passwords do not match.");
+        return;
+    }
+    if (window.matrixRegistration.isUsingRecaptcha) {
+        submitCaptcha(user, pwd1);
+    }
+    else {
+        submitPassword(user, pwd1);
+    }
+};
+
+matrixRegistration.onRegistered = function(hs_url, user_id, access_token) {
+    // clobber this function
+    console.log("onRegistered - This function should be replaced to proceed.");
+};
diff --git a/static/client/register/register_config.sample.js b/static/client/register/register_config.sample.js
new file mode 100644
index 0000000000..c7ea180dee
--- /dev/null
+++ b/static/client/register/register_config.sample.js
@@ -0,0 +1,3 @@
+window.matrixRegistrationConfig = {
+  recaptcha_public_key: "YOUR_PUBLIC_KEY"
+};
diff --git a/static/client/register/style.css b/static/client/register/style.css
new file mode 100644
index 0000000000..a3398852b9
--- /dev/null
+++ b/static/client/register/style.css
@@ -0,0 +1,56 @@
+html {
+    height: 100%;
+}
+
+body {
+    height: 100%;
+    font-family: "Myriad Pro", "Myriad", Helvetica, Arial, sans-serif;
+    font-size: 12pt;
+    margin: 0px;
+}
+
+h1 {
+    font-size: 20pt;
+}
+
+a:link    { color: #666; }
+a:visited { color: #666; }
+a:hover   { color: #000; }
+a:active  { color: #000; }
+
+input {
+   width: 100%
+}
+
+textarea, input {
+   font-family: inherit;
+   font-size: inherit;
+}
+
+.smallPrint {
+    color: #888;
+    font-size: 9pt ! important;
+    font-style: italic ! important;
+}
+
+#recaptcha_area { 
+    margin: auto
+}
+
+#registrationForm {
+    text-align: left;
+    padding: 1em;
+    margin-bottom: 40px;
+    display: inline-block;
+    
+    -webkit-border-radius: 10px;
+    -moz-border-radius: 10px;
+    border-radius: 10px;
+    
+    -webkit-box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
+    -moz-box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
+    box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
+    
+    background-color: #f8f8f8;
+    border: 1px #ccc solid;
+}
\ No newline at end of file
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 895a0766d2..f46a6df1fb 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -16,4 +16,4 @@
 """ This is a reference implementation of a Matrix home server.
 """
 
-__version__ = "0.6.1b"
+__version__ = "0.8.0"
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index e31482cfaa..b176db8ce1 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -21,6 +21,7 @@ from synapse.api.constants import EventTypes, Membership, JoinRules
 from synapse.api.errors import AuthError, StoreError, Codes, SynapseError
 from synapse.util.logutils import log_function
 from synapse.util.async import run_on_reactor
+from synapse.types import UserID, ClientInfo
 
 import logging
 
@@ -88,12 +89,19 @@ class Auth(object):
             raise
 
     @defer.inlineCallbacks
-    def check_joined_room(self, room_id, user_id):
-        member = yield self.state.get_current_state(
-            room_id=room_id,
-            event_type=EventTypes.Member,
-            state_key=user_id
-        )
+    def check_joined_room(self, room_id, user_id, current_state=None):
+        if current_state:
+            member = current_state.get(
+                (EventTypes.Member, user_id),
+                None
+            )
+        else:
+            member = yield self.state.get_current_state(
+                room_id=room_id,
+                event_type=EventTypes.Member,
+                state_key=user_id
+            )
+
         self._check_joined_room(member, user_id, room_id)
         defer.returnValue(member)
 
@@ -101,10 +109,10 @@ class Auth(object):
     def check_host_in_room(self, room_id, host):
         curr_state = yield self.state.get_current_state(room_id)
 
-        for event in curr_state:
+        for event in curr_state.values():
             if event.type == EventTypes.Member:
                 try:
-                    if self.hs.parse_userid(event.state_key).domain != host:
+                    if UserID.from_string(event.state_key).domain != host:
                         continue
                 except:
                     logger.warn("state_key not user_id: %s", event.state_key)
@@ -289,15 +297,47 @@ class Auth(object):
         Args:
             request - An HTTP request with an access_token query parameter.
         Returns:
-            UserID : User ID object of the user making the request
+            tuple : of UserID and device string:
+                User ID object of the user making the request
+                Client ID object of the client instance the user is using
         Raises:
             AuthError if no user by that token exists or the token is invalid.
         """
         # Can optionally look elsewhere in the request (e.g. headers)
         try:
             access_token = request.args["access_token"][0]
+
+            # Check for application service tokens with a user_id override
+            try:
+                app_service = yield self.store.get_app_service_by_token(
+                    access_token
+                )
+                if not app_service:
+                    raise KeyError
+
+                user_id = app_service.sender
+                if "user_id" in request.args:
+                    user_id = request.args["user_id"][0]
+                    if not app_service.is_interested_in_user(user_id):
+                        raise AuthError(
+                            403,
+                            "Application service cannot masquerade as this user."
+                        )
+
+                if not user_id:
+                    raise KeyError
+
+                defer.returnValue(
+                    (UserID.from_string(user_id), ClientInfo("", ""))
+                )
+                return
+            except KeyError:
+                pass  # normal users won't have this query parameter set
+
             user_info = yield self.get_user_by_token(access_token)
             user = user_info["user"]
+            device_id = user_info["device_id"]
+            token_id = user_info["token_id"]
 
             ip_addr = self.hs.get_ip_from_request(request)
             user_agent = request.requestHeaders.getRawHeaders(
@@ -313,7 +353,7 @@ class Auth(object):
                     user_agent=user_agent
                 )
 
-            defer.returnValue(user)
+            defer.returnValue((user, ClientInfo(device_id, token_id)))
         except KeyError:
             raise AuthError(403, "Missing access token.")
 
@@ -332,12 +372,12 @@ class Auth(object):
         try:
             ret = yield self.store.get_user_by_token(token=token)
             if not ret:
-                raise StoreError()
-
+                raise StoreError(400, "Unknown token")
             user_info = {
                 "admin": bool(ret.get("admin", False)),
                 "device_id": ret.get("device_id"),
-                "user": self.hs.parse_userid(ret.get("name")),
+                "user": UserID.from_string(ret.get("name")),
+                "token_id": ret.get("token_id", None),
             }
 
             defer.returnValue(user_info)
@@ -345,6 +385,18 @@ class Auth(object):
             raise AuthError(403, "Unrecognised access token.",
                             errcode=Codes.UNKNOWN_TOKEN)
 
+    @defer.inlineCallbacks
+    def get_appservice_by_req(self, request):
+        try:
+            token = request.args["access_token"][0]
+            service = yield self.store.get_app_service_by_token(token)
+            if not service:
+                raise AuthError(403, "Unrecognised access token.",
+                                errcode=Codes.UNKNOWN_TOKEN)
+            defer.returnValue(service)
+        except KeyError:
+            raise AuthError(403, "Missing access token.")
+
     def is_server_admin(self, user):
         return self.store.is_server_admin(user)
 
@@ -352,26 +404,40 @@ class Auth(object):
     def add_auth_events(self, builder, context):
         yield run_on_reactor()
 
-        if builder.type == EventTypes.Create:
-            builder.auth_events = []
-            return
+        auth_ids = self.compute_auth_events(builder, context.current_state)
+
+        auth_events_entries = yield self.store.add_event_hashes(
+            auth_ids
+        )
+
+        builder.auth_events = auth_events_entries
+
+        context.auth_events = {
+            k: v
+            for k, v in context.current_state.items()
+            if v.event_id in auth_ids
+        }
+
+    def compute_auth_events(self, event, current_state):
+        if event.type == EventTypes.Create:
+            return []
 
         auth_ids = []
 
         key = (EventTypes.PowerLevels, "", )
-        power_level_event = context.current_state.get(key)
+        power_level_event = current_state.get(key)
 
         if power_level_event:
             auth_ids.append(power_level_event.event_id)
 
         key = (EventTypes.JoinRules, "", )
-        join_rule_event = context.current_state.get(key)
+        join_rule_event = current_state.get(key)
 
-        key = (EventTypes.Member, builder.user_id, )
-        member_event = context.current_state.get(key)
+        key = (EventTypes.Member, event.user_id, )
+        member_event = current_state.get(key)
 
         key = (EventTypes.Create, "", )
-        create_event = context.current_state.get(key)
+        create_event = current_state.get(key)
         if create_event:
             auth_ids.append(create_event.event_id)
 
@@ -381,8 +447,8 @@ class Auth(object):
         else:
             is_public = False
 
-        if builder.type == EventTypes.Member:
-            e_type = builder.content["membership"]
+        if event.type == EventTypes.Member:
+            e_type = event.content["membership"]
             if e_type in [Membership.JOIN, Membership.INVITE]:
                 if join_rule_event:
                     auth_ids.append(join_rule_event.event_id)
@@ -397,17 +463,7 @@ class Auth(object):
             if member_event.content["membership"] == Membership.JOIN:
                 auth_ids.append(member_event.event_id)
 
-        auth_events_entries = yield self.store.add_event_hashes(
-            auth_ids
-        )
-
-        builder.auth_events = auth_events_entries
-
-        context.auth_events = {
-            k: v
-            for k, v in context.current_state.items()
-            if v.event_id in auth_ids
-        }
+        return auth_ids
 
     @log_function
     def _can_send_event(self, event, auth_events):
@@ -461,7 +517,7 @@ class Auth(object):
                             "You are not allowed to set others state"
                         )
                     else:
-                        sender_domain = self.hs.parse_userid(
+                        sender_domain = UserID.from_string(
                             event.user_id
                         ).domain
 
@@ -496,7 +552,7 @@ class Auth(object):
         # Validate users
         for k, v in user_list.items():
             try:
-                self.hs.parse_userid(k)
+                UserID.from_string(k)
             except:
                 raise SynapseError(400, "Not a valid user_id: %s" % (k,))
 
diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index 7ee6dcc46e..420f963d91 100644
--- a/synapse/api/constants.py
+++ b/synapse/api/constants.py
@@ -59,6 +59,7 @@ class LoginType(object):
     EMAIL_URL = u"m.login.email.url"
     EMAIL_IDENTITY = u"m.login.email.identity"
     RECAPTCHA = u"m.login.recaptcha"
+    APPLICATION_SERVICE = u"m.login.application_service"
 
 
 class EventTypes(object):
@@ -74,3 +75,9 @@ class EventTypes(object):
     Message = "m.room.message"
     Topic = "m.room.topic"
     Name = "m.room.name"
+
+
+class RejectedReason(object):
+    AUTH_ERROR = "auth_error"
+    REPLACED = "replaced"
+    NOT_ANCESTOR = "not_ancestor"
diff --git a/synapse/api/errors.py b/synapse/api/errors.py
index 2b049debf3..eddd889778 100644
--- a/synapse/api/errors.py
+++ b/synapse/api/errors.py
@@ -21,6 +21,7 @@ logger = logging.getLogger(__name__)
 
 
 class Codes(object):
+    UNRECOGNIZED = "M_UNRECOGNIZED"
     UNAUTHORIZED = "M_UNAUTHORIZED"
     FORBIDDEN = "M_FORBIDDEN"
     BAD_JSON = "M_BAD_JSON"
@@ -34,10 +35,12 @@ class Codes(object):
     LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED"
     CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED"
     CAPTCHA_INVALID = "M_CAPTCHA_INVALID"
-    TOO_LARGE = "M_TOO_LARGE"
+    MISSING_PARAM = "M_MISSING_PARAM",
+    TOO_LARGE = "M_TOO_LARGE",
+    EXCLUSIVE = "M_EXCLUSIVE"
 
 
-class CodeMessageException(Exception):
+class CodeMessageException(RuntimeError):
     """An exception with integer code and message string attributes."""
 
     def __init__(self, code, msg):
@@ -81,6 +84,35 @@ class RegistrationError(SynapseError):
     pass
 
 
+class UnrecognizedRequestError(SynapseError):
+    """An error indicating we don't understand the request you're trying to make"""
+    def __init__(self, *args, **kwargs):
+        if "errcode" not in kwargs:
+            kwargs["errcode"] = Codes.UNRECOGNIZED
+        message = None
+        if len(args) == 0:
+            message = "Unrecognized request"
+        else:
+            message = args[0]
+        super(UnrecognizedRequestError, self).__init__(
+            400,
+            message,
+            **kwargs
+        )
+
+
+class NotFoundError(SynapseError):
+    """An error indicating we can't find the thing you asked for"""
+    def __init__(self, *args, **kwargs):
+        if "errcode" not in kwargs:
+            kwargs["errcode"] = Codes.NOT_FOUND
+        super(NotFoundError, self).__init__(
+            404,
+            "Not found",
+            **kwargs
+        )
+
+
 class AuthError(SynapseError):
     """An error raised when there was a problem authorising an event."""
 
@@ -196,3 +228,9 @@ class FederationError(RuntimeError):
             "affected": self.affected,
             "source": self.source if self.source else self.affected,
         }
+
+
+class HttpResponseException(CodeMessageException):
+    def __init__(self, code, msg, response):
+        self.response = response
+        super(HttpResponseException, self).__init__(code, msg)
diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py
new file mode 100644
index 0000000000..4d570b74f8
--- /dev/null
+++ b/synapse/api/filtering.py
@@ -0,0 +1,229 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from synapse.api.errors import SynapseError
+from synapse.types import UserID, RoomID
+
+
+class Filtering(object):
+
+    def __init__(self, hs):
+        super(Filtering, self).__init__()
+        self.store = hs.get_datastore()
+
+    def get_user_filter(self, user_localpart, filter_id):
+        result = self.store.get_user_filter(user_localpart, filter_id)
+        result.addCallback(Filter)
+        return result
+
+    def add_user_filter(self, user_localpart, user_filter):
+        self._check_valid_filter(user_filter)
+        return self.store.add_user_filter(user_localpart, user_filter)
+
+    # TODO(paul): surely we should probably add a delete_user_filter or
+    #   replace_user_filter at some point? There's no REST API specified for
+    #   them however
+
+    def _check_valid_filter(self, user_filter_json):
+        """Check if the provided filter is valid.
+
+        This inspects all definitions contained within the filter.
+
+        Args:
+            user_filter_json(dict): The filter
+        Raises:
+            SynapseError: If the filter is not valid.
+        """
+        # NB: Filters are the complete json blobs. "Definitions" are an
+        # individual top-level key e.g. public_user_data. Filters are made of
+        # many definitions.
+
+        top_level_definitions = [
+            "public_user_data", "private_user_data", "server_data"
+        ]
+
+        room_level_definitions = [
+            "state", "events", "ephemeral"
+        ]
+
+        for key in top_level_definitions:
+            if key in user_filter_json:
+                self._check_definition(user_filter_json[key])
+
+        if "room" in user_filter_json:
+            for key in room_level_definitions:
+                if key in user_filter_json["room"]:
+                    self._check_definition(user_filter_json["room"][key])
+
+    def _check_definition(self, definition):
+        """Check if the provided definition is valid.
+
+        This inspects not only the types but also the values to make sure they
+        make sense.
+
+        Args:
+            definition(dict): The filter definition
+        Raises:
+            SynapseError: If there was a problem with this definition.
+        """
+        # NB: Filters are the complete json blobs. "Definitions" are an
+        # individual top-level key e.g. public_user_data. Filters are made of
+        # many definitions.
+        if type(definition) != dict:
+            raise SynapseError(
+                400, "Expected JSON object, not %s" % (definition,)
+            )
+
+        # check rooms are valid room IDs
+        room_id_keys = ["rooms", "not_rooms"]
+        for key in room_id_keys:
+            if key in definition:
+                if type(definition[key]) != list:
+                    raise SynapseError(400, "Expected %s to be a list." % key)
+                for room_id in definition[key]:
+                    RoomID.from_string(room_id)
+
+        # check senders are valid user IDs
+        user_id_keys = ["senders", "not_senders"]
+        for key in user_id_keys:
+            if key in definition:
+                if type(definition[key]) != list:
+                    raise SynapseError(400, "Expected %s to be a list." % key)
+                for user_id in definition[key]:
+                    UserID.from_string(user_id)
+
+        # TODO: We don't limit event type values but we probably should...
+        # check types are valid event types
+        event_keys = ["types", "not_types"]
+        for key in event_keys:
+            if key in definition:
+                if type(definition[key]) != list:
+                    raise SynapseError(400, "Expected %s to be a list." % key)
+                for event_type in definition[key]:
+                    if not isinstance(event_type, basestring):
+                        raise SynapseError(400, "Event type should be a string")
+
+        if "format" in definition:
+            event_format = definition["format"]
+            if event_format not in ["federation", "events"]:
+                raise SynapseError(400, "Invalid format: %s" % (event_format,))
+
+        if "select" in definition:
+            event_select_list = definition["select"]
+            for select_key in event_select_list:
+                if select_key not in ["event_id", "origin_server_ts",
+                                      "thread_id", "content", "content.body"]:
+                    raise SynapseError(400, "Bad select: %s" % (select_key,))
+
+        if ("bundle_updates" in definition and
+                type(definition["bundle_updates"]) != bool):
+            raise SynapseError(400, "Bad bundle_updates: expected bool.")
+
+
+class Filter(object):
+    def __init__(self, filter_json):
+        self.filter_json = filter_json
+
+    def filter_public_user_data(self, events):
+        return self._filter_on_key(events, ["public_user_data"])
+
+    def filter_private_user_data(self, events):
+        return self._filter_on_key(events, ["private_user_data"])
+
+    def filter_room_state(self, events):
+        return self._filter_on_key(events, ["room", "state"])
+
+    def filter_room_events(self, events):
+        return self._filter_on_key(events, ["room", "events"])
+
+    def filter_room_ephemeral(self, events):
+        return self._filter_on_key(events, ["room", "ephemeral"])
+
+    def _filter_on_key(self, events, keys):
+        filter_json = self.filter_json
+        if not filter_json:
+            return events
+
+        try:
+            # extract the right definition from the filter
+            definition = filter_json
+            for key in keys:
+                definition = definition[key]
+            return self._filter_with_definition(events, definition)
+        except KeyError:
+            # return all events if definition isn't specified.
+            return events
+
+    def _filter_with_definition(self, events, definition):
+        return [e for e in events if self._passes_definition(definition, e)]
+
+    def _passes_definition(self, definition, event):
+        """Check if the event passes through the given definition.
+
+        Args:
+            definition(dict): The definition to check against.
+            event(Event): The event to check.
+        Returns:
+            True if the event passes through the filter.
+        """
+        # Algorithm notes:
+        # For each key in the definition, check the event meets the criteria:
+        #   * For types: Literal match or prefix match (if ends with wildcard)
+        #   * For senders/rooms: Literal match only
+        #   * "not_" checks take presedence (e.g. if "m.*" is in both 'types'
+        #     and 'not_types' then it is treated as only being in 'not_types')
+
+        # room checks
+        if hasattr(event, "room_id"):
+            room_id = event.room_id
+            allow_rooms = definition.get("rooms", None)
+            reject_rooms = definition.get("not_rooms", None)
+            if reject_rooms and room_id in reject_rooms:
+                return False
+            if allow_rooms and room_id not in allow_rooms:
+                return False
+
+        # sender checks
+        if hasattr(event, "sender"):
+            # Should we be including event.state_key for some event types?
+            sender = event.sender
+            allow_senders = definition.get("senders", None)
+            reject_senders = definition.get("not_senders", None)
+            if reject_senders and sender in reject_senders:
+                return False
+            if allow_senders and sender not in allow_senders:
+                return False
+
+        # type checks
+        if "not_types" in definition:
+            for def_type in definition["not_types"]:
+                if self._event_matches_type(event, def_type):
+                    return False
+        if "types" in definition:
+            included = False
+            for def_type in definition["types"]:
+                if self._event_matches_type(event, def_type):
+                    included = True
+                    break
+            if not included:
+                return False
+
+        return True
+
+    def _event_matches_type(self, event, def_type):
+        if def_type.endswith("*"):
+            type_prefix = def_type[:-1]
+            return event.type.startswith(type_prefix)
+        else:
+            return event.type == def_type
diff --git a/synapse/api/urls.py b/synapse/api/urls.py
index a299392049..3d43674625 100644
--- a/synapse/api/urls.py
+++ b/synapse/api/urls.py
@@ -16,8 +16,11 @@
 """Contains the URL paths to prefix various aspects of the server with. """
 
 CLIENT_PREFIX = "/_matrix/client/api/v1"
+CLIENT_V2_ALPHA_PREFIX = "/_matrix/client/v2_alpha"
 FEDERATION_PREFIX = "/_matrix/federation/v1"
+STATIC_PREFIX = "/_matrix/static"
 WEB_CLIENT_PREFIX = "/_matrix/client"
 CONTENT_REPO_PREFIX = "/_matrix/content"
 SERVER_KEY_PREFIX = "/_matrix/key/v1"
 MEDIA_PREFIX = "/_matrix/media/v1"
+APP_SERVICE_PREFIX = "/_matrix/appservice/v1"
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 9b23c58abe..dff08c8bc5 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -14,7 +14,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from synapse.storage import prepare_database, UpgradeDatabaseException
+import sys
+sys.dont_write_bytecode = True
+
+from synapse.storage import (
+    prepare_database, prepare_sqlite3_database, UpgradeDatabaseException,
+)
 
 from synapse.server import HomeServer
 
@@ -27,17 +32,21 @@ from twisted.web.resource import Resource
 from twisted.web.static import File
 from twisted.web.server import Site
 from synapse.http.server import JsonResource, RootRedirect
-from synapse.media.v0.content_repository import ContentRepoResource
-from synapse.media.v1.media_repository import MediaRepositoryResource
+from synapse.rest.appservice.v1 import AppServiceRestResource
+from synapse.rest.media.v0.content_repository import ContentRepoResource
+from synapse.rest.media.v1.media_repository import MediaRepositoryResource
 from synapse.http.server_key_resource import LocalKey
 from synapse.http.matrixfederationclient import MatrixFederationHttpClient
 from synapse.api.urls import (
     CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
-    SERVER_KEY_PREFIX, MEDIA_PREFIX
+    SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, APP_SERVICE_PREFIX,
+    STATIC_PREFIX
 )
 from synapse.config.homeserver import HomeServerConfig
 from synapse.crypto import context_factory
 from synapse.util.logcontext import LoggingContext
+from synapse.rest.client.v1 import ClientV1RestResource
+from synapse.rest.client.v2_alpha import ClientV2AlphaRestResource
 
 from daemonize import Daemonize
 import twisted.manhole.telnet
@@ -47,7 +56,8 @@ import synapse
 import logging
 import os
 import re
-import sys
+import resource
+import subprocess
 import sqlite3
 import syweb
 
@@ -60,16 +70,25 @@ class SynapseHomeServer(HomeServer):
         return MatrixFederationHttpClient(self)
 
     def build_resource_for_client(self):
-        return JsonResource()
+        return ClientV1RestResource(self)
+
+    def build_resource_for_client_v2_alpha(self):
+        return ClientV2AlphaRestResource(self)
 
     def build_resource_for_federation(self):
-        return JsonResource()
+        return JsonResource(self)
+
+    def build_resource_for_app_services(self):
+        return AppServiceRestResource(self)
 
     def build_resource_for_web_client(self):
         syweb_path = os.path.dirname(syweb.__file__)
         webclient_path = os.path.join(syweb_path, "webclient")
         return File(webclient_path)  # TODO configurable?
 
+    def build_resource_for_static_content(self):
+        return File("static")
+
     def build_resource_for_content_repo(self):
         return ContentRepoResource(
             self, self.upload_dir, self.auth, self.content_addr
@@ -86,7 +105,9 @@ class SynapseHomeServer(HomeServer):
             "sqlite3", self.get_db_name(),
             check_same_thread=False,
             cp_min=1,
-            cp_max=1
+            cp_max=1,
+            cp_openfun=prepare_database,  # Prepare the database for each conn
+                                          # so that :memory: sqlite works
         )
 
     def create_resource_tree(self, web_client, redirect_root_to_web_client):
@@ -105,11 +126,15 @@ class SynapseHomeServer(HomeServer):
         # [ ("/aaa/bbb/cc", Resource1), ("/aaa/dummy", Resource2) ]
         desired_tree = [
             (CLIENT_PREFIX, self.get_resource_for_client()),
+            (CLIENT_V2_ALPHA_PREFIX, self.get_resource_for_client_v2_alpha()),
             (FEDERATION_PREFIX, self.get_resource_for_federation()),
             (CONTENT_REPO_PREFIX, self.get_resource_for_content_repo()),
             (SERVER_KEY_PREFIX, self.get_resource_for_server_key()),
             (MEDIA_PREFIX, self.get_resource_for_media_repository()),
+            (APP_SERVICE_PREFIX, self.get_resource_for_app_services()),
+            (STATIC_PREFIX, self.get_resource_for_static_content()),
         ]
+
         if web_client:
             logger.info("Adding the web client.")
             desired_tree.append((WEB_CLIENT_PREFIX,
@@ -125,11 +150,11 @@ class SynapseHomeServer(HomeServer):
         # instead, we'll store a copy of this mapping so we can actually add
         # extra resources to existing nodes. See self._resource_id for the key.
         resource_mappings = {}
-        for (full_path, resource) in desired_tree:
-            logger.info("Attaching %s to path %s", resource, full_path)
+        for full_path, res in desired_tree:
+            logger.info("Attaching %s to path %s", res, full_path)
             last_resource = self.root_resource
             for path_seg in full_path.split('/')[1:-1]:
-                if not path_seg in last_resource.listNames():
+                if path_seg not in last_resource.listNames():
                     # resource doesn't exist, so make a "dummy resource"
                     child_resource = Resource()
                     last_resource.putChild(path_seg, child_resource)
@@ -157,12 +182,12 @@ class SynapseHomeServer(HomeServer):
                                                      child_name)
                     child_resource = resource_mappings[child_res_id]
                     # steal the children
-                    resource.putChild(child_name, child_resource)
+                    res.putChild(child_name, child_resource)
 
             # finally, insert the desired resource in the right place
-            last_resource.putChild(last_path_seg, resource)
+            last_resource.putChild(last_path_seg, res)
             res_id = self._resource_id(last_resource, last_path_seg)
-            resource_mappings[res_id] = resource
+            resource_mappings[res_id] = res
 
         return self.root_resource
 
@@ -194,6 +219,83 @@ class SynapseHomeServer(HomeServer):
             logger.info("Synapse now listening on port %d", unsecure_port)
 
 
+def get_version_string():
+    try:
+        null = open(os.devnull, 'w')
+        cwd = os.path.dirname(os.path.abspath(__file__))
+        try:
+            git_branch = subprocess.check_output(
+                ['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
+                stderr=null,
+                cwd=cwd,
+            ).strip()
+            git_branch = "b=" + git_branch
+        except subprocess.CalledProcessError:
+            git_branch = ""
+
+        try:
+            git_tag = subprocess.check_output(
+                ['git', 'describe', '--exact-match'],
+                stderr=null,
+                cwd=cwd,
+            ).strip()
+            git_tag = "t=" + git_tag
+        except subprocess.CalledProcessError:
+            git_tag = ""
+
+        try:
+            git_commit = subprocess.check_output(
+                ['git', 'rev-parse', '--short', 'HEAD'],
+                stderr=null,
+                cwd=cwd,
+            ).strip()
+        except subprocess.CalledProcessError:
+            git_commit = ""
+
+        try:
+            dirty_string = "-this_is_a_dirty_checkout"
+            is_dirty = subprocess.check_output(
+                ['git', 'describe', '--dirty=' + dirty_string],
+                stderr=null,
+                cwd=cwd,
+            ).strip().endswith(dirty_string)
+
+            git_dirty = "dirty" if is_dirty else ""
+        except subprocess.CalledProcessError:
+            git_dirty = ""
+
+        if git_branch or git_tag or git_commit or git_dirty:
+            git_version = ",".join(
+                s for s in
+                (git_branch, git_tag, git_commit, git_dirty,)
+                if s
+            )
+
+            return (
+                "Synapse/%s (%s)" % (
+                    synapse.__version__, git_version,
+                )
+            ).encode("ascii")
+    except Exception as e:
+        logger.warn("Failed to check for git repository: %s", e)
+
+    return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
+
+
+def change_resource_limit(soft_file_no):
+    try:
+        soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
+
+        if not soft_file_no:
+            soft_file_no = hard
+
+        resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard))
+
+        logger.info("Set file limit to: %d", soft_file_no)
+    except (ValueError, resource.error) as e:
+        logger.warn("Failed to set file limit: %s", e)
+
+
 def setup(config_options, should_run=True):
     config = HomeServerConfig.load_config(
         "Synapse Homeserver",
@@ -205,8 +307,10 @@ def setup(config_options, should_run=True):
 
     check_requirements()
 
+    version_string = get_version_string()
+
     logger.info("Server hostname: %s", config.server_name)
-    logger.info("Server version: %s", synapse.__version__)
+    logger.info("Server version: %s", version_string)
 
     if re.search(":[0-9]+$", config.server_name):
         domain_with_port = config.server_name
@@ -223,10 +327,9 @@ def setup(config_options, should_run=True):
         tls_context_factory=tls_context_factory,
         config=config,
         content_addr=config.content_addr,
+        version_string=version_string,
     )
 
-    hs.register_servlets()
-
     hs.create_resource_tree(
         web_client=config.webclient,
         redirect_root_to_web_client=True,
@@ -238,6 +341,7 @@ def setup(config_options, should_run=True):
 
     try:
         with sqlite3.connect(db_name) as db_conn:
+            prepare_sqlite3_database(db_conn)
             prepare_database(db_conn)
     except UpgradeDatabaseException:
         sys.stderr.write(
@@ -249,14 +353,6 @@ def setup(config_options, should_run=True):
 
     logger.info("Database prepared in %s.", db_name)
 
-    db_pool = hs.get_db_pool()
-
-    if db_name == ":memory:":
-        # Memory databases will need to be setup each time they are opened.
-        reactor.callWhenRunning(
-            db_pool.runWithConnection, prepare_database
-        )
-
     if config.manhole:
         f = twisted.manhole.telnet.ShellFactory()
         f.username = "matrix"
@@ -267,17 +363,24 @@ def setup(config_options, should_run=True):
     bind_port = config.bind_port
     if config.no_tls:
         bind_port = None
+
     hs.start_listening(bind_port, config.unsecure_port)
 
+    hs.get_pusherpool().start()
+    hs.get_state_handler().start_caching()
+    hs.get_datastore().start_profiling()
+    hs.get_replication_layer().start_get_pdu_cache()
+
     if not should_run:
         return
 
     if config.daemonize:
         print config.pid_file
+
         daemon = Daemonize(
             app="synapse-homeserver",
             pid=config.pid_file,
-            action=run,
+            action=lambda: run(config),
             auto_close_fds=False,
             verbose=True,
             logger=logger,
@@ -285,7 +388,7 @@ def setup(config_options, should_run=True):
 
         daemon.start()
     else:
-        reactor.run()
+        run(config)
 
 
 class SynapseService(service.Service):
@@ -299,8 +402,10 @@ class SynapseService(service.Service):
         return self._port.stopListening()
 
 
-def run():
+def run(config):
     with LoggingContext("run"):
+        change_resource_limit(config.soft_file_limit)
+
         reactor.run()
 
 
diff --git a/synapse/app/synctl.py b/synapse/app/synctl.py
index 363c20f994..3a70a248dc 100755
--- a/synapse/app/synctl.py
+++ b/synapse/app/synctl.py
@@ -19,7 +19,7 @@ import os
 import subprocess
 import signal
 
-SYNAPSE = ["python", "-m", "synapse.app.homeserver"]
+SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"]
 
 CONFIGFILE = "homeserver.yaml"
 PIDFILE = "homeserver.pid"
diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py
new file mode 100644
index 0000000000..a268a6bcc4
--- /dev/null
+++ b/synapse/appservice/__init__.py
@@ -0,0 +1,176 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from synapse.api.constants import EventTypes
+
+import logging
+import re
+
+logger = logging.getLogger(__name__)
+
+
+class ApplicationService(object):
+    """Defines an application service. This definition is mostly what is
+    provided to the /register AS API.
+
+    Provides methods to check if this service is "interested" in events.
+    """
+    NS_USERS = "users"
+    NS_ALIASES = "aliases"
+    NS_ROOMS = "rooms"
+    # The ordering here is important as it is used to map database values (which
+    # are stored as ints representing the position in this list) to namespace
+    # values.
+    NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS]
+
+    def __init__(self, token, url=None, namespaces=None, hs_token=None,
+                 sender=None, txn_id=None):
+        self.token = token
+        self.url = url
+        self.hs_token = hs_token
+        self.sender = sender
+        self.namespaces = self._check_namespaces(namespaces)
+        self.txn_id = txn_id
+
+    def _check_namespaces(self, namespaces):
+        # Sanity check that it is of the form:
+        # {
+        #   users: [ {regex: "[A-z]+.*", exclusive: true}, ...],
+        #   aliases: [ {regex: "[A-z]+.*", exclusive: true}, ...],
+        #   rooms: [ {regex: "[A-z]+.*", exclusive: true}, ...],
+        # }
+        if not namespaces:
+            return None
+
+        for ns in ApplicationService.NS_LIST:
+            if ns not in namespaces:
+                namespaces[ns] = []
+                continue
+
+            if type(namespaces[ns]) != list:
+                raise ValueError("Bad namespace value for '%s'" % ns)
+            for regex_obj in namespaces[ns]:
+                if not isinstance(regex_obj, dict):
+                    raise ValueError("Expected dict regex for ns '%s'" % ns)
+                if not isinstance(regex_obj.get("exclusive"), bool):
+                    raise ValueError(
+                        "Expected bool for 'exclusive' in ns '%s'" % ns
+                    )
+                if not isinstance(regex_obj.get("regex"), basestring):
+                    raise ValueError(
+                        "Expected string for 'regex' in ns '%s'" % ns
+                    )
+        return namespaces
+
+    def _matches_regex(self, test_string, namespace_key, return_obj=False):
+        if not isinstance(test_string, basestring):
+            logger.error(
+                "Expected a string to test regex against, but got %s",
+                test_string
+            )
+            return False
+
+        for regex_obj in self.namespaces[namespace_key]:
+            if re.match(regex_obj["regex"], test_string):
+                if return_obj:
+                    return regex_obj
+                return True
+        return False
+
+    def _is_exclusive(self, ns_key, test_string):
+        regex_obj = self._matches_regex(test_string, ns_key, return_obj=True)
+        if regex_obj:
+            return regex_obj["exclusive"]
+        return False
+
+    def _matches_user(self, event, member_list):
+        if (hasattr(event, "sender") and
+                self.is_interested_in_user(event.sender)):
+            return True
+        # also check m.room.member state key
+        if (hasattr(event, "type") and event.type == EventTypes.Member
+                and hasattr(event, "state_key")
+                and self.is_interested_in_user(event.state_key)):
+            return True
+        # check joined member events
+        for member in member_list:
+            if self.is_interested_in_user(member.state_key):
+                return True
+        return False
+
+    def _matches_room_id(self, event):
+        if hasattr(event, "room_id"):
+            return self.is_interested_in_room(event.room_id)
+        return False
+
+    def _matches_aliases(self, event, alias_list):
+        for alias in alias_list:
+            if self.is_interested_in_alias(alias):
+                return True
+        return False
+
+    def is_interested(self, event, restrict_to=None, aliases_for_event=None,
+                      member_list=None):
+        """Check if this service is interested in this event.
+
+        Args:
+            event(Event): The event to check.
+            restrict_to(str): The namespace to restrict regex tests to.
+            aliases_for_event(list): A list of all the known room aliases for
+            this event.
+            member_list(list): A list of all joined room members in this room.
+        Returns:
+            bool: True if this service would like to know about this event.
+        """
+        if aliases_for_event is None:
+            aliases_for_event = []
+        if member_list is None:
+            member_list = []
+
+        if restrict_to and restrict_to not in ApplicationService.NS_LIST:
+            # this is a programming error, so fail early and raise a general
+            # exception
+            raise Exception("Unexpected restrict_to value: %s". restrict_to)
+
+        if not restrict_to:
+            return (self._matches_user(event, member_list)
+                    or self._matches_aliases(event, aliases_for_event)
+                    or self._matches_room_id(event))
+        elif restrict_to == ApplicationService.NS_ALIASES:
+            return self._matches_aliases(event, aliases_for_event)
+        elif restrict_to == ApplicationService.NS_ROOMS:
+            return self._matches_room_id(event)
+        elif restrict_to == ApplicationService.NS_USERS:
+            return self._matches_user(event, member_list)
+
+    def is_interested_in_user(self, user_id):
+        return self._matches_regex(user_id, ApplicationService.NS_USERS)
+
+    def is_interested_in_alias(self, alias):
+        return self._matches_regex(alias, ApplicationService.NS_ALIASES)
+
+    def is_interested_in_room(self, room_id):
+        return self._matches_regex(room_id, ApplicationService.NS_ROOMS)
+
+    def is_exclusive_user(self, user_id):
+        return self._is_exclusive(ApplicationService.NS_USERS, user_id)
+
+    def is_exclusive_alias(self, alias):
+        return self._is_exclusive(ApplicationService.NS_ALIASES, alias)
+
+    def is_exclusive_room(self, room_id):
+        return self._is_exclusive(ApplicationService.NS_ROOMS, room_id)
+
+    def __str__(self):
+        return "ApplicationService: %s" % (self.__dict__,)
diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py
new file mode 100644
index 0000000000..c2179f8d55
--- /dev/null
+++ b/synapse/appservice/api.py
@@ -0,0 +1,108 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from twisted.internet import defer
+
+from synapse.api.errors import CodeMessageException
+from synapse.http.client import SimpleHttpClient
+from synapse.events.utils import serialize_event
+
+import logging
+import urllib
+
+logger = logging.getLogger(__name__)
+
+
+class ApplicationServiceApi(SimpleHttpClient):
+    """This class manages HS -> AS communications, including querying and
+    pushing.
+    """
+
+    def __init__(self,  hs):
+        super(ApplicationServiceApi, self).__init__(hs)
+        self.clock = hs.get_clock()
+
+    @defer.inlineCallbacks
+    def query_user(self, service, user_id):
+        uri = service.url + ("/users/%s" % urllib.quote(user_id))
+        response = None
+        try:
+            response = yield self.get_json(uri, {
+                "access_token": service.hs_token
+            })
+            if response is not None:  # just an empty json object
+                defer.returnValue(True)
+        except CodeMessageException as e:
+            if e.code == 404:
+                defer.returnValue(False)
+                return
+            logger.warning("query_user to %s received %s", uri, e.code)
+        except Exception as ex:
+            logger.warning("query_user to %s threw exception %s", uri, ex)
+        defer.returnValue(False)
+
+    @defer.inlineCallbacks
+    def query_alias(self, service, alias):
+        uri = service.url + ("/rooms/%s" % urllib.quote(alias))
+        response = None
+        try:
+            response = yield self.get_json(uri, {
+                "access_token": service.hs_token
+            })
+            if response is not None:  # just an empty json object
+                defer.returnValue(True)
+        except CodeMessageException as e:
+            logger.warning("query_alias to %s received %s", uri, e.code)
+            if e.code == 404:
+                defer.returnValue(False)
+                return
+        except Exception as ex:
+            logger.warning("query_alias to %s threw exception %s", uri, ex)
+        defer.returnValue(False)
+
+    @defer.inlineCallbacks
+    def push_bulk(self, service, events):
+        events = self._serialize(events)
+
+        uri = service.url + ("/transactions/%s" %
+                             urllib.quote(str(0)))  # TODO txn_ids
+        response = None
+        try:
+            response = yield self.put_json(
+                uri=uri,
+                json_body={
+                    "events": events
+                },
+                args={
+                    "access_token": service.hs_token
+                })
+            if response:  # just an empty json object
+                # TODO: Mark txn as sent successfully
+                defer.returnValue(True)
+        except CodeMessageException as e:
+            logger.warning("push_bulk to %s received %s", uri, e.code)
+        except Exception as ex:
+            logger.warning("push_bulk to %s threw exception %s", uri, ex)
+        defer.returnValue(False)
+
+    @defer.inlineCallbacks
+    def push(self, service, event):
+        response = yield self.push_bulk(service, [event])
+        defer.returnValue(response)
+
+    def _serialize(self, events):
+        time_now = self.clock.time_msec()
+        return [
+            serialize_event(e, time_now, as_client_event=True) for e in events
+        ]
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index dfc115d8e8..87cdbf1d30 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -28,6 +28,16 @@ class Config(object):
         pass
 
     @staticmethod
+    def parse_size(string):
+        sizes = {"K": 1024, "M": 1024 * 1024}
+        size = 1
+        suffix = string[-1]
+        if suffix in sizes:
+            string = string[:-1]
+            size = sizes[suffix]
+        return int(string) * size
+
+    @staticmethod
     def abspath(file_path):
         return os.path.abspath(file_path) if file_path else file_path
 
@@ -50,8 +60,9 @@ class Config(object):
             )
         return cls.abspath(file_path)
 
-    @staticmethod
-    def ensure_directory(dir_path):
+    @classmethod
+    def ensure_directory(cls, dir_path):
+        dir_path = cls.abspath(dir_path)
         if not os.path.exists(dir_path):
             os.makedirs(dir_path)
         if not os.path.isdir(dir_path):
diff --git a/synapse/config/database.py b/synapse/config/database.py
index daa161c952..87efe54645 100644
--- a/synapse/config/database.py
+++ b/synapse/config/database.py
@@ -24,6 +24,7 @@ class DatabaseConfig(Config):
             self.database_path = ":memory:"
         else:
             self.database_path = self.abspath(args.database_path)
+        self.event_cache_size = self.parse_size(args.event_cache_size)
 
     @classmethod
     def add_arguments(cls, parser):
@@ -33,6 +34,10 @@ class DatabaseConfig(Config):
             "-d", "--database-path", default="homeserver.db",
             help="The database name."
         )
+        db_group.add_argument(
+            "--event-cache-size", default="100K",
+            help="Number of events to cache in memory."
+        )
 
     @classmethod
     def generate_config(cls, args, config_dir_path):
diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index b0fe217459..c024535f52 100644
--- a/synapse/config/homeserver.py
+++ b/synapse/config/homeserver.py
@@ -22,11 +22,12 @@ from .repository import ContentRepositoryConfig
 from .captcha import CaptchaConfig
 from .email import EmailConfig
 from .voip import VoipConfig
+from .registration import RegistrationConfig
 
 
 class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
                        RatelimitConfig, ContentRepositoryConfig, CaptchaConfig,
-                       EmailConfig, VoipConfig):
+                       EmailConfig, VoipConfig, RegistrationConfig,):
     pass
 
 
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index f9568ebd21..63c8e36930 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -18,6 +18,7 @@ from synapse.util.logcontext import LoggingContextFilter
 from twisted.python.log import PythonLoggingObserver
 import logging
 import logging.config
+import yaml
 
 
 class LoggingConfig(Config):
@@ -79,7 +80,8 @@ class LoggingConfig(Config):
             logger.addHandler(handler)
             logger.info("Test")
         else:
-            logging.config.fileConfig(self.log_config)
+            with open(self.log_config, 'r') as f:
+                logging.config.dictConfig(yaml.load(f))
 
         observer = PythonLoggingObserver()
         observer.start()
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 17c7e64ce7..862c07ef8c 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -22,6 +22,12 @@ class RatelimitConfig(Config):
         self.rc_messages_per_second = args.rc_messages_per_second
         self.rc_message_burst_count = args.rc_message_burst_count
 
+        self.federation_rc_window_size = args.federation_rc_window_size
+        self.federation_rc_sleep_limit = args.federation_rc_sleep_limit
+        self.federation_rc_sleep_delay = args.federation_rc_sleep_delay
+        self.federation_rc_reject_limit = args.federation_rc_reject_limit
+        self.federation_rc_concurrent = args.federation_rc_concurrent
+
     @classmethod
     def add_arguments(cls, parser):
         super(RatelimitConfig, cls).add_arguments(parser)
@@ -34,3 +40,33 @@ class RatelimitConfig(Config):
             "--rc-message-burst-count", type=float, default=10,
             help="number of message a client can send before being throttled"
         )
+
+        rc_group.add_argument(
+            "--federation-rc-window-size", type=int, default=10000,
+            help="The federation window size in milliseconds",
+        )
+
+        rc_group.add_argument(
+            "--federation-rc-sleep-limit", type=int, default=10,
+            help="The number of federation requests from a single server"
+                 " in a window before the server will delay processing the"
+                 " request.",
+        )
+
+        rc_group.add_argument(
+            "--federation-rc-sleep-delay", type=int, default=500,
+            help="The duration in milliseconds to delay processing events from"
+                 " remote servers by if they go over the sleep limit.",
+        )
+
+        rc_group.add_argument(
+            "--federation-rc-reject-limit", type=int, default=50,
+            help="The maximum number of concurrent federation requests allowed"
+                 " from a single server",
+        )
+
+        rc_group.add_argument(
+            "--federation-rc-concurrent", type=int, default=3,
+            help="The number of federation requests to concurrently process"
+                 " from a single server",
+        )
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
new file mode 100644
index 0000000000..cca8ab5676
--- /dev/null
+++ b/synapse/config/registration.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import Config
+
+
+class RegistrationConfig(Config):
+
+    def __init__(self, args):
+        super(RegistrationConfig, self).__init__(args)
+        self.disable_registration = args.disable_registration
+
+    @classmethod
+    def add_arguments(cls, parser):
+        super(RegistrationConfig, cls).add_arguments(parser)
+        reg_group = parser.add_argument_group("registration")
+        reg_group.add_argument(
+            "--disable-registration",
+            action='store_true',
+            help="Disable registration of new users."
+        )
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 31e44cc857..b042d4eed9 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -30,7 +30,7 @@ class ServerConfig(Config):
         self.pid_file = self.abspath(args.pid_file)
         self.webclient = True
         self.manhole = args.manhole
-        self.no_tls = args.no_tls
+        self.soft_file_limit = args.soft_file_limit
 
         if not args.content_addr:
             host = args.server_name
@@ -75,8 +75,12 @@ class ServerConfig(Config):
         server_group.add_argument("--content-addr", default=None,
                                   help="The host and scheme to use for the "
                                   "content repository")
-        server_group.add_argument("--no-tls", action='store_true',
-                                  help="Don't bind to the https port.")
+        server_group.add_argument("--soft-file-limit", type=int, default=0,
+                                  help="Set the soft limit on the number of "
+                                       "file descriptors synapse can use. "
+                                       "Zero is used to indicate synapse "
+                                       "should set the soft limit to the hard"
+                                       "limit.")
 
     def read_signing_key(self, signing_key_path):
         signing_keys = self.read_file(signing_key_path, "signing_key")
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index 384b29e7ba..034f9a7bf0 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -28,9 +28,16 @@ class TlsConfig(Config):
         self.tls_certificate = self.read_tls_certificate(
             args.tls_certificate_path
         )
-        self.tls_private_key = self.read_tls_private_key(
-            args.tls_private_key_path
-        )
+
+        self.no_tls = args.no_tls
+
+        if self.no_tls:
+            self.tls_private_key = None
+        else:
+            self.tls_private_key = self.read_tls_private_key(
+                args.tls_private_key_path
+            )
+
         self.tls_dh_params_path = self.check_file(
             args.tls_dh_params_path, "tls_dh_params"
         )
@@ -45,6 +52,8 @@ class TlsConfig(Config):
                                help="PEM encoded private key for TLS")
         tls_group.add_argument("--tls-dh-params-path",
                                help="PEM dh parameters for ephemeral keys")
+        tls_group.add_argument("--no-tls", action='store_true',
+                               help="Don't bind to the https port.")
 
     def read_tls_certificate(self, cert_path):
         cert_pem = self.read_file(cert_path, "tls_certificate")
diff --git a/synapse/config/voip.py b/synapse/config/voip.py
index a2b822719f..65162d21b7 100644
--- a/synapse/config/voip.py
+++ b/synapse/config/voip.py
@@ -28,7 +28,7 @@ class VoipConfig(Config):
         super(VoipConfig, cls).add_arguments(parser)
         group = parser.add_argument_group("voip")
         group.add_argument(
-            "--turn-uris", type=str, default=None,
+            "--turn-uris", type=str, default=None, action='append',
             help="The public URIs of the TURN server to give to clients"
         )
         group.add_argument(
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index 24d4abf3e9..2f8618a0df 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -38,7 +38,10 @@ class ServerContextFactory(ssl.ContextFactory):
             logger.exception("Failed to enable eliptic curve for TLS")
         context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
         context.use_certificate(config.tls_certificate)
-        context.use_privatekey(config.tls_private_key)
+
+        if not config.no_tls:
+            context.use_privatekey(config.tls_private_key)
+
         context.load_tmp_dh(config.tls_dh_params_path)
         context.set_cipher_list("!ADH:HIGH+kEDH:!AECDH:HIGH+kEECDH")
 
diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py
index 9c910fa3fc..74008347c3 100644
--- a/synapse/crypto/keyclient.py
+++ b/synapse/crypto/keyclient.py
@@ -19,7 +19,7 @@ from twisted.internet.protocol import Factory
 from twisted.internet import defer, reactor
 from synapse.http.endpoint import matrix_federation_endpoint
 from synapse.util.logcontext import PreserveLoggingContext
-import json
+import simplejson as json
 import logging
 
 
@@ -61,9 +61,11 @@ class SynapseKeyClientProtocol(HTTPClient):
 
     def __init__(self):
         self.remote_key = defer.Deferred()
+        self.host = None
 
     def connectionMade(self):
-        logger.debug("Connected to %s", self.transport.getHost())
+        self.host = self.transport.getHost()
+        logger.debug("Connected to %s", self.host)
         self.sendCommand(b"GET", b"/_matrix/key/v1/")
         self.endHeaders()
         self.timer = reactor.callLater(
@@ -73,7 +75,7 @@ class SynapseKeyClientProtocol(HTTPClient):
 
     def handleStatus(self, version, status, message):
         if status != b"200":
-            #logger.info("Non-200 response from %s: %s %s",
+            # logger.info("Non-200 response from %s: %s %s",
             #            self.transport.getHost(), status, message)
             self.transport.abortConnection()
 
@@ -81,7 +83,7 @@ class SynapseKeyClientProtocol(HTTPClient):
         try:
             json_response = json.loads(response_body_bytes)
         except ValueError:
-            #logger.info("Invalid JSON response from %s",
+            # logger.info("Invalid JSON response from %s",
             #            self.transport.getHost())
             self.transport.abortConnection()
             return
@@ -92,8 +94,7 @@ class SynapseKeyClientProtocol(HTTPClient):
         self.timer.cancel()
 
     def on_timeout(self):
-        logger.debug("Timeout waiting for response from %s",
-                     self.transport.getHost())
+        logger.debug("Timeout waiting for response from %s", self.host)
         self.remote_key.errback(IOError("Timeout waiting for response"))
         self.transport.abortConnection()
 
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index 3fb99f7125..f4db7b8a05 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -22,6 +22,8 @@ from syutil.crypto.signing_key import (
 from syutil.base64util import decode_base64, encode_base64
 from synapse.api.errors import SynapseError, Codes
 
+from synapse.util.retryutils import get_retry_limiter
+
 from OpenSSL import crypto
 
 import logging
@@ -48,18 +50,27 @@ class Keyring(object):
             )
         try:
             verify_key = yield self.get_server_verify_key(server_name, key_ids)
-        except IOError:
+        except IOError as e:
+            logger.warn(
+                "Got IOError when downloading keys for %s: %s %s",
+                server_name, type(e).__name__, str(e.message),
+            )
             raise SynapseError(
                 502,
                 "Error downloading keys for %s" % (server_name,),
                 Codes.UNAUTHORIZED,
             )
-        except:
+        except Exception as e:
+            logger.warn(
+                "Got Exception when downloading keys for %s: %s %s",
+                server_name, type(e).__name__, str(e.message),
+            )
             raise SynapseError(
                 401,
                 "No key for %s with id %s" % (server_name, key_ids),
                 Codes.UNAUTHORIZED,
             )
+
         try:
             verify_signed_json(json_object, server_name, verify_key)
         except:
@@ -87,12 +98,18 @@ class Keyring(object):
             return
 
         # Try to fetch the key from the remote server.
-        # TODO(markjh): Ratelimit requests to a given server.
 
-        (response, tls_certificate) = yield fetch_server_key(
-            server_name, self.hs.tls_context_factory
+        limiter = yield get_retry_limiter(
+            server_name,
+            self.clock,
+            self.store,
         )
 
+        with limiter:
+            (response, tls_certificate) = yield fetch_server_key(
+                server_name, self.hs.tls_context_factory
+            )
+
         # Check the response.
 
         x509_certificate_bytes = crypto.dump_certificate(
diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py
index 4252e5ab5c..64e08223b0 100644
--- a/synapse/events/__init__.py
+++ b/synapse/events/__init__.py
@@ -13,12 +13,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from synapse.util.frozenutils import freeze, unfreeze
+from synapse.util.frozenutils import freeze
 
 
 class _EventInternalMetadata(object):
     def __init__(self, internal_metadata_dict):
-        self.__dict__ = internal_metadata_dict
+        self.__dict__ = dict(internal_metadata_dict)
 
     def get_dict(self):
         return dict(self.__dict__)
@@ -77,7 +77,7 @@ class EventBase(object):
         return self.content["membership"]
 
     def is_state(self):
-        return hasattr(self, "state_key")
+        return hasattr(self, "state_key") and self.state_key is not None
 
     def get_dict(self):
         d = dict(self._event_dict)
@@ -140,10 +140,6 @@ class FrozenEvent(EventBase):
 
         return e
 
-    def get_dict(self):
-        # We need to unfreeze what we return
-        return unfreeze(super(FrozenEvent, self).get_dict())
-
     def __str__(self):
         return self.__repr__()
 
diff --git a/synapse/events/builder.py b/synapse/events/builder.py
index a9b1b99a10..9d45bdb892 100644
--- a/synapse/events/builder.py
+++ b/synapse/events/builder.py
@@ -23,14 +23,15 @@ import copy
 
 
 class EventBuilder(EventBase):
-    def __init__(self, key_values={}):
+    def __init__(self, key_values={}, internal_metadata_dict={}):
         signatures = copy.deepcopy(key_values.pop("signatures", {}))
         unsigned = copy.deepcopy(key_values.pop("unsigned", {}))
 
         super(EventBuilder, self).__init__(
             key_values,
             signatures=signatures,
-            unsigned=unsigned
+            unsigned=unsigned,
+            internal_metadata_dict=internal_metadata_dict,
         )
 
     def build(self):
diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py
index 6bbba8d6ba..7e98bdef28 100644
--- a/synapse/events/snapshot.py
+++ b/synapse/events/snapshot.py
@@ -20,3 +20,4 @@ class EventContext(object):
         self.current_state = current_state
         self.auth_events = auth_events
         self.state_group = None
+        self.rejected = False
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index bcb5457278..1aa952150e 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -45,12 +45,14 @@ def prune_event(event):
         "membership",
     ]
 
+    event_dict = event.get_dict()
+
     new_content = {}
 
     def add_fields(*fields):
         for field in fields:
             if field in event.content:
-                new_content[field] = event.content[field]
+                new_content[field] = event_dict["content"][field]
 
     if event_type == EventTypes.Member:
         add_fields("membership")
@@ -75,7 +77,7 @@ def prune_event(event):
 
     allowed_fields = {
         k: v
-        for k, v in event.get_dict().items()
+        for k, v in event_dict.items()
         if k in allowed_keys
     }
 
@@ -86,56 +88,78 @@ def prune_event(event):
     if "age_ts" in event.unsigned:
         allowed_fields["unsigned"]["age_ts"] = event.unsigned["age_ts"]
 
-    return type(event)(allowed_fields)
+    return type(event)(
+        allowed_fields,
+        internal_metadata_dict=event.internal_metadata.get_dict()
+    )
+
+
+def format_event_raw(d):
+    return d
+
+
+def format_event_for_client_v1(d):
+    d["user_id"] = d.pop("sender", None)
+
+    move_keys = ("age", "redacted_because", "replaces_state", "prev_content")
+    for key in move_keys:
+        if key in d["unsigned"]:
+            d[key] = d["unsigned"][key]
+
+    drop_keys = (
+        "auth_events", "prev_events", "hashes", "signatures", "depth",
+        "unsigned", "origin", "prev_state"
+    )
+    for key in drop_keys:
+        d.pop(key, None)
+    return d
+
+
+def format_event_for_client_v2(d):
+    drop_keys = (
+        "auth_events", "prev_events", "hashes", "signatures", "depth",
+        "origin", "prev_state",
+    )
+    for key in drop_keys:
+        d.pop(key, None)
+    return d
+
+
+def format_event_for_client_v2_without_event_id(d):
+    d = format_event_for_client_v2(d)
+    d.pop("room_id", None)
+    d.pop("event_id", None)
+    return d
 
 
-def serialize_event(hs, e, client_event=True):
+def serialize_event(e, time_now_ms, as_client_event=True,
+                    event_format=format_event_for_client_v1,
+                    token_id=None):
     # FIXME(erikj): To handle the case of presence events and the like
     if not isinstance(e, EventBase):
         return e
 
+    time_now_ms = int(time_now_ms)
+
     # Should this strip out None's?
     d = {k: v for k, v in e.get_dict().items()}
 
-    if not client_event:
-        # set the age and keep all other keys
-        if "age_ts" in d["unsigned"]:
-            now = int(hs.get_clock().time_msec())
-            d["unsigned"]["age"] = now - d["unsigned"]["age_ts"]
-        return d
-
     if "age_ts" in d["unsigned"]:
-        now = int(hs.get_clock().time_msec())
-        d["age"] = now - d["unsigned"]["age_ts"]
+        d["unsigned"]["age"] = time_now_ms - d["unsigned"]["age_ts"]
         del d["unsigned"]["age_ts"]
 
-    d["user_id"] = d.pop("sender", None)
-
     if "redacted_because" in e.unsigned:
-        d["redacted_because"] = serialize_event(
-            hs, e.unsigned["redacted_because"]
+        d["unsigned"]["redacted_because"] = serialize_event(
+            e.unsigned["redacted_because"], time_now_ms
         )
 
-        del d["unsigned"]["redacted_because"]
+    if token_id is not None:
+        if token_id == getattr(e.internal_metadata, "token_id", None):
+            txn_id = getattr(e.internal_metadata, "txn_id", None)
+            if txn_id is not None:
+                d["unsigned"]["transaction_id"] = txn_id
 
-    if "redacted_by" in e.unsigned:
-        d["redacted_by"] = e.unsigned["redacted_by"]
-        del d["unsigned"]["redacted_by"]
-
-    if "replaces_state" in e.unsigned:
-        d["replaces_state"] = e.unsigned["replaces_state"]
-        del d["unsigned"]["replaces_state"]
-
-    if "prev_content" in e.unsigned:
-        d["prev_content"] = e.unsigned["prev_content"]
-        del d["unsigned"]["prev_content"]
-
-    del d["auth_events"]
-    del d["prev_events"]
-    del d["hashes"]
-    del d["signatures"]
-    d.pop("depth", None)
-    d.pop("unsigned", None)
-    d.pop("origin", None)
-
-    return d
+    if as_client_event:
+        return event_format(d)
+    else:
+        return d
diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py
new file mode 100644
index 0000000000..21a763214b
--- /dev/null
+++ b/synapse/federation/federation_base.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from twisted.internet import defer
+
+from synapse.events.utils import prune_event
+
+from syutil.jsonutil import encode_canonical_json
+
+from synapse.crypto.event_signing import check_event_content_hash
+
+from synapse.api.errors import SynapseError
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class FederationBase(object):
+    @defer.inlineCallbacks
+    def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False):
+        """Takes a list of PDUs and checks the signatures and hashs of each
+        one. If a PDU fails its signature check then we check if we have it in
+        the database and if not then request if from the originating server of
+        that PDU.
+
+        If a PDU fails its content hash check then it is redacted.
+
+        The given list of PDUs are not modified, instead the function returns
+        a new list.
+
+        Args:
+            pdu (list)
+            outlier (bool)
+
+        Returns:
+            Deferred : A list of PDUs that have valid signatures and hashes.
+        """
+
+        signed_pdus = []
+
+        @defer.inlineCallbacks
+        def do(pdu):
+            try:
+                new_pdu = yield self._check_sigs_and_hash(pdu)
+                signed_pdus.append(new_pdu)
+            except SynapseError:
+                # FIXME: We should handle signature failures more gracefully.
+
+                # Check local db.
+                new_pdu = yield self.store.get_event(
+                    pdu.event_id,
+                    allow_rejected=True,
+                    allow_none=True,
+                )
+                if new_pdu:
+                    signed_pdus.append(new_pdu)
+                    return
+
+                # Check pdu.origin
+                if pdu.origin != origin:
+                    try:
+                        new_pdu = yield self.get_pdu(
+                            destinations=[pdu.origin],
+                            event_id=pdu.event_id,
+                            outlier=outlier,
+                        )
+
+                        if new_pdu:
+                            signed_pdus.append(new_pdu)
+                            return
+                    except:
+                        pass
+
+                logger.warn(
+                    "Failed to find copy of %s with valid signature",
+                    pdu.event_id,
+                )
+
+        yield defer.gatherResults(
+            [do(pdu) for pdu in pdus],
+            consumeErrors=True
+        )
+
+        defer.returnValue(signed_pdus)
+
+    @defer.inlineCallbacks
+    def _check_sigs_and_hash(self, pdu):
+        """Throws a SynapseError if the PDU does not have the correct
+        signatures.
+
+        Returns:
+            FrozenEvent: Either the given event or it redacted if it failed the
+            content hash check.
+        """
+        # Check signatures are correct.
+        redacted_event = prune_event(pdu)
+        redacted_pdu_json = redacted_event.get_pdu_json()
+
+        try:
+            yield self.keyring.verify_json_for_server(
+                pdu.origin, redacted_pdu_json
+            )
+        except SynapseError:
+            logger.warn(
+                "Signature check failed for %s redacted to %s",
+                encode_canonical_json(pdu.get_pdu_json()),
+                encode_canonical_json(redacted_pdu_json),
+            )
+            raise
+
+        if not check_event_content_hash(pdu):
+            logger.warn(
+                "Event content has been tampered, redacting %s, %s",
+                pdu.event_id, encode_canonical_json(pdu.get_dict())
+            )
+            defer.returnValue(redacted_event)
+
+        defer.returnValue(pdu)
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
new file mode 100644
index 0000000000..f131941f45
--- /dev/null
+++ b/synapse/federation/federation_client.py
@@ -0,0 +1,563 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from twisted.internet import defer
+
+from .federation_base import FederationBase
+from .units import Edu
+
+from synapse.api.errors import (
+    CodeMessageException, HttpResponseException, SynapseError,
+)
+from synapse.util.expiringcache import ExpiringCache
+from synapse.util.logutils import log_function
+from synapse.events import FrozenEvent
+
+from synapse.util.retryutils import get_retry_limiter, NotRetryingDestination
+
+import itertools
+import logging
+import random
+
+
+logger = logging.getLogger(__name__)
+
+
+class FederationClient(FederationBase):
+    def __init__(self):
+        self._get_pdu_cache = None
+
+    def start_get_pdu_cache(self):
+        self._get_pdu_cache = ExpiringCache(
+            cache_name="get_pdu_cache",
+            clock=self._clock,
+            max_len=1000,
+            expiry_ms=120*1000,
+            reset_expiry_on_get=False,
+        )
+
+        self._get_pdu_cache.start()
+
+    @log_function
+    def send_pdu(self, pdu, destinations):
+        """Informs the replication layer about a new PDU generated within the
+        home server that should be transmitted to others.
+
+        TODO: Figure out when we should actually resolve the deferred.
+
+        Args:
+            pdu (Pdu): The new Pdu.
+
+        Returns:
+            Deferred: Completes when we have successfully processed the PDU
+            and replicated it to any interested remote home servers.
+        """
+        order = self._order
+        self._order += 1
+
+        logger.debug("[%s] transaction_layer.enqueue_pdu... ", pdu.event_id)
+
+        # TODO, add errback, etc.
+        self._transaction_queue.enqueue_pdu(pdu, destinations, order)
+
+        logger.debug(
+            "[%s] transaction_layer.enqueue_pdu... done",
+            pdu.event_id
+        )
+
+    @log_function
+    def send_edu(self, destination, edu_type, content):
+        edu = Edu(
+            origin=self.server_name,
+            destination=destination,
+            edu_type=edu_type,
+            content=content,
+        )
+
+        # TODO, add errback, etc.
+        self._transaction_queue.enqueue_edu(edu)
+        return defer.succeed(None)
+
+    @log_function
+    def send_failure(self, failure, destination):
+        self._transaction_queue.enqueue_failure(failure, destination)
+        return defer.succeed(None)
+
+    @log_function
+    def make_query(self, destination, query_type, args,
+                   retry_on_dns_fail=True):
+        """Sends a federation Query to a remote homeserver of the given type
+        and arguments.
+
+        Args:
+            destination (str): Domain name of the remote homeserver
+            query_type (str): Category of the query type; should match the
+                handler name used in register_query_handler().
+            args (dict): Mapping of strings to strings containing the details
+                of the query request.
+
+        Returns:
+            a Deferred which will eventually yield a JSON object from the
+            response
+        """
+        return self.transport_layer.make_query(
+            destination, query_type, args, retry_on_dns_fail=retry_on_dns_fail
+        )
+
+    @defer.inlineCallbacks
+    @log_function
+    def backfill(self, dest, context, limit, extremities):
+        """Requests some more historic PDUs for the given context from the
+        given destination server.
+
+        Args:
+            dest (str): The remote home server to ask.
+            context (str): The context to backfill.
+            limit (int): The maximum number of PDUs to return.
+            extremities (list): List of PDU id and origins of the first pdus
+                we have seen from the context
+
+        Returns:
+            Deferred: Results in the received PDUs.
+        """
+        logger.debug("backfill extrem=%s", extremities)
+
+        # If there are no extremeties then we've (probably) reached the start.
+        if not extremities:
+            return
+
+        transaction_data = yield self.transport_layer.backfill(
+            dest, context, extremities, limit)
+
+        logger.debug("backfill transaction_data=%s", repr(transaction_data))
+
+        pdus = [
+            self.event_from_pdu_json(p, outlier=False)
+            for p in transaction_data["pdus"]
+        ]
+
+        for i, pdu in enumerate(pdus):
+            pdus[i] = yield self._check_sigs_and_hash(pdu)
+
+            # FIXME: We should handle signature failures more gracefully.
+
+        defer.returnValue(pdus)
+
+    @defer.inlineCallbacks
+    @log_function
+    def get_pdu(self, destinations, event_id, outlier=False):
+        """Requests the PDU with given origin and ID from the remote home
+        servers.
+
+        Will attempt to get the PDU from each destination in the list until
+        one succeeds.
+
+        This will persist the PDU locally upon receipt.
+
+        Args:
+            destinations (list): Which home servers to query
+            pdu_origin (str): The home server that originally sent the pdu.
+            event_id (str)
+            outlier (bool): Indicates whether the PDU is an `outlier`, i.e. if
+                it's from an arbitary point in the context as opposed to part
+                of the current block of PDUs. Defaults to `False`
+
+        Returns:
+            Deferred: Results in the requested PDU.
+        """
+
+        # TODO: Rate limit the number of times we try and get the same event.
+
+        if self._get_pdu_cache:
+            e = self._get_pdu_cache.get(event_id)
+            if e:
+                defer.returnValue(e)
+
+        pdu = None
+        for destination in destinations:
+            try:
+                limiter = yield get_retry_limiter(
+                    destination,
+                    self._clock,
+                    self.store,
+                )
+
+                with limiter:
+                    transaction_data = yield self.transport_layer.get_event(
+                        destination, event_id
+                    )
+
+                    logger.debug("transaction_data %r", transaction_data)
+
+                    pdu_list = [
+                        self.event_from_pdu_json(p, outlier=outlier)
+                        for p in transaction_data["pdus"]
+                    ]
+
+                    if pdu_list:
+                        pdu = pdu_list[0]
+
+                        # Check signatures are correct.
+                        pdu = yield self._check_sigs_and_hash(pdu)
+
+                        break
+
+            except SynapseError:
+                logger.info(
+                    "Failed to get PDU %s from %s because %s",
+                    event_id, destination, e,
+                )
+                continue
+            except CodeMessageException as e:
+                if 400 <= e.code < 500:
+                    raise
+
+                logger.info(
+                    "Failed to get PDU %s from %s because %s",
+                    event_id, destination, e,
+                )
+                continue
+            except NotRetryingDestination as e:
+                logger.info(e.message)
+                continue
+            except Exception as e:
+                logger.info(
+                    "Failed to get PDU %s from %s because %s",
+                    event_id, destination, e,
+                )
+                continue
+
+        if self._get_pdu_cache is not None:
+            self._get_pdu_cache[event_id] = pdu
+
+        defer.returnValue(pdu)
+
+    @defer.inlineCallbacks
+    @log_function
+    def get_state_for_room(self, destination, room_id, event_id):
+        """Requests all of the `current` state PDUs for a given room from
+        a remote home server.
+
+        Args:
+            destination (str): The remote homeserver to query for the state.
+            room_id (str): The id of the room we're interested in.
+            event_id (str): The id of the event we want the state at.
+
+        Returns:
+            Deferred: Results in a list of PDUs.
+        """
+
+        result = yield self.transport_layer.get_room_state(
+            destination, room_id, event_id=event_id,
+        )
+
+        pdus = [
+            self.event_from_pdu_json(p, outlier=True) for p in result["pdus"]
+        ]
+
+        auth_chain = [
+            self.event_from_pdu_json(p, outlier=True)
+            for p in result.get("auth_chain", [])
+        ]
+
+        signed_pdus = yield self._check_sigs_and_hash_and_fetch(
+            destination, pdus, outlier=True
+        )
+
+        signed_auth = yield self._check_sigs_and_hash_and_fetch(
+            destination, auth_chain, outlier=True
+        )
+
+        signed_auth.sort(key=lambda e: e.depth)
+
+        defer.returnValue((signed_pdus, signed_auth))
+
+    @defer.inlineCallbacks
+    @log_function
+    def get_event_auth(self, destination, room_id, event_id):
+        res = yield self.transport_layer.get_event_auth(
+            destination, room_id, event_id,
+        )
+
+        auth_chain = [
+            self.event_from_pdu_json(p, outlier=True)
+            for p in res["auth_chain"]
+        ]
+
+        signed_auth = yield self._check_sigs_and_hash_and_fetch(
+            destination, auth_chain, outlier=True
+        )
+
+        signed_auth.sort(key=lambda e: e.depth)
+
+        defer.returnValue(signed_auth)
+
+    @defer.inlineCallbacks
+    def make_join(self, destinations, room_id, user_id):
+        for destination in destinations:
+            try:
+                ret = yield self.transport_layer.make_join(
+                    destination, room_id, user_id
+                )
+
+                pdu_dict = ret["event"]
+
+                logger.debug("Got response to make_join: %s", pdu_dict)
+
+                defer.returnValue(
+                    (destination, self.event_from_pdu_json(pdu_dict))
+                )
+                break
+            except CodeMessageException:
+                raise
+            except Exception as e:
+                logger.warn(
+                    "Failed to make_join via %s: %s",
+                    destination, e.message
+                )
+
+        raise RuntimeError("Failed to send to any server.")
+
+    @defer.inlineCallbacks
+    def send_join(self, destinations, pdu):
+        for destination in destinations:
+            try:
+                time_now = self._clock.time_msec()
+                _, content = yield self.transport_layer.send_join(
+                    destination=destination,
+                    room_id=pdu.room_id,
+                    event_id=pdu.event_id,
+                    content=pdu.get_pdu_json(time_now),
+                )
+
+                logger.debug("Got content: %s", content)
+
+                state = [
+                    self.event_from_pdu_json(p, outlier=True)
+                    for p in content.get("state", [])
+                ]
+
+                auth_chain = [
+                    self.event_from_pdu_json(p, outlier=True)
+                    for p in content.get("auth_chain", [])
+                ]
+
+                signed_state = yield self._check_sigs_and_hash_and_fetch(
+                    destination, state, outlier=True
+                )
+
+                signed_auth = yield self._check_sigs_and_hash_and_fetch(
+                    destination, auth_chain, outlier=True
+                )
+
+                auth_chain.sort(key=lambda e: e.depth)
+
+                defer.returnValue({
+                    "state": signed_state,
+                    "auth_chain": signed_auth,
+                    "origin": destination,
+                })
+            except CodeMessageException:
+                raise
+            except Exception as e:
+                logger.warn(
+                    "Failed to send_join via %s: %s",
+                    destination, e.message
+                )
+
+        raise RuntimeError("Failed to send to any server.")
+
+    @defer.inlineCallbacks
+    def send_invite(self, destination, room_id, event_id, pdu):
+        time_now = self._clock.time_msec()
+        code, content = yield self.transport_layer.send_invite(
+            destination=destination,
+            room_id=room_id,
+            event_id=event_id,
+            content=pdu.get_pdu_json(time_now),
+        )
+
+        pdu_dict = content["event"]
+
+        logger.debug("Got response to send_invite: %s", pdu_dict)
+
+        pdu = self.event_from_pdu_json(pdu_dict)
+
+        # Check signatures are correct.
+        pdu = yield self._check_sigs_and_hash(pdu)
+
+        # FIXME: We should handle signature failures more gracefully.
+
+        defer.returnValue(pdu)
+
+    @defer.inlineCallbacks
+    def query_auth(self, destination, room_id, event_id, local_auth):
+        """
+        Params:
+            destination (str)
+            event_it (str)
+            local_auth (list)
+        """
+        time_now = self._clock.time_msec()
+
+        send_content = {
+            "auth_chain": [e.get_pdu_json(time_now) for e in local_auth],
+        }
+
+        code, content = yield self.transport_layer.send_query_auth(
+            destination=destination,
+            room_id=room_id,
+            event_id=event_id,
+            content=send_content,
+        )
+
+        auth_chain = [
+            self.event_from_pdu_json(e)
+            for e in content["auth_chain"]
+        ]
+
+        signed_auth = yield self._check_sigs_and_hash_and_fetch(
+            destination, auth_chain, outlier=True
+        )
+
+        signed_auth.sort(key=lambda e: e.depth)
+
+        ret = {
+            "auth_chain": signed_auth,
+            "rejects": content.get("rejects", []),
+            "missing": content.get("missing", []),
+        }
+
+        defer.returnValue(ret)
+
+    @defer.inlineCallbacks
+    def get_missing_events(self, destination, room_id, earliest_events_ids,
+                           latest_events, limit, min_depth):
+        """Tries to fetch events we are missing. This is called when we receive
+        an event without having received all of its ancestors.
+
+        Args:
+            destination (str)
+            room_id (str)
+            earliest_events_ids (list): List of event ids. Effectively the
+                events we expected to receive, but haven't. `get_missing_events`
+                should only return events that didn't happen before these.
+            latest_events (list): List of events we have received that we don't
+                have all previous events for.
+            limit (int): Maximum number of events to return.
+            min_depth (int): Minimum depth of events tor return.
+        """
+        try:
+            content = yield self.transport_layer.get_missing_events(
+                destination=destination,
+                room_id=room_id,
+                earliest_events=earliest_events_ids,
+                latest_events=[e.event_id for e in latest_events],
+                limit=limit,
+                min_depth=min_depth,
+            )
+
+            events = [
+                self.event_from_pdu_json(e)
+                for e in content.get("events", [])
+            ]
+
+            signed_events = yield self._check_sigs_and_hash_and_fetch(
+                destination, events, outlier=True
+            )
+
+            have_gotten_all_from_destination = True
+        except HttpResponseException as e:
+            if not e.code == 400:
+                raise
+
+            # We are probably hitting an old server that doesn't support
+            # get_missing_events
+            signed_events = []
+            have_gotten_all_from_destination = False
+
+        if len(signed_events) >= limit:
+            defer.returnValue(signed_events)
+
+        servers = yield self.store.get_joined_hosts_for_room(room_id)
+
+        servers = set(servers)
+        servers.discard(self.server_name)
+
+        failed_to_fetch = set()
+
+        while len(signed_events) < limit:
+            # Are we missing any?
+
+            seen_events = set(earliest_events_ids)
+            seen_events.update(e.event_id for e in signed_events)
+
+            missing_events = {}
+            for e in itertools.chain(latest_events, signed_events):
+                if e.depth > min_depth:
+                    missing_events.update({
+                        e_id: e.depth for e_id, _ in e.prev_events
+                        if e_id not in seen_events
+                        and e_id not in failed_to_fetch
+                    })
+
+            if not missing_events:
+                break
+
+            have_seen = yield self.store.have_events(missing_events)
+
+            for k in have_seen:
+                missing_events.pop(k, None)
+
+            if not missing_events:
+                break
+
+            # Okay, we haven't gotten everything yet. Lets get them.
+            ordered_missing = sorted(missing_events.items(), key=lambda x: x[0])
+
+            if have_gotten_all_from_destination:
+                servers.discard(destination)
+
+            def random_server_list():
+                srvs = list(servers)
+                random.shuffle(srvs)
+                return srvs
+
+            deferreds = [
+                self.get_pdu(
+                    destinations=random_server_list(),
+                    event_id=e_id,
+                )
+                for e_id, depth in ordered_missing[:limit - len(signed_events)]
+            ]
+
+            res = yield defer.DeferredList(deferreds, consumeErrors=True)
+            for (result, val), (e_id, _) in zip(res, ordered_missing):
+                if result:
+                    signed_events.append(val)
+                else:
+                    failed_to_fetch.add(e_id)
+
+        defer.returnValue(signed_events)
+
+    def event_from_pdu_json(self, pdu_json, outlier=False):
+        event = FrozenEvent(
+            pdu_json
+        )
+
+        event.internal_metadata.outlier = outlier
+
+        return event
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
new file mode 100644
index 0000000000..9c7dcdba96
--- /dev/null
+++ b/synapse/federation/federation_server.py
@@ -0,0 +1,474 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from twisted.internet import defer
+
+from .federation_base import FederationBase
+from .units import Transaction, Edu
+
+from synapse.util.logutils import log_function
+from synapse.util.logcontext import PreserveLoggingContext
+from synapse.events import FrozenEvent
+
+from synapse.api.errors import FederationError, SynapseError
+
+from synapse.crypto.event_signing import compute_event_signature
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class FederationServer(FederationBase):
+    def set_handler(self, handler):
+        """Sets the handler that the replication layer will use to communicate
+        receipt of new PDUs from other home servers. The required methods are
+        documented on :py:class:`.ReplicationHandler`.
+        """
+        self.handler = handler
+
+    def register_edu_handler(self, edu_type, handler):
+        if edu_type in self.edu_handlers:
+            raise KeyError("Already have an EDU handler for %s" % (edu_type,))
+
+        self.edu_handlers[edu_type] = handler
+
+    def register_query_handler(self, query_type, handler):
+        """Sets the handler callable that will be used to handle an incoming
+        federation Query of the given type.
+
+        Args:
+            query_type (str): Category name of the query, which should match
+                the string used by make_query.
+            handler (callable): Invoked to handle incoming queries of this type
+
+        handler is invoked as:
+            result = handler(args)
+
+        where 'args' is a dict mapping strings to strings of the query
+          arguments. It should return a Deferred that will eventually yield an
+          object to encode as JSON.
+        """
+        if query_type in self.query_handlers:
+            raise KeyError(
+                "Already have a Query handler for %s" % (query_type,)
+            )
+
+        self.query_handlers[query_type] = handler
+
+    @defer.inlineCallbacks
+    @log_function
+    def on_backfill_request(self, origin, room_id, versions, limit):
+        pdus = yield self.handler.on_backfill_request(
+            origin, room_id, versions, limit
+        )
+
+        defer.returnValue((200, self._transaction_from_pdus(pdus).get_dict()))
+
+    @defer.inlineCallbacks
+    @log_function
+    def on_incoming_transaction(self, transaction_data):
+        transaction = Transaction(**transaction_data)
+
+        for p in transaction.pdus:
+            if "unsigned" in p:
+                unsigned = p["unsigned"]
+                if "age" in unsigned:
+                    p["age"] = unsigned["age"]
+            if "age" in p:
+                p["age_ts"] = int(self._clock.time_msec()) - int(p["age"])
+                del p["age"]
+
+        pdu_list = [
+            self.event_from_pdu_json(p) for p in transaction.pdus
+        ]
+
+        logger.debug("[%s] Got transaction", transaction.transaction_id)
+
+        response = yield self.transaction_actions.have_responded(transaction)
+
+        if response:
+            logger.debug(
+                "[%s] We've already responed to this request",
+                transaction.transaction_id
+            )
+            defer.returnValue(response)
+            return
+
+        logger.debug("[%s] Transaction is new", transaction.transaction_id)
+
+        with PreserveLoggingContext():
+            results = []
+
+            for pdu in pdu_list:
+                d = self._handle_new_pdu(transaction.origin, pdu)
+
+                try:
+                    yield d
+                    results.append({})
+                except FederationError as e:
+                    self.send_failure(e, transaction.origin)
+                    results.append({"error": str(e)})
+                except Exception as e:
+                    results.append({"error": str(e)})
+                    logger.exception("Failed to handle PDU")
+
+            if hasattr(transaction, "edus"):
+                for edu in [Edu(**x) for x in transaction.edus]:
+                    self.received_edu(
+                        transaction.origin,
+                        edu.edu_type,
+                        edu.content
+                    )
+
+            for failure in getattr(transaction, "pdu_failures", []):
+                logger.info("Got failure %r", failure)
+
+        logger.debug("Returning: %s", str(results))
+
+        response = {
+            "pdus": dict(zip(
+                (p.event_id for p in pdu_list), results
+            )),
+        }
+
+        yield self.transaction_actions.set_response(
+            transaction,
+            200, response
+        )
+        defer.returnValue((200, response))
+
+    def received_edu(self, origin, edu_type, content):
+        if edu_type in self.edu_handlers:
+            self.edu_handlers[edu_type](origin, content)
+        else:
+            logger.warn("Received EDU of type %s with no handler", edu_type)
+
+    @defer.inlineCallbacks
+    @log_function
+    def on_context_state_request(self, origin, room_id, event_id):
+        if event_id:
+            pdus = yield self.handler.get_state_for_pdu(
+                origin, room_id, event_id,
+            )
+            auth_chain = yield self.store.get_auth_chain(
+                [pdu.event_id for pdu in pdus]
+            )
+
+            for event in auth_chain:
+                event.signatures.update(
+                    compute_event_signature(
+                        event,
+                        self.hs.hostname,
+                        self.hs.config.signing_key[0]
+                    )
+                )
+        else:
+            raise NotImplementedError("Specify an event")
+
+        defer.returnValue((200, {
+            "pdus": [pdu.get_pdu_json() for pdu in pdus],
+            "auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
+        }))
+
+    @defer.inlineCallbacks
+    @log_function
+    def on_pdu_request(self, origin, event_id):
+        pdu = yield self._get_persisted_pdu(origin, event_id)
+
+        if pdu:
+            defer.returnValue(
+                (200, self._transaction_from_pdus([pdu]).get_dict())
+            )
+        else:
+            defer.returnValue((404, ""))
+
+    @defer.inlineCallbacks
+    @log_function
+    def on_pull_request(self, origin, versions):
+        raise NotImplementedError("Pull transactions not implemented")
+
+    @defer.inlineCallbacks
+    def on_query_request(self, query_type, args):
+        if query_type in self.query_handlers:
+            response = yield self.query_handlers[query_type](args)
+            defer.returnValue((200, response))
+        else:
+            defer.returnValue(
+                (404, "No handler for Query type '%s'" % (query_type,))
+            )
+
+    @defer.inlineCallbacks
+    def on_make_join_request(self, room_id, user_id):
+        pdu = yield self.handler.on_make_join_request(room_id, user_id)
+        time_now = self._clock.time_msec()
+        defer.returnValue({"event": pdu.get_pdu_json(time_now)})
+
+    @defer.inlineCallbacks
+    def on_invite_request(self, origin, content):
+        pdu = self.event_from_pdu_json(content)
+        ret_pdu = yield self.handler.on_invite_request(origin, pdu)
+        time_now = self._clock.time_msec()
+        defer.returnValue((200, {"event": ret_pdu.get_pdu_json(time_now)}))
+
+    @defer.inlineCallbacks
+    def on_send_join_request(self, origin, content):
+        logger.debug("on_send_join_request: content: %s", content)
+        pdu = self.event_from_pdu_json(content)
+        logger.debug("on_send_join_request: pdu sigs: %s", pdu.signatures)
+        res_pdus = yield self.handler.on_send_join_request(origin, pdu)
+        time_now = self._clock.time_msec()
+        defer.returnValue((200, {
+            "state": [p.get_pdu_json(time_now) for p in res_pdus["state"]],
+            "auth_chain": [
+                p.get_pdu_json(time_now) for p in res_pdus["auth_chain"]
+            ],
+        }))
+
+    @defer.inlineCallbacks
+    def on_event_auth(self, origin, room_id, event_id):
+        time_now = self._clock.time_msec()
+        auth_pdus = yield self.handler.on_event_auth(event_id)
+        defer.returnValue((200, {
+            "auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus],
+        }))
+
+    @defer.inlineCallbacks
+    def on_query_auth_request(self, origin, content, event_id):
+        """
+        Content is a dict with keys::
+            auth_chain (list): A list of events that give the auth chain.
+            missing (list): A list of event_ids indicating what the other
+              side (`origin`) think we're missing.
+            rejects (dict): A mapping from event_id to a 2-tuple of reason
+              string and a proof (or None) of why the event was rejected.
+              The keys of this dict give the list of events the `origin` has
+              rejected.
+
+        Args:
+            origin (str)
+            content (dict)
+            event_id (str)
+
+        Returns:
+            Deferred: Results in `dict` with the same format as `content`
+        """
+        auth_chain = [
+            self.event_from_pdu_json(e)
+            for e in content["auth_chain"]
+        ]
+
+        signed_auth = yield self._check_sigs_and_hash_and_fetch(
+            origin, auth_chain, outlier=True
+        )
+
+        ret = yield self.handler.on_query_auth(
+            origin,
+            event_id,
+            signed_auth,
+            content.get("rejects", []),
+            content.get("missing", []),
+        )
+
+        time_now = self._clock.time_msec()
+        send_content = {
+            "auth_chain": [
+                e.get_pdu_json(time_now)
+                for e in ret["auth_chain"]
+            ],
+            "rejects": ret.get("rejects", []),
+            "missing": ret.get("missing", []),
+        }
+
+        defer.returnValue(
+            (200, send_content)
+        )
+
+    @defer.inlineCallbacks
+    @log_function
+    def on_get_missing_events(self, origin, room_id, earliest_events,
+                              latest_events, limit, min_depth):
+        missing_events = yield self.handler.on_get_missing_events(
+            origin, room_id, earliest_events, latest_events, limit, min_depth
+        )
+
+        time_now = self._clock.time_msec()
+
+        defer.returnValue({
+            "events": [ev.get_pdu_json(time_now) for ev in missing_events],
+        })
+
+    @log_function
+    def _get_persisted_pdu(self, origin, event_id, do_auth=True):
+        """ Get a PDU from the database with given origin and id.
+
+        Returns:
+            Deferred: Results in a `Pdu`.
+        """
+        return self.handler.get_persisted_pdu(
+            origin, event_id, do_auth=do_auth
+        )
+
+    def _transaction_from_pdus(self, pdu_list):
+        """Returns a new Transaction containing the given PDUs suitable for
+        transmission.
+        """
+        time_now = self._clock.time_msec()
+        pdus = [p.get_pdu_json(time_now) for p in pdu_list]
+        return Transaction(
+            origin=self.server_name,
+            pdus=pdus,
+            origin_server_ts=int(time_now),
+            destination=None,
+        )
+
+    @defer.inlineCallbacks
+    @log_function
+    def _handle_new_pdu(self, origin, pdu, get_missing=True):
+        # We reprocess pdus when we have seen them only as outliers
+        existing = yield self._get_persisted_pdu(
+            origin, pdu.event_id, do_auth=False
+        )
+
+        # FIXME: Currently we fetch an event again when we already have it
+        # if it has been marked as an outlier.
+
+        already_seen = (
+            existing and (
+                not existing.internal_metadata.is_outlier()
+                or pdu.internal_metadata.is_outlier()
+            )
+        )
+        if already_seen:
+            logger.debug("Already seen pdu %s", pdu.event_id)
+            return
+
+        # Check signature.
+        try:
+            pdu = yield self._check_sigs_and_hash(pdu)
+        except SynapseError as e:
+            raise FederationError(
+                "ERROR",
+                e.code,
+                e.msg,
+                affected=pdu.event_id,
+            )
+
+        state = None
+
+        auth_chain = []
+
+        have_seen = yield self.store.have_events(
+            [ev for ev, _ in pdu.prev_events]
+        )
+
+        fetch_state = False
+
+        # Get missing pdus if necessary.
+        if not pdu.internal_metadata.is_outlier():
+            # We only backfill backwards to the min depth.
+            min_depth = yield self.handler.get_min_depth_for_context(
+                pdu.room_id
+            )
+
+            logger.debug(
+                "_handle_new_pdu min_depth for %s: %d",
+                pdu.room_id, min_depth
+            )
+
+            prevs = {e_id for e_id, _ in pdu.prev_events}
+            seen = set(have_seen.keys())
+
+            if min_depth and pdu.depth < min_depth:
+                # This is so that we don't notify the user about this
+                # message, to work around the fact that some events will
+                # reference really really old events we really don't want to
+                # send to the clients.
+                pdu.internal_metadata.outlier = True
+            elif min_depth and pdu.depth > min_depth:
+                if get_missing and prevs - seen:
+                    latest_tuples = yield self.store.get_latest_events_in_room(
+                        pdu.room_id
+                    )
+
+                    # We add the prev events that we have seen to the latest
+                    # list to ensure the remote server doesn't give them to us
+                    latest = set(e_id for e_id, _, _ in latest_tuples)
+                    latest |= seen
+
+                    missing_events = yield self.get_missing_events(
+                        origin,
+                        pdu.room_id,
+                        earliest_events_ids=list(latest),
+                        latest_events=[pdu],
+                        limit=10,
+                        min_depth=min_depth,
+                    )
+
+                    # We want to sort these by depth so we process them and
+                    # tell clients about them in order.
+                    missing_events.sort(key=lambda x: x.depth)
+
+                    for e in missing_events:
+                        yield self._handle_new_pdu(
+                            origin,
+                            e,
+                            get_missing=False
+                        )
+
+                    have_seen = yield self.store.have_events(
+                        [ev for ev, _ in pdu.prev_events]
+                    )
+
+            prevs = {e_id for e_id, _ in pdu.prev_events}
+            seen = set(have_seen.keys())
+            if prevs - seen:
+                fetch_state = True
+
+        if fetch_state:
+            # We need to get the state at this event, since we haven't
+            # processed all the prev events.
+            logger.debug(
+                "_handle_new_pdu getting state for %s",
+                pdu.room_id
+            )
+            try:
+                state, auth_chain = yield self.get_state_for_room(
+                    origin, pdu.room_id, pdu.event_id,
+                )
+            except:
+                logger.warn("Failed to get state for event: %s", pdu.event_id)
+
+        yield self.handler.on_receive_pdu(
+            origin,
+            pdu,
+            backfilled=False,
+            state=state,
+            auth_chain=auth_chain,
+        )
+
+    def __str__(self):
+        return "<ReplicationLayer(%s)>" % self.server_name
+
+    def event_from_pdu_json(self, pdu_json, outlier=False):
+        event = FrozenEvent(
+            pdu_json
+        )
+
+        event.internal_metadata.outlier = outlier
+
+        return event
diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py
index 85c82a4623..76a9dcd777 100644
--- a/synapse/federation/persistence.py
+++ b/synapse/federation/persistence.py
@@ -23,7 +23,8 @@ from twisted.internet import defer
 
 from synapse.util.logutils import log_function
 
-import json
+from syutil.jsonutil import encode_canonical_json
+
 import logging
 
 
@@ -70,7 +71,7 @@ class TransactionActions(object):
             transaction.transaction_id,
             transaction.origin,
             code,
-            json.dumps(response)
+            encode_canonical_json(response)
         )
 
     @defer.inlineCallbacks
@@ -100,5 +101,5 @@ class TransactionActions(object):
             transaction.transaction_id,
             transaction.destination,
             response_code,
-            json.dumps(response_dict)
+            encode_canonical_json(response_dict)
         )
diff --git a/synapse/federation/replication.py b/synapse/federation/replication.py
index 6620532a60..54a0c7ad8e 100644
--- a/synapse/federation/replication.py
+++ b/synapse/federation/replication.py
@@ -17,23 +17,20 @@
 a given transport.
 """
 
-from twisted.internet import defer
+from .federation_client import FederationClient
+from .federation_server import FederationServer
 
-from .units import Transaction, Edu
+from .transaction_queue import TransactionQueue
 
 from .persistence import TransactionActions
 
-from synapse.util.logutils import log_function
-from synapse.util.logcontext import PreserveLoggingContext
-from synapse.events import FrozenEvent
-
 import logging
 
 
 logger = logging.getLogger(__name__)
 
 
-class ReplicationLayer(object):
+class ReplicationLayer(FederationClient, FederationServer):
     """This layer is responsible for replicating with remote home servers over
     the given transport. I.e., does the sending and receiving of PDUs to
     remote home servers.
@@ -54,898 +51,28 @@ class ReplicationLayer(object):
     def __init__(self, hs, transport_layer):
         self.server_name = hs.hostname
 
+        self.keyring = hs.get_keyring()
+
         self.transport_layer = transport_layer
         self.transport_layer.register_received_handler(self)
         self.transport_layer.register_request_handler(self)
 
-        self.store = hs.get_datastore()
-        # self.pdu_actions = PduActions(self.store)
-        self.transaction_actions = TransactionActions(self.store)
+        self.federation_client = self
 
-        self._transaction_queue = _TransactionQueue(
-            hs, self.transaction_actions, transport_layer
-        )
+        self.store = hs.get_datastore()
 
         self.handler = None
         self.edu_handlers = {}
         self.query_handlers = {}
 
-        self._order = 0
-
         self._clock = hs.get_clock()
 
-        self.event_builder_factory = hs.get_event_builder_factory()
-
-    def set_handler(self, handler):
-        """Sets the handler that the replication layer will use to communicate
-        receipt of new PDUs from other home servers. The required methods are
-        documented on :py:class:`.ReplicationHandler`.
-        """
-        self.handler = handler
-
-    def register_edu_handler(self, edu_type, handler):
-        if edu_type in self.edu_handlers:
-            raise KeyError("Already have an EDU handler for %s" % (edu_type,))
-
-        self.edu_handlers[edu_type] = handler
-
-    def register_query_handler(self, query_type, handler):
-        """Sets the handler callable that will be used to handle an incoming
-        federation Query of the given type.
-
-        Args:
-            query_type (str): Category name of the query, which should match
-                the string used by make_query.
-            handler (callable): Invoked to handle incoming queries of this type
-
-        handler is invoked as:
-            result = handler(args)
-
-        where 'args' is a dict mapping strings to strings of the query
-          arguments. It should return a Deferred that will eventually yield an
-          object to encode as JSON.
-        """
-        if query_type in self.query_handlers:
-            raise KeyError(
-                "Already have a Query handler for %s" % (query_type,)
-            )
-
-        self.query_handlers[query_type] = handler
-
-    @log_function
-    def send_pdu(self, pdu, destinations):
-        """Informs the replication layer about a new PDU generated within the
-        home server that should be transmitted to others.
-
-        TODO: Figure out when we should actually resolve the deferred.
-
-        Args:
-            pdu (Pdu): The new Pdu.
-
-        Returns:
-            Deferred: Completes when we have successfully processed the PDU
-            and replicated it to any interested remote home servers.
-        """
-        order = self._order
-        self._order += 1
-
-        logger.debug("[%s] transaction_layer.enqueue_pdu... ", pdu.event_id)
-
-        # TODO, add errback, etc.
-        self._transaction_queue.enqueue_pdu(pdu, destinations, order)
-
-        logger.debug(
-            "[%s] transaction_layer.enqueue_pdu... done",
-            pdu.event_id
-        )
-
-    @log_function
-    def send_edu(self, destination, edu_type, content):
-        edu = Edu(
-            origin=self.server_name,
-            destination=destination,
-            edu_type=edu_type,
-            content=content,
-        )
-
-        # TODO, add errback, etc.
-        self._transaction_queue.enqueue_edu(edu)
-        return defer.succeed(None)
-
-    @log_function
-    def send_failure(self, failure, destination):
-        self._transaction_queue.enqueue_failure(failure, destination)
-        return defer.succeed(None)
-
-    @log_function
-    def make_query(self, destination, query_type, args,
-                   retry_on_dns_fail=True):
-        """Sends a federation Query to a remote homeserver of the given type
-        and arguments.
-
-        Args:
-            destination (str): Domain name of the remote homeserver
-            query_type (str): Category of the query type; should match the
-                handler name used in register_query_handler().
-            args (dict): Mapping of strings to strings containing the details
-                of the query request.
-
-        Returns:
-            a Deferred which will eventually yield a JSON object from the
-            response
-        """
-        return self.transport_layer.make_query(
-            destination, query_type, args, retry_on_dns_fail=retry_on_dns_fail
-        )
-
-    @defer.inlineCallbacks
-    @log_function
-    def backfill(self, dest, context, limit, extremities):
-        """Requests some more historic PDUs for the given context from the
-        given destination server.
-
-        Args:
-            dest (str): The remote home server to ask.
-            context (str): The context to backfill.
-            limit (int): The maximum number of PDUs to return.
-            extremities (list): List of PDU id and origins of the first pdus
-                we have seen from the context
-
-        Returns:
-            Deferred: Results in the received PDUs.
-        """
-        logger.debug("backfill extrem=%s", extremities)
-
-        # If there are no extremeties then we've (probably) reached the start.
-        if not extremities:
-            return
-
-        transaction_data = yield self.transport_layer.backfill(
-            dest, context, extremities, limit)
-
-        logger.debug("backfill transaction_data=%s", repr(transaction_data))
-
-        transaction = Transaction(**transaction_data)
-
-        pdus = [
-            self.event_from_pdu_json(p, outlier=False)
-            for p in transaction.pdus
-        ]
-        for pdu in pdus:
-            yield self._handle_new_pdu(dest, pdu, backfilled=True)
-
-        defer.returnValue(pdus)
-
-    @defer.inlineCallbacks
-    @log_function
-    def get_pdu(self, destination, event_id, outlier=False):
-        """Requests the PDU with given origin and ID from the remote home
-        server.
-
-        This will persist the PDU locally upon receipt.
-
-        Args:
-            destination (str): Which home server to query
-            pdu_origin (str): The home server that originally sent the pdu.
-            event_id (str)
-            outlier (bool): Indicates whether the PDU is an `outlier`, i.e. if
-                it's from an arbitary point in the context as opposed to part
-                of the current block of PDUs. Defaults to `False`
-
-        Returns:
-            Deferred: Results in the requested PDU.
-        """
-
-        transaction_data = yield self.transport_layer.get_event(
-            destination, event_id
-        )
-
-        transaction = Transaction(**transaction_data)
-
-        pdu_list = [
-            self.event_from_pdu_json(p, outlier=outlier)
-            for p in transaction.pdus
-        ]
-
-        pdu = None
-        if pdu_list:
-            pdu = pdu_list[0]
-            yield self._handle_new_pdu(destination, pdu)
-
-        defer.returnValue(pdu)
-
-    @defer.inlineCallbacks
-    @log_function
-    def get_state_for_room(self, destination, room_id, event_id):
-        """Requests all of the `current` state PDUs for a given room from
-        a remote home server.
-
-        Args:
-            destination (str): The remote homeserver to query for the state.
-            room_id (str): The id of the room we're interested in.
-            event_id (str): The id of the event we want the state at.
-
-        Returns:
-            Deferred: Results in a list of PDUs.
-        """
-
-        result = yield self.transport_layer.get_room_state(
-            destination, room_id, event_id=event_id,
-        )
-
-        pdus = [
-            self.event_from_pdu_json(p, outlier=True) for p in result["pdus"]
-        ]
-
-        auth_chain = [
-            self.event_from_pdu_json(p, outlier=True)
-            for p in result.get("auth_chain", [])
-        ]
-
-        defer.returnValue((pdus, auth_chain))
-
-    @defer.inlineCallbacks
-    @log_function
-    def get_event_auth(self, destination, room_id, event_id):
-        res = yield self.transport_layer.get_event_auth(
-            destination, room_id, event_id,
-        )
-
-        auth_chain = [
-            self.event_from_pdu_json(p, outlier=True)
-            for p in res["auth_chain"]
-        ]
-
-        auth_chain.sort(key=lambda e: e.depth)
-
-        defer.returnValue(auth_chain)
-
-    @defer.inlineCallbacks
-    @log_function
-    def on_backfill_request(self, origin, room_id, versions, limit):
-        pdus = yield self.handler.on_backfill_request(
-            origin, room_id, versions, limit
-        )
-
-        defer.returnValue((200, self._transaction_from_pdus(pdus).get_dict()))
-
-    @defer.inlineCallbacks
-    @log_function
-    def on_incoming_transaction(self, transaction_data):
-        transaction = Transaction(**transaction_data)
-
-        for p in transaction.pdus:
-            if "unsigned" in p:
-                unsigned = p["unsigned"]
-                if "age" in unsigned:
-                    p["age"] = unsigned["age"]
-            if "age" in p:
-                p["age_ts"] = int(self._clock.time_msec()) - int(p["age"])
-                del p["age"]
-
-        pdu_list = [
-            self.event_from_pdu_json(p) for p in transaction.pdus
-        ]
-
-        logger.debug("[%s] Got transaction", transaction.transaction_id)
-
-        response = yield self.transaction_actions.have_responded(transaction)
-
-        if response:
-            logger.debug("[%s] We've already responed to this request",
-                         transaction.transaction_id)
-            defer.returnValue(response)
-            return
-
-        logger.debug("[%s] Transaction is new", transaction.transaction_id)
-
-        with PreserveLoggingContext():
-            dl = []
-            for pdu in pdu_list:
-                dl.append(self._handle_new_pdu(transaction.origin, pdu))
-
-            if hasattr(transaction, "edus"):
-                for edu in [Edu(**x) for x in transaction.edus]:
-                    self.received_edu(
-                        transaction.origin,
-                        edu.edu_type,
-                        edu.content
-                    )
-
-            results = yield defer.DeferredList(dl)
-
-        ret = []
-        for r in results:
-            if r[0]:
-                ret.append({})
-            else:
-                logger.exception(r[1])
-                ret.append({"error": str(r[1])})
-
-        logger.debug("Returning: %s", str(ret))
-
-        yield self.transaction_actions.set_response(
-            transaction,
-            200, response
-        )
-        defer.returnValue((200, response))
-
-    def received_edu(self, origin, edu_type, content):
-        if edu_type in self.edu_handlers:
-            self.edu_handlers[edu_type](origin, content)
-        else:
-            logger.warn("Received EDU of type %s with no handler", edu_type)
-
-    @defer.inlineCallbacks
-    @log_function
-    def on_context_state_request(self, origin, room_id, event_id):
-        if event_id:
-            pdus = yield self.handler.get_state_for_pdu(
-                origin, room_id, event_id,
-            )
-            auth_chain = yield self.store.get_auth_chain(
-                [pdu.event_id for pdu in pdus]
-            )
-        else:
-            raise NotImplementedError("Specify an event")
-
-        defer.returnValue((200, {
-            "pdus": [pdu.get_pdu_json() for pdu in pdus],
-            "auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
-        }))
-
-    @defer.inlineCallbacks
-    @log_function
-    def on_pdu_request(self, origin, event_id):
-        pdu = yield self._get_persisted_pdu(origin, event_id)
-
-        if pdu:
-            defer.returnValue(
-                (200, self._transaction_from_pdus([pdu]).get_dict())
-            )
-        else:
-            defer.returnValue((404, ""))
-
-    @defer.inlineCallbacks
-    @log_function
-    def on_pull_request(self, origin, versions):
-        raise NotImplementedError("Pull transactions not implemented")
-
-    @defer.inlineCallbacks
-    def on_query_request(self, query_type, args):
-        if query_type in self.query_handlers:
-            response = yield self.query_handlers[query_type](args)
-            defer.returnValue((200, response))
-        else:
-            defer.returnValue(
-                (404, "No handler for Query type '%s'" % (query_type,))
-            )
-
-    @defer.inlineCallbacks
-    def on_make_join_request(self, room_id, user_id):
-        pdu = yield self.handler.on_make_join_request(room_id, user_id)
-        time_now = self._clock.time_msec()
-        defer.returnValue({"event": pdu.get_pdu_json(time_now)})
-
-    @defer.inlineCallbacks
-    def on_invite_request(self, origin, content):
-        pdu = self.event_from_pdu_json(content)
-        ret_pdu = yield self.handler.on_invite_request(origin, pdu)
-        time_now = self._clock.time_msec()
-        defer.returnValue((200, {"event": ret_pdu.get_pdu_json(time_now)}))
-
-    @defer.inlineCallbacks
-    def on_send_join_request(self, origin, content):
-        logger.debug("on_send_join_request: content: %s", content)
-        pdu = self.event_from_pdu_json(content)
-        logger.debug("on_send_join_request: pdu sigs: %s", pdu.signatures)
-        res_pdus = yield self.handler.on_send_join_request(origin, pdu)
-        time_now = self._clock.time_msec()
-        defer.returnValue((200, {
-            "state": [p.get_pdu_json(time_now) for p in res_pdus["state"]],
-            "auth_chain": [
-                p.get_pdu_json(time_now) for p in res_pdus["auth_chain"]
-            ],
-        }))
-
-    @defer.inlineCallbacks
-    def on_event_auth(self, origin, room_id, event_id):
-        time_now = self._clock.time_msec()
-        auth_pdus = yield self.handler.on_event_auth(event_id)
-        defer.returnValue((200, {
-            "auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus],
-        }))
-
-    @defer.inlineCallbacks
-    def make_join(self, destination, room_id, user_id):
-        ret = yield self.transport_layer.make_join(
-            destination, room_id, user_id
-        )
-
-        pdu_dict = ret["event"]
-
-        logger.debug("Got response to make_join: %s", pdu_dict)
-
-        defer.returnValue(self.event_from_pdu_json(pdu_dict))
-
-    @defer.inlineCallbacks
-    def send_join(self, destination, pdu):
-        time_now = self._clock.time_msec()
-        _, content = yield self.transport_layer.send_join(
-            destination=destination,
-            room_id=pdu.room_id,
-            event_id=pdu.event_id,
-            content=pdu.get_pdu_json(time_now),
-        )
-
-        logger.debug("Got content: %s", content)
-
-        state = [
-            self.event_from_pdu_json(p, outlier=True)
-            for p in content.get("state", [])
-        ]
-
-        auth_chain = [
-            self.event_from_pdu_json(p, outlier=True)
-            for p in content.get("auth_chain", [])
-        ]
-
-        auth_chain.sort(key=lambda e: e.depth)
-
-        defer.returnValue({
-            "state": state,
-            "auth_chain": auth_chain,
-        })
-
-    @defer.inlineCallbacks
-    def send_invite(self, destination, room_id, event_id, pdu):
-        time_now = self._clock.time_msec()
-        code, content = yield self.transport_layer.send_invite(
-            destination=destination,
-            room_id=room_id,
-            event_id=event_id,
-            content=pdu.get_pdu_json(time_now),
-        )
-
-        pdu_dict = content["event"]
-
-        logger.debug("Got response to send_invite: %s", pdu_dict)
-
-        defer.returnValue(self.event_from_pdu_json(pdu_dict))
-
-    @log_function
-    def _get_persisted_pdu(self, origin, event_id, do_auth=True):
-        """ Get a PDU from the database with given origin and id.
-
-        Returns:
-            Deferred: Results in a `Pdu`.
-        """
-        return self.handler.get_persisted_pdu(
-            origin, event_id, do_auth=do_auth
-        )
-
-    def _transaction_from_pdus(self, pdu_list):
-        """Returns a new Transaction containing the given PDUs suitable for
-        transmission.
-        """
-        time_now = self._clock.time_msec()
-        pdus = [p.get_pdu_json(time_now) for p in pdu_list]
-        return Transaction(
-            origin=self.server_name,
-            pdus=pdus,
-            origin_server_ts=int(time_now),
-            destination=None,
-        )
-
-    @defer.inlineCallbacks
-    @log_function
-    def _handle_new_pdu(self, origin, pdu, backfilled=False):
-        # We reprocess pdus when we have seen them only as outliers
-        existing = yield self._get_persisted_pdu(
-            origin, pdu.event_id, do_auth=False
-        )
-
-        already_seen = (
-            existing and (
-                not existing.internal_metadata.is_outlier()
-                or pdu.internal_metadata.is_outlier()
-            )
-        )
-        if already_seen:
-            logger.debug("Already seen pdu %s", pdu.event_id)
-            defer.returnValue({})
-            return
-
-        state = None
-
-        auth_chain = []
-
-        # We need to make sure we have all the auth events.
-        # for e_id, _ in pdu.auth_events:
-        #     exists = yield self._get_persisted_pdu(
-        #         origin,
-        #         e_id,
-        #         do_auth=False
-        #     )
-        #
-        #     if not exists:
-        #         try:
-        #             logger.debug(
-        #                 "_handle_new_pdu fetch missing auth event %s from %s",
-        #                 e_id,
-        #                 origin,
-        #             )
-        #
-        #             yield self.get_pdu(
-        #                 origin,
-        #                 event_id=e_id,
-        #                 outlier=True,
-        #             )
-        #
-        #             logger.debug("Processed pdu %s", e_id)
-        #         except:
-        #             logger.warn(
-        #                 "Failed to get auth event %s from %s",
-        #                 e_id,
-        #                 origin
-        #             )
-
-        # Get missing pdus if necessary.
-        if not pdu.internal_metadata.is_outlier():
-            # We only backfill backwards to the min depth.
-            min_depth = yield self.handler.get_min_depth_for_context(
-                pdu.room_id
-            )
-
-            logger.debug(
-                "_handle_new_pdu min_depth for %s: %d",
-                pdu.room_id, min_depth
-            )
-
-            if min_depth and pdu.depth > min_depth:
-                for event_id, hashes in pdu.prev_events:
-                    exists = yield self._get_persisted_pdu(
-                        origin,
-                        event_id,
-                        do_auth=False
-                    )
-
-                    if not exists:
-                        logger.debug(
-                            "_handle_new_pdu requesting pdu %s",
-                            event_id
-                        )
-
-                        try:
-                            yield self.get_pdu(
-                                origin,
-                                event_id=event_id,
-                            )
-                            logger.debug("Processed pdu %s", event_id)
-                        except:
-                            # TODO(erikj): Do some more intelligent retries.
-                            logger.exception("Failed to get PDU")
-            else:
-                # We need to get the state at this event, since we have reached
-                # a backward extremity edge.
-                logger.debug(
-                    "_handle_new_pdu getting state for %s",
-                    pdu.room_id
-                )
-                state, auth_chain = yield self.get_state_for_room(
-                    origin, pdu.room_id, pdu.event_id,
-                )
-
-        if not backfilled:
-            ret = yield self.handler.on_receive_pdu(
-                origin,
-                pdu,
-                backfilled=backfilled,
-                state=state,
-                auth_chain=auth_chain,
-            )
-        else:
-            ret = None
+        self.transaction_actions = TransactionActions(self.store)
+        self._transaction_queue = TransactionQueue(hs, transport_layer)
 
-        # yield self.pdu_actions.mark_as_processed(pdu)
+        self._order = 0
 
-        defer.returnValue(ret)
+        self.hs = hs
 
     def __str__(self):
         return "<ReplicationLayer(%s)>" % self.server_name
-
-    def event_from_pdu_json(self, pdu_json, outlier=False):
-        event = FrozenEvent(
-            pdu_json
-        )
-
-        event.internal_metadata.outlier = outlier
-
-        return event
-
-
-class _TransactionQueue(object):
-    """This class makes sure we only have one transaction in flight at
-    a time for a given destination.
-
-    It batches pending PDUs into single transactions.
-    """
-
-    def __init__(self, hs, transaction_actions, transport_layer):
-        self.server_name = hs.hostname
-        self.transaction_actions = transaction_actions
-        self.transport_layer = transport_layer
-
-        self._clock = hs.get_clock()
-        self.store = hs.get_datastore()
-
-        # Is a mapping from destinations -> deferreds. Used to keep track
-        # of which destinations have transactions in flight and when they are
-        # done
-        self.pending_transactions = {}
-
-        # Is a mapping from destination -> list of
-        # tuple(pending pdus, deferred, order)
-        self.pending_pdus_by_dest = {}
-        # destination -> list of tuple(edu, deferred)
-        self.pending_edus_by_dest = {}
-
-        # destination -> list of tuple(failure, deferred)
-        self.pending_failures_by_dest = {}
-
-        # HACK to get unique tx id
-        self._next_txn_id = int(self._clock.time_msec())
-
-    @defer.inlineCallbacks
-    @log_function
-    def enqueue_pdu(self, pdu, destinations, order):
-        # We loop through all destinations to see whether we already have
-        # a transaction in progress. If we do, stick it in the pending_pdus
-        # table and we'll get back to it later.
-
-        destinations = set(destinations)
-        destinations.discard(self.server_name)
-        destinations.discard("localhost")
-
-        logger.debug("Sending to: %s", str(destinations))
-
-        if not destinations:
-            return
-
-        deferreds = []
-
-        for destination in destinations:
-            deferred = defer.Deferred()
-            self.pending_pdus_by_dest.setdefault(destination, []).append(
-                (pdu, deferred, order)
-            )
-
-            def eb(failure):
-                if not deferred.called:
-                    deferred.errback(failure)
-                else:
-                    logger.warn("Failed to send pdu", failure)
-
-            with PreserveLoggingContext():
-                self._attempt_new_transaction(destination).addErrback(eb)
-
-            deferreds.append(deferred)
-
-        yield defer.DeferredList(deferreds)
-
-    # NO inlineCallbacks
-    def enqueue_edu(self, edu):
-        destination = edu.destination
-
-        if destination == self.server_name:
-            return
-
-        deferred = defer.Deferred()
-        self.pending_edus_by_dest.setdefault(destination, []).append(
-            (edu, deferred)
-        )
-
-        def eb(failure):
-            if not deferred.called:
-                deferred.errback(failure)
-            else:
-                logger.warn("Failed to send edu", failure)
-
-        with PreserveLoggingContext():
-            self._attempt_new_transaction(destination).addErrback(eb)
-
-        return deferred
-
-    @defer.inlineCallbacks
-    def enqueue_failure(self, failure, destination):
-        deferred = defer.Deferred()
-
-        self.pending_failures_by_dest.setdefault(
-            destination, []
-        ).append(
-            (failure, deferred)
-        )
-
-        yield deferred
-
-    @defer.inlineCallbacks
-    @log_function
-    def _attempt_new_transaction(self, destination):
-
-        (retry_last_ts, retry_interval) = (0, 0)
-        retry_timings = yield self.store.get_destination_retry_timings(
-            destination
-        )
-        if retry_timings:
-            (retry_last_ts, retry_interval) = (
-                retry_timings.retry_last_ts, retry_timings.retry_interval
-            )
-            if retry_last_ts + retry_interval > int(self._clock.time_msec()):
-                logger.info(
-                    "TX [%s] not ready for retry yet - "
-                    "dropping transaction for now",
-                    destination,
-                )
-                return
-            else:
-                logger.info("TX [%s] is ready for retry", destination)
-
-        logger.info("TX [%s] _attempt_new_transaction", destination)
-
-        if destination in self.pending_transactions:
-            # XXX: pending_transactions can get stuck on by a never-ending
-            # request at which point pending_pdus_by_dest just keeps growing.
-            # we need application-layer timeouts of some flavour of these
-            # requests
-            return
-
-        # list of (pending_pdu, deferred, order)
-        pending_pdus = self.pending_pdus_by_dest.pop(destination, [])
-        pending_edus = self.pending_edus_by_dest.pop(destination, [])
-        pending_failures = self.pending_failures_by_dest.pop(destination, [])
-
-        if pending_pdus:
-            logger.info("TX [%s] len(pending_pdus_by_dest[dest]) = %d",
-                        destination, len(pending_pdus))
-
-        if not pending_pdus and not pending_edus and not pending_failures:
-            return
-
-        logger.debug(
-            "TX [%s] Attempting new transaction"
-            " (pdus: %d, edus: %d, failures: %d)",
-            destination,
-            len(pending_pdus),
-            len(pending_edus),
-            len(pending_failures)
-        )
-
-        # Sort based on the order field
-        pending_pdus.sort(key=lambda t: t[2])
-
-        pdus = [x[0] for x in pending_pdus]
-        edus = [x[0] for x in pending_edus]
-        failures = [x[0].get_dict() for x in pending_failures]
-        deferreds = [
-            x[1]
-            for x in pending_pdus + pending_edus + pending_failures
-        ]
-
-        try:
-            self.pending_transactions[destination] = 1
-
-            logger.debug("TX [%s] Persisting transaction...", destination)
-
-            transaction = Transaction.create_new(
-                origin_server_ts=int(self._clock.time_msec()),
-                transaction_id=str(self._next_txn_id),
-                origin=self.server_name,
-                destination=destination,
-                pdus=pdus,
-                edus=edus,
-                pdu_failures=failures,
-            )
-
-            self._next_txn_id += 1
-
-            yield self.transaction_actions.prepare_to_send(transaction)
-
-            logger.debug("TX [%s] Persisted transaction", destination)
-            logger.info(
-                "TX [%s] Sending transaction [%s]",
-                destination,
-                transaction.transaction_id,
-            )
-
-            # Actually send the transaction
-
-            # FIXME (erikj): This is a bit of a hack to make the Pdu age
-            # keys work
-            def json_data_cb():
-                data = transaction.get_dict()
-                now = int(self._clock.time_msec())
-                if "pdus" in data:
-                    for p in data["pdus"]:
-                        if "age_ts" in p:
-                            unsigned = p.setdefault("unsigned", {})
-                            unsigned["age"] = now - int(p["age_ts"])
-                            del p["age_ts"]
-                return data
-
-            code, response = yield self.transport_layer.send_transaction(
-                transaction, json_data_cb
-            )
-
-            logger.info("TX [%s] got %d response", destination, code)
-
-            logger.debug("TX [%s] Sent transaction", destination)
-            logger.debug("TX [%s] Marking as delivered...", destination)
-
-            yield self.transaction_actions.delivered(
-                transaction, code, response
-            )
-
-            logger.debug("TX [%s] Marked as delivered", destination)
-            logger.debug("TX [%s] Yielding to callbacks...", destination)
-
-            for deferred in deferreds:
-                if code == 200:
-                    if retry_last_ts:
-                        # this host is alive! reset retry schedule
-                        yield self.store.set_destination_retry_timings(
-                            destination, 0, 0
-                        )
-                    deferred.callback(None)
-                else:
-                    self.set_retrying(destination, retry_interval)
-                    deferred.errback(RuntimeError("Got status %d" % code))
-
-                # Ensures we don't continue until all callbacks on that
-                # deferred have fired
-                try:
-                    yield deferred
-                except:
-                    pass
-
-            logger.debug("TX [%s] Yielded to callbacks", destination)
-
-        except Exception as e:
-            # We capture this here as there as nothing actually listens
-            # for this finishing functions deferred.
-            logger.warn(
-                "TX [%s] Problem in _attempt_transaction: %s",
-                destination,
-                e,
-            )
-
-            self.set_retrying(destination, retry_interval)
-
-            for deferred in deferreds:
-                if not deferred.called:
-                    deferred.errback(e)
-
-        finally:
-            # We want to be *very* sure we delete this after we stop processing
-            self.pending_transactions.pop(destination, None)
-
-            # Check to see if there is anything else to send.
-            self._attempt_new_transaction(destination)
-
-    @defer.inlineCallbacks
-    def set_retrying(self, destination, retry_interval):
-        # track that this destination is having problems and we should
-        # give it a chance to recover before trying it again
-
-        if retry_interval:
-            retry_interval *= 2
-            # plateau at hourly retries for now
-            if retry_interval >= 60 * 60 * 1000:
-                retry_interval = 60 * 60 * 1000
-        else:
-            retry_interval = 2000  # try again at first after 2 seconds
-
-        yield self.store.set_destination_retry_timings(
-            destination,
-            int(self._clock.time_msec()),
-            retry_interval
-        )
diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py
new file mode 100644
index 0000000000..741a4e7a1a
--- /dev/null
+++ b/synapse/federation/transaction_queue.py
@@ -0,0 +1,359 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from twisted.internet import defer
+
+from .persistence import TransactionActions
+from .units import Transaction
+
+from synapse.api.errors import HttpResponseException
+from synapse.util.logutils import log_function
+from synapse.util.logcontext import PreserveLoggingContext
+from synapse.util.retryutils import (
+    get_retry_limiter, NotRetryingDestination,
+)
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class TransactionQueue(object):
+    """This class makes sure we only have one transaction in flight at
+    a time for a given destination.
+
+    It batches pending PDUs into single transactions.
+    """
+
+    def __init__(self, hs, transport_layer):
+        self.server_name = hs.hostname
+
+        self.store = hs.get_datastore()
+        self.transaction_actions = TransactionActions(self.store)
+
+        self.transport_layer = transport_layer
+
+        self._clock = hs.get_clock()
+
+        # Is a mapping from destinations -> deferreds. Used to keep track
+        # of which destinations have transactions in flight and when they are
+        # done
+        self.pending_transactions = {}
+
+        # Is a mapping from destination -> list of
+        # tuple(pending pdus, deferred, order)
+        self.pending_pdus_by_dest = {}
+        # destination -> list of tuple(edu, deferred)
+        self.pending_edus_by_dest = {}
+
+        # destination -> list of tuple(failure, deferred)
+        self.pending_failures_by_dest = {}
+
+        # HACK to get unique tx id
+        self._next_txn_id = int(self._clock.time_msec())
+
+    def can_send_to(self, destination):
+        """Can we send messages to the given server?
+
+        We can't send messages to ourselves. If we are running on localhost
+        then we can only federation with other servers running on localhost.
+        Otherwise we only federate with servers on a public domain.
+
+        Args:
+            destination(str): The server we are possibly trying to send to.
+        Returns:
+            bool: True if we can send to the server.
+        """
+
+        if destination == self.server_name:
+            return False
+        if self.server_name.startswith("localhost"):
+            return destination.startswith("localhost")
+        else:
+            return not destination.startswith("localhost")
+
+    @defer.inlineCallbacks
+    @log_function
+    def enqueue_pdu(self, pdu, destinations, order):
+        # We loop through all destinations to see whether we already have
+        # a transaction in progress. If we do, stick it in the pending_pdus
+        # table and we'll get back to it later.
+
+        destinations = set(destinations)
+        destinations = set(
+            dest for dest in destinations if self.can_send_to(dest)
+        )
+
+        logger.debug("Sending to: %s", str(destinations))
+
+        if not destinations:
+            return
+
+        deferreds = []
+
+        for destination in destinations:
+            deferred = defer.Deferred()
+            self.pending_pdus_by_dest.setdefault(destination, []).append(
+                (pdu, deferred, order)
+            )
+
+            def chain(failure):
+                if not deferred.called:
+                    deferred.errback(failure)
+
+            def log_failure(failure):
+                logger.warn("Failed to send pdu", failure.value)
+
+            deferred.addErrback(log_failure)
+
+            with PreserveLoggingContext():
+                self._attempt_new_transaction(destination).addErrback(chain)
+
+            deferreds.append(deferred)
+
+        yield defer.DeferredList(deferreds, consumeErrors=True)
+
+    # NO inlineCallbacks
+    def enqueue_edu(self, edu):
+        destination = edu.destination
+
+        if not self.can_send_to(destination):
+            return
+
+        deferred = defer.Deferred()
+        self.pending_edus_by_dest.setdefault(destination, []).append(
+            (edu, deferred)
+        )
+
+        def chain(failure):
+            if not deferred.called:
+                deferred.errback(failure)
+
+        def log_failure(failure):
+            logger.warn("Failed to send pdu", failure.value)
+
+        deferred.addErrback(log_failure)
+
+        with PreserveLoggingContext():
+            self._attempt_new_transaction(destination).addErrback(chain)
+
+        return deferred
+
+    @defer.inlineCallbacks
+    def enqueue_failure(self, failure, destination):
+        if destination == self.server_name or destination == "localhost":
+            return
+
+        deferred = defer.Deferred()
+
+        if not self.can_send_to(destination):
+            return
+
+        self.pending_failures_by_dest.setdefault(
+            destination, []
+        ).append(
+            (failure, deferred)
+        )
+
+        def chain(f):
+            if not deferred.called:
+                deferred.errback(f)
+
+        def log_failure(f):
+            logger.warn("Failed to send pdu", f.value)
+
+        deferred.addErrback(log_failure)
+
+        with PreserveLoggingContext():
+            self._attempt_new_transaction(destination).addErrback(chain)
+
+        yield deferred
+
+    @defer.inlineCallbacks
+    @log_function
+    def _attempt_new_transaction(self, destination):
+        if destination in self.pending_transactions:
+            # XXX: pending_transactions can get stuck on by a never-ending
+            # request at which point pending_pdus_by_dest just keeps growing.
+            # we need application-layer timeouts of some flavour of these
+            # requests
+            logger.info(
+                "TX [%s] Transaction already in progress",
+                destination
+            )
+            return
+
+        logger.info("TX [%s] _attempt_new_transaction", destination)
+
+        # list of (pending_pdu, deferred, order)
+        pending_pdus = self.pending_pdus_by_dest.pop(destination, [])
+        pending_edus = self.pending_edus_by_dest.pop(destination, [])
+        pending_failures = self.pending_failures_by_dest.pop(destination, [])
+
+        if pending_pdus:
+            logger.info("TX [%s] len(pending_pdus_by_dest[dest]) = %d",
+                        destination, len(pending_pdus))
+
+        if not pending_pdus and not pending_edus and not pending_failures:
+            logger.info("TX [%s] Nothing to send", destination)
+            return
+
+        # Sort based on the order field
+        pending_pdus.sort(key=lambda t: t[2])
+
+        pdus = [x[0] for x in pending_pdus]
+        edus = [x[0] for x in pending_edus]
+        failures = [x[0].get_dict() for x in pending_failures]
+        deferreds = [
+            x[1]
+            for x in pending_pdus + pending_edus + pending_failures
+        ]
+
+        try:
+            self.pending_transactions[destination] = 1
+
+            limiter = yield get_retry_limiter(
+                destination,
+                self._clock,
+                self.store,
+            )
+
+            logger.debug(
+                "TX [%s] Attempting new transaction"
+                " (pdus: %d, edus: %d, failures: %d)",
+                destination,
+                len(pending_pdus),
+                len(pending_edus),
+                len(pending_failures)
+            )
+
+            logger.debug("TX [%s] Persisting transaction...", destination)
+
+            transaction = Transaction.create_new(
+                origin_server_ts=int(self._clock.time_msec()),
+                transaction_id=str(self._next_txn_id),
+                origin=self.server_name,
+                destination=destination,
+                pdus=pdus,
+                edus=edus,
+                pdu_failures=failures,
+            )
+
+            self._next_txn_id += 1
+
+            yield self.transaction_actions.prepare_to_send(transaction)
+
+            logger.debug("TX [%s] Persisted transaction", destination)
+            logger.info(
+                "TX [%s] Sending transaction [%s]",
+                destination,
+                transaction.transaction_id,
+            )
+
+            with limiter:
+                # Actually send the transaction
+
+                # FIXME (erikj): This is a bit of a hack to make the Pdu age
+                # keys work
+                def json_data_cb():
+                    data = transaction.get_dict()
+                    now = int(self._clock.time_msec())
+                    if "pdus" in data:
+                        for p in data["pdus"]:
+                            if "age_ts" in p:
+                                unsigned = p.setdefault("unsigned", {})
+                                unsigned["age"] = now - int(p["age_ts"])
+                                del p["age_ts"]
+                    return data
+
+                try:
+                    response = yield self.transport_layer.send_transaction(
+                        transaction, json_data_cb
+                    )
+                    code = 200
+
+                    if response:
+                        for e_id, r in response.get("pdus", {}).items():
+                            if "error" in r:
+                                logger.warn(
+                                    "Transaction returned error for %s: %s",
+                                    e_id, r,
+                                )
+                except HttpResponseException as e:
+                    code = e.code
+                    response = e.response
+
+                logger.info("TX [%s] got %d response", destination, code)
+
+                logger.debug("TX [%s] Sent transaction", destination)
+                logger.debug("TX [%s] Marking as delivered...", destination)
+
+            yield self.transaction_actions.delivered(
+                transaction, code, response
+            )
+
+            logger.debug("TX [%s] Marked as delivered", destination)
+
+            logger.debug("TX [%s] Yielding to callbacks...", destination)
+
+            for deferred in deferreds:
+                if code == 200:
+                    deferred.callback(None)
+                else:
+                    deferred.errback(RuntimeError("Got status %d" % code))
+
+                # Ensures we don't continue until all callbacks on that
+                # deferred have fired
+                try:
+                    yield deferred
+                except:
+                    pass
+
+            logger.debug("TX [%s] Yielded to callbacks", destination)
+        except NotRetryingDestination:
+            logger.info(
+                "TX [%s] not ready for retry yet - "
+                "dropping transaction for now",
+                destination,
+            )
+        except RuntimeError as e:
+            # We capture this here as there as nothing actually listens
+            # for this finishing functions deferred.
+            logger.warn(
+                "TX [%s] Problem in _attempt_transaction: %s",
+                destination,
+                e,
+            )
+        except Exception as e:
+            # We capture this here as there as nothing actually listens
+            # for this finishing functions deferred.
+            logger.warn(
+                "TX [%s] Problem in _attempt_transaction: %s",
+                destination,
+                e,
+            )
+
+            for deferred in deferreds:
+                if not deferred.called:
+                    deferred.errback(e)
+
+        finally:
+            # We want to be *very* sure we delete this after we stop processing
+            self.pending_transactions.pop(destination, None)
+
+            # Check to see if there is anything else to send.
+            self._attempt_new_transaction(destination)
diff --git a/synapse/federation/transport/__init__.py b/synapse/federation/transport/__init__.py
index 6800ac46c5..2a671b9aec 100644
--- a/synapse/federation/transport/__init__.py
+++ b/synapse/federation/transport/__init__.py
@@ -24,6 +24,8 @@ communicate over a different (albeit still reliable) protocol.
 from .server import TransportLayerServer
 from .client import TransportLayerClient
 
+from synapse.util.ratelimitutils import FederationRateLimiter
+
 
 class TransportLayer(TransportLayerServer, TransportLayerClient):
     """This is a basic implementation of the transport layer that translates
@@ -55,8 +57,18 @@ class TransportLayer(TransportLayerServer, TransportLayerClient):
                 send requests
         """
         self.keyring = homeserver.get_keyring()
+        self.clock = homeserver.get_clock()
         self.server_name = server_name
         self.server = server
         self.client = client
         self.request_handler = None
         self.received_handler = None
+
+        self.ratelimiter = FederationRateLimiter(
+            self.clock,
+            window_size=homeserver.config.federation_rc_window_size,
+            sleep_limit=homeserver.config.federation_rc_sleep_limit,
+            sleep_msec=homeserver.config.federation_rc_sleep_delay,
+            reject_limit=homeserver.config.federation_rc_reject_limit,
+            concurrent_requests=homeserver.config.federation_rc_concurrent,
+        )
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index e634a3a213..80d03012b7 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -19,7 +19,6 @@ from synapse.api.urls import FEDERATION_PREFIX as PREFIX
 from synapse.util.logutils import log_function
 
 import logging
-import json
 
 
 logger = logging.getLogger(__name__)
@@ -129,7 +128,7 @@ class TransportLayerClient(object):
         # generated by the json_data_callback.
         json_data = transaction.get_dict()
 
-        code, response = yield self.client.put_json(
+        response = yield self.client.put_json(
             transaction.destination,
             path=PREFIX + "/send/%s/" % transaction.transaction_id,
             data=json_data,
@@ -137,79 +136,105 @@ class TransportLayerClient(object):
         )
 
         logger.debug(
-            "send_data dest=%s, txid=%s, got response: %d",
-            transaction.destination, transaction.transaction_id, code
+            "send_data dest=%s, txid=%s, got response: 200",
+            transaction.destination, transaction.transaction_id,
         )
 
-        defer.returnValue((code, response))
+        defer.returnValue(response)
 
     @defer.inlineCallbacks
     @log_function
     def make_query(self, destination, query_type, args, retry_on_dns_fail):
         path = PREFIX + "/query/%s" % query_type
 
-        response = yield self.client.get_json(
+        content = yield self.client.get_json(
             destination=destination,
             path=path,
             args=args,
             retry_on_dns_fail=retry_on_dns_fail,
         )
 
-        defer.returnValue(response)
+        defer.returnValue(content)
 
     @defer.inlineCallbacks
     @log_function
     def make_join(self, destination, room_id, user_id, retry_on_dns_fail=True):
         path = PREFIX + "/make_join/%s/%s" % (room_id, user_id)
 
-        response = yield self.client.get_json(
+        content = yield self.client.get_json(
             destination=destination,
             path=path,
             retry_on_dns_fail=retry_on_dns_fail,
         )
 
-        defer.returnValue(response)
+        defer.returnValue(content)
 
     @defer.inlineCallbacks
     @log_function
     def send_join(self, destination, room_id, event_id, content):
         path = PREFIX + "/send_join/%s/%s" % (room_id, event_id)
 
-        code, content = yield self.client.put_json(
+        response = yield self.client.put_json(
             destination=destination,
             path=path,
             data=content,
         )
 
-        if not 200 <= code < 300:
-            raise RuntimeError("Got %d from send_join", code)
-
-        defer.returnValue(json.loads(content))
+        defer.returnValue(response)
 
     @defer.inlineCallbacks
     @log_function
     def send_invite(self, destination, room_id, event_id, content):
         path = PREFIX + "/invite/%s/%s" % (room_id, event_id)
 
-        code, content = yield self.client.put_json(
+        response = yield self.client.put_json(
             destination=destination,
             path=path,
             data=content,
         )
 
-        if not 200 <= code < 300:
-            raise RuntimeError("Got %d from send_invite", code)
-
-        defer.returnValue(json.loads(content))
+        defer.returnValue(response)
 
     @defer.inlineCallbacks
     @log_function
     def get_event_auth(self, destination, room_id, event_id):
         path = PREFIX + "/event_auth/%s/%s" % (room_id, event_id)
 
-        response = yield self.client.get_json(
+        content = yield self.client.get_json(
             destination=destination,
             path=path,
         )
 
-        defer.returnValue(response)
+        defer.returnValue(content)
+
+    @defer.inlineCallbacks
+    @log_function
+    def send_query_auth(self, destination, room_id, event_id, content):
+        path = PREFIX + "/query_auth/%s/%s" % (room_id, event_id)
+
+        content = yield self.client.post_json(
+            destination=destination,
+            path=path,
+            data=content,
+        )
+
+        defer.returnValue(content)
+
+    @defer.inlineCallbacks
+    @log_function
+    def get_missing_events(self, destination, room_id, earliest_events,
+                           latest_events, limit, min_depth):
+        path = PREFIX + "/get_missing_events/%s" % (room_id,)
+
+        content = yield self.client.post_json(
+            destination=destination,
+            path=path,
+            data={
+                "limit": int(limit),
+                "min_depth": int(min_depth),
+                "earliest_events": earliest_events,
+                "latest_events": latest_events,
+            }
+        )
+
+        defer.returnValue(content)
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index a380a6910b..ece6dbcf62 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -20,7 +20,7 @@ from synapse.api.errors import Codes, SynapseError
 from synapse.util.logutils import log_function
 
 import logging
-import json
+import simplejson as json
 import re
 
 
@@ -42,7 +42,7 @@ class TransportLayerServer(object):
         content = None
         origin = None
 
-        if request.method == "PUT":
+        if request.method in ["PUT", "POST"]:
             # TODO: Handle other method types? other content types?
             try:
                 content_bytes = request.content.read()
@@ -98,15 +98,23 @@ class TransportLayerServer(object):
         def new_handler(request, *args, **kwargs):
             try:
                 (origin, content) = yield self._authenticate_request(request)
-                response = yield handler(
-                    origin, content, request.args, *args, **kwargs
-                )
+                with self.ratelimiter.ratelimit(origin) as d:
+                    yield d
+                    response = yield handler(
+                        origin, content, request.args, *args, **kwargs
+                    )
             except:
                 logger.exception("_authenticate_request failed")
                 raise
             defer.returnValue(response)
         return new_handler
 
+    def rate_limit_origin(self, handler):
+        def new_handler(origin, *args, **kwargs):
+            response = yield handler(origin, *args, **kwargs)
+            defer.returnValue(response)
+        return new_handler()
+
     @log_function
     def register_received_handler(self, handler):
         """ Register a handler that will be fired when we receive data.
@@ -235,6 +243,28 @@ class TransportLayerServer(object):
             )
         )
 
+        self.server.register_path(
+            "POST",
+            re.compile("^" + PREFIX + "/query_auth/([^/]*)/([^/]*)$"),
+            self._with_authentication(
+                lambda origin, content, query, context, event_id:
+                self._on_query_auth_request(
+                    origin, content, event_id,
+                )
+            )
+        )
+
+        self.server.register_path(
+            "POST",
+            re.compile("^" + PREFIX + "/get_missing_events/([^/]*)/?$"),
+            self._with_authentication(
+                lambda origin, content, query, room_id:
+                self._get_missing_events(
+                    origin, content, room_id,
+                )
+            )
+        )
+
     @defer.inlineCallbacks
     @log_function
     def _on_send_request(self, origin, content, query, transaction_id):
@@ -325,3 +355,31 @@ class TransportLayerServer(object):
         )
 
         defer.returnValue((200, content))
+
+    @defer.inlineCallbacks
+    @log_function
+    def _on_query_auth_request(self, origin, content, event_id):
+        new_content = yield self.request_handler.on_query_auth_request(
+            origin, content, event_id
+        )
+
+        defer.returnValue((200, new_content))
+
+    @defer.inlineCallbacks
+    @log_function
+    def _get_missing_events(self, origin, content, room_id):
+        limit = int(content.get("limit", 10))
+        min_depth = int(content.get("min_depth", 0))
+        earliest_events = content.get("earliest_events", [])
+        latest_events = content.get("latest_events", [])
+
+        content = yield self.request_handler.on_get_missing_events(
+            origin,
+            room_id=room_id,
+            earliest_events=earliest_events,
+            latest_events=latest_events,
+            min_depth=min_depth,
+            limit=limit,
+        )
+
+        defer.returnValue((200, content))
diff --git a/synapse/handlers/__init__.py b/synapse/handlers/__init__.py
index fe071a4bc2..8d345bf936 100644
--- a/synapse/handlers/__init__.py
+++ b/synapse/handlers/__init__.py
@@ -13,6 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from synapse.appservice.api import ApplicationServiceApi
 from .register import RegistrationHandler
 from .room import (
     RoomCreationHandler, RoomMemberHandler, RoomListHandler
@@ -26,6 +27,8 @@ from .presence import PresenceHandler
 from .directory import DirectoryHandler
 from .typing import TypingNotificationHandler
 from .admin import AdminHandler
+from .appservice import ApplicationServicesHandler
+from .sync import SyncHandler
 
 
 class Handlers(object):
@@ -51,3 +54,7 @@ class Handlers(object):
         self.directory_handler = DirectoryHandler(hs)
         self.typing_notification_handler = TypingNotificationHandler(hs)
         self.admin_handler = AdminHandler(hs)
+        self.appservice_handler = ApplicationServicesHandler(
+            hs, ApplicationServiceApi(hs)
+        )
+        self.sync_handler = SyncHandler(hs)
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index f33d17a31e..1773fa20aa 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -19,6 +19,7 @@ from synapse.api.errors import LimitExceededError, SynapseError
 from synapse.util.async import run_on_reactor
 from synapse.crypto.event_signing import add_hashes_and_signatures
 from synapse.api.constants import Membership, EventTypes
+from synapse.types import UserID
 
 import logging
 
@@ -113,7 +114,7 @@ class BaseHandler(object):
 
         if event.type == EventTypes.Member:
             if event.content["membership"] == Membership.INVITE:
-                invitee = self.hs.parse_userid(event.state_key)
+                invitee = UserID.from_string(event.state_key)
                 if not self.hs.is_mine(invitee):
                     # TODO: Can we add signature from remote server in a nicer
                     # way? If we have been invited by a remote server, we need
@@ -134,7 +135,7 @@ class BaseHandler(object):
                 if k[0] == EventTypes.Member:
                     if s.content["membership"] == Membership.JOIN:
                         destinations.add(
-                            self.hs.parse_userid(s.state_key).domain
+                            UserID.from_string(s.state_key).domain
                         )
             except SynapseError:
                 logger.warn(
diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py
new file mode 100644
index 0000000000..2c488a46f6
--- /dev/null
+++ b/synapse/handlers/appservice.py
@@ -0,0 +1,211 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.api.constants import EventTypes, Membership
+from synapse.api.errors import Codes, StoreError, SynapseError
+from synapse.appservice import ApplicationService
+from synapse.types import UserID
+import synapse.util.stringutils as stringutils
+
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+# NB: Purposefully not inheriting BaseHandler since that contains way too much
+# setup code which this handler does not need or use. This makes testing a lot
+# easier.
+class ApplicationServicesHandler(object):
+
+    def __init__(self, hs, appservice_api):
+        self.store = hs.get_datastore()
+        self.hs = hs
+        self.appservice_api = appservice_api
+
+    @defer.inlineCallbacks
+    def register(self, app_service):
+        logger.info("Register -> %s", app_service)
+        # check the token is recognised
+        try:
+            stored_service = yield self.store.get_app_service_by_token(
+                app_service.token
+            )
+            if not stored_service:
+                raise StoreError(404, "Application service not found")
+        except StoreError:
+            raise SynapseError(
+                403, "Unrecognised application services token. "
+                "Consult the home server admin.",
+                errcode=Codes.FORBIDDEN
+            )
+
+        app_service.hs_token = self._generate_hs_token()
+
+        # create a sender for this application service which is used when
+        # creating rooms, etc..
+        account = yield self.hs.get_handlers().registration_handler.register()
+        app_service.sender = account[0]
+
+        yield self.store.update_app_service(app_service)
+        defer.returnValue(app_service)
+
+    @defer.inlineCallbacks
+    def unregister(self, token):
+        logger.info("Unregister as_token=%s", token)
+        yield self.store.unregister_app_service(token)
+
+    @defer.inlineCallbacks
+    def notify_interested_services(self, event):
+        """Notifies (pushes) all application services interested in this event.
+
+        Pushing is done asynchronously, so this method won't block for any
+        prolonged length of time.
+
+        Args:
+            event(Event): The event to push out to interested services.
+        """
+        # Gather interested services
+        services = yield self._get_services_for_event(event)
+        if len(services) == 0:
+            return  # no services need notifying
+
+        # Do we know this user exists? If not, poke the user query API for
+        # all services which match that user regex. This needs to block as these
+        # user queries need to be made BEFORE pushing the event.
+        yield self._check_user_exists(event.sender)
+        if event.type == EventTypes.Member:
+            yield self._check_user_exists(event.state_key)
+
+        # Fork off pushes to these services - XXX First cut, best effort
+        for service in services:
+            self.appservice_api.push(service, event)
+
+    @defer.inlineCallbacks
+    def query_user_exists(self, user_id):
+        """Check if any application service knows this user_id exists.
+
+        Args:
+            user_id(str): The user to query if they exist on any AS.
+        Returns:
+            True if this user exists on at least one application service.
+        """
+        user_query_services = yield self._get_services_for_user(
+            user_id=user_id
+        )
+        for user_service in user_query_services:
+            is_known_user = yield self.appservice_api.query_user(
+                user_service, user_id
+            )
+            if is_known_user:
+                defer.returnValue(True)
+        defer.returnValue(False)
+
+    @defer.inlineCallbacks
+    def query_room_alias_exists(self, room_alias):
+        """Check if an application service knows this room alias exists.
+
+        Args:
+            room_alias(RoomAlias): The room alias to query.
+        Returns:
+            namedtuple: with keys "room_id" and "servers" or None if no
+            association can be found.
+        """
+        room_alias_str = room_alias.to_string()
+        alias_query_services = yield self._get_services_for_event(
+            event=None,
+            restrict_to=ApplicationService.NS_ALIASES,
+            alias_list=[room_alias_str]
+        )
+        for alias_service in alias_query_services:
+            is_known_alias = yield self.appservice_api.query_alias(
+                alias_service, room_alias_str
+            )
+            if is_known_alias:
+                # the alias exists now so don't query more ASes.
+                result = yield self.store.get_association_from_room_alias(
+                    room_alias
+                )
+                defer.returnValue(result)
+
+    @defer.inlineCallbacks
+    def _get_services_for_event(self, event, restrict_to="", alias_list=None):
+        """Retrieve a list of application services interested in this event.
+
+        Args:
+            event(Event): The event to check. Can be None if alias_list is not.
+            restrict_to(str): The namespace to restrict regex tests to.
+            alias_list: A list of aliases to get services for. If None, this
+            list is obtained from the database.
+        Returns:
+            list<ApplicationService>: A list of services interested in this
+            event based on the service regex.
+        """
+        member_list = None
+        if hasattr(event, "room_id"):
+            # We need to know the aliases associated with this event.room_id,
+            # if any.
+            if not alias_list:
+                alias_list = yield self.store.get_aliases_for_room(
+                    event.room_id
+                )
+            # We need to know the members associated with this event.room_id,
+            # if any.
+            member_list = yield self.store.get_room_members(
+                room_id=event.room_id,
+                membership=Membership.JOIN
+            )
+
+        services = yield self.store.get_app_services()
+        interested_list = [
+            s for s in services if (
+                s.is_interested(event, restrict_to, alias_list, member_list)
+            )
+        ]
+        defer.returnValue(interested_list)
+
+    @defer.inlineCallbacks
+    def _get_services_for_user(self, user_id):
+        services = yield self.store.get_app_services()
+        interested_list = [
+            s for s in services if (
+                s.is_interested_in_user(user_id)
+            )
+        ]
+        defer.returnValue(interested_list)
+
+    @defer.inlineCallbacks
+    def _is_unknown_user(self, user_id):
+        user = UserID.from_string(user_id)
+        if not self.hs.is_mine(user):
+            # we don't know if they are unknown or not since it isn't one of our
+            # users. We can't poke ASes.
+            defer.returnValue(False)
+            return
+
+        user_info = yield self.store.get_user_by_id(user_id)
+        defer.returnValue(len(user_info) == 0)
+
+    @defer.inlineCallbacks
+    def _check_user_exists(self, user_id):
+        unknown_user = yield self._is_unknown_user(user_id)
+        if unknown_user:
+            exists = yield self.query_user_exists(user_id)
+            defer.returnValue(exists)
+        defer.returnValue(True)
+
+    def _generate_hs_token(self):
+        return stringutils.random_string(24)
diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py
index 91fceda2ac..f76febee8f 100644
--- a/synapse/handlers/directory.py
+++ b/synapse/handlers/directory.py
@@ -19,6 +19,7 @@ from ._base import BaseHandler
 
 from synapse.api.errors import SynapseError, Codes, CodeMessageException
 from synapse.api.constants import EventTypes
+from synapse.types import RoomAlias
 
 import logging
 
@@ -36,18 +37,15 @@ class DirectoryHandler(BaseHandler):
         )
 
     @defer.inlineCallbacks
-    def create_association(self, user_id, room_alias, room_id, servers=None):
-
-        # TODO(erikj): Do auth.
+    def _create_association(self, room_alias, room_id, servers=None):
+        # general association creation for both human users and app services
 
         if not self.hs.is_mine(room_alias):
             raise SynapseError(400, "Room alias must be local")
             # TODO(erikj): Change this.
 
         # TODO(erikj): Add transactions.
-
         # TODO(erikj): Check if there is a current association.
-
         if not servers:
             servers = yield self.store.get_joined_hosts_for_room(room_id)
 
@@ -61,22 +59,77 @@ class DirectoryHandler(BaseHandler):
         )
 
     @defer.inlineCallbacks
+    def create_association(self, user_id, room_alias, room_id, servers=None):
+        # association creation for human users
+        # TODO(erikj): Do user auth.
+
+        can_create = yield self.can_modify_alias(
+            room_alias,
+            user_id=user_id
+        )
+        if not can_create:
+            raise SynapseError(
+                400, "This alias is reserved by an application service.",
+                errcode=Codes.EXCLUSIVE
+            )
+        yield self._create_association(room_alias, room_id, servers)
+
+    @defer.inlineCallbacks
+    def create_appservice_association(self, service, room_alias, room_id,
+                                      servers=None):
+        if not service.is_interested_in_alias(room_alias.to_string()):
+            raise SynapseError(
+                400, "This application service has not reserved"
+                " this kind of alias.", errcode=Codes.EXCLUSIVE
+            )
+
+        # association creation for app services
+        yield self._create_association(room_alias, room_id, servers)
+
+    @defer.inlineCallbacks
     def delete_association(self, user_id, room_alias):
+        # association deletion for human users
+
         # TODO Check if server admin
 
+        can_delete = yield self.can_modify_alias(
+            room_alias,
+            user_id=user_id
+        )
+        if not can_delete:
+            raise SynapseError(
+                400, "This alias is reserved by an application service.",
+                errcode=Codes.EXCLUSIVE
+            )
+
+        yield self._delete_association(room_alias)
+
+    @defer.inlineCallbacks
+    def delete_appservice_association(self, service, room_alias):
+        if not service.is_interested_in_alias(room_alias.to_string()):
+            raise SynapseError(
+                400,
+                "This application service has not reserved this kind of alias",
+                errcode=Codes.EXCLUSIVE
+            )
+        yield self._delete_association(room_alias)
+
+    @defer.inlineCallbacks
+    def _delete_association(self, room_alias):
         if not self.hs.is_mine(room_alias):
             raise SynapseError(400, "Room alias must be local")
 
-        room_id = yield self.store.delete_room_alias(room_alias)
+        yield self.store.delete_room_alias(room_alias)
 
-        if room_id:
-            yield self._update_room_alias_events(user_id, room_id)
+        # TODO - Looks like _update_room_alias_event has never been implemented
+        # if room_id:
+        #    yield self._update_room_alias_events(user_id, room_id)
 
     @defer.inlineCallbacks
     def get_association(self, room_alias):
         room_id = None
         if self.hs.is_mine(room_alias):
-            result = yield self.store.get_association_from_room_alias(
+            result = yield self.get_association_from_room_alias(
                 room_alias
             )
 
@@ -107,12 +160,21 @@ class DirectoryHandler(BaseHandler):
         if not room_id:
             raise SynapseError(
                 404,
-                "Room alias %r not found" % (room_alias.to_string(),),
+                "Room alias %s not found" % (room_alias.to_string(),),
                 Codes.NOT_FOUND
             )
 
         extra_servers = yield self.store.get_joined_hosts_for_room(room_id)
-        servers = list(set(extra_servers) | set(servers))
+        servers = set(extra_servers) | set(servers)
+
+        # If this server is in the list of servers, return it first.
+        if self.server_name in servers:
+            servers = (
+                [self.server_name]
+                + [s for s in servers if s != self.server_name]
+            )
+        else:
+            servers = list(servers)
 
         defer.returnValue({
             "room_id": room_id,
@@ -122,13 +184,13 @@ class DirectoryHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def on_directory_query(self, args):
-        room_alias = self.hs.parse_roomalias(args["room_alias"])
+        room_alias = RoomAlias.from_string(args["room_alias"])
         if not self.hs.is_mine(room_alias):
             raise SynapseError(
                 400, "Room Alias is not hosted on this Home Server"
             )
 
-        result = yield self.store.get_association_from_room_alias(
+        result = yield self.get_association_from_room_alias(
             room_alias
         )
 
@@ -156,3 +218,37 @@ class DirectoryHandler(BaseHandler):
             "sender": user_id,
             "content": {"aliases": aliases},
         }, ratelimit=False)
+
+    @defer.inlineCallbacks
+    def get_association_from_room_alias(self, room_alias):
+        result = yield self.store.get_association_from_room_alias(
+            room_alias
+        )
+        if not result:
+            # Query AS to see if it exists
+            as_handler = self.hs.get_handlers().appservice_handler
+            result = yield as_handler.query_room_alias_exists(room_alias)
+        defer.returnValue(result)
+
+    @defer.inlineCallbacks
+    def can_modify_alias(self, alias, user_id=None):
+        # Any application service "interested" in an alias they are regexing on
+        # can modify the alias.
+        # Users can only modify the alias if ALL the interested services have
+        # non-exclusive locks on the alias (or there are no interested services)
+        services = yield self.store.get_app_services()
+        interested_services = [
+            s for s in services if s.is_interested_in_alias(alias.to_string())
+        ]
+
+        for service in interested_services:
+            if user_id == service.sender:
+                # this user IS the app service so they can do whatever they like
+                defer.returnValue(True)
+                return
+            elif service.is_exclusive_alias(alias.to_string()):
+                # another service has an exclusive lock on this alias.
+                defer.returnValue(False)
+                return
+        # either no interested services, or no service with an exclusive lock
+        defer.returnValue(True)
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index 103bc67c42..d3297b7292 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -17,10 +17,13 @@ from twisted.internet import defer
 
 from synapse.util.logcontext import PreserveLoggingContext
 from synapse.util.logutils import log_function
+from synapse.types import UserID
+from synapse.events.utils import serialize_event
 
 from ._base import BaseHandler
 
 import logging
+import random
 
 
 logger = logging.getLogger(__name__)
@@ -47,38 +50,46 @@ class EventStreamHandler(BaseHandler):
     @defer.inlineCallbacks
     @log_function
     def get_stream(self, auth_user_id, pagin_config, timeout=0,
-                   as_client_event=True):
-        auth_user = self.hs.parse_userid(auth_user_id)
+                   as_client_event=True, affect_presence=True):
+        auth_user = UserID.from_string(auth_user_id)
 
         try:
-            if auth_user not in self._streams_per_user:
-                self._streams_per_user[auth_user] = 0
-                if auth_user in self._stop_timer_per_user:
-                    try:
-                        self.clock.cancel_call_later(
-                            self._stop_timer_per_user.pop(auth_user)
+            if affect_presence:
+                if auth_user not in self._streams_per_user:
+                    self._streams_per_user[auth_user] = 0
+                    if auth_user in self._stop_timer_per_user:
+                        try:
+                            self.clock.cancel_call_later(
+                                self._stop_timer_per_user.pop(auth_user)
+                            )
+                        except:
+                            logger.exception("Failed to cancel event timer")
+                    else:
+                        yield self.distributor.fire(
+                            "started_user_eventstream", auth_user
                         )
-                    except:
-                        logger.exception("Failed to cancel event timer")
-                else:
-                    yield self.distributor.fire(
-                        "started_user_eventstream", auth_user
-                    )
-            self._streams_per_user[auth_user] += 1
-
-            if pagin_config.from_token is None:
-                pagin_config.from_token = None
+                self._streams_per_user[auth_user] += 1
 
             rm_handler = self.hs.get_handlers().room_member_handler
             room_ids = yield rm_handler.get_rooms_for_user(auth_user)
 
+            if timeout:
+                # If they've set a timeout set a minimum limit.
+                timeout = max(timeout, 500)
+
+                # Add some randomness to this value to try and mitigate against
+                # thundering herds on restart.
+                timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
+
             with PreserveLoggingContext():
                 events, tokens = yield self.notifier.get_events_for(
                     auth_user, room_ids, pagin_config, timeout
                 )
 
+            time_now = self.clock.time_msec()
+
             chunks = [
-                self.hs.serialize_event(e, as_client_event) for e in events
+                serialize_event(e, time_now, as_client_event) for e in events
             ]
 
             chunk = {
@@ -90,27 +101,28 @@ class EventStreamHandler(BaseHandler):
             defer.returnValue(chunk)
 
         finally:
-            self._streams_per_user[auth_user] -= 1
-            if not self._streams_per_user[auth_user]:
-                del self._streams_per_user[auth_user]
-
-                # 10 seconds of grace to allow the client to reconnect again
-                #   before we think they're gone
-                def _later():
-                    logger.debug(
-                        "_later stopped_user_eventstream %s", auth_user
-                    )
+            if affect_presence:
+                self._streams_per_user[auth_user] -= 1
+                if not self._streams_per_user[auth_user]:
+                    del self._streams_per_user[auth_user]
+
+                    # 10 seconds of grace to allow the client to reconnect again
+                    #   before we think they're gone
+                    def _later():
+                        logger.debug(
+                            "_later stopped_user_eventstream %s", auth_user
+                        )
 
-                    self._stop_timer_per_user.pop(auth_user, None)
+                        self._stop_timer_per_user.pop(auth_user, None)
 
-                    yield self.distributor.fire(
-                        "stopped_user_eventstream", auth_user
-                    )
+                        return self.distributor.fire(
+                            "stopped_user_eventstream", auth_user
+                        )
 
-                logger.debug("Scheduling _later: for %s", auth_user)
-                self._stop_timer_per_user[auth_user] = (
-                    self.clock.call_later(30, _later)
-                )
+                    logger.debug("Scheduling _later: for %s", auth_user)
+                    self._stop_timer_per_user[auth_user] = (
+                        self.clock.call_later(30, _later)
+                    )
 
 
 class EventHandler(BaseHandler):
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 81203bf1a3..ae4e9b316d 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -17,21 +17,21 @@
 
 from ._base import BaseHandler
 
-from synapse.events.utils import prune_event
 from synapse.api.errors import (
-    AuthError, FederationError, SynapseError, StoreError,
+    AuthError, FederationError, StoreError,
 )
-from synapse.api.constants import EventTypes, Membership
+from synapse.api.constants import EventTypes, Membership, RejectedReason
 from synapse.util.logutils import log_function
 from synapse.util.async import run_on_reactor
+from synapse.util.frozenutils import unfreeze
 from synapse.crypto.event_signing import (
-    compute_event_signature, check_event_content_hash,
-    add_hashes_and_signatures,
+    compute_event_signature, add_hashes_and_signatures,
 )
-from syutil.jsonutil import encode_canonical_json
+from synapse.types import UserID
 
 from twisted.internet import defer
 
+import itertools
 import logging
 
 
@@ -112,33 +112,6 @@ class FederationHandler(BaseHandler):
 
         logger.debug("Processing event: %s", event.event_id)
 
-        redacted_event = prune_event(event)
-
-        redacted_pdu_json = redacted_event.get_pdu_json()
-        try:
-            yield self.keyring.verify_json_for_server(
-                event.origin, redacted_pdu_json
-            )
-        except SynapseError as e:
-            logger.warn(
-                "Signature check failed for %s redacted to %s",
-                encode_canonical_json(pdu.get_pdu_json()),
-                encode_canonical_json(redacted_pdu_json),
-            )
-            raise FederationError(
-                "ERROR",
-                e.code,
-                e.msg,
-                affected=event.event_id,
-            )
-
-        if not check_event_content_hash(event):
-            logger.warn(
-                "Event content has been tampered, redacting %s, %s",
-                event.event_id, encode_canonical_json(event.get_dict())
-            )
-            event = redacted_event
-
         logger.debug("Event: %s", event)
 
         # FIXME (erikj): Awful hack to make the case where we are not currently
@@ -148,41 +121,38 @@ class FederationHandler(BaseHandler):
             event.room_id,
             self.server_name
         )
-        if not is_in_room and not event.internal_metadata.outlier:
+        if not is_in_room and not event.internal_metadata.is_outlier():
             logger.debug("Got event for room we're not in.")
+            current_state = state
 
-            replication = self.replication_layer
+        event_ids = set()
+        if state:
+            event_ids |= {e.event_id for e in state}
+        if auth_chain:
+            event_ids |= {e.event_id for e in auth_chain}
 
-            if not state:
-                state, auth_chain = yield replication.get_state_for_room(
-                    origin, context=event.room_id, event_id=event.event_id,
-                )
+        seen_ids = set(
+            (yield self.store.have_events(event_ids)).keys()
+        )
 
-            if not auth_chain:
-                auth_chain = yield replication.get_event_auth(
-                    origin,
-                    context=event.room_id,
-                    event_id=event.event_id,
-                )
+        if state and auth_chain is not None:
+            # If we have any state or auth_chain given to us by the replication
+            # layer, then we should handle them (if we haven't before.)
+            for e in itertools.chain(auth_chain, state):
+                if e.event_id in seen_ids:
+                    continue
 
-            for e in auth_chain:
                 e.internal_metadata.outlier = True
                 try:
-                    yield self._handle_new_event(e, fetch_auth_from=origin)
-                except:
-                    logger.exception(
-                        "Failed to handle auth event %s",
-                        e.event_id,
+                    auth_ids = [e_id for e_id, _ in e.auth_events]
+                    auth = {
+                        (e.type, e.state_key): e for e in auth_chain
+                        if e.event_id in auth_ids
+                    }
+                    yield self._handle_new_event(
+                        origin, e, auth_events=auth
                     )
-
-            current_state = state
-
-        if state:
-            for e in state:
-                logging.info("A :) %r", e)
-                e.internal_metadata.outlier = True
-                try:
-                    yield self._handle_new_event(e)
+                    seen_ids.add(e.event_id)
                 except:
                     logger.exception(
                         "Failed to handle state event %s",
@@ -191,6 +161,7 @@ class FederationHandler(BaseHandler):
 
         try:
             yield self._handle_new_event(
+                origin,
                 event,
                 state=state,
                 backfilled=backfilled,
@@ -227,7 +198,7 @@ class FederationHandler(BaseHandler):
             extra_users = []
             if event.type == EventTypes.Member:
                 target_user_id = event.state_key
-                target_user = self.hs.parse_userid(target_user_id)
+                target_user = UserID.from_string(target_user_id)
                 extra_users.append(target_user)
 
             yield self.notifier.on_new_room_event(
@@ -236,7 +207,7 @@ class FederationHandler(BaseHandler):
 
         if event.type == EventTypes.Member:
             if event.membership == Membership.JOIN:
-                user = self.hs.parse_userid(event.state_key)
+                user = UserID.from_string(event.state_key)
                 yield self.distributor.fire(
                     "user_joined_room", user=user, room_id=event.room_id
                 )
@@ -305,7 +276,7 @@ class FederationHandler(BaseHandler):
 
     @log_function
     @defer.inlineCallbacks
-    def do_invite_join(self, target_host, room_id, joinee, content, snapshot):
+    def do_invite_join(self, target_hosts, room_id, joinee, content, snapshot):
         """ Attempts to join the `joinee` to the room `room_id` via the
         server `target_host`.
 
@@ -319,8 +290,8 @@ class FederationHandler(BaseHandler):
         """
         logger.debug("Joining %s to %s", joinee, room_id)
 
-        pdu = yield self.replication_layer.make_join(
-            target_host,
+        origin, pdu = yield self.replication_layer.make_join(
+            target_hosts,
             room_id,
             joinee
         )
@@ -341,7 +312,7 @@ class FederationHandler(BaseHandler):
         self.room_queues[room_id] = []
 
         builder = self.event_builder_factory.new(
-            event.get_pdu_json()
+            unfreeze(event.get_pdu_json())
         )
 
         handled_events = set()
@@ -362,11 +333,20 @@ class FederationHandler(BaseHandler):
 
             new_event = builder.build()
 
+            # Try the host we successfully got a response to /make_join/
+            # request first.
+            try:
+                target_hosts.remove(origin)
+                target_hosts.insert(0, origin)
+            except ValueError:
+                pass
+
             ret = yield self.replication_layer.send_join(
-                target_host,
+                target_hosts,
                 new_event
             )
 
+            origin = ret["origin"]
             state = ret["state"]
             auth_chain = ret["auth_chain"]
             auth_chain.sort(key=lambda e: e.depth)
@@ -392,8 +372,19 @@ class FederationHandler(BaseHandler):
 
             for e in auth_chain:
                 e.internal_metadata.outlier = True
+
+                if e.event_id == event.event_id:
+                    continue
+
                 try:
-                    yield self._handle_new_event(e)
+                    auth_ids = [e_id for e_id, _ in e.auth_events]
+                    auth = {
+                        (e.type, e.state_key): e for e in auth_chain
+                        if e.event_id in auth_ids
+                    }
+                    yield self._handle_new_event(
+                        origin, e, auth_events=auth
+                    )
                 except:
                     logger.exception(
                         "Failed to handle auth event %s",
@@ -401,11 +392,18 @@ class FederationHandler(BaseHandler):
                     )
 
             for e in state:
-                # FIXME: Auth these.
+                if e.event_id == event.event_id:
+                    continue
+
                 e.internal_metadata.outlier = True
                 try:
+                    auth_ids = [e_id for e_id, _ in e.auth_events]
+                    auth = {
+                        (e.type, e.state_key): e for e in auth_chain
+                        if e.event_id in auth_ids
+                    }
                     yield self._handle_new_event(
-                        e, fetch_auth_from=target_host
+                        origin, e, auth_events=auth
                     )
                 except:
                     logger.exception(
@@ -413,10 +411,18 @@ class FederationHandler(BaseHandler):
                         e.event_id,
                     )
 
+            auth_ids = [e_id for e_id, _ in event.auth_events]
+            auth_events = {
+                (e.type, e.state_key): e for e in auth_chain
+                if e.event_id in auth_ids
+            }
+
             yield self._handle_new_event(
+                origin,
                 new_event,
                 state=state,
                 current_state=state,
+                auth_events=auth_events,
             )
 
             yield self.notifier.on_new_room_event(
@@ -480,7 +486,7 @@ class FederationHandler(BaseHandler):
 
         event.internal_metadata.outlier = False
 
-        context = yield self._handle_new_event(event)
+        context = yield self._handle_new_event(origin, event)
 
         logger.debug(
             "on_send_join_request: After _handle_new_event: %s, sigs: %s",
@@ -491,7 +497,7 @@ class FederationHandler(BaseHandler):
         extra_users = []
         if event.type == EventTypes.Member:
             target_user_id = event.state_key
-            target_user = self.hs.parse_userid(target_user_id)
+            target_user = UserID.from_string(target_user_id)
             extra_users.append(target_user)
 
         yield self.notifier.on_new_room_event(
@@ -500,7 +506,7 @@ class FederationHandler(BaseHandler):
 
         if event.type == EventTypes.Member:
             if event.content["membership"] == Membership.JOIN:
-                user = self.hs.parse_userid(event.state_key)
+                user = UserID.from_string(event.state_key)
                 yield self.distributor.fire(
                     "user_joined_room", user=user, room_id=event.room_id
                 )
@@ -514,13 +520,15 @@ class FederationHandler(BaseHandler):
                 if k[0] == EventTypes.Member:
                     if s.content["membership"] == Membership.JOIN:
                         destinations.add(
-                            self.hs.parse_userid(s.state_key).domain
+                            UserID.from_string(s.state_key).domain
                         )
             except:
                 logger.warn(
                     "Failed to get destination from event %s", s.event_id
                 )
 
+        destinations.discard(origin)
+
         logger.debug(
             "on_send_join_request: Sending event: %s, signatures: %s",
             event.event_id,
@@ -565,7 +573,7 @@ class FederationHandler(BaseHandler):
             backfilled=False,
         )
 
-        target_user = self.hs.parse_userid(event.state_key)
+        target_user = UserID.from_string(event.state_key)
         yield self.notifier.on_new_room_event(
             event, extra_users=[target_user],
         )
@@ -573,12 +581,13 @@ class FederationHandler(BaseHandler):
         defer.returnValue(event)
 
     @defer.inlineCallbacks
-    def get_state_for_pdu(self, origin, room_id, event_id):
+    def get_state_for_pdu(self, origin, room_id, event_id, do_auth=True):
         yield run_on_reactor()
 
-        in_room = yield self.auth.check_host_in_room(room_id, origin)
-        if not in_room:
-            raise AuthError(403, "Host not in room.")
+        if do_auth:
+            in_room = yield self.auth.check_host_in_room(room_id, origin)
+            if not in_room:
+                raise AuthError(403, "Host not in room.")
 
         state_groups = yield self.store.get_state_groups(
             [event_id]
@@ -641,6 +650,7 @@ class FederationHandler(BaseHandler):
         event = yield self.store.get_event(
             event_id,
             allow_none=True,
+            allow_rejected=True,
         )
 
         if event:
@@ -681,11 +691,12 @@ class FederationHandler(BaseHandler):
             waiters.pop().callback(None)
 
     @defer.inlineCallbacks
-    def _handle_new_event(self, event, state=None, backfilled=False,
-                          current_state=None, fetch_auth_from=None):
+    @log_function
+    def _handle_new_event(self, origin, event, state=None, backfilled=False,
+                          current_state=None, auth_events=None):
 
         logger.debug(
-            "_handle_new_event: Before annotate: %s, sigs: %s",
+            "_handle_new_event: %s, sigs: %s",
             event.event_id, event.signatures,
         )
 
@@ -693,65 +704,46 @@ class FederationHandler(BaseHandler):
             event, old_state=state
         )
 
+        if not auth_events:
+            auth_events = context.auth_events
+
         logger.debug(
-            "_handle_new_event: Before auth fetch: %s, sigs: %s",
-            event.event_id, event.signatures,
+            "_handle_new_event: %s, auth_events: %s",
+            event.event_id, auth_events,
         )
 
         is_new_state = not event.internal_metadata.is_outlier()
 
-        known_ids = set(
-            [s.event_id for s in context.auth_events.values()]
-        )
-
-        for e_id, _ in event.auth_events:
-            if e_id not in known_ids:
-                e = yield self.store.get_event(e_id, allow_none=True)
-
-                if not e and fetch_auth_from is not None:
-                    # Grab the auth_chain over federation if we are missing
-                    # auth events.
-                    auth_chain = yield self.replication_layer.get_event_auth(
-                        fetch_auth_from, event.event_id, event.room_id
-                    )
-                    for auth_event in auth_chain:
-                        yield self._handle_new_event(auth_event)
-                    e = yield self.store.get_event(e_id, allow_none=True)
-
-                if not e:
-                    # TODO: Do some conflict res to make sure that we're
-                    # not the ones who are wrong.
-                    logger.info(
-                        "Rejecting %s as %s not in db or %s",
-                        event.event_id, e_id, known_ids,
-                    )
-                    # FIXME: How does raising AuthError work with federation?
-                    raise AuthError(403, "Cannot find auth event")
-
-                context.auth_events[(e.type, e.state_key)] = e
-
-        logger.debug(
-            "_handle_new_event: Before hack: %s, sigs: %s",
-            event.event_id, event.signatures,
-        )
-
+        # This is a hack to fix some old rooms where the initial join event
+        # didn't reference the create event in its auth events.
         if event.type == EventTypes.Member and not event.auth_events:
             if len(event.prev_events) == 1:
                 c = yield self.store.get_event(event.prev_events[0][0])
                 if c.type == EventTypes.Create:
-                    context.auth_events[(c.type, c.state_key)] = c
+                    auth_events[(c.type, c.state_key)] = c
 
-        logger.debug(
-            "_handle_new_event: Before auth check: %s, sigs: %s",
-            event.event_id, event.signatures,
-        )
+        try:
+            yield self.do_auth(
+                origin, event, context, auth_events=auth_events
+            )
+        except AuthError as e:
+            logger.warn(
+                "Rejecting %s because %s",
+                event.event_id, e.msg
+            )
 
-        self.auth.check(event, auth_events=context.auth_events)
+            context.rejected = RejectedReason.AUTH_ERROR
 
-        logger.debug(
-            "_handle_new_event: Before persist_event: %s, sigs: %s",
-            event.event_id, event.signatures,
-        )
+            # FIXME: Don't store as rejected with AUTH_ERROR if we haven't
+            # seen all the auth events.
+            yield self.store.persist_event(
+                event,
+                context=context,
+                backfilled=backfilled,
+                is_new_state=False,
+                current_state=current_state,
+            )
+            raise
 
         yield self.store.persist_event(
             event,
@@ -761,9 +753,388 @@ class FederationHandler(BaseHandler):
             current_state=current_state,
         )
 
-        logger.debug(
-            "_handle_new_event: After persist_event: %s, sigs: %s",
-            event.event_id, event.signatures,
+        defer.returnValue(context)
+
+    @defer.inlineCallbacks
+    def on_query_auth(self, origin, event_id, remote_auth_chain, rejects,
+                      missing):
+        # Just go through and process each event in `remote_auth_chain`. We
+        # don't want to fall into the trap of `missing` being wrong.
+        for e in remote_auth_chain:
+            try:
+                yield self._handle_new_event(origin, e)
+            except AuthError:
+                pass
+
+        # Now get the current auth_chain for the event.
+        local_auth_chain = yield self.store.get_auth_chain([event_id])
+
+        # TODO: Check if we would now reject event_id. If so we need to tell
+        # everyone.
+
+        ret = yield self.construct_auth_difference(
+            local_auth_chain, remote_auth_chain
         )
 
-        defer.returnValue(context)
+        for event in ret["auth_chain"]:
+            event.signatures.update(
+                compute_event_signature(
+                    event,
+                    self.hs.hostname,
+                    self.hs.config.signing_key[0]
+                )
+            )
+
+        logger.debug("on_query_auth returning: %s", ret)
+
+        defer.returnValue(ret)
+
+    @defer.inlineCallbacks
+    def on_get_missing_events(self, origin, room_id, earliest_events,
+                              latest_events, limit, min_depth):
+        in_room = yield self.auth.check_host_in_room(
+            room_id,
+            origin
+        )
+        if not in_room:
+            raise AuthError(403, "Host not in room.")
+
+        limit = min(limit, 20)
+        min_depth = max(min_depth, 0)
+
+        missing_events = yield self.store.get_missing_events(
+            room_id=room_id,
+            earliest_events=earliest_events,
+            latest_events=latest_events,
+            limit=limit,
+            min_depth=min_depth,
+        )
+
+        defer.returnValue(missing_events)
+
+    @defer.inlineCallbacks
+    @log_function
+    def do_auth(self, origin, event, context, auth_events):
+        # Check if we have all the auth events.
+        have_events = yield self.store.have_events(
+            [e_id for e_id, _ in event.auth_events]
+        )
+
+        event_auth_events = set(e_id for e_id, _ in event.auth_events)
+        seen_events = set(have_events.keys())
+
+        missing_auth = event_auth_events - seen_events
+
+        if missing_auth:
+            logger.info("Missing auth: %s", missing_auth)
+            # If we don't have all the auth events, we need to get them.
+            try:
+                remote_auth_chain = yield self.replication_layer.get_event_auth(
+                    origin, event.room_id, event.event_id
+                )
+
+                seen_remotes = yield self.store.have_events(
+                    [e.event_id for e in remote_auth_chain]
+                )
+
+                for e in remote_auth_chain:
+                    if e.event_id in seen_remotes.keys():
+                        continue
+
+                    if e.event_id == event.event_id:
+                        continue
+
+                    try:
+                        auth_ids = [e_id for e_id, _ in e.auth_events]
+                        auth = {
+                            (e.type, e.state_key): e for e in remote_auth_chain
+                            if e.event_id in auth_ids
+                        }
+                        e.internal_metadata.outlier = True
+
+                        logger.debug(
+                            "do_auth %s missing_auth: %s",
+                            event.event_id, e.event_id
+                        )
+                        yield self._handle_new_event(
+                            origin, e, auth_events=auth
+                        )
+
+                        if e.event_id in event_auth_events:
+                            auth_events[(e.type, e.state_key)] = e
+                    except AuthError:
+                        pass
+
+                have_events = yield self.store.have_events(
+                    [e_id for e_id, _ in event.auth_events]
+                )
+                seen_events = set(have_events.keys())
+            except:
+                # FIXME:
+                logger.exception("Failed to get auth chain")
+
+        # FIXME: Assumes we have and stored all the state for all the
+        # prev_events
+        current_state = set(e.event_id for e in auth_events.values())
+        different_auth = event_auth_events - current_state
+
+        if different_auth and not event.internal_metadata.is_outlier():
+            # Do auth conflict res.
+            logger.info("Different auth: %s", different_auth)
+
+            different_events = yield defer.gatherResults(
+                [
+                    self.store.get_event(
+                        d,
+                        allow_none=True,
+                        allow_rejected=False,
+                    )
+                    for d in different_auth
+                    if d in have_events and not have_events[d]
+                ],
+                consumeErrors=True
+            )
+
+            if different_events:
+                local_view = dict(auth_events)
+                remote_view = dict(auth_events)
+                remote_view.update({
+                    (d.type, d.state_key): d for d in different_events
+                })
+
+                new_state, prev_state = self.state_handler.resolve_events(
+                    [local_view.values(), remote_view.values()],
+                    event
+                )
+
+                auth_events.update(new_state)
+
+                current_state = set(e.event_id for e in auth_events.values())
+                different_auth = event_auth_events - current_state
+
+                context.current_state.update(auth_events)
+                context.state_group = None
+
+        if different_auth and not event.internal_metadata.is_outlier():
+            logger.info("Different auth after resolution: %s", different_auth)
+
+            # Only do auth resolution if we have something new to say.
+            # We can't rove an auth failure.
+            do_resolution = False
+
+            provable = [
+                RejectedReason.NOT_ANCESTOR, RejectedReason.NOT_ANCESTOR,
+            ]
+
+            for e_id in different_auth:
+                if e_id in have_events:
+                    if have_events[e_id] in provable:
+                        do_resolution = True
+                        break
+
+            if do_resolution:
+                # 1. Get what we think is the auth chain.
+                auth_ids = self.auth.compute_auth_events(
+                    event, context.current_state
+                )
+                local_auth_chain = yield self.store.get_auth_chain(auth_ids)
+
+                try:
+                    # 2. Get remote difference.
+                    result = yield self.replication_layer.query_auth(
+                        origin,
+                        event.room_id,
+                        event.event_id,
+                        local_auth_chain,
+                    )
+
+                    seen_remotes = yield self.store.have_events(
+                        [e.event_id for e in result["auth_chain"]]
+                    )
+
+                    # 3. Process any remote auth chain events we haven't seen.
+                    for ev in result["auth_chain"]:
+                        if ev.event_id in seen_remotes.keys():
+                            continue
+
+                        if ev.event_id == event.event_id:
+                            continue
+
+                        try:
+                            auth_ids = [e_id for e_id, _ in ev.auth_events]
+                            auth = {
+                                (e.type, e.state_key): e
+                                for e in result["auth_chain"]
+                                if e.event_id in auth_ids
+                            }
+                            ev.internal_metadata.outlier = True
+
+                            logger.debug(
+                                "do_auth %s different_auth: %s",
+                                event.event_id, e.event_id
+                            )
+
+                            yield self._handle_new_event(
+                                origin, ev, auth_events=auth
+                            )
+
+                            if ev.event_id in event_auth_events:
+                                auth_events[(ev.type, ev.state_key)] = ev
+                        except AuthError:
+                            pass
+
+                except:
+                    # FIXME:
+                    logger.exception("Failed to query auth chain")
+
+                # 4. Look at rejects and their proofs.
+                # TODO.
+
+                context.current_state.update(auth_events)
+                context.state_group = None
+
+        try:
+            self.auth.check(event, auth_events=auth_events)
+        except AuthError:
+            raise
+
+    @defer.inlineCallbacks
+    def construct_auth_difference(self, local_auth, remote_auth):
+        """ Given a local and remote auth chain, find the differences. This
+        assumes that we have already processed all events in remote_auth
+
+        Params:
+            local_auth (list)
+            remote_auth (list)
+
+        Returns:
+            dict
+        """
+
+        logger.debug("construct_auth_difference Start!")
+
+        # TODO: Make sure we are OK with local_auth or remote_auth having more
+        # auth events in them than strictly necessary.
+
+        def sort_fun(ev):
+            return ev.depth, ev.event_id
+
+        logger.debug("construct_auth_difference after sort_fun!")
+
+        # We find the differences by starting at the "bottom" of each list
+        # and iterating up on both lists. The lists are ordered by depth and
+        # then event_id, we iterate up both lists until we find the event ids
+        # don't match. Then we look at depth/event_id to see which side is
+        # missing that event, and iterate only up that list. Repeat.
+
+        remote_list = list(remote_auth)
+        remote_list.sort(key=sort_fun)
+
+        local_list = list(local_auth)
+        local_list.sort(key=sort_fun)
+
+        local_iter = iter(local_list)
+        remote_iter = iter(remote_list)
+
+        logger.debug("construct_auth_difference before get_next!")
+
+        def get_next(it, opt=None):
+            try:
+                return it.next()
+            except:
+                return opt
+
+        current_local = get_next(local_iter)
+        current_remote = get_next(remote_iter)
+
+        logger.debug("construct_auth_difference before while")
+
+        missing_remotes = []
+        missing_locals = []
+        while current_local or current_remote:
+            if current_remote is None:
+                missing_locals.append(current_local)
+                current_local = get_next(local_iter)
+                continue
+
+            if current_local is None:
+                missing_remotes.append(current_remote)
+                current_remote = get_next(remote_iter)
+                continue
+
+            if current_local.event_id == current_remote.event_id:
+                current_local = get_next(local_iter)
+                current_remote = get_next(remote_iter)
+                continue
+
+            if current_local.depth < current_remote.depth:
+                missing_locals.append(current_local)
+                current_local = get_next(local_iter)
+                continue
+
+            if current_local.depth > current_remote.depth:
+                missing_remotes.append(current_remote)
+                current_remote = get_next(remote_iter)
+                continue
+
+            # They have the same depth, so we fall back to the event_id order
+            if current_local.event_id < current_remote.event_id:
+                missing_locals.append(current_local)
+                current_local = get_next(local_iter)
+
+            if current_local.event_id > current_remote.event_id:
+                missing_remotes.append(current_remote)
+                current_remote = get_next(remote_iter)
+                continue
+
+        logger.debug("construct_auth_difference after while")
+
+        # missing locals should be sent to the server
+        # We should find why we are missing remotes, as they will have been
+        # rejected.
+
+        # Remove events from missing_remotes if they are referencing a missing
+        # remote. We only care about the "root" rejected ones.
+        missing_remote_ids = [e.event_id for e in missing_remotes]
+        base_remote_rejected = list(missing_remotes)
+        for e in missing_remotes:
+            for e_id, _ in e.auth_events:
+                if e_id in missing_remote_ids:
+                    try:
+                        base_remote_rejected.remove(e)
+                    except ValueError:
+                        pass
+
+        reason_map = {}
+
+        for e in base_remote_rejected:
+            reason = yield self.store.get_rejection_reason(e.event_id)
+            if reason is None:
+                # TODO: e is not in the current state, so we should
+                # construct some proof of that.
+                continue
+
+            reason_map[e.event_id] = reason
+
+            if reason == RejectedReason.AUTH_ERROR:
+                pass
+            elif reason == RejectedReason.REPLACED:
+                # TODO: Get proof
+                pass
+            elif reason == RejectedReason.NOT_ANCESTOR:
+                # TODO: Get proof.
+                pass
+
+        logger.debug("construct_auth_difference returning")
+
+        defer.returnValue({
+            "auth_chain": local_auth,
+            "rejects": {
+                e.event_id: {
+                    "reason": reason_map[e.event_id],
+                    "proof": None,
+                }
+                for e in base_remote_rejected
+            },
+            "missing": [e.event_id for e in missing_locals],
+        })
diff --git a/synapse/handlers/login.py b/synapse/handlers/login.py
index d297d71c03..7447800460 100644
--- a/synapse/handlers/login.py
+++ b/synapse/handlers/login.py
@@ -16,12 +16,13 @@
 from twisted.internet import defer
 
 from ._base import BaseHandler
-from synapse.api.errors import LoginError, Codes
+from synapse.api.errors import LoginError, Codes, CodeMessageException
 from synapse.http.client import SimpleHttpClient
 from synapse.util.emailutils import EmailException
 import synapse.util.emailutils as emailutils
 
 import bcrypt
+import json
 import logging
 
 logger = logging.getLogger(__name__)
@@ -96,16 +97,20 @@ class LoginHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def _query_email(self, email):
-        httpCli = SimpleHttpClient(self.hs)
-        data = yield httpCli.get_json(
-            # TODO FIXME This should be configurable.
-            # XXX: ID servers need to use HTTPS
-            "http://%s%s" % (
-                "matrix.org:8090", "/_matrix/identity/api/v1/lookup"
-            ),
-            {
-                'medium': 'email',
-                'address': email
-            }
-        )
-        defer.returnValue(data)
+        http_client = SimpleHttpClient(self.hs)
+        try:
+            data = yield http_client.get_json(
+                # TODO FIXME This should be configurable.
+                # XXX: ID servers need to use HTTPS
+                "http://%s%s" % (
+                    "matrix.org:8090", "/_matrix/identity/api/v1/lookup"
+                ),
+                {
+                    'medium': 'email',
+                    'address': email
+                }
+            )
+            defer.returnValue(data)
+        except CodeMessageException as e:
+            data = json.loads(e.msg)
+            defer.returnValue(data)
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index f2a2f16933..7b9685be7f 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -16,10 +16,12 @@
 from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, Membership
-from synapse.api.errors import RoomError
+from synapse.api.errors import RoomError, SynapseError
 from synapse.streams.config import PaginationConfig
+from synapse.events.utils import serialize_event
 from synapse.events.validator import EventValidator
 from synapse.util.logcontext import PreserveLoggingContext
+from synapse.types import UserID
 
 from ._base import BaseHandler
 
@@ -33,6 +35,7 @@ class MessageHandler(BaseHandler):
     def __init__(self, hs):
         super(MessageHandler, self).__init__(hs)
         self.hs = hs
+        self.state = hs.get_state_handler()
         self.clock = hs.get_clock()
         self.validator = EventValidator()
 
@@ -89,7 +92,7 @@ class MessageHandler(BaseHandler):
                 yield self.hs.get_event_sources().get_current_token()
             )
 
-        user = self.hs.parse_userid(user_id)
+        user = UserID.from_string(user_id)
 
         events, next_key = yield data_source.get_pagination_rows(
             user, pagin_config.get_source_config("room"), room_id
@@ -99,9 +102,11 @@ class MessageHandler(BaseHandler):
             "room_key", next_key
         )
 
+        time_now = self.clock.time_msec()
+
         chunk = {
             "chunk": [
-                self.hs.serialize_event(e, as_client_event) for e in events
+                serialize_event(e, time_now, as_client_event) for e in events
             ],
             "start": pagin_config.from_token.to_string(),
             "end": next_token.to_string(),
@@ -110,7 +115,8 @@ class MessageHandler(BaseHandler):
         defer.returnValue(chunk)
 
     @defer.inlineCallbacks
-    def create_and_send_event(self, event_dict, ratelimit=True):
+    def create_and_send_event(self, event_dict, ratelimit=True,
+                              client=None, txn_id=None):
         """ Given a dict from a client, create and handle a new event.
 
         Creates an FrozenEvent object, filling out auth_events, prev_events,
@@ -130,13 +136,13 @@ class MessageHandler(BaseHandler):
         if ratelimit:
             self.ratelimit(builder.user_id)
         # TODO(paul): Why does 'event' not have a 'user' object?
-        user = self.hs.parse_userid(builder.user_id)
+        user = UserID.from_string(builder.user_id)
         assert self.hs.is_mine(user), "User must be our own: %s" % (user,)
 
         if builder.type == EventTypes.Member:
             membership = builder.content.get("membership", None)
             if membership == Membership.JOIN:
-                joinee = self.hs.parse_userid(builder.state_key)
+                joinee = UserID.from_string(builder.state_key)
                 # If event doesn't include a display name, add one.
                 yield self.distributor.fire(
                     "collect_presencelike_data",
@@ -144,6 +150,15 @@ class MessageHandler(BaseHandler):
                     builder.content
                 )
 
+        if client is not None:
+            if client.token_id is not None:
+                builder.internal_metadata.token_id = client.token_id
+            if client.device_id is not None:
+                builder.internal_metadata.device_id = client.device_id
+
+        if txn_id is not None:
+            builder.internal_metadata.txn_id = txn_id
+
         event, context = yield self._create_new_client_event(
             builder=builder,
         )
@@ -210,7 +225,10 @@ class MessageHandler(BaseHandler):
 
         # TODO: This is duplicating logic from snapshot_all_rooms
         current_state = yield self.state_handler.get_current_state(room_id)
-        defer.returnValue([self.hs.serialize_event(c) for c in current_state])
+        now = self.clock.time_msec()
+        defer.returnValue(
+            [serialize_event(c, now) for c in current_state.values()]
+        )
 
     @defer.inlineCallbacks
     def snapshot_all_rooms(self, user_id=None, pagin_config=None,
@@ -237,7 +255,7 @@ class MessageHandler(BaseHandler):
             membership_list=[Membership.INVITE, Membership.JOIN]
         )
 
-        user = self.hs.parse_userid(user_id)
+        user = UserID.from_string(user_id)
 
         rooms_ret = []
 
@@ -282,10 +300,11 @@ class MessageHandler(BaseHandler):
 
                 start_token = now_token.copy_and_replace("room_key", token[0])
                 end_token = now_token.copy_and_replace("room_key", token[1])
+                time_now = self.clock.time_msec()
 
                 d["messages"] = {
                     "chunk": [
-                        self.hs.serialize_event(m, as_client_event)
+                        serialize_event(m, time_now, as_client_event)
                         for m in messages
                     ],
                     "start": start_token.to_string(),
@@ -296,7 +315,8 @@ class MessageHandler(BaseHandler):
                     event.room_id
                 )
                 d["state"] = [
-                    self.hs.serialize_event(c) for c in current_state
+                    serialize_event(c, time_now, as_client_event)
+                    for c in current_state.values()
                 ]
             except:
                 logger.exception("Failed to get snapshot")
@@ -312,20 +332,27 @@ class MessageHandler(BaseHandler):
     @defer.inlineCallbacks
     def room_initial_sync(self, user_id, room_id, pagin_config=None,
                           feedback=False):
-        yield self.auth.check_joined_room(room_id, user_id)
+        current_state = yield self.state.get_current_state(
+            room_id=room_id,
+        )
+
+        yield self.auth.check_joined_room(
+            room_id, user_id,
+            current_state=current_state
+        )
 
         # TODO(paul): I wish I was called with user objects not user_id
         #   strings...
-        auth_user = self.hs.parse_userid(user_id)
+        auth_user = UserID.from_string(user_id)
 
         # TODO: These concurrently
-        state_tuples = yield self.state_handler.get_current_state(room_id)
-        state = [self.hs.serialize_event(x) for x in state_tuples]
+        time_now = self.clock.time_msec()
+        state = [
+            serialize_event(x, time_now)
+            for x in current_state.values()
+        ]
 
-        member_event = (yield self.store.get_room_member(
-            user_id=user_id,
-            room_id=room_id
-        ))
+        member_event = current_state.get((EventTypes.Member, user_id,))
 
         now_token = yield self.hs.get_event_sources().get_current_token()
 
@@ -342,28 +369,34 @@ class MessageHandler(BaseHandler):
         start_token = now_token.copy_and_replace("room_key", token[0])
         end_token = now_token.copy_and_replace("room_key", token[1])
 
-        room_members = yield self.store.get_room_members(room_id)
+        room_members = [
+            m for m in current_state.values()
+            if m.type == EventTypes.Member
+            and m.content["membership"] == Membership.JOIN
+        ]
 
         presence_handler = self.hs.get_handlers().presence_handler
         presence = []
         for m in room_members:
             try:
                 member_presence = yield presence_handler.get_state(
-                    target_user=self.hs.parse_userid(m.user_id),
+                    target_user=UserID.from_string(m.user_id),
                     auth_user=auth_user,
                     as_event=True,
                 )
                 presence.append(member_presence)
-            except Exception:
+            except SynapseError:
                 logger.exception(
                     "Failed to get member presence of %r", m.user_id
                 )
 
+        time_now = self.clock.time_msec()
+
         defer.returnValue({
             "membership": member_event.membership,
             "room_id": room_id,
             "messages": {
-                "chunk": [self.hs.serialize_event(m) for m in messages],
+                "chunk": [serialize_event(m, time_now) for m in messages],
                 "start": start_token.to_string(),
                 "end": end_token.to_string(),
             },
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index 8aeed99274..8ef248ecf2 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -20,6 +20,7 @@ from synapse.api.constants import PresenceState
 
 from synapse.util.logutils import log_function
 from synapse.util.logcontext import PreserveLoggingContext
+from synapse.types import UserID
 
 from ._base import BaseHandler
 
@@ -86,6 +87,10 @@ class PresenceHandler(BaseHandler):
             "changed_presencelike_data", self.changed_presencelike_data
         )
 
+        # outbound signal from the presence module to advertise when a user's
+        # presence has changed
+        distributor.declare("user_presence_changed")
+
         self.distributor = distributor
 
         self.federation = hs.get_replication_layer()
@@ -96,22 +101,22 @@ class PresenceHandler(BaseHandler):
         self.federation.register_edu_handler(
             "m.presence_invite",
             lambda origin, content: self.invite_presence(
-                observed_user=hs.parse_userid(content["observed_user"]),
-                observer_user=hs.parse_userid(content["observer_user"]),
+                observed_user=UserID.from_string(content["observed_user"]),
+                observer_user=UserID.from_string(content["observer_user"]),
             )
         )
         self.federation.register_edu_handler(
             "m.presence_accept",
             lambda origin, content: self.accept_presence(
-                observed_user=hs.parse_userid(content["observed_user"]),
-                observer_user=hs.parse_userid(content["observer_user"]),
+                observed_user=UserID.from_string(content["observed_user"]),
+                observer_user=UserID.from_string(content["observer_user"]),
             )
         )
         self.federation.register_edu_handler(
             "m.presence_deny",
             lambda origin, content: self.deny_presence(
-                observed_user=hs.parse_userid(content["observed_user"]),
-                observer_user=hs.parse_userid(content["observer_user"]),
+                observed_user=UserID.from_string(content["observed_user"]),
+                observer_user=UserID.from_string(content["observer_user"]),
             )
         )
 
@@ -418,7 +423,7 @@ class PresenceHandler(BaseHandler):
         )
 
         for p in presence:
-            observed_user = self.hs.parse_userid(p.pop("observed_user_id"))
+            observed_user = UserID.from_string(p.pop("observed_user_id"))
             p["observed_user"] = observed_user
             p.update(self._get_or_offline_usercache(observed_user).get_state())
             if "last_active" in p:
@@ -441,7 +446,7 @@ class PresenceHandler(BaseHandler):
                 user.localpart, accepted=True
             )
             target_users = set([
-                self.hs.parse_userid(x["observed_user_id"]) for x in presence
+                UserID.from_string(x["observed_user_id"]) for x in presence
             ])
 
             # Also include people in all my rooms
@@ -452,9 +457,9 @@ class PresenceHandler(BaseHandler):
         if state is None:
             state = yield self.store.get_presence_state(user.localpart)
         else:
-#            statuscache = self._get_or_make_usercache(user)
-#            self._user_cachemap_latest_serial += 1
-#            statuscache.update(state, self._user_cachemap_latest_serial)
+            # statuscache = self._get_or_make_usercache(user)
+            # self._user_cachemap_latest_serial += 1
+            # statuscache.update(state, self._user_cachemap_latest_serial)
             pass
 
         yield self.push_update_to_local_and_remote(
@@ -487,7 +492,7 @@ class PresenceHandler(BaseHandler):
                 user, domain, remoteusers
             ))
 
-        yield defer.DeferredList(deferreds)
+        yield defer.DeferredList(deferreds, consumeErrors=True)
 
     def _start_polling_local(self, user, target_user):
         target_localpart = target_user.localpart
@@ -543,7 +548,7 @@ class PresenceHandler(BaseHandler):
                 self._stop_polling_remote(user, domain, remoteusers)
             )
 
-        return defer.DeferredList(deferreds)
+        return defer.DeferredList(deferreds, consumeErrors=True)
 
     def _stop_polling_local(self, user, target_user):
         for localpart in self._local_pushmap.keys():
@@ -603,6 +608,7 @@ class PresenceHandler(BaseHandler):
             room_ids=room_ids,
             statuscache=statuscache,
         )
+        yield self.distributor.fire("user_presence_changed", user, statuscache)
 
     @defer.inlineCallbacks
     def _push_presence_remote(self, user, destination, state=None):
@@ -646,13 +652,15 @@ class PresenceHandler(BaseHandler):
         deferreds = []
 
         for push in content.get("push", []):
-            user = self.hs.parse_userid(push["user_id"])
+            user = UserID.from_string(push["user_id"])
 
             logger.debug("Incoming presence update from %s", user)
 
             observers = set(self._remote_recvmap.get(user, set()))
             if observers:
-                logger.debug(" | %d interested local observers %r", len(observers), observers)
+                logger.debug(
+                    " | %d interested local observers %r", len(observers), observers
+                )
 
             rm_handler = self.homeserver.get_handlers().room_member_handler
             room_ids = yield rm_handler.get_rooms_for_user(user)
@@ -694,14 +702,14 @@ class PresenceHandler(BaseHandler):
                 del self._user_cachemap[user]
 
         for poll in content.get("poll", []):
-            user = self.hs.parse_userid(poll)
+            user = UserID.from_string(poll)
 
             if not self.hs.is_mine(user):
                 continue
 
             # TODO(paul) permissions checks
 
-            if not user in self._remote_sendmap:
+            if user not in self._remote_sendmap:
                 self._remote_sendmap[user] = set()
 
             self._remote_sendmap[user].add(origin)
@@ -709,7 +717,7 @@ class PresenceHandler(BaseHandler):
             deferreds.append(self._push_presence_remote(user, origin))
 
         for unpoll in content.get("unpoll", []):
-            user = self.hs.parse_userid(unpoll)
+            user = UserID.from_string(unpoll)
 
             if not self.hs.is_mine(user):
                 continue
@@ -721,7 +729,7 @@ class PresenceHandler(BaseHandler):
                     del self._remote_sendmap[user]
 
         with PreserveLoggingContext():
-            yield defer.DeferredList(deferreds)
+            yield defer.DeferredList(deferreds, consumeErrors=True)
 
     @defer.inlineCallbacks
     def push_update_to_local_and_remote(self, observed_user, statuscache,
@@ -760,7 +768,7 @@ class PresenceHandler(BaseHandler):
                 )
             )
 
-        yield defer.DeferredList(deferreds)
+        yield defer.DeferredList(deferreds, consumeErrors=True)
 
         defer.returnValue((localusers, remote_domains))
 
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index 7777d3cc94..2ddf9d5378 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -18,6 +18,7 @@ from twisted.internet import defer
 from synapse.api.errors import SynapseError, AuthError, CodeMessageException
 from synapse.api.constants import EventTypes, Membership
 from synapse.util.logcontext import PreserveLoggingContext
+from synapse.types import UserID
 
 from ._base import BaseHandler
 
@@ -169,7 +170,7 @@ class ProfileHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def on_profile_query(self, args):
-        user = self.hs.parse_userid(args["user_id"])
+        user = UserID.from_string(args["user_id"])
         if not self.hs.is_mine(user):
             raise SynapseError(400, "User is not hosted on this Home Server")
 
@@ -211,10 +212,16 @@ class ProfileHandler(BaseHandler):
             )
 
             msg_handler = self.hs.get_handlers().message_handler
-            yield msg_handler.create_and_send_event({
-                "type": EventTypes.Member,
-                "room_id": j.room_id,
-                "state_key": user.to_string(),
-                "content": content,
-                "sender": user.to_string()
-            }, ratelimit=False)
+            try:
+                yield msg_handler.create_and_send_event({
+                    "type": EventTypes.Member,
+                    "room_id": j.room_id,
+                    "state_key": user.to_string(),
+                    "content": content,
+                    "sender": user.to_string()
+                }, ratelimit=False)
+            except Exception as e:
+                logger.warn(
+                    "Failed to update join event for room %s - %s",
+                    j.room_id, str(e.message)
+                )
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 732652c228..cda4a8502a 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -18,7 +18,8 @@ from twisted.internet import defer
 
 from synapse.types import UserID
 from synapse.api.errors import (
-    SynapseError, RegistrationError, InvalidCaptchaError
+    AuthError, Codes, SynapseError, RegistrationError, InvalidCaptchaError,
+    CodeMessageException
 )
 from ._base import BaseHandler
 import synapse.util.stringutils as stringutils
@@ -28,6 +29,7 @@ from synapse.http.client import CaptchaServerHttpClient
 
 import base64
 import bcrypt
+import json
 import logging
 
 logger = logging.getLogger(__name__)
@@ -64,6 +66,8 @@ class RegistrationHandler(BaseHandler):
             user = UserID(localpart, self.hs.hostname)
             user_id = user.to_string()
 
+            yield self.check_user_id_is_valid(user_id)
+
             token = self._generate_token(user_id)
             yield self.store.register(
                 user_id=user_id,
@@ -82,6 +86,7 @@ class RegistrationHandler(BaseHandler):
                     localpart = self._generate_user_id()
                     user = UserID(localpart, self.hs.hostname)
                     user_id = user.to_string()
+                    yield self.check_user_id_is_valid(user_id)
 
                     token = self._generate_token(user_id)
                     yield self.store.register(
@@ -99,6 +104,47 @@ class RegistrationHandler(BaseHandler):
                         raise RegistrationError(
                             500, "Cannot generate user ID.")
 
+        # create a default avatar for the user
+        # XXX: ideally clients would explicitly specify one, but given they don't
+        # and we want consistent and pretty identicons for random users, we'll
+        # do it here.
+        try:
+            auth_user = UserID.from_string(user_id)
+            media_repository = self.hs.get_resource_for_media_repository()
+            identicon_resource = media_repository.getChildWithDefault("identicon", None)
+            upload_resource = media_repository.getChildWithDefault("upload", None)
+            identicon_bytes = identicon_resource.generate_identicon(user_id, 320, 320)
+            content_uri = yield upload_resource.create_content(
+                "image/png", None, identicon_bytes, len(identicon_bytes), auth_user
+            )
+            profile_handler = self.hs.get_handlers().profile_handler
+            profile_handler.set_avatar_url(
+                auth_user, auth_user, ("%s#auto" % (content_uri,))
+            )
+        except NotImplementedError:
+            pass  # make tests pass without messing around creating default avatars
+
+        defer.returnValue((user_id, token))
+
+    @defer.inlineCallbacks
+    def appservice_register(self, user_localpart, as_token):
+        user = UserID(user_localpart, self.hs.hostname)
+        user_id = user.to_string()
+        service = yield self.store.get_app_service_by_token(as_token)
+        if not service:
+            raise AuthError(403, "Invalid application service token.")
+        if not service.is_interested_in_user(user_id):
+            raise SynapseError(
+                400, "Invalid user localpart for this application service.",
+                errcode=Codes.EXCLUSIVE
+            )
+        token = self._generate_token(user_id)
+        yield self.store.register(
+            user_id=user_id,
+            token=token,
+            password_hash=""
+        )
+        self.distributor.fire("registered_user", user)
         defer.returnValue((user_id, token))
 
     @defer.inlineCallbacks
@@ -147,6 +193,21 @@ class RegistrationHandler(BaseHandler):
             # XXX: This should be a deferred list, shouldn't it?
             yield self._bind_threepid(c, user_id)
 
+    @defer.inlineCallbacks
+    def check_user_id_is_valid(self, user_id):
+        # valid user IDs must not clash with any user ID namespaces claimed by
+        # application services.
+        services = yield self.store.get_app_services()
+        interested_services = [
+            s for s in services if s.is_interested_in_user(user_id)
+        ]
+        for service in interested_services:
+            if service.is_exclusive_user(user_id):
+                raise SynapseError(
+                    400, "This user ID is reserved by an application service.",
+                    errcode=Codes.EXCLUSIVE
+                )
+
     def _generate_token(self, user_id):
         # urlsafe variant uses _ and - so use . as the separator and replace
         # all =s with .s so http clients don't quote =s when it is used as
@@ -161,21 +222,26 @@ class RegistrationHandler(BaseHandler):
     def _threepid_from_creds(self, creds):
         # TODO: get this from the homeserver rather than creating a new one for
         # each request
-        httpCli = SimpleHttpClient(self.hs)
+        http_client = SimpleHttpClient(self.hs)
         # XXX: make this configurable!
-        trustedIdServers = ['matrix.org:8090']
+        trustedIdServers = ['matrix.org:8090', 'matrix.org']
         if not creds['idServer'] in trustedIdServers:
             logger.warn('%s is not a trusted ID server: rejecting 3pid ' +
                         'credentials', creds['idServer'])
             defer.returnValue(None)
-        data = yield httpCli.get_json(
-            # XXX: This should be HTTPS
-            "http://%s%s" % (
-                creds['idServer'],
-                "/_matrix/identity/api/v1/3pid/getValidated3pid"
-            ),
-            {'sid': creds['sid'], 'clientSecret': creds['clientSecret']}
-        )
+
+        data = {}
+        try:
+            data = yield http_client.get_json(
+                # XXX: This should be HTTPS
+                "http://%s%s" % (
+                    creds['idServer'],
+                    "/_matrix/identity/api/v1/3pid/getValidated3pid"
+                ),
+                {'sid': creds['sid'], 'clientSecret': creds['clientSecret']}
+            )
+        except CodeMessageException as e:
+            data = json.loads(e.msg)
 
         if 'medium' in data:
             defer.returnValue(data)
@@ -185,19 +251,23 @@ class RegistrationHandler(BaseHandler):
     def _bind_threepid(self, creds, mxid):
         yield
         logger.debug("binding threepid")
-        httpCli = SimpleHttpClient(self.hs)
-        data = yield httpCli.post_urlencoded_get_json(
-            # XXX: Change when ID servers are all HTTPS
-            "http://%s%s" % (
-                creds['idServer'], "/_matrix/identity/api/v1/3pid/bind"
-            ),
-            {
-                'sid': creds['sid'],
-                'clientSecret': creds['clientSecret'],
-                'mxid': mxid,
-            }
-        )
-        logger.debug("bound threepid")
+        http_client = SimpleHttpClient(self.hs)
+        data = None
+        try:
+            data = yield http_client.post_urlencoded_get_json(
+                # XXX: Change when ID servers are all HTTPS
+                "http://%s%s" % (
+                    creds['idServer'], "/_matrix/identity/api/v1/3pid/bind"
+                ),
+                {
+                    'sid': creds['sid'],
+                    'clientSecret': creds['clientSecret'],
+                    'mxid': mxid,
+                }
+            )
+            logger.debug("bound threepid")
+        except CodeMessageException as e:
+            data = json.loads(e.msg)
         defer.returnValue(data)
 
     @defer.inlineCallbacks
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 6d0db18e51..80f7ee3f12 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -16,12 +16,14 @@
 """Contains functions for performing events on rooms."""
 from twisted.internet import defer
 
+from ._base import BaseHandler
+
 from synapse.types import UserID, RoomAlias, RoomID
 from synapse.api.constants import EventTypes, Membership, JoinRules
 from synapse.api.errors import StoreError, SynapseError
 from synapse.util import stringutils
 from synapse.util.async import run_on_reactor
-from ._base import BaseHandler
+from synapse.events.utils import serialize_event
 
 import logging
 
@@ -64,7 +66,7 @@ class RoomCreationHandler(BaseHandler):
         invite_list = config.get("invite", [])
         for i in invite_list:
             try:
-                self.hs.parse_userid(i)
+                UserID.from_string(i)
             except:
                 raise SynapseError(400, "Invalid user_id: %s" % (i,))
 
@@ -114,7 +116,7 @@ class RoomCreationHandler(BaseHandler):
                 servers=[self.hs.hostname],
             )
 
-        user = self.hs.parse_userid(user_id)
+        user = UserID.from_string(user_id)
         creation_events = self._create_events_for_new_room(
             user, room_id, is_public=is_public
         )
@@ -246,11 +248,9 @@ class RoomMemberHandler(BaseHandler):
 
     @defer.inlineCallbacks
     def get_room_members(self, room_id):
-        hs = self.hs
-
         users = yield self.store.get_users_in_room(room_id)
 
-        defer.returnValue([hs.parse_userid(u) for u in users])
+        defer.returnValue([UserID.from_string(u) for u in users])
 
     @defer.inlineCallbacks
     def fetch_room_distributions_into(self, room_id, localusers=None,
@@ -295,8 +295,9 @@ class RoomMemberHandler(BaseHandler):
         yield self.auth.check_joined_room(room_id, user_id)
 
         member_list = yield self.store.get_room_members(room_id=room_id)
+        time_now = self.clock.time_msec()
         event_list = [
-            self.hs.serialize_event(entry)
+            serialize_event(entry, time_now)
             for entry in member_list
         ]
         chunk_data = {
@@ -368,7 +369,7 @@ class RoomMemberHandler(BaseHandler):
             )
 
             if prev_state and prev_state.membership == Membership.JOIN:
-                user = self.hs.parse_userid(event.user_id)
+                user = UserID.from_string(event.user_id)
                 self.distributor.fire(
                     "user_left_room", user=user, room_id=event.room_id
                 )
@@ -388,8 +389,6 @@ class RoomMemberHandler(BaseHandler):
         if not hosts:
             raise SynapseError(404, "No known servers")
 
-        host = hosts[0]
-
         # If event doesn't include a display name, add one.
         yield self.distributor.fire(
             "collect_presencelike_data", joinee, content
@@ -406,13 +405,13 @@ class RoomMemberHandler(BaseHandler):
         })
         event, context = yield self._create_new_client_event(builder)
 
-        yield self._do_join(event, context, room_host=host, do_auth=True)
+        yield self._do_join(event, context, room_hosts=hosts, do_auth=True)
 
         defer.returnValue({"room_id": room_id})
 
     @defer.inlineCallbacks
-    def _do_join(self, event, context, room_host=None, do_auth=True):
-        joinee = self.hs.parse_userid(event.state_key)
+    def _do_join(self, event, context, room_hosts=None, do_auth=True):
+        joinee = UserID.from_string(event.state_key)
         # room_id = RoomID.from_string(event.room_id, self.hs)
         room_id = event.room_id
 
@@ -440,7 +439,7 @@ class RoomMemberHandler(BaseHandler):
 
         if is_host_in_room:
             should_do_dance = False
-        elif room_host:  # TODO: Shouldn't this be remote_room_host?
+        elif room_hosts:  # TODO: Shouldn't this be remote_room_host?
             should_do_dance = True
         else:
             # TODO(markjh): get prev_state from snapshot
@@ -452,7 +451,7 @@ class RoomMemberHandler(BaseHandler):
                 inviter = UserID.from_string(prev_state.user_id)
 
                 should_do_dance = not self.hs.is_mine(inviter)
-                room_host = inviter.domain
+                room_hosts = [inviter.domain]
             else:
                 # return the same error as join_room_alias does
                 raise SynapseError(404, "No known servers")
@@ -460,10 +459,10 @@ class RoomMemberHandler(BaseHandler):
         if should_do_dance:
             handler = self.hs.get_handlers().federation_handler
             yield handler.do_invite_join(
-                room_host,
+                room_hosts,
                 room_id,
                 event.user_id,
-                event.get_dict()["content"],  # FIXME To get a non-frozen dict
+                event.content,  # FIXME To get a non-frozen dict
                 context
             )
         else:
@@ -476,7 +475,7 @@ class RoomMemberHandler(BaseHandler):
                 do_auth=do_auth,
             )
 
-        user = self.hs.parse_userid(event.user_id)
+        user = UserID.from_string(event.user_id)
         yield self.distributor.fire(
             "user_joined_room", user=user, room_id=room_id
         )
@@ -511,9 +510,16 @@ class RoomMemberHandler(BaseHandler):
     def get_rooms_for_user(self, user, membership_list=[Membership.JOIN]):
         """Returns a list of roomids that the user has any of the given
         membership states in."""
-        rooms = yield self.store.get_rooms_for_user_where_membership_is(
-            user_id=user.to_string(), membership_list=membership_list
+
+        app_service = yield self.store.get_app_service_by_user_id(
+            user.to_string()
         )
+        if app_service:
+            rooms = yield self.store.get_app_service_rooms(app_service)
+        else:
+            rooms = yield self.store.get_rooms_for_user_where_membership_is(
+                user_id=user.to_string(), membership_list=membership_list
+            )
 
         # For some reason the list of events contains duplicates
         # TODO(paul): work out why because I really don't think it should
@@ -526,7 +532,7 @@ class RoomMemberHandler(BaseHandler):
                                     do_auth):
         yield run_on_reactor()
 
-        target_user = self.hs.parse_userid(event.state_key)
+        target_user = UserID.from_string(event.state_key)
 
         yield self.handle_new_client_event(
             event,
@@ -560,13 +566,24 @@ class RoomEventSource(object):
 
         to_key = yield self.get_current_key()
 
-        events, end_key = yield self.store.get_room_events_stream(
-            user_id=user.to_string(),
-            from_key=from_key,
-            to_key=to_key,
-            room_id=None,
-            limit=limit,
+        app_service = yield self.store.get_app_service_by_user_id(
+            user.to_string()
         )
+        if app_service:
+            events, end_key = yield self.store.get_appservice_room_stream(
+                service=app_service,
+                from_key=from_key,
+                to_key=to_key,
+                limit=limit,
+            )
+        else:
+            events, end_key = yield self.store.get_room_events_stream(
+                user_id=user.to_string(),
+                from_key=from_key,
+                to_key=to_key,
+                room_id=None,
+                limit=limit,
+            )
 
         defer.returnValue((events, end_key))
 
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
new file mode 100644
index 0000000000..7883bbd834
--- /dev/null
+++ b/synapse/handlers/sync.py
@@ -0,0 +1,439 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import BaseHandler
+
+from synapse.streams.config import PaginationConfig
+from synapse.api.constants import Membership, EventTypes
+
+from twisted.internet import defer
+
+import collections
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+SyncConfig = collections.namedtuple("SyncConfig", [
+    "user",
+    "client_info",
+    "limit",
+    "gap",
+    "sort",
+    "backfill",
+    "filter",
+])
+
+
+class RoomSyncResult(collections.namedtuple("RoomSyncResult", [
+    "room_id",
+    "limited",
+    "published",
+    "events",
+    "state",
+    "prev_batch",
+    "ephemeral",
+])):
+    __slots__ = []
+
+    def __nonzero__(self):
+        """Make the result appear empty if there are no updates. This is used
+        to tell if room needs to be part of the sync result.
+        """
+        return bool(self.events or self.state or self.ephemeral)
+
+
+class SyncResult(collections.namedtuple("SyncResult", [
+    "next_batch",  # Token for the next sync
+    "private_user_data",  # List of private events for the user.
+    "public_user_data",  # List of public events for all users.
+    "rooms",  # RoomSyncResult for each room.
+])):
+    __slots__ = []
+
+    def __nonzero__(self):
+        """Make the result appear empty if there are no updates. This is used
+        to tell if the notifier needs to wait for more events when polling for
+        events.
+        """
+        return bool(
+            self.private_user_data or self.public_user_data or self.rooms
+        )
+
+
+class SyncHandler(BaseHandler):
+
+    def __init__(self, hs):
+        super(SyncHandler, self).__init__(hs)
+        self.event_sources = hs.get_event_sources()
+        self.clock = hs.get_clock()
+
+    @defer.inlineCallbacks
+    def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0):
+        """Get the sync for a client if we have new data for it now. Otherwise
+        wait for new data to arrive on the server. If the timeout expires, then
+        return an empty sync result.
+        Returns:
+            A Deferred SyncResult.
+        """
+        if timeout == 0 or since_token is None:
+            result = yield self.current_sync_for_user(sync_config, since_token)
+            defer.returnValue(result)
+        else:
+            def current_sync_callback():
+                return self.current_sync_for_user(sync_config, since_token)
+
+            rm_handler = self.hs.get_handlers().room_member_handler
+            room_ids = yield rm_handler.get_rooms_for_user(sync_config.user)
+            result = yield self.notifier.wait_for_events(
+                sync_config.user, room_ids,
+                sync_config.filter, timeout, current_sync_callback
+            )
+            defer.returnValue(result)
+
+    def current_sync_for_user(self, sync_config, since_token=None):
+        """Get the sync for client needed to match what the server has now.
+        Returns:
+            A Deferred SyncResult.
+        """
+        if since_token is None:
+            return self.initial_sync(sync_config)
+        else:
+            if sync_config.gap:
+                return self.incremental_sync_with_gap(sync_config, since_token)
+            else:
+                # TODO(mjark): Handle gapless sync
+                raise NotImplementedError()
+
+    @defer.inlineCallbacks
+    def initial_sync(self, sync_config):
+        """Get a sync for a client which is starting without any state
+        Returns:
+            A Deferred SyncResult.
+        """
+        if sync_config.sort == "timeline,desc":
+            # TODO(mjark): Handle going through events in reverse order?.
+            # What does "most recent events" mean when applying the limits mean
+            # in this case?
+            raise NotImplementedError()
+
+        now_token = yield self.event_sources.get_current_token()
+
+        presence_stream = self.event_sources.sources["presence"]
+        # TODO (mjark): This looks wrong, shouldn't we be getting the presence
+        # UP to the present rather than after the present?
+        pagination_config = PaginationConfig(from_token=now_token)
+        presence, _ = yield presence_stream.get_pagination_rows(
+            user=sync_config.user,
+            pagination_config=pagination_config.get_source_config("presence"),
+            key=None
+        )
+        room_list = yield self.store.get_rooms_for_user_where_membership_is(
+            user_id=sync_config.user.to_string(),
+            membership_list=[Membership.INVITE, Membership.JOIN]
+        )
+
+        # TODO (mjark): Does public mean "published"?
+        published_rooms = yield self.store.get_rooms(is_public=True)
+        published_room_ids = set(r["room_id"] for r in published_rooms)
+
+        rooms = []
+        for event in room_list:
+            room_sync = yield self.initial_sync_for_room(
+                event.room_id, sync_config, now_token, published_room_ids
+            )
+            rooms.append(room_sync)
+
+        defer.returnValue(SyncResult(
+            public_user_data=presence,
+            private_user_data=[],
+            rooms=rooms,
+            next_batch=now_token,
+        ))
+
+    @defer.inlineCallbacks
+    def initial_sync_for_room(self, room_id, sync_config, now_token,
+                              published_room_ids):
+        """Sync a room for a client which is starting without any state
+        Returns:
+            A Deferred RoomSyncResult.
+        """
+
+        recents, prev_batch_token, limited = yield self.load_filtered_recents(
+            room_id, sync_config, now_token,
+        )
+
+        current_state = yield self.state_handler.get_current_state(
+            room_id
+        )
+        current_state_events = current_state.values()
+
+        defer.returnValue(RoomSyncResult(
+            room_id=room_id,
+            published=room_id in published_room_ids,
+            events=recents,
+            prev_batch=prev_batch_token,
+            state=current_state_events,
+            limited=limited,
+            ephemeral=[],
+        ))
+
+    @defer.inlineCallbacks
+    def incremental_sync_with_gap(self, sync_config, since_token):
+        """ Get the incremental delta needed to bring the client up to
+        date with the server.
+        Returns:
+            A Deferred SyncResult.
+        """
+        if sync_config.sort == "timeline,desc":
+            # TODO(mjark): Handle going through events in reverse order?.
+            # What does "most recent events" mean when applying the limits mean
+            # in this case?
+            raise NotImplementedError()
+
+        now_token = yield self.event_sources.get_current_token()
+
+        presence_source = self.event_sources.sources["presence"]
+        presence, presence_key = yield presence_source.get_new_events_for_user(
+            user=sync_config.user,
+            from_key=since_token.presence_key,
+            limit=sync_config.limit,
+        )
+        now_token = now_token.copy_and_replace("presence_key", presence_key)
+
+        typing_source = self.event_sources.sources["typing"]
+        typing, typing_key = yield typing_source.get_new_events_for_user(
+            user=sync_config.user,
+            from_key=since_token.typing_key,
+            limit=sync_config.limit,
+        )
+        now_token = now_token.copy_and_replace("typing_key", typing_key)
+
+        typing_by_room = {event["room_id"]: [event] for event in typing}
+        for event in typing:
+            event.pop("room_id")
+        logger.debug("Typing %r", typing_by_room)
+
+        rm_handler = self.hs.get_handlers().room_member_handler
+        room_ids = yield rm_handler.get_rooms_for_user(sync_config.user)
+
+        # TODO (mjark): Does public mean "published"?
+        published_rooms = yield self.store.get_rooms(is_public=True)
+        published_room_ids = set(r["room_id"] for r in published_rooms)
+
+        room_events, _ = yield self.store.get_room_events_stream(
+            sync_config.user.to_string(),
+            from_key=since_token.room_key,
+            to_key=now_token.room_key,
+            room_id=None,
+            limit=sync_config.limit + 1,
+        )
+
+        rooms = []
+        if len(room_events) <= sync_config.limit:
+            # There is no gap in any of the rooms. Therefore we can just
+            # partition the new events by room and return them.
+            events_by_room_id = {}
+            for event in room_events:
+                events_by_room_id.setdefault(event.room_id, []).append(event)
+
+            for room_id in room_ids:
+                recents = events_by_room_id.get(room_id, [])
+                state = [event for event in recents if event.is_state()]
+                if recents:
+                    prev_batch = now_token.copy_and_replace(
+                        "room_key", recents[0].internal_metadata.before
+                    )
+                else:
+                    prev_batch = now_token
+
+                state = yield self.check_joined_room(
+                    sync_config, room_id, state
+                )
+
+                room_sync = RoomSyncResult(
+                    room_id=room_id,
+                    published=room_id in published_room_ids,
+                    events=recents,
+                    prev_batch=prev_batch,
+                    state=state,
+                    limited=False,
+                    ephemeral=typing_by_room.get(room_id, [])
+                )
+                if room_sync:
+                    rooms.append(room_sync)
+        else:
+            for room_id in room_ids:
+                room_sync = yield self.incremental_sync_with_gap_for_room(
+                    room_id, sync_config, since_token, now_token,
+                    published_room_ids, typing_by_room
+                )
+                if room_sync:
+                    rooms.append(room_sync)
+
+        defer.returnValue(SyncResult(
+            public_user_data=presence,
+            private_user_data=[],
+            rooms=rooms,
+            next_batch=now_token,
+        ))
+
+    @defer.inlineCallbacks
+    def load_filtered_recents(self, room_id, sync_config, now_token,
+                              since_token=None):
+        limited = True
+        recents = []
+        filtering_factor = 2
+        load_limit = max(sync_config.limit * filtering_factor, 100)
+        max_repeat = 3  # Only try a few times per room, otherwise
+        room_key = now_token.room_key
+        end_key = room_key
+
+        while limited and len(recents) < sync_config.limit and max_repeat:
+            events, keys = yield self.store.get_recent_events_for_room(
+                room_id,
+                limit=load_limit + 1,
+                from_token=since_token.room_key if since_token else None,
+                end_token=end_key,
+            )
+            (room_key, _) = keys
+            end_key = "s" + room_key.split('-')[-1]
+            loaded_recents = sync_config.filter.filter_room_events(events)
+            loaded_recents.extend(recents)
+            recents = loaded_recents
+            if len(events) <= load_limit:
+                limited = False
+            max_repeat -= 1
+
+        if len(recents) > sync_config.limit:
+            recents = recents[-sync_config.limit:]
+            room_key = recents[0].internal_metadata.before
+
+        prev_batch_token = now_token.copy_and_replace(
+            "room_key", room_key
+        )
+
+        defer.returnValue((recents, prev_batch_token, limited))
+
+    @defer.inlineCallbacks
+    def incremental_sync_with_gap_for_room(self, room_id, sync_config,
+                                           since_token, now_token,
+                                           published_room_ids, typing_by_room):
+        """ Get the incremental delta needed to bring the client up to date for
+        the room. Gives the client the most recent events and the changes to
+        state.
+        Returns:
+            A Deferred RoomSyncResult
+        """
+
+        # TODO(mjark): Check for redactions we might have missed.
+
+        recents, prev_batch_token, limited = yield self.load_filtered_recents(
+            room_id, sync_config, now_token, since_token,
+        )
+
+        logging.debug("Recents %r", recents)
+
+        # TODO(mjark): This seems racy since this isn't being passed a
+        # token to indicate what point in the stream this is
+        current_state = yield self.state_handler.get_current_state(
+            room_id
+        )
+        current_state_events = current_state.values()
+
+        state_at_previous_sync = yield self.get_state_at_previous_sync(
+            room_id, since_token=since_token
+        )
+
+        state_events_delta = yield self.compute_state_delta(
+            since_token=since_token,
+            previous_state=state_at_previous_sync,
+            current_state=current_state_events,
+        )
+
+        state_events_delta = yield self.check_joined_room(
+            sync_config, room_id, state_events_delta
+        )
+
+        room_sync = RoomSyncResult(
+            room_id=room_id,
+            published=room_id in published_room_ids,
+            events=recents,
+            prev_batch=prev_batch_token,
+            state=state_events_delta,
+            limited=limited,
+            ephemeral=typing_by_room.get(room_id, [])
+        )
+
+        logging.debug("Room sync: %r", room_sync)
+
+        defer.returnValue(room_sync)
+
+    @defer.inlineCallbacks
+    def get_state_at_previous_sync(self, room_id, since_token):
+        """ Get the room state at the previous sync the client made.
+        Returns:
+            A Deferred list of Events.
+        """
+        last_events, token = yield self.store.get_recent_events_for_room(
+            room_id, end_token=since_token.room_key, limit=1,
+        )
+
+        if last_events:
+            last_event = last_events[0]
+            last_context = yield self.state_handler.compute_event_context(
+                last_event
+            )
+            if last_event.is_state():
+                state = [last_event] + last_context.current_state.values()
+            else:
+                state = last_context.current_state.values()
+        else:
+            state = ()
+        defer.returnValue(state)
+
+    def compute_state_delta(self, since_token, previous_state, current_state):
+        """ Works out the differnce in state between the current state and the
+        state the client got when it last performed a sync.
+        Returns:
+            A list of events.
+        """
+        # TODO(mjark) Check if the state events were received by the server
+        # after the previous sync, since we need to include those state
+        # updates even if they occured logically before the previous event.
+        # TODO(mjark) Check for new redactions in the state events.
+        previous_dict = {event.event_id: event for event in previous_state}
+        state_delta = []
+        for event in current_state:
+            if event.event_id not in previous_dict:
+                state_delta.append(event)
+        return state_delta
+
+    @defer.inlineCallbacks
+    def check_joined_room(self, sync_config, room_id, state_delta):
+        joined = False
+        for event in state_delta:
+            if (
+                event.type == EventTypes.Member
+                and event.state_key == sync_config.user.to_string()
+            ):
+                if event.content["membership"] == Membership.JOIN:
+                    joined = True
+
+        if joined:
+            res = yield self.state_handler.get_current_state(room_id)
+            state_delta = res.values()
+
+        defer.returnValue(state_delta)
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index cd9638dd04..c2762f92c7 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -18,6 +18,7 @@ from twisted.internet import defer
 from ._base import BaseHandler
 
 from synapse.api.errors import SynapseError, AuthError
+from synapse.types import UserID
 
 import logging
 
@@ -180,12 +181,12 @@ class TypingNotificationHandler(BaseHandler):
                 },
             ))
 
-        yield defer.DeferredList(deferreds, consumeErrors=False)
+        yield defer.DeferredList(deferreds, consumeErrors=True)
 
     @defer.inlineCallbacks
     def _recv_edu(self, origin, content):
         room_id = content["room_id"]
-        user = self.homeserver.parse_userid(content["user_id"])
+        user = UserID.from_string(content["user_id"])
 
         localusers = set()
 
diff --git a/synapse/http/client.py b/synapse/http/client.py
index 7793bab106..b53a07aa2d 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -13,8 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from synapse.api.errors import CodeMessageException
+from syutil.jsonutil import encode_canonical_json
 
-from synapse.http.agent_name import AGENT_NAME
 from twisted.internet import defer, reactor
 from twisted.web.client import (
     Agent, readBody, FileBodyProducer, PartialDownloadError
@@ -23,7 +24,7 @@ from twisted.web.http_headers import Headers
 
 from StringIO import StringIO
 
-import json
+import simplejson as json
 import logging
 import urllib
 
@@ -42,6 +43,7 @@ class SimpleHttpClient(object):
         # BrowserLikePolicyForHTTPS which will do regular cert validation
         # 'like a browser'
         self.agent = Agent(reactor)
+        self.version_string = hs.version_string
 
     @defer.inlineCallbacks
     def post_urlencoded_get_json(self, uri, args={}):
@@ -53,7 +55,7 @@ class SimpleHttpClient(object):
             uri.encode("ascii"),
             headers=Headers({
                 b"Content-Type": [b"application/x-www-form-urlencoded"],
-                b"User-Agent": [AGENT_NAME],
+                b"User-Agent": [self.version_string],
             }),
             bodyProducer=FileBodyProducer(StringIO(query_bytes))
         )
@@ -63,8 +65,27 @@ class SimpleHttpClient(object):
         defer.returnValue(json.loads(body))
 
     @defer.inlineCallbacks
+    def post_json_get_json(self, uri, post_json):
+        json_str = encode_canonical_json(post_json)
+
+        logger.info("HTTP POST %s -> %s", json_str, uri)
+
+        response = yield self.agent.request(
+            "POST",
+            uri.encode("ascii"),
+            headers=Headers({
+                "Content-Type": ["application/json"]
+            }),
+            bodyProducer=FileBodyProducer(StringIO(json_str))
+        )
+
+        body = yield readBody(response)
+
+        defer.returnValue(json.loads(body))
+
+    @defer.inlineCallbacks
     def get_json(self, uri, args={}):
-        """ Get's some json from the given host and path
+        """ Gets some json from the given URI.
 
         Args:
             uri (str): The URI to request, not including query parameters
@@ -72,15 +93,13 @@ class SimpleHttpClient(object):
                 None.
                 **Note**: The value of each key is assumed to be an iterable
                 and *not* a string.
-
         Returns:
-            Deferred: Succeeds when we get *any* HTTP response.
-
-            The result of the deferred is a tuple of `(code, response)`,
-            where `response` is a dict representing the decoded JSON body.
+            Deferred: Succeeds when we get *any* 2xx HTTP response, with the
+            HTTP body as JSON.
+        Raises:
+            On a non-2xx HTTP response. The response body will be used as the
+            error message.
         """
-
-        yield
         if len(args):
             query_bytes = urllib.urlencode(args, True)
             uri = "%s?%s" % (uri, query_bytes)
@@ -89,13 +108,62 @@ class SimpleHttpClient(object):
             "GET",
             uri.encode("ascii"),
             headers=Headers({
-                b"User-Agent": [AGENT_NAME],
+                b"User-Agent": [self.version_string],
             })
         )
 
         body = yield readBody(response)
 
-        defer.returnValue(json.loads(body))
+        if 200 <= response.code < 300:
+            defer.returnValue(json.loads(body))
+        else:
+            # NB: This is explicitly not json.loads(body)'d because the contract
+            # of CodeMessageException is a *string* message. Callers can always
+            # load it into JSON if they want.
+            raise CodeMessageException(response.code, body)
+
+    @defer.inlineCallbacks
+    def put_json(self, uri, json_body, args={}):
+        """ Puts some json to the given URI.
+
+        Args:
+            uri (str): The URI to request, not including query parameters
+            json_body (dict): The JSON to put in the HTTP body,
+            args (dict): A dictionary used to create query strings, defaults to
+                None.
+                **Note**: The value of each key is assumed to be an iterable
+                and *not* a string.
+        Returns:
+            Deferred: Succeeds when we get *any* 2xx HTTP response, with the
+            HTTP body as JSON.
+        Raises:
+            On a non-2xx HTTP response.
+        """
+        if len(args):
+            query_bytes = urllib.urlencode(args, True)
+            uri = "%s?%s" % (uri, query_bytes)
+
+        json_str = encode_canonical_json(json_body)
+
+        response = yield self.agent.request(
+            "PUT",
+            uri.encode("ascii"),
+            headers=Headers({
+                b"User-Agent": [self.version_string],
+                "Content-Type": ["application/json"]
+            }),
+            bodyProducer=FileBodyProducer(StringIO(json_str))
+        )
+
+        body = yield readBody(response)
+
+        if 200 <= response.code < 300:
+            defer.returnValue(json.loads(body))
+        else:
+            # NB: This is explicitly not json.loads(body)'d because the contract
+            # of CodeMessageException is a *string* message. Callers can always
+            # load it into JSON if they want.
+            raise CodeMessageException(response.code, body)
 
 
 class CaptchaServerHttpClient(SimpleHttpClient):
@@ -114,7 +182,7 @@ class CaptchaServerHttpClient(SimpleHttpClient):
             bodyProducer=FileBodyProducer(StringIO(query_bytes)),
             headers=Headers({
                 b"Content-Type": [b"application/x-www-form-urlencoded"],
-                b"User-Agent": [AGENT_NAME],
+                b"User-Agent": [self.version_string],
             })
         )
 
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index 1dda3ba2c7..7db001cc63 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -20,18 +20,19 @@ from twisted.web.client import readBody, _AgentBase, _URI
 from twisted.web.http_headers import Headers
 from twisted.web._newclient import ResponseDone
 
-from synapse.http.agent_name import AGENT_NAME
 from synapse.http.endpoint import matrix_federation_endpoint
 from synapse.util.async import sleep
 from synapse.util.logcontext import PreserveLoggingContext
 
 from syutil.jsonutil import encode_canonical_json
 
-from synapse.api.errors import CodeMessageException, SynapseError, Codes
+from synapse.api.errors import (
+    SynapseError, Codes, HttpResponseException,
+)
 
 from syutil.crypto.jsonsign import sign_json
 
-import json
+import simplejson as json
 import logging
 import urllib
 import urlparse
@@ -77,6 +78,8 @@ class MatrixFederationHttpClient(object):
         self.signing_key = hs.config.signing_key[0]
         self.server_name = hs.hostname
         self.agent = MatrixFederationHttpAgent(reactor)
+        self.clock = hs.get_clock()
+        self.version_string = hs.version_string
 
     @defer.inlineCallbacks
     def _create_request(self, destination, method, path_bytes,
@@ -84,7 +87,7 @@ class MatrixFederationHttpClient(object):
                         query_bytes=b"", retry_on_dns_fail=True):
         """ Creates and sends a request to the given url
         """
-        headers_dict[b"User-Agent"] = [AGENT_NAME]
+        headers_dict[b"User-Agent"] = [self.version_string]
         headers_dict[b"Host"] = [destination]
 
         url_bytes = urlparse.urlunparse(
@@ -116,7 +119,7 @@ class MatrixFederationHttpClient(object):
 
             try:
                 with PreserveLoggingContext():
-                    response = yield self.agent.request(
+                    request_deferred = self.agent.request(
                         destination,
                         endpoint,
                         method,
@@ -127,6 +130,11 @@ class MatrixFederationHttpClient(object):
                         producer
                     )
 
+                    response = yield self.clock.time_bound_deferred(
+                        request_deferred,
+                        time_out=60,
+                    )
+
                 logger.debug("Got response to %s", method)
                 break
             except Exception as e:
@@ -136,16 +144,16 @@ class MatrixFederationHttpClient(object):
                         destination,
                         e
                     )
-                    raise SynapseError(400, "Domain specified not found.")
+                    raise
 
                 logger.warn(
-                    "Sending request failed to %s: %s %s : %s",
+                    "Sending request failed to %s: %s %s: %s - %s",
                     destination,
                     method,
                     url_bytes,
-                    e
+                    type(e).__name__,
+                    _flatten_response_never_received(e),
                 )
-                _print_ex(e)
 
                 if retries_left:
                     yield sleep(2 ** (5 - retries_left))
@@ -163,13 +171,13 @@ class MatrixFederationHttpClient(object):
         )
 
         if 200 <= response.code < 300:
-            # We need to update the transactions table to say it was sent?
             pass
         else:
             # :'(
             # Update transactions table?
-            raise CodeMessageException(
-                response.code, response.phrase
+            body = yield readBody(response)
+            raise HttpResponseException(
+                response.code, response.phrase, body
             )
 
         defer.returnValue(response)
@@ -238,11 +246,66 @@ class MatrixFederationHttpClient(object):
             headers_dict={"Content-Type": ["application/json"]},
         )
 
+        if 200 <= response.code < 300:
+            # We need to update the transactions table to say it was sent?
+            c_type = response.headers.getRawHeaders("Content-Type")
+
+            if "application/json" not in c_type:
+                raise RuntimeError(
+                    "Content-Type not application/json"
+                )
+
         logger.debug("Getting resp body")
         body = yield readBody(response)
         logger.debug("Got resp body")
 
-        defer.returnValue((response.code, body))
+        defer.returnValue(json.loads(body))
+
+    @defer.inlineCallbacks
+    def post_json(self, destination, path, data={}):
+        """ Sends the specifed json data using POST
+
+        Args:
+            destination (str): The remote server to send the HTTP request
+                to.
+            path (str): The HTTP path.
+            data (dict): A dict containing the data that will be used as
+                the request body. This will be encoded as JSON.
+
+        Returns:
+            Deferred: Succeeds when we get a 2xx HTTP response. The result
+            will be the decoded JSON body. On a 4xx or 5xx error response a
+            CodeMessageException is raised.
+        """
+
+        def body_callback(method, url_bytes, headers_dict):
+            self.sign_request(
+                destination, method, url_bytes, headers_dict, data
+            )
+            return _JsonProducer(data)
+
+        response = yield self._create_request(
+            destination.encode("ascii"),
+            "POST",
+            path.encode("ascii"),
+            body_callback=body_callback,
+            headers_dict={"Content-Type": ["application/json"]},
+        )
+
+        if 200 <= response.code < 300:
+            # We need to update the transactions table to say it was sent?
+            c_type = response.headers.getRawHeaders("Content-Type")
+
+            if "application/json" not in c_type:
+                raise RuntimeError(
+                    "Content-Type not application/json"
+                )
+
+        logger.debug("Getting resp body")
+        body = yield readBody(response)
+        logger.debug("Got resp body")
+
+        defer.returnValue(json.loads(body))
 
     @defer.inlineCallbacks
     def get_json(self, destination, path, args={}, retry_on_dns_fail=True):
@@ -284,7 +347,18 @@ class MatrixFederationHttpClient(object):
             retry_on_dns_fail=retry_on_dns_fail
         )
 
+        if 200 <= response.code < 300:
+            # We need to update the transactions table to say it was sent?
+            c_type = response.headers.getRawHeaders("Content-Type")
+
+            if "application/json" not in c_type:
+                raise RuntimeError(
+                    "Content-Type not application/json"
+                )
+
+        logger.debug("Getting resp body")
         body = yield readBody(response)
+        logger.debug("Got resp body")
 
         defer.returnValue(json.loads(body))
 
@@ -373,14 +447,6 @@ def _readBodyToFile(response, stream, max_size):
     return d
 
 
-def _print_ex(e):
-    if hasattr(e, "reasons") and e.reasons:
-        for ex in e.reasons:
-            _print_ex(ex)
-    else:
-        logger.warn(e)
-
-
 class _JsonProducer(object):
     """ Used by the twisted http client to create the HTTP body from json
     """
@@ -400,3 +466,13 @@ class _JsonProducer(object):
 
     def stopProducing(self):
         pass
+
+
+def _flatten_response_never_received(e):
+    if hasattr(e, "reasons"):
+        return ", ".join(
+            _flatten_response_never_received(f.value)
+            for f in e.reasons
+        )
+    else:
+        return "%s: %s" % (type(e).__name__, e.message,)
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 8015a22edf..767c3ef79b 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -14,9 +14,8 @@
 # limitations under the License.
 
 
-from synapse.http.agent_name import AGENT_NAME
 from synapse.api.errors import (
-    cs_exception, SynapseError, CodeMessageException
+    cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError
 )
 from synapse.util.logcontext import LoggingContext
 
@@ -69,10 +68,12 @@ class JsonResource(HttpServer, resource.Resource):
 
     _PathEntry = collections.namedtuple("_PathEntry", ["pattern", "callback"])
 
-    def __init__(self):
+    def __init__(self, hs):
         resource.Resource.__init__(self)
 
+        self.clock = hs.get_clock()
         self.path_regexs = {}
+        self.version_string = hs.version_string
 
     def register_path(self, method, path_pattern, callback):
         self.path_regexs.setdefault(method, []).append(
@@ -111,6 +112,8 @@ class JsonResource(HttpServer, resource.Resource):
             This checks if anyone has registered a callback for that method and
             path.
         """
+        code = None
+        start = self.clock.time_msec()
         try:
             # Just say yes to OPTIONS.
             if request.method == "OPTIONS":
@@ -121,37 +124,42 @@ class JsonResource(HttpServer, resource.Resource):
             # and path regex match
             for path_entry in self.path_regexs.get(request.method, []):
                 m = path_entry.pattern.match(request.path)
-                if m:
-                    # We found a match! Trigger callback and then return the
-                    # returned response. We pass both the request and any
-                    # matched groups from the regex to the callback.
+                if not m:
+                    continue
 
-                    args = [
-                        urllib.unquote(u).decode("UTF-8") for u in m.groups()
-                    ]
+                # We found a match! Trigger callback and then return the
+                # returned response. We pass both the request and any
+                # matched groups from the regex to the callback.
 
-                    code, response = yield path_entry.callback(
-                        request,
-                        *args
-                    )
+                args = [
+                    urllib.unquote(u).decode("UTF-8") for u in m.groups()
+                ]
 
-                    self._send_response(request, code, response)
-                    return
+                logger.info(
+                    "Received request: %s %s",
+                    request.method, request.path
+                )
+
+                code, response = yield path_entry.callback(
+                    request,
+                    *args
+                )
+
+                self._send_response(request, code, response)
+                return
 
             # Huh. No one wanted to handle that? Fiiiiiine. Send 400.
-            self._send_response(
-                request,
-                400,
-                {"error": "Unrecognized request"}
-            )
+            raise UnrecognizedRequestError()
         except CodeMessageException as e:
             if isinstance(e, SynapseError):
                 logger.info("%s SynapseError: %s - %s", request, e.code, e.msg)
             else:
                 logger.exception(e)
+
+            code = e.code
             self._send_response(
                 request,
-                e.code,
+                code,
                 cs_exception(e),
                 response_code_message=e.response_code_message
             )
@@ -162,6 +170,14 @@ class JsonResource(HttpServer, resource.Resource):
                 500,
                 {"error": "Internal server error"}
             )
+        finally:
+            code = str(code) if code else "-"
+
+            end = self.clock.time_msec()
+            logger.info(
+                "Processed request: %dms %s %s %s",
+                end-start, code, request.method, request.path
+            )
 
     def _send_response(self, request, code, response_json_object,
                        response_code_message=None):
@@ -175,9 +191,13 @@ class JsonResource(HttpServer, resource.Resource):
             return
 
         # TODO: Only enable CORS for the requests that need it.
-        respond_with_json(request, code, response_json_object, send_cors=True,
-                          response_code_message=response_code_message,
-                          pretty_print=self._request_user_agent_is_curl)
+        respond_with_json(
+            request, code, response_json_object,
+            send_cors=True,
+            response_code_message=response_code_message,
+            pretty_print=self._request_user_agent_is_curl,
+            version_string=self.version_string,
+        )
 
     @staticmethod
     def _request_user_agent_is_curl(request):
@@ -207,18 +227,23 @@ class RootRedirect(resource.Resource):
 
 
 def respond_with_json(request, code, json_object, send_cors=False,
-                      response_code_message=None, pretty_print=False):
+                      response_code_message=None, pretty_print=False,
+                      version_string=""):
     if not pretty_print:
         json_bytes = encode_pretty_printed_json(json_object)
     else:
         json_bytes = encode_canonical_json(json_object)
 
-    return respond_with_json_bytes(request, code, json_bytes, send_cors,
-                                   response_code_message=response_code_message)
+    return respond_with_json_bytes(
+        request, code, json_bytes,
+        send_cors=send_cors,
+        response_code_message=response_code_message,
+        version_string=version_string
+    )
 
 
 def respond_with_json_bytes(request, code, json_bytes, send_cors=False,
-                            response_code_message=None):
+                            version_string="", response_code_message=None):
     """Sends encoded JSON in response to the given request.
 
     Args:
@@ -232,7 +257,7 @@ def respond_with_json_bytes(request, code, json_bytes, send_cors=False,
 
     request.setResponseCode(code, message=response_code_message)
     request.setHeader(b"Content-Type", b"application/json")
-    request.setHeader(b"Server", AGENT_NAME)
+    request.setHeader(b"Server", version_string)
     request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),))
 
     if send_cors:
diff --git a/synapse/http/server_key_resource.py b/synapse/http/server_key_resource.py
index 4fc491dc82..71e9a51f5c 100644
--- a/synapse/http/server_key_resource.py
+++ b/synapse/http/server_key_resource.py
@@ -50,6 +50,7 @@ class LocalKey(Resource):
 
     def __init__(self, hs):
         self.hs = hs
+        self.version_string = hs.version_string
         self.response_body = encode_canonical_json(
             self.response_json_object(hs.config)
         )
@@ -82,7 +83,10 @@ class LocalKey(Resource):
         return json_object
 
     def render_GET(self, request):
-        return respond_with_json_bytes(request, 200, self.response_body)
+        return respond_with_json_bytes(
+            request, 200, self.response_body,
+            version_string=self.version_string
+        )
 
     def getChild(self, name, request):
         if name == '':
diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py
new file mode 100644
index 0000000000..a4eb6c817c
--- /dev/null
+++ b/synapse/http/servlet.py
@@ -0,0 +1,113 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" This module contains base REST classes for constructing REST servlets. """
+
+from synapse.api.errors import SynapseError
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class RestServlet(object):
+
+    """ A Synapse REST Servlet.
+
+    An implementing class can either provide its own custom 'register' method,
+    or use the automatic pattern handling provided by the base class.
+
+    To use this latter, the implementing class instead provides a `PATTERN`
+    class attribute containing a pre-compiled regular expression. The automatic
+    register method will then use this method to register any of the following
+    instance methods associated with the corresponding HTTP method:
+
+      on_GET
+      on_PUT
+      on_POST
+      on_DELETE
+      on_OPTIONS
+
+    Automatically handles turning CodeMessageExceptions thrown by these methods
+    into the appropriate HTTP response.
+    """
+
+    def register(self, http_server):
+        """ Register this servlet with the given HTTP server. """
+        if hasattr(self, "PATTERN"):
+            pattern = self.PATTERN
+
+            for method in ("GET", "PUT", "POST", "OPTIONS", "DELETE"):
+                if hasattr(self, "on_%s" % (method)):
+                    method_handler = getattr(self, "on_%s" % (method))
+                    http_server.register_path(method, pattern, method_handler)
+        else:
+            raise NotImplementedError("RestServlet must register something.")
+
+    @staticmethod
+    def parse_integer(request, name, default=None, required=False):
+        if name in request.args:
+            try:
+                return int(request.args[name][0])
+            except:
+                message = "Query parameter %r must be an integer" % (name,)
+                raise SynapseError(400, message)
+        else:
+            if required:
+                message = "Missing integer query parameter %r" % (name,)
+                raise SynapseError(400, message)
+            else:
+                return default
+
+    @staticmethod
+    def parse_boolean(request, name, default=None, required=False):
+        if name in request.args:
+            try:
+                return {
+                    "true": True,
+                    "false": False,
+                }[request.args[name][0]]
+            except:
+                message = (
+                    "Boolean query parameter %r must be one of"
+                    " ['true', 'false']"
+                ) % (name,)
+                raise SynapseError(400, message)
+        else:
+            if required:
+                message = "Missing boolean query parameter %r" % (name,)
+                raise SynapseError(400, message)
+            else:
+                return default
+
+    @staticmethod
+    def parse_string(request, name, default=None, required=False,
+                     allowed_values=None, param_type="string"):
+        if name in request.args:
+            value = request.args[name][0]
+            if allowed_values is not None and value not in allowed_values:
+                message = "Query parameter %r must be one of [%s]" % (
+                    name, ", ".join(repr(v) for v in allowed_values)
+                )
+                raise SynapseError(message)
+            else:
+                return value
+        else:
+            if required:
+                message = "Missing %s query parameter %r" % (param_type, name)
+                raise SynapseError(400, message)
+            else:
+                return default
diff --git a/synapse/notifier.py b/synapse/notifier.py
index 3aec1d4af2..df13e8ddb6 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -18,6 +18,7 @@ from twisted.internet import defer
 from synapse.util.logutils import log_function
 from synapse.util.logcontext import PreserveLoggingContext
 from synapse.util.async import run_on_reactor
+from synapse.types import StreamToken
 
 import logging
 
@@ -35,8 +36,10 @@ class _NotificationListener(object):
     so that it can remove itself from the indexes in the Notifier class.
     """
 
-    def __init__(self, user, rooms, from_token, limit, timeout, deferred):
+    def __init__(self, user, rooms, from_token, limit, timeout, deferred,
+                 appservice=None):
         self.user = user
+        self.appservice = appservice
         self.from_token = from_token
         self.limit = limit
         self.timeout = timeout
@@ -60,10 +63,14 @@ class _NotificationListener(object):
             pass
 
         for room in self.rooms:
-            lst = notifier.rooms_to_listeners.get(room, set())
+            lst = notifier.room_to_listeners.get(room, set())
             lst.discard(self)
 
         notifier.user_to_listeners.get(self.user, set()).discard(self)
+        if self.appservice:
+            notifier.appservice_to_listeners.get(
+                self.appservice, set()
+            ).discard(self)
 
 
 class Notifier(object):
@@ -76,8 +83,9 @@ class Notifier(object):
     def __init__(self, hs):
         self.hs = hs
 
-        self.rooms_to_listeners = {}
+        self.room_to_listeners = {}
         self.user_to_listeners = {}
+        self.appservice_to_listeners = {}
 
         self.event_sources = hs.get_event_sources()
 
@@ -98,15 +106,32 @@ class Notifier(object):
         `extra_users` param.
         """
         yield run_on_reactor()
+
+        # poke any interested application service.
+        self.hs.get_handlers().appservice_handler.notify_interested_services(
+            event
+        )
+
         room_id = event.room_id
 
         room_source = self.event_sources.sources["room"]
 
-        listeners = self.rooms_to_listeners.get(room_id, set()).copy()
+        listeners = self.room_to_listeners.get(room_id, set()).copy()
 
         for user in extra_users:
             listeners |= self.user_to_listeners.get(user, set()).copy()
 
+        for appservice in self.appservice_to_listeners:
+            # TODO (kegan): Redundant appservice listener checks?
+            # App services will already be in the room_to_listeners set, but
+            # that isn't enough. They need to be checked here in order to
+            # receive *invites* for users they are interested in. Does this
+            # make the room_to_listeners check somewhat obselete?
+            if appservice.is_interested(event):
+                listeners |= self.appservice_to_listeners.get(
+                    appservice, set()
+                ).copy()
+
         logger.debug("on_new_room_event listeners %s", listeners)
 
         # TODO (erikj): Can we make this more efficient by hitting the
@@ -134,7 +159,8 @@ class Notifier(object):
 
         with PreserveLoggingContext():
             yield defer.DeferredList(
-                [notify(l).addErrback(eb) for l in listeners]
+                [notify(l).addErrback(eb) for l in listeners],
+                consumeErrors=True,
             )
 
     @defer.inlineCallbacks
@@ -158,7 +184,7 @@ class Notifier(object):
             listeners |= self.user_to_listeners.get(user, set()).copy()
 
         for room in rooms:
-            listeners |= self.rooms_to_listeners.get(room, set()).copy()
+            listeners |= self.room_to_listeners.get(room, set()).copy()
 
         @defer.inlineCallbacks
         def notify(listener):
@@ -202,9 +228,57 @@ class Notifier(object):
 
         with PreserveLoggingContext():
             yield defer.DeferredList(
-                [notify(l).addErrback(eb) for l in listeners]
+                [notify(l).addErrback(eb) for l in listeners],
+                consumeErrors=True,
             )
 
+    @defer.inlineCallbacks
+    def wait_for_events(self, user, rooms, filter, timeout, callback):
+        """Wait until the callback returns a non empty response or the
+        timeout fires.
+        """
+
+        deferred = defer.Deferred()
+
+        from_token = StreamToken("s0", "0", "0")
+
+        listener = [_NotificationListener(
+            user=user,
+            rooms=rooms,
+            from_token=from_token,
+            limit=1,
+            timeout=timeout,
+            deferred=deferred,
+        )]
+
+        if timeout:
+            self._register_with_keys(listener[0])
+
+        result = yield callback()
+        if timeout:
+            timed_out = [False]
+
+            def _timeout_listener():
+                timed_out[0] = True
+                listener[0].notify(self, [], from_token, from_token)
+
+            self.clock.call_later(timeout/1000., _timeout_listener)
+            while not result and not timed_out[0]:
+                yield deferred
+                deferred = defer.Deferred()
+                listener[0] = _NotificationListener(
+                    user=user,
+                    rooms=rooms,
+                    from_token=from_token,
+                    limit=1,
+                    timeout=timeout,
+                    deferred=deferred,
+                )
+                self._register_with_keys(listener[0])
+                result = yield callback()
+
+        defer.returnValue(result)
+
     def get_events_for(self, user, rooms, pagination_config, timeout):
         """ For the given user and rooms, return any new events for them. If
         there are no new events wait for up to `timeout` milliseconds for any
@@ -224,6 +298,10 @@ class Notifier(object):
         if not from_token:
             from_token = yield self.event_sources.get_current_token()
 
+        appservice = yield self.hs.get_datastore().get_app_service_by_user_id(
+            user.to_string()
+        )
+
         listener = _NotificationListener(
             user,
             rooms,
@@ -231,6 +309,7 @@ class Notifier(object):
             limit,
             timeout,
             deferred,
+            appservice=appservice
         )
 
         def _timeout_listener():
@@ -258,11 +337,16 @@ class Notifier(object):
     @log_function
     def _register_with_keys(self, listener):
         for room in listener.rooms:
-            s = self.rooms_to_listeners.setdefault(room, set())
+            s = self.room_to_listeners.setdefault(room, set())
             s.add(listener)
 
         self.user_to_listeners.setdefault(listener.user, set()).add(listener)
 
+        if listener.appservice:
+            self.appservice_to_listeners.setdefault(
+                listener.appservice, set()
+            ).add(listener)
+
     @defer.inlineCallbacks
     @log_function
     def _check_for_updates(self, listener):
@@ -296,5 +380,5 @@ class Notifier(object):
     def _user_joined_room(self, user, room_id):
         new_listeners = self.user_to_listeners.get(user, set())
 
-        listeners = self.rooms_to_listeners.setdefault(room_id, set())
+        listeners = self.room_to_listeners.setdefault(room_id, set())
         listeners |= new_listeners
diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py
new file mode 100644
index 0000000000..3da0ce8703
--- /dev/null
+++ b/synapse/push/__init__.py
@@ -0,0 +1,427 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.streams.config import PaginationConfig
+from synapse.types import StreamToken, UserID
+
+import synapse.util.async
+import baserules
+
+import logging
+import simplejson as json
+import re
+
+logger = logging.getLogger(__name__)
+
+
+class Pusher(object):
+    INITIAL_BACKOFF = 1000
+    MAX_BACKOFF = 60 * 60 * 1000
+    GIVE_UP_AFTER = 24 * 60 * 60 * 1000
+    DEFAULT_ACTIONS = ['dont-notify']
+
+    INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
+
+    def __init__(self, _hs, profile_tag, user_name, app_id,
+                 app_display_name, device_display_name, pushkey, pushkey_ts,
+                 data, last_token, last_success, failing_since):
+        self.hs = _hs
+        self.evStreamHandler = self.hs.get_handlers().event_stream_handler
+        self.store = self.hs.get_datastore()
+        self.clock = self.hs.get_clock()
+        self.profile_tag = profile_tag
+        self.user_name = user_name
+        self.app_id = app_id
+        self.app_display_name = app_display_name
+        self.device_display_name = device_display_name
+        self.pushkey = pushkey
+        self.pushkey_ts = pushkey_ts
+        self.data = data
+        self.last_token = last_token
+        self.last_success = last_success  # not actually used
+        self.backoff_delay = Pusher.INITIAL_BACKOFF
+        self.failing_since = failing_since
+        self.alive = True
+
+        # The last value of last_active_time that we saw
+        self.last_last_active_time = 0
+        self.has_unread = True
+
+    @defer.inlineCallbacks
+    def _actions_for_event(self, ev):
+        """
+        This should take into account notification settings that the user
+        has configured both globally and per-room when we have the ability
+        to do such things.
+        """
+        if ev['user_id'] == self.user_name:
+            # let's assume you probably know about messages you sent yourself
+            defer.returnValue(['dont_notify'])
+
+        rawrules = yield self.store.get_push_rules_for_user(self.user_name)
+
+        for r in rawrules:
+            r['conditions'] = json.loads(r['conditions'])
+            r['actions'] = json.loads(r['actions'])
+
+        enabled_map = yield self.store.get_push_rules_enabled_for_user(self.user_name)
+
+        user = UserID.from_string(self.user_name)
+
+        rules = baserules.list_with_base_rules(rawrules, user)
+
+        # get *our* member event for display name matching
+        member_events_for_room = yield self.store.get_current_state(
+            room_id=ev['room_id'],
+            event_type='m.room.member',
+            state_key=None
+        )
+        my_display_name = None
+        room_member_count = 0
+        for mev in member_events_for_room:
+            if mev.content['membership'] != 'join':
+                continue
+
+            # This loop does two things:
+            # 1) Find our current display name
+            if mev.state_key == self.user_name and 'displayname' in mev.content:
+                my_display_name = mev.content['displayname']
+
+            # and 2) Get the number of people in that room
+            room_member_count += 1
+
+        for r in rules:
+            if r['rule_id'] in enabled_map and not enabled_map[r['rule_id']]:
+                continue
+            matches = True
+
+            conditions = r['conditions']
+            actions = r['actions']
+
+            for c in conditions:
+                matches &= self._event_fulfills_condition(
+                    ev, c, display_name=my_display_name,
+                    room_member_count=room_member_count
+                )
+            logger.debug(
+                "Rule %s %s",
+                r['rule_id'], "matches" if matches else "doesn't match"
+            )
+            # ignore rules with no actions (we have an explict 'dont_notify')
+            if len(actions) == 0:
+                logger.warn(
+                    "Ignoring rule id %s with no actions for user %s" %
+                    (r['rule_id'], r['user_name'])
+                )
+                continue
+            if matches:
+                defer.returnValue(actions)
+
+        defer.returnValue(Pusher.DEFAULT_ACTIONS)
+
+    @staticmethod
+    def _glob_to_regexp(glob):
+        r = re.escape(glob)
+        r = re.sub(r'\\\*', r'.*?', r)
+        r = re.sub(r'\\\?', r'.', r)
+
+        # handle [abc], [a-z] and [!a-z] style ranges.
+        r = re.sub(r'\\\[(\\\!|)(.*)\\\]',
+                   lambda x: ('[%s%s]' % (x.group(1) and '^' or '',
+                                          re.sub(r'\\\-', '-', x.group(2)))), r)
+        return r
+
+    def _event_fulfills_condition(self, ev, condition, display_name, room_member_count):
+        if condition['kind'] == 'event_match':
+            if 'pattern' not in condition:
+                logger.warn("event_match condition with no pattern")
+                return False
+            # XXX: optimisation: cache our pattern regexps
+            if condition['key'] == 'content.body':
+                r = r'\b%s\b' % self._glob_to_regexp(condition['pattern'])
+            else:
+                r = r'^%s$' % self._glob_to_regexp(condition['pattern'])
+            val = _value_for_dotted_key(condition['key'], ev)
+            if val is None:
+                return False
+            return re.search(r, val, flags=re.IGNORECASE) is not None
+
+        elif condition['kind'] == 'device':
+            if 'profile_tag' not in condition:
+                return True
+            return condition['profile_tag'] == self.profile_tag
+
+        elif condition['kind'] == 'contains_display_name':
+            # This is special because display names can be different
+            # between rooms and so you can't really hard code it in a rule.
+            # Optimisation: we should cache these names and update them from
+            # the event stream.
+            if 'content' not in ev or 'body' not in ev['content']:
+                return False
+            if not display_name:
+                return False
+            return re.search(
+                "\b%s\b" % re.escape(display_name), ev['content']['body'],
+                flags=re.IGNORECASE
+            ) is not None
+
+        elif condition['kind'] == 'room_member_count':
+            if 'is' not in condition:
+                return False
+            m = Pusher.INEQUALITY_EXPR.match(condition['is'])
+            if not m:
+                return False
+            ineq = m.group(1)
+            rhs = m.group(2)
+            if not rhs.isdigit():
+                return False
+            rhs = int(rhs)
+
+            if ineq == '' or ineq == '==':
+                return room_member_count == rhs
+            elif ineq == '<':
+                return room_member_count < rhs
+            elif ineq == '>':
+                return room_member_count > rhs
+            elif ineq == '>=':
+                return room_member_count >= rhs
+            elif ineq == '<=':
+                return room_member_count <= rhs
+            else:
+                return False
+        else:
+            return True
+
+    @defer.inlineCallbacks
+    def get_context_for_event(self, ev):
+        name_aliases = yield self.store.get_room_name_and_aliases(
+            ev['room_id']
+        )
+
+        ctx = {'aliases': name_aliases[1]}
+        if name_aliases[0] is not None:
+            ctx['name'] = name_aliases[0]
+
+        their_member_events_for_room = yield self.store.get_current_state(
+            room_id=ev['room_id'],
+            event_type='m.room.member',
+            state_key=ev['user_id']
+        )
+        for mev in their_member_events_for_room:
+            if mev.content['membership'] == 'join' and 'displayname' in mev.content:
+                dn = mev.content['displayname']
+                if dn is not None:
+                    ctx['sender_display_name'] = dn
+
+        defer.returnValue(ctx)
+
+    @defer.inlineCallbacks
+    def start(self):
+        if not self.last_token:
+            # First-time setup: get a token to start from (we can't
+            # just start from no token, ie. 'now'
+            # because we need the result to be reproduceable in case
+            # we fail to dispatch the push)
+            config = PaginationConfig(from_token=None, limit='1')
+            chunk = yield self.evStreamHandler.get_stream(
+                self.user_name, config, timeout=0)
+            self.last_token = chunk['end']
+            self.store.update_pusher_last_token(
+                self.app_id, self.pushkey, self.last_token)
+            logger.info("Pusher %s for user %s starting from token %s",
+                        self.pushkey, self.user_name, self.last_token)
+
+        while self.alive:
+            from_tok = StreamToken.from_string(self.last_token)
+            config = PaginationConfig(from_token=from_tok, limit='1')
+            chunk = yield self.evStreamHandler.get_stream(
+                self.user_name, config,
+                timeout=100*365*24*60*60*1000, affect_presence=False
+            )
+
+            # limiting to 1 may get 1 event plus 1 presence event, so
+            # pick out the actual event
+            single_event = None
+            for c in chunk['chunk']:
+                if 'event_id' in c:  # Hmmm...
+                    single_event = c
+                    break
+            if not single_event:
+                self.last_token = chunk['end']
+                continue
+
+            if not self.alive:
+                continue
+
+            processed = False
+            actions = yield self._actions_for_event(single_event)
+            tweaks = _tweaks_for_actions(actions)
+
+            if len(actions) == 0:
+                logger.warn("Empty actions! Using default action.")
+                actions = Pusher.DEFAULT_ACTIONS
+            if 'notify' not in actions and 'dont_notify' not in actions:
+                logger.warn("Neither notify nor dont_notify in actions: adding default")
+                actions.extend(Pusher.DEFAULT_ACTIONS)
+            if 'dont_notify' in actions:
+                logger.debug(
+                    "%s for %s: dont_notify",
+                    single_event['event_id'], self.user_name
+                )
+                processed = True
+            else:
+                rejected = yield self.dispatch_push(single_event, tweaks)
+                self.has_unread = True
+                if isinstance(rejected, list) or isinstance(rejected, tuple):
+                    processed = True
+                    for pk in rejected:
+                        if pk != self.pushkey:
+                            # for sanity, we only remove the pushkey if it
+                            # was the one we actually sent...
+                            logger.warn(
+                                ("Ignoring rejected pushkey %s because we"
+                                 " didn't send it"), pk
+                            )
+                        else:
+                            logger.info(
+                                "Pushkey %s was rejected: removing",
+                                pk
+                            )
+                            yield self.hs.get_pusherpool().remove_pusher(
+                                self.app_id, pk
+                            )
+
+            if not self.alive:
+                continue
+
+            if processed:
+                self.backoff_delay = Pusher.INITIAL_BACKOFF
+                self.last_token = chunk['end']
+                self.store.update_pusher_last_token_and_success(
+                    self.app_id,
+                    self.pushkey,
+                    self.last_token,
+                    self.clock.time_msec()
+                )
+                if self.failing_since:
+                    self.failing_since = None
+                    self.store.update_pusher_failing_since(
+                        self.app_id,
+                        self.pushkey,
+                        self.failing_since)
+            else:
+                if not self.failing_since:
+                    self.failing_since = self.clock.time_msec()
+                    self.store.update_pusher_failing_since(
+                        self.app_id,
+                        self.pushkey,
+                        self.failing_since
+                    )
+
+                if (self.failing_since and
+                   self.failing_since <
+                   self.clock.time_msec() - Pusher.GIVE_UP_AFTER):
+                    # we really only give up so that if the URL gets
+                    # fixed, we don't suddenly deliver a load
+                    # of old notifications.
+                    logger.warn("Giving up on a notification to user %s, "
+                                "pushkey %s",
+                                self.user_name, self.pushkey)
+                    self.backoff_delay = Pusher.INITIAL_BACKOFF
+                    self.last_token = chunk['end']
+                    self.store.update_pusher_last_token(
+                        self.app_id,
+                        self.pushkey,
+                        self.last_token
+                    )
+
+                    self.failing_since = None
+                    self.store.update_pusher_failing_since(
+                        self.app_id,
+                        self.pushkey,
+                        self.failing_since
+                    )
+                else:
+                    logger.warn("Failed to dispatch push for user %s "
+                                "(failing for %dms)."
+                                "Trying again in %dms",
+                                self.user_name,
+                                self.clock.time_msec() - self.failing_since,
+                                self.backoff_delay)
+                    yield synapse.util.async.sleep(self.backoff_delay / 1000.0)
+                    self.backoff_delay *= 2
+                    if self.backoff_delay > Pusher.MAX_BACKOFF:
+                        self.backoff_delay = Pusher.MAX_BACKOFF
+
+    def stop(self):
+        self.alive = False
+
+    def dispatch_push(self, p, tweaks):
+        """
+        Overridden by implementing classes to actually deliver the notification
+        Args:
+            p: The event to notify for as a single event from the event stream
+        Returns: If the notification was delivered, an array containing any
+                 pushkeys that were rejected by the push gateway.
+                 False if the notification could not be delivered (ie.
+                 should be retried).
+        """
+        pass
+
+    def reset_badge_count(self):
+        pass
+
+    def presence_changed(self, state):
+        """
+        We clear badge counts whenever a user's last_active time is bumped
+        This is by no means perfect but I think it's the best we can do
+        without read receipts.
+        """
+        if 'last_active' in state.state:
+            last_active = state.state['last_active']
+            if last_active > self.last_last_active_time:
+                self.last_last_active_time = last_active
+                if self.has_unread:
+                    logger.info("Resetting badge count for %s", self.user_name)
+                    self.reset_badge_count()
+                    self.has_unread = False
+
+
+def _value_for_dotted_key(dotted_key, event):
+    parts = dotted_key.split(".")
+    val = event
+    while len(parts) > 0:
+        if parts[0] not in val:
+            return None
+        val = val[parts[0]]
+        parts = parts[1:]
+    return val
+
+
+def _tweaks_for_actions(actions):
+    tweaks = {}
+    for a in actions:
+        if not isinstance(a, dict):
+            continue
+        if 'set_tweak' in a and 'value' in a:
+            tweaks[a['set_tweak']] = a['value']
+    return tweaks
+
+
+class PusherConfigException(Exception):
+    def __init__(self, msg):
+        super(PusherConfigException, self).__init__(msg)
diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py
new file mode 100644
index 0000000000..6e333a3d21
--- /dev/null
+++ b/synapse/push/baserules.py
@@ -0,0 +1,209 @@
+from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP
+
+
+def list_with_base_rules(rawrules, user_name):
+    ruleslist = []
+
+    # shove the server default rules for each kind onto the end of each
+    current_prio_class = PRIORITY_CLASS_INVERSE_MAP.keys()[-1]
+    for r in rawrules:
+        if r['priority_class'] < current_prio_class:
+            while r['priority_class'] < current_prio_class:
+                ruleslist.extend(make_base_rules(
+                    user_name,
+                    PRIORITY_CLASS_INVERSE_MAP[current_prio_class]
+                ))
+                current_prio_class -= 1
+
+        ruleslist.append(r)
+
+    while current_prio_class > 0:
+        ruleslist.extend(make_base_rules(
+            user_name,
+            PRIORITY_CLASS_INVERSE_MAP[current_prio_class]
+        ))
+        current_prio_class -= 1
+
+    return ruleslist
+
+
+def make_base_rules(user, kind):
+    rules = []
+
+    if kind == 'override':
+        rules = make_base_override_rules()
+    elif kind == 'underride':
+        rules = make_base_underride_rules(user)
+    elif kind == 'content':
+        rules = make_base_content_rules(user)
+
+    for r in rules:
+        r['priority_class'] = PRIORITY_CLASS_MAP[kind]
+        r['default'] = True  # Deprecated, left for backwards compat
+
+    return rules
+
+
+def make_base_content_rules(user):
+    return [
+        {
+            'rule_id': 'global/content/.m.rule.contains_user_name',
+            'conditions': [
+                {
+                    'kind': 'event_match',
+                    'key': 'content.body',
+                    'pattern': user.localpart,  # Matrix ID match
+                }
+            ],
+            'actions': [
+                'notify',
+                {
+                    'set_tweak': 'sound',
+                    'value': 'default',
+                }, {
+                    'set_tweak': 'highlight'
+                }
+            ]
+        },
+    ]
+
+
+def make_base_override_rules():
+    return [
+        {
+            'rule_id': 'global/override/.m.rule.call',
+            'conditions': [
+                {
+                    'kind': 'event_match',
+                    'key': 'type',
+                    'pattern': 'm.call.invite',
+                }
+            ],
+            'actions': [
+                'notify',
+                {
+                    'set_tweak': 'sound',
+                    'value': 'ring'
+                }, {
+                    'set_tweak': 'highlight',
+                    'value': 'false'
+                }
+            ]
+        },
+        {
+            'rule_id': 'global/override/.m.rule.suppress_notices',
+            'conditions': [
+                {
+                    'kind': 'event_match',
+                    'key': 'content.msgtype',
+                    'pattern': 'm.notice',
+                }
+            ],
+            'actions': [
+                'dont_notify',
+            ]
+        },
+        {
+            'rule_id': 'global/override/.m.rule.contains_display_name',
+            'conditions': [
+                {
+                    'kind': 'contains_display_name'
+                }
+            ],
+            'actions': [
+                'notify',
+                {
+                    'set_tweak': 'sound',
+                    'value': 'default'
+                }, {
+                    'set_tweak': 'highlight'
+                }
+            ]
+        },
+        {
+            'rule_id': 'global/override/.m.rule.room_one_to_one',
+            'conditions': [
+                {
+                    'kind': 'room_member_count',
+                    'is': '2'
+                }
+            ],
+            'actions': [
+                'notify',
+                {
+                    'set_tweak': 'sound',
+                    'value': 'default'
+                }, {
+                    'set_tweak': 'highlight',
+                    'value': 'false'
+                }
+            ]
+        }
+    ]
+
+
+def make_base_underride_rules(user):
+    return [
+        {
+            'rule_id': 'global/underride/.m.rule.invite_for_me',
+            'conditions': [
+                {
+                    'kind': 'event_match',
+                    'key': 'type',
+                    'pattern': 'm.room.member',
+                },
+                {
+                    'kind': 'event_match',
+                    'key': 'content.membership',
+                    'pattern': 'invite',
+                },
+                {
+                    'kind': 'event_match',
+                    'key': 'state_key',
+                    'pattern': user.to_string(),
+                },
+            ],
+            'actions': [
+                'notify',
+                {
+                    'set_tweak': 'sound',
+                    'value': 'default'
+                }, {
+                    'set_tweak': 'highlight',
+                    'value': 'false'
+                }
+            ]
+        },
+        {
+            'rule_id': 'global/underride/.m.rule.member_event',
+            'conditions': [
+                {
+                    'kind': 'event_match',
+                    'key': 'type',
+                    'pattern': 'm.room.member',
+                }
+            ],
+            'actions': [
+                'notify', {
+                    'set_tweak': 'highlight',
+                    'value': 'false'
+                }
+            ]
+        },
+        {
+            'rule_id': 'global/underride/.m.rule.message',
+            'conditions': [
+                {
+                    'kind': 'event_match',
+                    'key': 'type',
+                    'pattern': 'm.room.message',
+                }
+            ],
+            'actions': [
+                'notify', {
+                    'set_tweak': 'highlight',
+                    'value': 'false'
+                }
+            ]
+        }
+    ]
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
new file mode 100644
index 0000000000..a02fed57b4
--- /dev/null
+++ b/synapse/push/httppusher.py
@@ -0,0 +1,148 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from synapse.push import Pusher, PusherConfigException
+from synapse.http.client import SimpleHttpClient
+
+from twisted.internet import defer
+
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class HttpPusher(Pusher):
+    def __init__(self, _hs, profile_tag, user_name, app_id,
+                 app_display_name, device_display_name, pushkey, pushkey_ts,
+                 data, last_token, last_success, failing_since):
+        super(HttpPusher, self).__init__(
+            _hs,
+            profile_tag,
+            user_name,
+            app_id,
+            app_display_name,
+            device_display_name,
+            pushkey,
+            pushkey_ts,
+            data,
+            last_token,
+            last_success,
+            failing_since
+        )
+        if 'url' not in data:
+            raise PusherConfigException(
+                "'url' required in data for HTTP pusher"
+            )
+        self.url = data['url']
+        self.httpCli = SimpleHttpClient(self.hs)
+        self.data_minus_url = {}
+        self.data_minus_url.update(self.data)
+        del self.data_minus_url['url']
+
+    @defer.inlineCallbacks
+    def _build_notification_dict(self, event, tweaks):
+        # we probably do not want to push for every presence update
+        # (we may want to be able to set up notifications when specific
+        # people sign in, but we'd want to only deliver the pertinent ones)
+        # Actually, presence events will not get this far now because we
+        # need to filter them out in the main Pusher code.
+        if 'event_id' not in event:
+            defer.returnValue(None)
+
+        ctx = yield self.get_context_for_event(event)
+
+        d = {
+            'notification': {
+                'id': event['event_id'],
+                'room_id': event['room_id'],
+                'type': event['type'],
+                'sender': event['user_id'],
+                'counts': {  # -- we don't mark messages as read yet so
+                             # we have no way of knowing
+                    # Just set the badge to 1 until we have read receipts
+                    'unread': 1,
+                    # 'missed_calls': 2
+                },
+                'devices': [
+                    {
+                        'app_id': self.app_id,
+                        'pushkey': self.pushkey,
+                        'pushkey_ts': long(self.pushkey_ts / 1000),
+                        'data': self.data_minus_url,
+                        'tweaks': tweaks
+                    }
+                ]
+            }
+        }
+        if event['type'] == 'm.room.member':
+            d['notification']['membership'] = event['content']['membership']
+            d['notification']['user_is_target'] = event['state_key'] == self.user_name
+        if 'content' in event:
+            d['notification']['content'] = event['content']
+
+        if len(ctx['aliases']):
+            d['notification']['room_alias'] = ctx['aliases'][0]
+        if 'sender_display_name' in ctx and len(ctx['sender_display_name']) > 0:
+            d['notification']['sender_display_name'] = ctx['sender_display_name']
+        if 'name' in ctx and len(ctx['name']) > 0:
+            d['notification']['room_name'] = ctx['name']
+
+        defer.returnValue(d)
+
+    @defer.inlineCallbacks
+    def dispatch_push(self, event, tweaks):
+        notification_dict = yield self._build_notification_dict(event, tweaks)
+        if not notification_dict:
+            defer.returnValue([])
+        try:
+            resp = yield self.httpCli.post_json_get_json(self.url, notification_dict)
+        except:
+            logger.warn("Failed to push %s ", self.url)
+            defer.returnValue(False)
+        rejected = []
+        if 'rejected' in resp:
+            rejected = resp['rejected']
+        defer.returnValue(rejected)
+
+    @defer.inlineCallbacks
+    def reset_badge_count(self):
+        d = {
+            'notification': {
+                'id': '',
+                'type': None,
+                'sender': '',
+                'counts': {
+                    'unread': 0,
+                    'missed_calls': 0
+                },
+                'devices': [
+                    {
+                        'app_id': self.app_id,
+                        'pushkey': self.pushkey,
+                        'pushkey_ts': long(self.pushkey_ts / 1000),
+                        'data': self.data_minus_url,
+                    }
+                ]
+            }
+        }
+        try:
+            resp = yield self.httpCli.post_json_get_json(self.url, d)
+        except:
+            logger.exception("Failed to push %s ", self.url)
+            defer.returnValue(False)
+        rejected = []
+        if 'rejected' in resp:
+            rejected = resp['rejected']
+        defer.returnValue(rejected)
diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py
new file mode 100644
index 0000000000..90babd7224
--- /dev/null
+++ b/synapse/push/pusherpool.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from httppusher import HttpPusher
+from synapse.push import PusherConfigException
+
+from syutil.jsonutil import encode_canonical_json
+
+import logging
+import simplejson as json
+
+logger = logging.getLogger(__name__)
+
+
+class PusherPool:
+    def __init__(self, _hs):
+        self.hs = _hs
+        self.store = self.hs.get_datastore()
+        self.pushers = {}
+        self.last_pusher_started = -1
+
+        distributor = self.hs.get_distributor()
+        distributor.observe(
+            "user_presence_changed", self.user_presence_changed
+        )
+
+    @defer.inlineCallbacks
+    def user_presence_changed(self, user, state):
+        user_name = user.to_string()
+
+        # until we have read receipts, pushers use this to reset a user's
+        # badge counters to zero
+        for p in self.pushers.values():
+            if p.user_name == user_name:
+                yield p.presence_changed(state)
+
+    @defer.inlineCallbacks
+    def start(self):
+        pushers = yield self.store.get_all_pushers()
+        for p in pushers:
+            p['data'] = json.loads(p['data'])
+        self._start_pushers(pushers)
+
+    @defer.inlineCallbacks
+    def add_pusher(self, user_name, profile_tag, kind, app_id,
+                   app_display_name, device_display_name, pushkey, lang, data):
+        # we try to create the pusher just to validate the config: it
+        # will then get pulled out of the database,
+        # recreated, added and started: this means we have only one
+        # code path adding pushers.
+        self._create_pusher({
+            "user_name": user_name,
+            "kind": kind,
+            "profile_tag": profile_tag,
+            "app_id": app_id,
+            "app_display_name": app_display_name,
+            "device_display_name": device_display_name,
+            "pushkey": pushkey,
+            "pushkey_ts": self.hs.get_clock().time_msec(),
+            "lang": lang,
+            "data": data,
+            "last_token": None,
+            "last_success": None,
+            "failing_since": None
+        })
+        yield self._add_pusher_to_store(
+            user_name, profile_tag, kind, app_id,
+            app_display_name, device_display_name,
+            pushkey, lang, data
+        )
+
+    @defer.inlineCallbacks
+    def _add_pusher_to_store(self, user_name, profile_tag, kind, app_id,
+                             app_display_name, device_display_name,
+                             pushkey, lang, data):
+        yield self.store.add_pusher(
+            user_name=user_name,
+            profile_tag=profile_tag,
+            kind=kind,
+            app_id=app_id,
+            app_display_name=app_display_name,
+            device_display_name=device_display_name,
+            pushkey=pushkey,
+            pushkey_ts=self.hs.get_clock().time_msec(),
+            lang=lang,
+            data=encode_canonical_json(data).decode("UTF-8"),
+        )
+        self._refresh_pusher((app_id, pushkey))
+
+    def _create_pusher(self, pusherdict):
+        if pusherdict['kind'] == 'http':
+            return HttpPusher(
+                self.hs,
+                profile_tag=pusherdict['profile_tag'],
+                user_name=pusherdict['user_name'],
+                app_id=pusherdict['app_id'],
+                app_display_name=pusherdict['app_display_name'],
+                device_display_name=pusherdict['device_display_name'],
+                pushkey=pusherdict['pushkey'],
+                pushkey_ts=pusherdict['pushkey_ts'],
+                data=pusherdict['data'],
+                last_token=pusherdict['last_token'],
+                last_success=pusherdict['last_success'],
+                failing_since=pusherdict['failing_since']
+            )
+        else:
+            raise PusherConfigException(
+                "Unknown pusher type '%s' for user %s" %
+                (pusherdict['kind'], pusherdict['user_name'])
+            )
+
+    @defer.inlineCallbacks
+    def _refresh_pusher(self, app_id_pushkey):
+        p = yield self.store.get_pushers_by_app_id_and_pushkey(
+            app_id_pushkey
+        )
+        p['data'] = json.loads(p['data'])
+
+        self._start_pushers([p])
+
+    def _start_pushers(self, pushers):
+        logger.info("Starting %d pushers", len(pushers))
+        for pusherdict in pushers:
+            p = self._create_pusher(pusherdict)
+            if p:
+                fullid = "%s:%s" % (pusherdict['app_id'], pusherdict['pushkey'])
+                if fullid in self.pushers:
+                    self.pushers[fullid].stop()
+                self.pushers[fullid] = p
+                p.start()
+
+    @defer.inlineCallbacks
+    def remove_pusher(self, app_id, pushkey):
+        fullid = "%s:%s" % (app_id, pushkey)
+        if fullid in self.pushers:
+            logger.info("Stopping pusher %s", fullid)
+            self.pushers[fullid].stop()
+            del self.pushers[fullid]
+        yield self.store.delete_pusher_by_app_id_pushkey(app_id, pushkey)
diff --git a/synapse/push/rulekinds.py b/synapse/push/rulekinds.py
new file mode 100644
index 0000000000..660aa4e10e
--- /dev/null
+++ b/synapse/push/rulekinds.py
@@ -0,0 +1,8 @@
+PRIORITY_CLASS_MAP = {
+    'underride': 1,
+    'sender': 2,
+    'room': 3,
+    'content': 4,
+    'override': 5,
+}
+PRIORITY_CLASS_INVERSE_MAP = {v: k for k, v in PRIORITY_CLASS_MAP.items()}
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index b1fae991e0..5fe8a825e3 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -4,9 +4,9 @@ from distutils.version import LooseVersion
 logger = logging.getLogger(__name__)
 
 REQUIREMENTS = {
-    "syutil==0.0.2": ["syutil"],
-    "matrix_angular_sdk==0.6.0": ["syweb==0.6.0"],
-    "Twisted>=14.0.0": ["twisted>=14.0.0"],
+    "syutil>=0.0.3": ["syutil"],
+    "matrix_angular_sdk>=0.6.4": ["syweb>=0.6.4"],
+    "Twisted==14.0.2": ["twisted==14.0.2"],
     "service_identity>=1.0.0": ["service_identity>=1.0.0"],
     "pyopenssl>=0.14": ["OpenSSL>=0.14"],
     "pyyaml": ["yaml"],
@@ -16,9 +16,32 @@ REQUIREMENTS = {
     "py-bcrypt": ["bcrypt"],
     "frozendict>=0.4": ["frozendict"],
     "pillow": ["PIL"],
+    "pydenticon": ["pydenticon"],
 }
 
 
+def github_link(project, version, egg):
+    return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg)
+
+DEPENDENCY_LINKS = [
+    github_link(
+        project="pyca/pynacl",
+        version="d4d3175589b892f6ea7c22f466e0e223853516fa",
+        egg="pynacl-0.3.0",
+    ),
+    github_link(
+        project="matrix-org/syutil",
+        version="v0.0.3",
+        egg="syutil-0.0.3",
+    ),
+    github_link(
+        project="matrix-org/matrix-angular-sdk",
+        version="v0.6.4",
+        egg="matrix_angular_sdk-0.6.4",
+    ),
+]
+
+
 class MissingRequirementError(Exception):
     pass
 
@@ -78,3 +101,24 @@ def check_requirements():
                         "Unexpected version of %r in %r. %r != %r"
                         % (dependency, file_path, version, required_version)
                     )
+
+
+def list_requirements():
+    result = []
+    linked = []
+    for link in DEPENDENCY_LINKS:
+        egg = link.split("#egg=")[1]
+        linked.append(egg.split('-')[0])
+        result.append(link)
+    for requirement in REQUIREMENTS:
+        is_linked = False
+        for link in linked:
+            if requirement.replace('-', '_').startswith(link):
+                is_linked = True
+        if not is_linked:
+            result.append(requirement)
+    return result
+
+if __name__ == "__main__":
+    import sys
+    sys.stdout.writelines(req + "\n" for req in list_requirements())
diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py
index 88ec9cd27d..1a84d94cd9 100644
--- a/synapse/rest/__init__.py
+++ b/synapse/rest/__init__.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Copyright 2014, 2015 OpenMarket Ltd
+# Copyright 2015 OpenMarket Ltd
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,36 +12,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-
-from . import (
-    room, events, register, login, profile, presence, initial_sync, directory,
-    voip, admin,
-)
-
-
-class RestServletFactory(object):
-
-    """ A factory for creating REST servlets.
-
-    These REST servlets represent the entire client-server REST API. Generally
-    speaking, they serve as wrappers around events and the handlers that
-    process them.
-
-    See synapse.events for information on synapse events.
-    """
-
-    def __init__(self, hs):
-        client_resource = hs.get_resource_for_client()
-
-        # TODO(erikj): There *must* be a better way of doing this.
-        room.register_servlets(hs, client_resource)
-        events.register_servlets(hs, client_resource)
-        register.register_servlets(hs, client_resource)
-        login.register_servlets(hs, client_resource)
-        profile.register_servlets(hs, client_resource)
-        presence.register_servlets(hs, client_resource)
-        initial_sync.register_servlets(hs, client_resource)
-        directory.register_servlets(hs, client_resource)
-        voip.register_servlets(hs, client_resource)
-        admin.register_servlets(hs, client_resource)
diff --git a/synapse/rest/appservice/__init__.py b/synapse/rest/appservice/__init__.py
new file mode 100644
index 0000000000..1a84d94cd9
--- /dev/null
+++ b/synapse/rest/appservice/__init__.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/synapse/rest/appservice/v1/__init__.py b/synapse/rest/appservice/v1/__init__.py
new file mode 100644
index 0000000000..a7877609ad
--- /dev/null
+++ b/synapse/rest/appservice/v1/__init__.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from . import register
+
+from synapse.http.server import JsonResource
+
+
+class AppServiceRestResource(JsonResource):
+    """A resource for version 1 of the matrix application service API."""
+
+    def __init__(self, hs):
+        JsonResource.__init__(self, hs)
+        self.register_servlets(self, hs)
+
+    @staticmethod
+    def register_servlets(appservice_resource, hs):
+        register.register_servlets(hs, appservice_resource)
diff --git a/synapse/rest/appservice/v1/base.py b/synapse/rest/appservice/v1/base.py
new file mode 100644
index 0000000000..65d5bcf9be
--- /dev/null
+++ b/synapse/rest/appservice/v1/base.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module contains base REST classes for constructing client v1 servlets.
+"""
+
+from synapse.http.servlet import RestServlet
+from synapse.api.urls import APP_SERVICE_PREFIX
+import re
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+def as_path_pattern(path_regex):
+    """Creates a regex compiled appservice path with the correct path
+    prefix.
+
+    Args:
+        path_regex (str): The regex string to match. This should NOT have a ^
+        as this will be prefixed.
+    Returns:
+        SRE_Pattern
+    """
+    return re.compile("^" + APP_SERVICE_PREFIX + path_regex)
+
+
+class AppServiceRestServlet(RestServlet):
+    """A base Synapse REST Servlet for the application services version 1 API.
+    """
+
+    def __init__(self, hs):
+        self.hs = hs
+        self.handler = hs.get_handlers().appservice_handler
diff --git a/synapse/rest/appservice/v1/register.py b/synapse/rest/appservice/v1/register.py
new file mode 100644
index 0000000000..a4f6159773
--- /dev/null
+++ b/synapse/rest/appservice/v1/register.py
@@ -0,0 +1,98 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module contains REST servlets to do with registration: /register"""
+from twisted.internet import defer
+
+from base import AppServiceRestServlet, as_path_pattern
+from synapse.api.errors import CodeMessageException, SynapseError
+from synapse.storage.appservice import ApplicationService
+
+import json
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class RegisterRestServlet(AppServiceRestServlet):
+    """Handles AS registration with the home server.
+    """
+
+    PATTERN = as_path_pattern("/register$")
+
+    @defer.inlineCallbacks
+    def on_POST(self, request):
+        params = _parse_json(request)
+
+        # sanity check required params
+        try:
+            as_token = params["as_token"]
+            as_url = params["url"]
+            if (not isinstance(as_token, basestring) or
+                    not isinstance(as_url, basestring)):
+                raise ValueError
+        except (KeyError, ValueError):
+            raise SynapseError(
+                400, "Missed required keys: as_token(str) / url(str)."
+            )
+
+        try:
+            app_service = ApplicationService(
+                as_token, as_url, params["namespaces"]
+            )
+        except ValueError as e:
+            raise SynapseError(400, e.message)
+
+        app_service = yield self.handler.register(app_service)
+        hs_token = app_service.hs_token
+
+        defer.returnValue((200, {
+            "hs_token": hs_token
+        }))
+
+
+class UnregisterRestServlet(AppServiceRestServlet):
+    """Handles AS registration with the home server.
+    """
+
+    PATTERN = as_path_pattern("/unregister$")
+
+    def on_POST(self, request):
+        params = _parse_json(request)
+        try:
+            as_token = params["as_token"]
+            if not isinstance(as_token, basestring):
+                raise ValueError
+        except (KeyError, ValueError):
+            raise SynapseError(400, "Missing required key: as_token(str)")
+
+        yield self.handler.unregister(as_token)
+
+        raise CodeMessageException(500, "Not implemented")
+
+
+def _parse_json(request):
+    try:
+        content = json.loads(request.content.read())
+        if type(content) != dict:
+            raise SynapseError(400, "Content must be a JSON object.")
+        return content
+    except ValueError:
+        raise SynapseError(400, "Content not JSON.")
+
+
+def register_servlets(hs, http_server):
+    RegisterRestServlet(hs).register(http_server)
+    UnregisterRestServlet(hs).register(http_server)
diff --git a/synapse/rest/base.py b/synapse/rest/base.py
deleted file mode 100644
index c583945527..0000000000
--- a/synapse/rest/base.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014, 2015 OpenMarket Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-""" This module contains base REST classes for constructing REST servlets. """
-from synapse.api.urls import CLIENT_PREFIX
-from synapse.rest.transactions import HttpTransactionStore
-import re
-
-import logging
-
-
-logger = logging.getLogger(__name__)
-
-
-def client_path_pattern(path_regex):
-    """Creates a regex compiled client path with the correct client path
-    prefix.
-
-    Args:
-        path_regex (str): The regex string to match. This should NOT have a ^
-        as this will be prefixed.
-    Returns:
-        SRE_Pattern
-    """
-    return re.compile("^" + CLIENT_PREFIX + path_regex)
-
-
-class RestServlet(object):
-
-    """ A Synapse REST Servlet.
-
-    An implementing class can either provide its own custom 'register' method,
-    or use the automatic pattern handling provided by the base class.
-
-    To use this latter, the implementing class instead provides a `PATTERN`
-    class attribute containing a pre-compiled regular expression. The automatic
-    register method will then use this method to register any of the following
-    instance methods associated with the corresponding HTTP method:
-
-      on_GET
-      on_PUT
-      on_POST
-      on_DELETE
-      on_OPTIONS
-
-    Automatically handles turning CodeMessageExceptions thrown by these methods
-    into the appropriate HTTP response.
-    """
-
-    def __init__(self, hs):
-        self.hs = hs
-
-        self.handlers = hs.get_handlers()
-        self.builder_factory = hs.get_event_builder_factory()
-        self.auth = hs.get_auth()
-        self.txns = HttpTransactionStore()
-
-    def register(self, http_server):
-        """ Register this servlet with the given HTTP server. """
-        if hasattr(self, "PATTERN"):
-            pattern = self.PATTERN
-
-            for method in ("GET", "PUT", "POST", "OPTIONS", "DELETE"):
-                if hasattr(self, "on_%s" % (method)):
-                    method_handler = getattr(self, "on_%s" % (method))
-                    http_server.register_path(method, pattern, method_handler)
-        else:
-            raise NotImplementedError("RestServlet must register something.")
diff --git a/synapse/rest/client/__init__.py b/synapse/rest/client/__init__.py
new file mode 100644
index 0000000000..1a84d94cd9
--- /dev/null
+++ b/synapse/rest/client/__init__.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/synapse/rest/client/v1/__init__.py b/synapse/rest/client/v1/__init__.py
new file mode 100644
index 0000000000..21876b3487
--- /dev/null
+++ b/synapse/rest/client/v1/__init__.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import (
+    room, events, register, login, profile, presence, initial_sync, directory,
+    voip, admin, pusher, push_rule
+)
+
+from synapse.http.server import JsonResource
+
+
+class ClientV1RestResource(JsonResource):
+    """A resource for version 1 of the matrix client API."""
+
+    def __init__(self, hs):
+        JsonResource.__init__(self, hs)
+        self.register_servlets(self, hs)
+
+    @staticmethod
+    def register_servlets(client_resource, hs):
+        room.register_servlets(hs, client_resource)
+        events.register_servlets(hs, client_resource)
+        register.register_servlets(hs, client_resource)
+        login.register_servlets(hs, client_resource)
+        profile.register_servlets(hs, client_resource)
+        presence.register_servlets(hs, client_resource)
+        initial_sync.register_servlets(hs, client_resource)
+        directory.register_servlets(hs, client_resource)
+        voip.register_servlets(hs, client_resource)
+        admin.register_servlets(hs, client_resource)
+        pusher.register_servlets(hs, client_resource)
+        push_rule.register_servlets(hs, client_resource)
diff --git a/synapse/rest/admin.py b/synapse/rest/client/v1/admin.py
index 0aa83514c8..2ce754b028 100644
--- a/synapse/rest/admin.py
+++ b/synapse/rest/client/v1/admin.py
@@ -16,20 +16,22 @@
 from twisted.internet import defer
 
 from synapse.api.errors import AuthError, SynapseError
-from base import RestServlet, client_path_pattern
+from synapse.types import UserID
+
+from base import ClientV1RestServlet, client_path_pattern
 
 import logging
 
 logger = logging.getLogger(__name__)
 
 
-class WhoisRestServlet(RestServlet):
+class WhoisRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/admin/whois/(?P<user_id>[^/]*)")
 
     @defer.inlineCallbacks
     def on_GET(self, request, user_id):
-        target_user = self.hs.parse_userid(user_id)
-        auth_user = yield self.auth.get_user_by_req(request)
+        target_user = UserID.from_string(user_id)
+        auth_user, client = yield self.auth.get_user_by_req(request)
         is_admin = yield self.auth.is_server_admin(auth_user)
 
         if not is_admin and target_user != auth_user:
diff --git a/synapse/rest/client/v1/base.py b/synapse/rest/client/v1/base.py
new file mode 100644
index 0000000000..72332bdb10
--- /dev/null
+++ b/synapse/rest/client/v1/base.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module contains base REST classes for constructing client v1 servlets.
+"""
+
+from synapse.http.servlet import RestServlet
+from synapse.api.urls import CLIENT_PREFIX
+from .transactions import HttpTransactionStore
+import re
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+def client_path_pattern(path_regex):
+    """Creates a regex compiled client path with the correct client path
+    prefix.
+
+    Args:
+        path_regex (str): The regex string to match. This should NOT have a ^
+        as this will be prefixed.
+    Returns:
+        SRE_Pattern
+    """
+    return re.compile("^" + CLIENT_PREFIX + path_regex)
+
+
+class ClientV1RestServlet(RestServlet):
+    """A base Synapse REST Servlet for the client version 1 API.
+    """
+
+    def __init__(self, hs):
+        self.hs = hs
+        self.handlers = hs.get_handlers()
+        self.builder_factory = hs.get_event_builder_factory()
+        self.auth = hs.get_auth()
+        self.txns = HttpTransactionStore()
diff --git a/synapse/rest/directory.py b/synapse/rest/client/v1/directory.py
index 7ff44fdd9e..6758a888b3 100644
--- a/synapse/rest/directory.py
+++ b/synapse/rest/client/v1/directory.py
@@ -17,9 +17,10 @@
 from twisted.internet import defer
 
 from synapse.api.errors import AuthError, SynapseError, Codes
-from base import RestServlet, client_path_pattern
+from synapse.types import RoomAlias
+from .base import ClientV1RestServlet, client_path_pattern
 
-import json
+import simplejson as json
 import logging
 
 
@@ -30,12 +31,12 @@ def register_servlets(hs, http_server):
     ClientDirectoryServer(hs).register(http_server)
 
 
-class ClientDirectoryServer(RestServlet):
+class ClientDirectoryServer(ClientV1RestServlet):
     PATTERN = client_path_pattern("/directory/room/(?P<room_alias>[^/]*)$")
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_alias):
-        room_alias = self.hs.parse_roomalias(room_alias)
+        room_alias = RoomAlias.from_string(room_alias)
 
         dir_handler = self.handlers.directory_handler
         res = yield dir_handler.get_association(room_alias)
@@ -44,16 +45,14 @@ class ClientDirectoryServer(RestServlet):
 
     @defer.inlineCallbacks
     def on_PUT(self, request, room_alias):
-        user = yield self.auth.get_user_by_req(request)
-
         content = _parse_json(request)
-        if not "room_id" in content:
+        if "room_id" not in content:
             raise SynapseError(400, "Missing room_id key",
                                errcode=Codes.BAD_JSON)
 
         logger.debug("Got content: %s", content)
 
-        room_alias = self.hs.parse_roomalias(room_alias)
+        room_alias = RoomAlias.from_string(room_alias)
 
         logger.debug("Got room name: %s", room_alias.to_string())
 
@@ -69,34 +68,70 @@ class ClientDirectoryServer(RestServlet):
         dir_handler = self.handlers.directory_handler
 
         try:
-            user_id = user.to_string()
-            yield dir_handler.create_association(
-                user_id, room_alias, room_id, servers
+            # try to auth as a user
+            user, client = yield self.auth.get_user_by_req(request)
+            try:
+                user_id = user.to_string()
+                yield dir_handler.create_association(
+                    user_id, room_alias, room_id, servers
+                )
+                yield dir_handler.send_room_alias_update_event(user_id, room_id)
+            except SynapseError as e:
+                raise e
+            except:
+                logger.exception("Failed to create association")
+                raise
+        except AuthError:
+            # try to auth as an application service
+            service = yield self.auth.get_appservice_by_req(request)
+            yield dir_handler.create_appservice_association(
+                service, room_alias, room_id, servers
+            )
+            logger.info(
+                "Application service at %s created alias %s pointing to %s",
+                service.url,
+                room_alias.to_string(),
+                room_id
             )
-            yield dir_handler.send_room_alias_update_event(user_id, room_id)
-        except SynapseError as e:
-            raise e
-        except:
-            logger.exception("Failed to create association")
-            raise
 
         defer.returnValue((200, {}))
 
     @defer.inlineCallbacks
     def on_DELETE(self, request, room_alias):
-        user = yield self.auth.get_user_by_req(request)
+        dir_handler = self.handlers.directory_handler
+
+        try:
+            service = yield self.auth.get_appservice_by_req(request)
+            room_alias = RoomAlias.from_string(room_alias)
+            yield dir_handler.delete_appservice_association(
+                service, room_alias
+            )
+            logger.info(
+                "Application service at %s deleted alias %s",
+                service.url,
+                room_alias.to_string()
+            )
+            defer.returnValue((200, {}))
+        except AuthError:
+            # fallback to default user behaviour if they aren't an AS
+            pass
+
+        user, client = yield self.auth.get_user_by_req(request)
 
         is_admin = yield self.auth.is_server_admin(user)
         if not is_admin:
             raise AuthError(403, "You need to be a server admin")
 
-        dir_handler = self.handlers.directory_handler
-
-        room_alias = self.hs.parse_roomalias(room_alias)
+        room_alias = RoomAlias.from_string(room_alias)
 
         yield dir_handler.delete_association(
             user.to_string(), room_alias
         )
+        logger.info(
+            "User %s deleted alias %s",
+            user.to_string(),
+            room_alias.to_string()
+        )
 
         defer.returnValue((200, {}))
 
diff --git a/synapse/rest/events.py b/synapse/rest/client/v1/events.py
index bedcb2bcc6..77b7c25a03 100644
--- a/synapse/rest/events.py
+++ b/synapse/rest/client/v1/events.py
@@ -18,7 +18,8 @@ from twisted.internet import defer
 
 from synapse.api.errors import SynapseError
 from synapse.streams.config import PaginationConfig
-from synapse.rest.base import RestServlet, client_path_pattern
+from .base import ClientV1RestServlet, client_path_pattern
+from synapse.events.utils import serialize_event
 
 import logging
 
@@ -26,14 +27,14 @@ import logging
 logger = logging.getLogger(__name__)
 
 
-class EventStreamRestServlet(RestServlet):
+class EventStreamRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/events$")
 
     DEFAULT_LONGPOLL_TIME_MS = 30000
 
     @defer.inlineCallbacks
     def on_GET(self, request):
-        auth_user = yield self.auth.get_user_by_req(request)
+        auth_user, client = yield self.auth.get_user_by_req(request)
         try:
             handler = self.handlers.event_stream_handler
             pagin_config = PaginationConfig.from_request(request)
@@ -61,17 +62,22 @@ class EventStreamRestServlet(RestServlet):
 
 
 # TODO: Unit test gets, with and without auth, with different kinds of events.
-class EventRestServlet(RestServlet):
+class EventRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/events/(?P<event_id>[^/]*)$")
 
+    def __init__(self, hs):
+        super(EventRestServlet, self).__init__(hs)
+        self.clock = hs.get_clock()
+
     @defer.inlineCallbacks
     def on_GET(self, request, event_id):
-        auth_user = yield self.auth.get_user_by_req(request)
+        auth_user, client = yield self.auth.get_user_by_req(request)
         handler = self.handlers.event_handler
         event = yield handler.get_event(auth_user, event_id)
 
+        time_now = self.clock.time_msec()
         if event:
-            defer.returnValue((200, self.hs.serialize_event(event)))
+            defer.returnValue((200, serialize_event(event, time_now)))
         else:
             defer.returnValue((404, "Event not found."))
 
diff --git a/synapse/rest/initial_sync.py b/synapse/rest/client/v1/initial_sync.py
index b13d56b286..4a259bba64 100644
--- a/synapse/rest/initial_sync.py
+++ b/synapse/rest/client/v1/initial_sync.py
@@ -16,16 +16,16 @@
 from twisted.internet import defer
 
 from synapse.streams.config import PaginationConfig
-from base import RestServlet, client_path_pattern
+from base import ClientV1RestServlet, client_path_pattern
 
 
 # TODO: Needs unit testing
-class InitialSyncRestServlet(RestServlet):
+class InitialSyncRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/initialSync$")
 
     @defer.inlineCallbacks
     def on_GET(self, request):
-        user = yield self.auth.get_user_by_req(request)
+        user, client = yield self.auth.get_user_by_req(request)
         with_feedback = "feedback" in request.args
         as_client_event = "raw" not in request.args
         pagination_config = PaginationConfig.from_request(request)
diff --git a/synapse/rest/login.py b/synapse/rest/client/v1/login.py
index 6b8deff67b..b2257b749d 100644
--- a/synapse/rest/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -17,12 +17,12 @@ from twisted.internet import defer
 
 from synapse.api.errors import SynapseError
 from synapse.types import UserID
-from base import RestServlet, client_path_pattern
+from base import ClientV1RestServlet, client_path_pattern
 
-import json
+import simplejson as json
 
 
-class LoginRestServlet(RestServlet):
+class LoginRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/login$")
     PASS_TYPE = "m.login.password"
 
@@ -64,7 +64,7 @@ class LoginRestServlet(RestServlet):
         defer.returnValue((200, result))
 
 
-class LoginFallbackRestServlet(RestServlet):
+class LoginFallbackRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/login/fallback$")
 
     def on_GET(self, request):
@@ -73,7 +73,7 @@ class LoginFallbackRestServlet(RestServlet):
         return (200, {})
 
 
-class PasswordResetRestServlet(RestServlet):
+class PasswordResetRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/login/reset")
 
     @defer.inlineCallbacks
diff --git a/synapse/rest/presence.py b/synapse/rest/client/v1/presence.py
index ca4d2d21f0..78d4f2b128 100644
--- a/synapse/rest/presence.py
+++ b/synapse/rest/client/v1/presence.py
@@ -18,21 +18,22 @@
 from twisted.internet import defer
 
 from synapse.api.errors import SynapseError
-from base import RestServlet, client_path_pattern
+from synapse.types import UserID
+from .base import ClientV1RestServlet, client_path_pattern
 
-import json
+import simplejson as json
 import logging
 
 logger = logging.getLogger(__name__)
 
 
-class PresenceStatusRestServlet(RestServlet):
+class PresenceStatusRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/presence/(?P<user_id>[^/]*)/status")
 
     @defer.inlineCallbacks
     def on_GET(self, request, user_id):
-        auth_user = yield self.auth.get_user_by_req(request)
-        user = self.hs.parse_userid(user_id)
+        auth_user, client = yield self.auth.get_user_by_req(request)
+        user = UserID.from_string(user_id)
 
         state = yield self.handlers.presence_handler.get_state(
             target_user=user, auth_user=auth_user)
@@ -41,8 +42,8 @@ class PresenceStatusRestServlet(RestServlet):
 
     @defer.inlineCallbacks
     def on_PUT(self, request, user_id):
-        auth_user = yield self.auth.get_user_by_req(request)
-        user = self.hs.parse_userid(user_id)
+        auth_user, client = yield self.auth.get_user_by_req(request)
+        user = UserID.from_string(user_id)
 
         state = {}
         try:
@@ -71,13 +72,13 @@ class PresenceStatusRestServlet(RestServlet):
         return (200, {})
 
 
-class PresenceListRestServlet(RestServlet):
+class PresenceListRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/presence/list/(?P<user_id>[^/]*)")
 
     @defer.inlineCallbacks
     def on_GET(self, request, user_id):
-        auth_user = yield self.auth.get_user_by_req(request)
-        user = self.hs.parse_userid(user_id)
+        auth_user, client = yield self.auth.get_user_by_req(request)
+        user = UserID.from_string(user_id)
 
         if not self.hs.is_mine(user):
             raise SynapseError(400, "User not hosted on this Home Server")
@@ -96,8 +97,8 @@ class PresenceListRestServlet(RestServlet):
 
     @defer.inlineCallbacks
     def on_POST(self, request, user_id):
-        auth_user = yield self.auth.get_user_by_req(request)
-        user = self.hs.parse_userid(user_id)
+        auth_user, client = yield self.auth.get_user_by_req(request)
+        user = UserID.from_string(user_id)
 
         if not self.hs.is_mine(user):
             raise SynapseError(400, "User not hosted on this Home Server")
@@ -118,7 +119,7 @@ class PresenceListRestServlet(RestServlet):
                     raise SynapseError(400, "Bad invite value.")
                 if len(u) == 0:
                     continue
-                invited_user = self.hs.parse_userid(u)
+                invited_user = UserID.from_string(u)
                 yield self.handlers.presence_handler.send_invite(
                     observer_user=user, observed_user=invited_user
                 )
@@ -129,7 +130,7 @@ class PresenceListRestServlet(RestServlet):
                     raise SynapseError(400, "Bad drop value.")
                 if len(u) == 0:
                     continue
-                dropped_user = self.hs.parse_userid(u)
+                dropped_user = UserID.from_string(u)
                 yield self.handlers.presence_handler.drop(
                     observer_user=user, observed_user=dropped_user
                 )
diff --git a/synapse/rest/profile.py b/synapse/rest/client/v1/profile.py
index dc6eb424b0..1e77eb49cf 100644
--- a/synapse/rest/profile.py
+++ b/synapse/rest/client/v1/profile.py
@@ -16,17 +16,18 @@
 """ This module contains REST servlets to do with profile: /profile/<paths> """
 from twisted.internet import defer
 
-from base import RestServlet, client_path_pattern
+from .base import ClientV1RestServlet, client_path_pattern
+from synapse.types import UserID
 
-import json
+import simplejson as json
 
 
-class ProfileDisplaynameRestServlet(RestServlet):
+class ProfileDisplaynameRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/profile/(?P<user_id>[^/]*)/displayname")
 
     @defer.inlineCallbacks
     def on_GET(self, request, user_id):
-        user = self.hs.parse_userid(user_id)
+        user = UserID.from_string(user_id)
 
         displayname = yield self.handlers.profile_handler.get_displayname(
             user,
@@ -36,8 +37,8 @@ class ProfileDisplaynameRestServlet(RestServlet):
 
     @defer.inlineCallbacks
     def on_PUT(self, request, user_id):
-        auth_user = yield self.auth.get_user_by_req(request)
-        user = self.hs.parse_userid(user_id)
+        auth_user, client = yield self.auth.get_user_by_req(request)
+        user = UserID.from_string(user_id)
 
         try:
             content = json.loads(request.content.read())
@@ -54,12 +55,12 @@ class ProfileDisplaynameRestServlet(RestServlet):
         return (200, {})
 
 
-class ProfileAvatarURLRestServlet(RestServlet):
+class ProfileAvatarURLRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/profile/(?P<user_id>[^/]*)/avatar_url")
 
     @defer.inlineCallbacks
     def on_GET(self, request, user_id):
-        user = self.hs.parse_userid(user_id)
+        user = UserID.from_string(user_id)
 
         avatar_url = yield self.handlers.profile_handler.get_avatar_url(
             user,
@@ -69,8 +70,8 @@ class ProfileAvatarURLRestServlet(RestServlet):
 
     @defer.inlineCallbacks
     def on_PUT(self, request, user_id):
-        auth_user = yield self.auth.get_user_by_req(request)
-        user = self.hs.parse_userid(user_id)
+        auth_user, client = yield self.auth.get_user_by_req(request)
+        user = UserID.from_string(user_id)
 
         try:
             content = json.loads(request.content.read())
@@ -87,12 +88,12 @@ class ProfileAvatarURLRestServlet(RestServlet):
         return (200, {})
 
 
-class ProfileRestServlet(RestServlet):
+class ProfileRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/profile/(?P<user_id>[^/]*)")
 
     @defer.inlineCallbacks
     def on_GET(self, request, user_id):
-        user = self.hs.parse_userid(user_id)
+        user = UserID.from_string(user_id)
 
         displayname = yield self.handlers.profile_handler.get_displayname(
             user,
diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py
new file mode 100644
index 0000000000..fef0eb6572
--- /dev/null
+++ b/synapse/rest/client/v1/push_rule.py
@@ -0,0 +1,456 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.api.errors import (
+    SynapseError, Codes, UnrecognizedRequestError, NotFoundError, StoreError
+)
+from .base import ClientV1RestServlet, client_path_pattern
+from synapse.storage.push_rule import (
+    InconsistentRuleException, RuleNotFoundException
+)
+import synapse.push.baserules as baserules
+from synapse.push.rulekinds import (
+    PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP
+)
+
+import simplejson as json
+
+
+class PushRuleRestServlet(ClientV1RestServlet):
+    PATTERN = client_path_pattern("/pushrules/.*$")
+    SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR = (
+        "Unrecognised request: You probably wanted a trailing slash")
+
+    @defer.inlineCallbacks
+    def on_PUT(self, request):
+        spec = _rule_spec_from_path(request.postpath)
+        try:
+            priority_class = _priority_class_from_spec(spec)
+        except InvalidRuleException as e:
+            raise SynapseError(400, e.message)
+
+        user, _ = yield self.auth.get_user_by_req(request)
+
+        if '/' in spec['rule_id'] or '\\' in spec['rule_id']:
+            raise SynapseError(400, "rule_id may not contain slashes")
+
+        content = _parse_json(request)
+
+        if 'attr' in spec:
+            self.set_rule_attr(user.to_string(), spec, content)
+            defer.returnValue((200, {}))
+
+        try:
+            (conditions, actions) = _rule_tuple_from_request_object(
+                spec['template'],
+                spec['rule_id'],
+                content,
+                device=spec['device'] if 'device' in spec else None
+            )
+        except InvalidRuleException as e:
+            raise SynapseError(400, e.message)
+
+        before = request.args.get("before", None)
+        if before and len(before):
+            before = before[0]
+        after = request.args.get("after", None)
+        if after and len(after):
+            after = after[0]
+
+        try:
+            yield self.hs.get_datastore().add_push_rule(
+                user_name=user.to_string(),
+                rule_id=_namespaced_rule_id_from_spec(spec),
+                priority_class=priority_class,
+                conditions=conditions,
+                actions=actions,
+                before=before,
+                after=after
+            )
+        except InconsistentRuleException as e:
+            raise SynapseError(400, e.message)
+        except RuleNotFoundException as e:
+            raise SynapseError(400, e.message)
+
+        defer.returnValue((200, {}))
+
+    @defer.inlineCallbacks
+    def on_DELETE(self, request):
+        spec = _rule_spec_from_path(request.postpath)
+
+        user, _ = yield self.auth.get_user_by_req(request)
+
+        namespaced_rule_id = _namespaced_rule_id_from_spec(spec)
+
+        try:
+            yield self.hs.get_datastore().delete_push_rule(
+                user.to_string(), namespaced_rule_id
+            )
+            defer.returnValue((200, {}))
+        except StoreError as e:
+            if e.code == 404:
+                raise NotFoundError()
+            else:
+                raise
+
+    @defer.inlineCallbacks
+    def on_GET(self, request):
+        user, _ = yield self.auth.get_user_by_req(request)
+
+        # we build up the full structure and then decide which bits of it
+        # to send which means doing unnecessary work sometimes but is
+        # is probably not going to make a whole lot of difference
+        rawrules = yield self.hs.get_datastore().get_push_rules_for_user(
+            user.to_string()
+        )
+
+        for r in rawrules:
+            r["conditions"] = json.loads(r["conditions"])
+            r["actions"] = json.loads(r["actions"])
+
+        ruleslist = baserules.list_with_base_rules(rawrules, user)
+
+        rules = {'global': {}, 'device': {}}
+
+        rules['global'] = _add_empty_priority_class_arrays(rules['global'])
+
+        enabled_map = yield self.hs.get_datastore().\
+            get_push_rules_enabled_for_user(user.to_string())
+
+        for r in ruleslist:
+            rulearray = None
+
+            template_name = _priority_class_to_template_name(r['priority_class'])
+
+            if r['priority_class'] > PRIORITY_CLASS_MAP['override']:
+                # per-device rule
+                profile_tag = _profile_tag_from_conditions(r["conditions"])
+                r = _strip_device_condition(r)
+                if not profile_tag:
+                    continue
+                if profile_tag not in rules['device']:
+                    rules['device'][profile_tag] = {}
+                    rules['device'][profile_tag] = (
+                        _add_empty_priority_class_arrays(
+                            rules['device'][profile_tag]
+                        )
+                    )
+
+                rulearray = rules['device'][profile_tag][template_name]
+            else:
+                rulearray = rules['global'][template_name]
+
+            template_rule = _rule_to_template(r)
+            if template_rule:
+                template_rule['enabled'] = True
+                if r['rule_id'] in enabled_map:
+                    template_rule['enabled'] = enabled_map[r['rule_id']]
+                rulearray.append(template_rule)
+
+        path = request.postpath[1:]
+
+        if path == []:
+            # we're a reference impl: pedantry is our job.
+            raise UnrecognizedRequestError(
+                PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
+            )
+
+        if path[0] == '':
+            defer.returnValue((200, rules))
+        elif path[0] == 'global':
+            path = path[1:]
+            result = _filter_ruleset_with_path(rules['global'], path)
+            defer.returnValue((200, result))
+        elif path[0] == 'device':
+            path = path[1:]
+            if path == []:
+                raise UnrecognizedRequestError(
+                    PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
+                )
+            if path[0] == '':
+                defer.returnValue((200, rules['device']))
+
+            profile_tag = path[0]
+            path = path[1:]
+            if profile_tag not in rules['device']:
+                ret = {}
+                ret = _add_empty_priority_class_arrays(ret)
+                defer.returnValue((200, ret))
+            ruleset = rules['device'][profile_tag]
+            result = _filter_ruleset_with_path(ruleset, path)
+            defer.returnValue((200, result))
+        else:
+            raise UnrecognizedRequestError()
+
+    def on_OPTIONS(self, _):
+        return 200, {}
+
+    def set_rule_attr(self, user_name, spec, val):
+        if spec['attr'] == 'enabled':
+            if not isinstance(val, bool):
+                raise SynapseError(400, "Value for 'enabled' must be boolean")
+            namespaced_rule_id = _namespaced_rule_id_from_spec(spec)
+            self.hs.get_datastore().set_push_rule_enabled(
+                user_name, namespaced_rule_id, val
+            )
+        else:
+            raise UnrecognizedRequestError()
+
+    def get_rule_attr(self, user_name, namespaced_rule_id, attr):
+        if attr == 'enabled':
+            return self.hs.get_datastore().get_push_rule_enabled_by_user_rule_id(
+                user_name, namespaced_rule_id
+            )
+        else:
+            raise UnrecognizedRequestError()
+
+
+def _rule_spec_from_path(path):
+    if len(path) < 2:
+        raise UnrecognizedRequestError()
+    if path[0] != 'pushrules':
+        raise UnrecognizedRequestError()
+
+    scope = path[1]
+    path = path[2:]
+    if scope not in ['global', 'device']:
+        raise UnrecognizedRequestError()
+
+    device = None
+    if scope == 'device':
+        if len(path) == 0:
+            raise UnrecognizedRequestError()
+        device = path[0]
+        path = path[1:]
+
+    if len(path) == 0:
+        raise UnrecognizedRequestError()
+
+    template = path[0]
+    path = path[1:]
+
+    if len(path) == 0 or len(path[0]) == 0:
+        raise UnrecognizedRequestError()
+
+    rule_id = path[0]
+
+    spec = {
+        'scope': scope,
+        'template': template,
+        'rule_id': rule_id
+    }
+    if device:
+        spec['profile_tag'] = device
+
+    path = path[1:]
+
+    if len(path) > 0 and len(path[0]) > 0:
+        spec['attr'] = path[0]
+
+    return spec
+
+
+def _rule_tuple_from_request_object(rule_template, rule_id, req_obj, device=None):
+    if rule_template in ['override', 'underride']:
+        if 'conditions' not in req_obj:
+            raise InvalidRuleException("Missing 'conditions'")
+        conditions = req_obj['conditions']
+        for c in conditions:
+            if 'kind' not in c:
+                raise InvalidRuleException("Condition without 'kind'")
+    elif rule_template == 'room':
+        conditions = [{
+            'kind': 'event_match',
+            'key': 'room_id',
+            'pattern': rule_id
+        }]
+    elif rule_template == 'sender':
+        conditions = [{
+            'kind': 'event_match',
+            'key': 'user_id',
+            'pattern': rule_id
+        }]
+    elif rule_template == 'content':
+        if 'pattern' not in req_obj:
+            raise InvalidRuleException("Content rule missing 'pattern'")
+        pat = req_obj['pattern']
+
+        conditions = [{
+            'kind': 'event_match',
+            'key': 'content.body',
+            'pattern': pat
+        }]
+    else:
+        raise InvalidRuleException("Unknown rule template: %s" % (rule_template,))
+
+    if device:
+        conditions.append({
+            'kind': 'device',
+            'profile_tag': device
+        })
+
+    if 'actions' not in req_obj:
+        raise InvalidRuleException("No actions found")
+    actions = req_obj['actions']
+
+    for a in actions:
+        if a in ['notify', 'dont_notify', 'coalesce']:
+            pass
+        elif isinstance(a, dict) and 'set_tweak' in a:
+            pass
+        else:
+            raise InvalidRuleException("Unrecognised action")
+
+    return conditions, actions
+
+
+def _add_empty_priority_class_arrays(d):
+    for pc in PRIORITY_CLASS_MAP.keys():
+        d[pc] = []
+    return d
+
+
+def _profile_tag_from_conditions(conditions):
+    """
+    Given a list of conditions, return the profile tag of the
+    device rule if there is one
+    """
+    for c in conditions:
+        if c['kind'] == 'device':
+            return c['profile_tag']
+    return None
+
+
+def _filter_ruleset_with_path(ruleset, path):
+    if path == []:
+        raise UnrecognizedRequestError(
+            PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
+        )
+
+    if path[0] == '':
+        return ruleset
+    template_kind = path[0]
+    if template_kind not in ruleset:
+        raise UnrecognizedRequestError()
+    path = path[1:]
+    if path == []:
+        raise UnrecognizedRequestError(
+            PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
+        )
+    if path[0] == '':
+        return ruleset[template_kind]
+    rule_id = path[0]
+
+    the_rule = None
+    for r in ruleset[template_kind]:
+        if r['rule_id'] == rule_id:
+            the_rule = r
+    if the_rule is None:
+        raise NotFoundError
+
+    path = path[1:]
+    if len(path) == 0:
+        return the_rule
+
+    attr = path[0]
+    if attr in the_rule:
+        return the_rule[attr]
+    else:
+        raise UnrecognizedRequestError()
+
+
+def _priority_class_from_spec(spec):
+    if spec['template'] not in PRIORITY_CLASS_MAP.keys():
+        raise InvalidRuleException("Unknown template: %s" % (spec['kind']))
+    pc = PRIORITY_CLASS_MAP[spec['template']]
+
+    if spec['scope'] == 'device':
+        pc += len(PRIORITY_CLASS_MAP)
+
+    return pc
+
+
+def _priority_class_to_template_name(pc):
+    if pc > PRIORITY_CLASS_MAP['override']:
+        # per-device
+        prio_class_index = pc - len(PRIORITY_CLASS_MAP)
+        return PRIORITY_CLASS_INVERSE_MAP[prio_class_index]
+    else:
+        return PRIORITY_CLASS_INVERSE_MAP[pc]
+
+
+def _rule_to_template(rule):
+    unscoped_rule_id = None
+    if 'rule_id' in rule:
+        unscoped_rule_id = _rule_id_from_namespaced(rule['rule_id'])
+
+    template_name = _priority_class_to_template_name(rule['priority_class'])
+    if template_name in ['override', 'underride']:
+        templaterule = {k: rule[k] for k in ["conditions", "actions"]}
+    elif template_name in ["sender", "room"]:
+        templaterule = {'actions': rule['actions']}
+        unscoped_rule_id = rule['conditions'][0]['pattern']
+    elif template_name == 'content':
+        if len(rule["conditions"]) != 1:
+            return None
+        thecond = rule["conditions"][0]
+        if "pattern" not in thecond:
+            return None
+        templaterule = {'actions': rule['actions']}
+        templaterule["pattern"] = thecond["pattern"]
+
+    if unscoped_rule_id:
+            templaterule['rule_id'] = unscoped_rule_id
+    if 'default' in rule:
+        templaterule['default'] = rule['default']
+    return templaterule
+
+
+def _strip_device_condition(rule):
+    for i, c in enumerate(rule['conditions']):
+        if c['kind'] == 'device':
+            del rule['conditions'][i]
+    return rule
+
+
+def _namespaced_rule_id_from_spec(spec):
+    if spec['scope'] == 'global':
+        scope = 'global'
+    else:
+        scope = 'device/%s' % (spec['profile_tag'])
+    return "%s/%s/%s" % (scope, spec['template'], spec['rule_id'])
+
+
+def _rule_id_from_namespaced(in_rule_id):
+    return in_rule_id.split('/')[-1]
+
+
+class InvalidRuleException(Exception):
+    pass
+
+
+# XXX: C+ped from rest/room.py - surely this should be common?
+def _parse_json(request):
+    try:
+        content = json.loads(request.content.read())
+        return content
+    except ValueError:
+        raise SynapseError(400, "Content not JSON.", errcode=Codes.NOT_JSON)
+
+
+def register_servlets(hs, http_server):
+    PushRuleRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py
new file mode 100644
index 0000000000..6045e86f34
--- /dev/null
+++ b/synapse/rest/client/v1/pusher.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.api.errors import SynapseError, Codes
+from synapse.push import PusherConfigException
+from .base import ClientV1RestServlet, client_path_pattern
+
+import simplejson as json
+
+
+class PusherRestServlet(ClientV1RestServlet):
+    PATTERN = client_path_pattern("/pushers/set$")
+
+    @defer.inlineCallbacks
+    def on_POST(self, request):
+        user, _ = yield self.auth.get_user_by_req(request)
+
+        content = _parse_json(request)
+
+        pusher_pool = self.hs.get_pusherpool()
+
+        if ('pushkey' in content and 'app_id' in content
+                and 'kind' in content and
+                content['kind'] is None):
+            yield pusher_pool.remove_pusher(
+                content['app_id'], content['pushkey']
+            )
+            defer.returnValue((200, {}))
+
+        reqd = ['profile_tag', 'kind', 'app_id', 'app_display_name',
+                'device_display_name', 'pushkey', 'lang', 'data']
+        missing = []
+        for i in reqd:
+            if i not in content:
+                missing.append(i)
+        if len(missing):
+            raise SynapseError(400, "Missing parameters: "+','.join(missing),
+                               errcode=Codes.MISSING_PARAM)
+
+        try:
+            yield pusher_pool.add_pusher(
+                user_name=user.to_string(),
+                profile_tag=content['profile_tag'],
+                kind=content['kind'],
+                app_id=content['app_id'],
+                app_display_name=content['app_display_name'],
+                device_display_name=content['device_display_name'],
+                pushkey=content['pushkey'],
+                lang=content['lang'],
+                data=content['data']
+            )
+        except PusherConfigException as pce:
+            raise SynapseError(400, "Config Error: "+pce.message,
+                               errcode=Codes.MISSING_PARAM)
+
+        defer.returnValue((200, {}))
+
+    def on_OPTIONS(self, _):
+        return 200, {}
+
+
+# XXX: C+ped from rest/room.py - surely this should be common?
+def _parse_json(request):
+    try:
+        content = json.loads(request.content.read())
+        if type(content) != dict:
+            raise SynapseError(400, "Content must be a JSON object.",
+                               errcode=Codes.NOT_JSON)
+        return content
+    except ValueError:
+        raise SynapseError(400, "Content not JSON.", errcode=Codes.NOT_JSON)
+
+
+def register_servlets(hs, http_server):
+    PusherRestServlet(hs).register(http_server)
diff --git a/synapse/rest/register.py b/synapse/rest/client/v1/register.py
index e3b26902d9..f5acfb945f 100644
--- a/synapse/rest/register.py
+++ b/synapse/rest/client/v1/register.py
@@ -18,14 +18,14 @@ from twisted.internet import defer
 
 from synapse.api.errors import SynapseError, Codes
 from synapse.api.constants import LoginType
-from base import RestServlet, client_path_pattern
+from base import ClientV1RestServlet, client_path_pattern
 import synapse.util.stringutils as stringutils
 
 from synapse.util.async import run_on_reactor
 
 from hashlib import sha1
 import hmac
-import json
+import simplejson as json
 import logging
 import urllib
 
@@ -42,7 +42,7 @@ else:
     compare_digest = lambda a, b: a == b
 
 
-class RegisterRestServlet(RestServlet):
+class RegisterRestServlet(ClientV1RestServlet):
     """Handles registration with the home server.
 
     This servlet is in control of the registration flow; the registration
@@ -59,6 +59,7 @@ class RegisterRestServlet(RestServlet):
         # }
         # TODO: persistent storage
         self.sessions = {}
+        self.disable_registration = hs.config.disable_registration
 
     def on_GET(self, request):
         if self.hs.config.enable_registration_captcha:
@@ -107,10 +108,16 @@ class RegisterRestServlet(RestServlet):
 
         try:
             login_type = register_json["type"]
+
+            is_application_server = login_type == LoginType.APPLICATION_SERVICE
+            if self.disable_registration and not is_application_server:
+                raise SynapseError(403, "Registration has been disabled")
+
             stages = {
                 LoginType.RECAPTCHA: self._do_recaptcha,
                 LoginType.PASSWORD: self._do_password,
-                LoginType.EMAIL_IDENTITY: self._do_email_identity
+                LoginType.EMAIL_IDENTITY: self._do_email_identity,
+                LoginType.APPLICATION_SERVICE: self._do_app_service
             }
 
             session_info = self._get_session_info(request, session)
@@ -276,6 +283,27 @@ class RegisterRestServlet(RestServlet):
         self._remove_session(session)
         defer.returnValue(result)
 
+    @defer.inlineCallbacks
+    def _do_app_service(self, request, register_json, session):
+        if "access_token" not in request.args:
+            raise SynapseError(400, "Expected application service token.")
+        if "user" not in register_json:
+            raise SynapseError(400, "Expected 'user' key.")
+
+        as_token = request.args["access_token"][0]
+        user_localpart = register_json["user"].encode("utf-8")
+
+        handler = self.handlers.registration_handler
+        (user_id, token) = yield handler.appservice_register(
+            user_localpart, as_token
+        )
+        self._remove_session(session)
+        defer.returnValue({
+            "user_id": user_id,
+            "access_token": token,
+            "home_server": self.hs.hostname,
+        })
+
 
 def _parse_json(request):
     try:
diff --git a/synapse/rest/room.py b/synapse/rest/client/v1/room.py
index 48bba2a5f3..0346afb1b4 100644
--- a/synapse/rest/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -16,12 +16,14 @@
 """ This module contains REST servlets to do with rooms: /rooms/<paths> """
 from twisted.internet import defer
 
-from base import RestServlet, client_path_pattern
+from base import ClientV1RestServlet, client_path_pattern
 from synapse.api.errors import SynapseError, Codes
 from synapse.streams.config import PaginationConfig
 from synapse.api.constants import EventTypes, Membership
+from synapse.types import UserID, RoomID, RoomAlias
+from synapse.events.utils import serialize_event
 
-import json
+import simplejson as json
 import logging
 import urllib
 
@@ -29,7 +31,7 @@ import urllib
 logger = logging.getLogger(__name__)
 
 
-class RoomCreateRestServlet(RestServlet):
+class RoomCreateRestServlet(ClientV1RestServlet):
     # No PATTERN; we have custom dispatch rules here
 
     def register(self, http_server):
@@ -60,7 +62,7 @@ class RoomCreateRestServlet(RestServlet):
 
     @defer.inlineCallbacks
     def on_POST(self, request):
-        auth_user = yield self.auth.get_user_by_req(request)
+        auth_user, client = yield self.auth.get_user_by_req(request)
 
         room_config = self.get_room_config(request)
         info = yield self.make_room(room_config, auth_user, None)
@@ -93,7 +95,7 @@ class RoomCreateRestServlet(RestServlet):
 
 
 # TODO: Needs unit testing for generic events
-class RoomStateEventRestServlet(RestServlet):
+class RoomStateEventRestServlet(ClientV1RestServlet):
     def register(self, http_server):
         # /room/$roomid/state/$eventtype
         no_state_key = "/rooms/(?P<room_id>[^/]*)/state/(?P<event_type>[^/]*)$"
@@ -123,7 +125,7 @@ class RoomStateEventRestServlet(RestServlet):
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id, event_type, state_key):
-        user = yield self.auth.get_user_by_req(request)
+        user, client = yield self.auth.get_user_by_req(request)
 
         msg_handler = self.handlers.message_handler
         data = yield msg_handler.get_room_data(
@@ -140,8 +142,8 @@ class RoomStateEventRestServlet(RestServlet):
         defer.returnValue((200, data.get_dict()["content"]))
 
     @defer.inlineCallbacks
-    def on_PUT(self, request, room_id, event_type, state_key):
-        user = yield self.auth.get_user_by_req(request)
+    def on_PUT(self, request, room_id, event_type, state_key, txn_id=None):
+        user, client = yield self.auth.get_user_by_req(request)
 
         content = _parse_json(request)
 
@@ -156,13 +158,15 @@ class RoomStateEventRestServlet(RestServlet):
             event_dict["state_key"] = state_key
 
         msg_handler = self.handlers.message_handler
-        yield msg_handler.create_and_send_event(event_dict)
+        yield msg_handler.create_and_send_event(
+            event_dict, client=client, txn_id=txn_id,
+        )
 
         defer.returnValue((200, {}))
 
 
 # TODO: Needs unit testing for generic events + feedback
-class RoomSendEventRestServlet(RestServlet):
+class RoomSendEventRestServlet(ClientV1RestServlet):
 
     def register(self, http_server):
         # /rooms/$roomid/send/$event_type[/$txn_id]
@@ -170,8 +174,8 @@ class RoomSendEventRestServlet(RestServlet):
         register_txn_path(self, PATTERN, http_server, with_get=True)
 
     @defer.inlineCallbacks
-    def on_POST(self, request, room_id, event_type):
-        user = yield self.auth.get_user_by_req(request)
+    def on_POST(self, request, room_id, event_type, txn_id=None):
+        user, client = yield self.auth.get_user_by_req(request)
         content = _parse_json(request)
 
         msg_handler = self.handlers.message_handler
@@ -181,7 +185,9 @@ class RoomSendEventRestServlet(RestServlet):
                 "content": content,
                 "room_id": room_id,
                 "sender": user.to_string(),
-            }
+            },
+            client=client,
+            txn_id=txn_id,
         )
 
         defer.returnValue((200, {"event_id": event.event_id}))
@@ -198,14 +204,14 @@ class RoomSendEventRestServlet(RestServlet):
         except KeyError:
             pass
 
-        response = yield self.on_POST(request, room_id, event_type)
+        response = yield self.on_POST(request, room_id, event_type, txn_id)
 
         self.txns.store_client_transaction(request, txn_id, response)
         defer.returnValue(response)
 
 
 # TODO: Needs unit testing for room ID + alias joins
-class JoinRoomAliasServlet(RestServlet):
+class JoinRoomAliasServlet(ClientV1RestServlet):
 
     def register(self, http_server):
         # /join/$room_identifier[/$txn_id]
@@ -213,8 +219,8 @@ class JoinRoomAliasServlet(RestServlet):
         register_txn_path(self, PATTERN, http_server)
 
     @defer.inlineCallbacks
-    def on_POST(self, request, room_identifier):
-        user = yield self.auth.get_user_by_req(request)
+    def on_POST(self, request, room_identifier, txn_id=None):
+        user, client = yield self.auth.get_user_by_req(request)
 
         # the identifier could be a room alias or a room id. Try one then the
         # other if it fails to parse, without swallowing other valid
@@ -223,10 +229,10 @@ class JoinRoomAliasServlet(RestServlet):
         identifier = None
         is_room_alias = False
         try:
-            identifier = self.hs.parse_roomalias(room_identifier)
+            identifier = RoomAlias.from_string(room_identifier)
             is_room_alias = True
         except SynapseError:
-            identifier = self.hs.parse_roomid(room_identifier)
+            identifier = RoomID.from_string(room_identifier)
 
         # TODO: Support for specifying the home server to join with?
 
@@ -243,7 +249,9 @@ class JoinRoomAliasServlet(RestServlet):
                     "room_id": identifier.to_string(),
                     "sender": user.to_string(),
                     "state_key": user.to_string(),
-                }
+                },
+                client=client,
+                txn_id=txn_id,
             )
 
             defer.returnValue((200, {"room_id": identifier.to_string()}))
@@ -257,14 +265,14 @@ class JoinRoomAliasServlet(RestServlet):
         except KeyError:
             pass
 
-        response = yield self.on_POST(request, room_identifier)
+        response = yield self.on_POST(request, room_identifier, txn_id)
 
         self.txns.store_client_transaction(request, txn_id, response)
         defer.returnValue(response)
 
 
 # TODO: Needs unit testing
-class PublicRoomListRestServlet(RestServlet):
+class PublicRoomListRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/publicRooms$")
 
     @defer.inlineCallbacks
@@ -275,13 +283,13 @@ class PublicRoomListRestServlet(RestServlet):
 
 
 # TODO: Needs unit testing
-class RoomMemberListRestServlet(RestServlet):
+class RoomMemberListRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/members$")
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id):
         # TODO support Pagination stream API (limit/tokens)
-        user = yield self.auth.get_user_by_req(request)
+        user, client = yield self.auth.get_user_by_req(request)
         handler = self.handlers.room_member_handler
         members = yield handler.get_room_members_as_pagination_chunk(
             room_id=room_id,
@@ -289,7 +297,7 @@ class RoomMemberListRestServlet(RestServlet):
 
         for event in members["chunk"]:
             # FIXME: should probably be state_key here, not user_id
-            target_user = self.hs.parse_userid(event["user_id"])
+            target_user = UserID.from_string(event["user_id"])
             # Presence is an optional cache; don't fail if we can't fetch it
             try:
                 presence_handler = self.handlers.presence_handler
@@ -304,12 +312,12 @@ class RoomMemberListRestServlet(RestServlet):
 
 
 # TODO: Needs unit testing
-class RoomMessageListRestServlet(RestServlet):
+class RoomMessageListRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/messages$")
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id):
-        user = yield self.auth.get_user_by_req(request)
+        user, client = yield self.auth.get_user_by_req(request)
         pagination_config = PaginationConfig.from_request(
             request, default_limit=10,
         )
@@ -328,12 +336,12 @@ class RoomMessageListRestServlet(RestServlet):
 
 
 # TODO: Needs unit testing
-class RoomStateRestServlet(RestServlet):
+class RoomStateRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/state$")
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id):
-        user = yield self.auth.get_user_by_req(request)
+        user, client = yield self.auth.get_user_by_req(request)
         handler = self.handlers.message_handler
         # Get all the current state for this room
         events = yield handler.get_state_events(
@@ -344,12 +352,12 @@ class RoomStateRestServlet(RestServlet):
 
 
 # TODO: Needs unit testing
-class RoomInitialSyncRestServlet(RestServlet):
+class RoomInitialSyncRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/initialSync$")
 
     @defer.inlineCallbacks
     def on_GET(self, request, room_id):
-        user = yield self.auth.get_user_by_req(request)
+        user, client = yield self.auth.get_user_by_req(request)
         pagination_config = PaginationConfig.from_request(request)
         content = yield self.handlers.message_handler.room_initial_sync(
             room_id=room_id,
@@ -359,9 +367,13 @@ class RoomInitialSyncRestServlet(RestServlet):
         defer.returnValue((200, content))
 
 
-class RoomTriggerBackfill(RestServlet):
+class RoomTriggerBackfill(ClientV1RestServlet):
     PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/backfill$")
 
+    def __init__(self, hs):
+        super(RoomTriggerBackfill, self).__init__(hs)
+        self.clock = hs.get_clock()
+
     @defer.inlineCallbacks
     def on_GET(self, request, room_id):
         remote_server = urllib.unquote(
@@ -373,12 +385,14 @@ class RoomTriggerBackfill(RestServlet):
         handler = self.handlers.federation_handler
         events = yield handler.backfill(remote_server, room_id, limit)
 
-        res = [self.hs.serialize_event(event) for event in events]
+        time_now = self.clock.time_msec()
+
+        res = [serialize_event(event, time_now) for event in events]
         defer.returnValue((200, res))
 
 
 # TODO: Needs unit testing
-class RoomMembershipRestServlet(RestServlet):
+class RoomMembershipRestServlet(ClientV1RestServlet):
 
     def register(self, http_server):
         # /rooms/$roomid/[invite|join|leave]
@@ -387,8 +401,8 @@ class RoomMembershipRestServlet(RestServlet):
         register_txn_path(self, PATTERN, http_server)
 
     @defer.inlineCallbacks
-    def on_POST(self, request, room_id, membership_action):
-        user = yield self.auth.get_user_by_req(request)
+    def on_POST(self, request, room_id, membership_action, txn_id=None):
+        user, client = yield self.auth.get_user_by_req(request)
 
         content = _parse_json(request)
 
@@ -410,7 +424,9 @@ class RoomMembershipRestServlet(RestServlet):
                 "room_id": room_id,
                 "sender": user.to_string(),
                 "state_key": state_key,
-            }
+            },
+            client=client,
+            txn_id=txn_id,
         )
 
         defer.returnValue((200, {}))
@@ -424,20 +440,22 @@ class RoomMembershipRestServlet(RestServlet):
         except KeyError:
             pass
 
-        response = yield self.on_POST(request, room_id, membership_action)
+        response = yield self.on_POST(
+            request, room_id, membership_action, txn_id
+        )
 
         self.txns.store_client_transaction(request, txn_id, response)
         defer.returnValue(response)
 
 
-class RoomRedactEventRestServlet(RestServlet):
+class RoomRedactEventRestServlet(ClientV1RestServlet):
     def register(self, http_server):
         PATTERN = ("/rooms/(?P<room_id>[^/]*)/redact/(?P<event_id>[^/]*)")
         register_txn_path(self, PATTERN, http_server)
 
     @defer.inlineCallbacks
-    def on_POST(self, request, room_id, event_id):
-        user = yield self.auth.get_user_by_req(request)
+    def on_POST(self, request, room_id, event_id, txn_id=None):
+        user, client = yield self.auth.get_user_by_req(request)
         content = _parse_json(request)
 
         msg_handler = self.handlers.message_handler
@@ -448,7 +466,9 @@ class RoomRedactEventRestServlet(RestServlet):
                 "room_id": room_id,
                 "sender": user.to_string(),
                 "redacts": event_id,
-            }
+            },
+            client=client,
+            txn_id=txn_id,
         )
 
         defer.returnValue((200, {"event_id": event.event_id}))
@@ -462,23 +482,23 @@ class RoomRedactEventRestServlet(RestServlet):
         except KeyError:
             pass
 
-        response = yield self.on_POST(request, room_id, event_id)
+        response = yield self.on_POST(request, room_id, event_id, txn_id)
 
         self.txns.store_client_transaction(request, txn_id, response)
         defer.returnValue(response)
 
 
-class RoomTypingRestServlet(RestServlet):
+class RoomTypingRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern(
         "/rooms/(?P<room_id>[^/]*)/typing/(?P<user_id>[^/]*)$"
     )
 
     @defer.inlineCallbacks
     def on_PUT(self, request, room_id, user_id):
-        auth_user = yield self.auth.get_user_by_req(request)
+        auth_user, client = yield self.auth.get_user_by_req(request)
 
         room_id = urllib.unquote(room_id)
-        target_user = self.hs.parse_userid(urllib.unquote(user_id))
+        target_user = UserID.from_string(urllib.unquote(user_id))
 
         content = _parse_json(request)
 
diff --git a/synapse/rest/transactions.py b/synapse/rest/client/v1/transactions.py
index d933fea18a..d933fea18a 100644
--- a/synapse/rest/transactions.py
+++ b/synapse/rest/client/v1/transactions.py
diff --git a/synapse/rest/voip.py b/synapse/rest/client/v1/voip.py
index 011c35e69b..11d08fbced 100644
--- a/synapse/rest/voip.py
+++ b/synapse/rest/client/v1/voip.py
@@ -15,7 +15,7 @@
 
 from twisted.internet import defer
 
-from base import RestServlet, client_path_pattern
+from base import ClientV1RestServlet, client_path_pattern
 
 
 import hmac
@@ -23,12 +23,12 @@ import hashlib
 import base64
 
 
-class VoipRestServlet(RestServlet):
+class VoipRestServlet(ClientV1RestServlet):
     PATTERN = client_path_pattern("/voip/turnServer$")
 
     @defer.inlineCallbacks
     def on_GET(self, request):
-        auth_user = yield self.auth.get_user_by_req(request)
+        auth_user, client = yield self.auth.get_user_by_req(request)
 
         turnUris = self.hs.config.turn_uris
         turnSecret = self.hs.config.turn_shared_secret
diff --git a/synapse/rest/client/v2_alpha/__init__.py b/synapse/rest/client/v2_alpha/__init__.py
new file mode 100644
index 0000000000..bca65f2a6a
--- /dev/null
+++ b/synapse/rest/client/v2_alpha/__init__.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import (
+    sync,
+    filter
+)
+
+from synapse.http.server import JsonResource
+
+
+class ClientV2AlphaRestResource(JsonResource):
+    """A resource for version 2 alpha of the matrix client API."""
+
+    def __init__(self, hs):
+        JsonResource.__init__(self, hs)
+        self.register_servlets(self, hs)
+
+    @staticmethod
+    def register_servlets(client_resource, hs):
+        sync.register_servlets(hs, client_resource)
+        filter.register_servlets(hs, client_resource)
diff --git a/synapse/rest/client/v2_alpha/_base.py b/synapse/rest/client/v2_alpha/_base.py
new file mode 100644
index 0000000000..22dc5cb862
--- /dev/null
+++ b/synapse/rest/client/v2_alpha/_base.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module contains base REST classes for constructing client v1 servlets.
+"""
+
+from synapse.api.urls import CLIENT_V2_ALPHA_PREFIX
+import re
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+def client_v2_pattern(path_regex):
+    """Creates a regex compiled client path with the correct client path
+    prefix.
+
+    Args:
+        path_regex (str): The regex string to match. This should NOT have a ^
+        as this will be prefixed.
+    Returns:
+        SRE_Pattern
+    """
+    return re.compile("^" + CLIENT_V2_ALPHA_PREFIX + path_regex)
diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py
new file mode 100644
index 0000000000..703250cea8
--- /dev/null
+++ b/synapse/rest/client/v2_alpha/filter.py
@@ -0,0 +1,104 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.api.errors import AuthError, SynapseError
+from synapse.http.servlet import RestServlet
+from synapse.types import UserID
+
+from ._base import client_v2_pattern
+
+import simplejson as json
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class GetFilterRestServlet(RestServlet):
+    PATTERN = client_v2_pattern("/user/(?P<user_id>[^/]*)/filter/(?P<filter_id>[^/]*)")
+
+    def __init__(self, hs):
+        super(GetFilterRestServlet, self).__init__()
+        self.hs = hs
+        self.auth = hs.get_auth()
+        self.filtering = hs.get_filtering()
+
+    @defer.inlineCallbacks
+    def on_GET(self, request, user_id, filter_id):
+        target_user = UserID.from_string(user_id)
+        auth_user, client = yield self.auth.get_user_by_req(request)
+
+        if target_user != auth_user:
+            raise AuthError(403, "Cannot get filters for other users")
+
+        if not self.hs.is_mine(target_user):
+            raise SynapseError(400, "Can only get filters for local users")
+
+        try:
+            filter_id = int(filter_id)
+        except:
+            raise SynapseError(400, "Invalid filter_id")
+
+        try:
+            filter = yield self.filtering.get_user_filter(
+                user_localpart=target_user.localpart,
+                filter_id=filter_id,
+            )
+
+            defer.returnValue((200, filter.filter_json))
+        except KeyError:
+            raise SynapseError(400, "No such filter")
+
+
+class CreateFilterRestServlet(RestServlet):
+    PATTERN = client_v2_pattern("/user/(?P<user_id>[^/]*)/filter")
+
+    def __init__(self, hs):
+        super(CreateFilterRestServlet, self).__init__()
+        self.hs = hs
+        self.auth = hs.get_auth()
+        self.filtering = hs.get_filtering()
+
+    @defer.inlineCallbacks
+    def on_POST(self, request, user_id):
+        target_user = UserID.from_string(user_id)
+        auth_user, client = yield self.auth.get_user_by_req(request)
+
+        if target_user != auth_user:
+            raise AuthError(403, "Cannot create filters for other users")
+
+        if not self.hs.is_mine(target_user):
+            raise SynapseError(400, "Can only create filters for local users")
+
+        try:
+            content = json.loads(request.content.read())
+
+            # TODO(paul): check for required keys and invalid keys
+        except:
+            raise SynapseError(400, "Invalid filter definition")
+
+        filter_id = yield self.filtering.add_user_filter(
+            user_localpart=target_user.localpart,
+            user_filter=content,
+        )
+
+        defer.returnValue((200, {"filter_id": str(filter_id)}))
+
+
+def register_servlets(hs, http_server):
+    GetFilterRestServlet(hs).register(http_server)
+    CreateFilterRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
new file mode 100644
index 0000000000..3056ec45cf
--- /dev/null
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -0,0 +1,207 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.http.servlet import RestServlet
+from synapse.handlers.sync import SyncConfig
+from synapse.types import StreamToken
+from synapse.events.utils import (
+    serialize_event, format_event_for_client_v2_without_event_id,
+)
+from synapse.api.filtering import Filter
+from ._base import client_v2_pattern
+
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class SyncRestServlet(RestServlet):
+    """
+
+    GET parameters::
+        timeout(int): How long to wait for new events in milliseconds.
+        limit(int): Maxiumum number of events per room to return.
+        gap(bool): Create gaps the message history if limit is exceeded to
+            ensure that the client has the most recent messages. Defaults to
+            "true".
+        sort(str,str): tuple of sort key (e.g. "timeline") and direction
+            (e.g. "asc", "desc"). Defaults to "timeline,asc".
+        since(batch_token): Batch token when asking for incremental deltas.
+        set_presence(str): What state the device presence should be set to.
+            default is "online".
+        backfill(bool): Should the HS request message history from other
+            servers. This may take a long time making it unsuitable for clients
+            expecting a prompt response. Defaults to "true".
+        filter(filter_id): A filter to apply to the events returned.
+        filter_*: Filter override parameters.
+
+    Response JSON::
+        {
+            "next_batch": // batch token for the next /sync
+            "private_user_data": // private events for this user.
+            "public_user_data": // public events for all users including the
+                                // public events for this user.
+            "rooms": [{ // List of rooms with updates.
+                "room_id": // Id of the room being updated
+                "limited": // Was the per-room event limit exceeded?
+                "published": // Is the room published by our HS?
+                "event_map": // Map of EventID -> event JSON.
+                "events": { // The recent events in the room if gap is "true"
+                            // otherwise the next events in the room.
+                    "batch": [] // list of EventIDs in the "event_map".
+                    "prev_batch": // back token for getting previous events.
+                }
+                "state": [] // list of EventIDs updating the current state to
+                            // be what it should be at the end of the batch.
+                "ephemeral": []
+            }]
+        }
+    """
+
+    PATTERN = client_v2_pattern("/sync$")
+    ALLOWED_SORT = set(["timeline,asc", "timeline,desc"])
+    ALLOWED_PRESENCE = set(["online", "offline", "idle"])
+
+    def __init__(self, hs):
+        super(SyncRestServlet, self).__init__()
+        self.auth = hs.get_auth()
+        self.sync_handler = hs.get_handlers().sync_handler
+        self.clock = hs.get_clock()
+        self.filtering = hs.get_filtering()
+
+    @defer.inlineCallbacks
+    def on_GET(self, request):
+        user, client = yield self.auth.get_user_by_req(request)
+
+        timeout = self.parse_integer(request, "timeout", default=0)
+        limit = self.parse_integer(request, "limit", required=True)
+        gap = self.parse_boolean(request, "gap", default=True)
+        sort = self.parse_string(
+            request, "sort", default="timeline,asc",
+            allowed_values=self.ALLOWED_SORT
+        )
+        since = self.parse_string(request, "since")
+        set_presence = self.parse_string(
+            request, "set_presence", default="online",
+            allowed_values=self.ALLOWED_PRESENCE
+        )
+        backfill = self.parse_boolean(request, "backfill", default=False)
+        filter_id = self.parse_string(request, "filter", default=None)
+
+        logger.info(
+            "/sync: user=%r, timeout=%r, limit=%r, gap=%r, sort=%r, since=%r,"
+            " set_presence=%r, backfill=%r, filter_id=%r" % (
+                user, timeout, limit, gap, sort, since, set_presence,
+                backfill, filter_id
+            )
+        )
+
+        # TODO(mjark): Load filter and apply overrides.
+        try:
+            filter = yield self.filtering.get_user_filter(
+                user.localpart, filter_id
+            )
+        except:
+            filter = Filter({})
+        # filter = filter.apply_overrides(http_request)
+        # if filter.matches(event):
+        #   # stuff
+
+        sync_config = SyncConfig(
+            user=user,
+            client_info=client,
+            gap=gap,
+            limit=limit,
+            sort=sort,
+            backfill=backfill,
+            filter=filter,
+        )
+
+        if since is not None:
+            since_token = StreamToken.from_string(since)
+        else:
+            since_token = None
+
+        sync_result = yield self.sync_handler.wait_for_sync_for_user(
+            sync_config, since_token=since_token, timeout=timeout
+        )
+
+        time_now = self.clock.time_msec()
+
+        response_content = {
+            "public_user_data": self.encode_user_data(
+                sync_result.public_user_data, filter, time_now
+            ),
+            "private_user_data": self.encode_user_data(
+                sync_result.private_user_data, filter, time_now
+            ),
+            "rooms": self.encode_rooms(
+                sync_result.rooms, filter, time_now, client.token_id
+            ),
+            "next_batch": sync_result.next_batch.to_string(),
+        }
+
+        defer.returnValue((200, response_content))
+
+    def encode_user_data(self, events, filter, time_now):
+        return events
+
+    def encode_rooms(self, rooms, filter, time_now, token_id):
+        return [
+            self.encode_room(room, filter, time_now, token_id)
+            for room in rooms
+        ]
+
+    @staticmethod
+    def encode_room(room, filter, time_now, token_id):
+        event_map = {}
+        state_events = filter.filter_room_state(room.state)
+        recent_events = filter.filter_room_events(room.events)
+        state_event_ids = []
+        recent_event_ids = []
+        for event in state_events:
+            # TODO(mjark): Respect formatting requirements in the filter.
+            event_map[event.event_id] = serialize_event(
+                event, time_now, token_id=token_id,
+                event_format=format_event_for_client_v2_without_event_id,
+            )
+            state_event_ids.append(event.event_id)
+
+        for event in recent_events:
+            # TODO(mjark): Respect formatting requirements in the filter.
+            event_map[event.event_id] = serialize_event(
+                event, time_now, token_id=token_id,
+                event_format=format_event_for_client_v2_without_event_id,
+            )
+            recent_event_ids.append(event.event_id)
+        result = {
+            "room_id": room.room_id,
+            "event_map": event_map,
+            "events": {
+                "batch": recent_event_ids,
+                "prev_batch": room.prev_batch.to_string(),
+            },
+            "state": state_event_ids,
+            "limited": room.limited,
+            "published": room.published,
+            "ephemeral": room.ephemeral,
+        }
+        return result
+
+
+def register_servlets(hs, http_server):
+    SyncRestServlet(hs).register(http_server)
diff --git a/synapse/media/__init__.py b/synapse/rest/media/__init__.py
index e69de29bb2..e69de29bb2 100644
--- a/synapse/media/__init__.py
+++ b/synapse/rest/media/__init__.py
diff --git a/synapse/media/v0/__init__.py b/synapse/rest/media/v0/__init__.py
index e69de29bb2..e69de29bb2 100644
--- a/synapse/media/v0/__init__.py
+++ b/synapse/rest/media/v0/__init__.py
diff --git a/synapse/media/v0/content_repository.py b/synapse/rest/media/v0/content_repository.py
index 79ae0e3d74..e77a20fb2e 100644
--- a/synapse/media/v0/content_repository.py
+++ b/synapse/rest/media/v0/content_repository.py
@@ -25,7 +25,7 @@ from twisted.web import server, resource
 from twisted.internet import defer
 
 import base64
-import json
+import simplejson as json
 import logging
 import os
 import re
@@ -66,7 +66,7 @@ class ContentRepoResource(resource.Resource):
     @defer.inlineCallbacks
     def map_request_to_name(self, request):
         # auth the user
-        auth_user = yield self.auth.get_user_by_req(request)
+        auth_user, client = yield self.auth.get_user_by_req(request)
 
         # namespace all file uploads on the user
         prefix = base64.urlsafe_b64encode(
diff --git a/synapse/media/v1/__init__.py b/synapse/rest/media/v1/__init__.py
index d6c6690577..d6c6690577 100644
--- a/synapse/media/v1/__init__.py
+++ b/synapse/rest/media/v1/__init__.py
diff --git a/synapse/media/v1/base_resource.py b/synapse/rest/media/v1/base_resource.py
index 688e7376ad..b10cbddb81 100644
--- a/synapse/media/v1/base_resource.py
+++ b/synapse/rest/media/v1/base_resource.py
@@ -54,7 +54,7 @@ class BaseMediaResource(Resource):
             try:
                 yield request_handler(self, request)
             except CodeMessageException as e:
-                logger.exception(e)
+                logger.info("Responding with error: %r", e)
                 respond_with_json(
                     request, e.code, cs_exception(e), send_cors=True
                 )
@@ -82,7 +82,7 @@ class BaseMediaResource(Resource):
             raise SynapseError(
                 404,
                 "Invalid media id token %r" % (request.postpath,),
-                Codes.UNKKOWN,
+                Codes.UNKNOWN,
             )
 
     @staticmethod
diff --git a/synapse/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py
index c585bb11f7..c585bb11f7 100644
--- a/synapse/media/v1/download_resource.py
+++ b/synapse/rest/media/v1/download_resource.py
diff --git a/synapse/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py
index ed9a58e9d9..ed9a58e9d9 100644
--- a/synapse/media/v1/filepath.py
+++ b/synapse/rest/media/v1/filepath.py
diff --git a/synapse/rest/media/v1/identicon_resource.py b/synapse/rest/media/v1/identicon_resource.py
new file mode 100644
index 0000000000..912856386a
--- /dev/null
+++ b/synapse/rest/media/v1/identicon_resource.py
@@ -0,0 +1,51 @@
+from pydenticon import Generator
+from twisted.web.resource import Resource
+
+FOREGROUND = [
+    "rgb(45,79,255)",
+    "rgb(254,180,44)",
+    "rgb(226,121,234)",
+    "rgb(30,179,253)",
+    "rgb(232,77,65)",
+    "rgb(49,203,115)",
+    "rgb(141,69,170)"
+]
+
+BACKGROUND = "rgb(224,224,224)"
+SIZE = 5
+
+
+class IdenticonResource(Resource):
+    isLeaf = True
+
+    def __init__(self):
+        Resource.__init__(self)
+        self.generator = Generator(
+            SIZE, SIZE, foreground=FOREGROUND, background=BACKGROUND,
+        )
+
+    def generate_identicon(self, name, width, height):
+        v_padding = width % SIZE
+        h_padding = height % SIZE
+        top_padding = v_padding // 2
+        left_padding = h_padding // 2
+        bottom_padding = v_padding - top_padding
+        right_padding = h_padding - left_padding
+        width -= v_padding
+        height -= h_padding
+        padding = (top_padding, bottom_padding, left_padding, right_padding)
+        identicon = self.generator.generate(
+            name, width, height, padding=padding
+        )
+        return identicon
+
+    def render_GET(self, request):
+        name = "/".join(request.postpath)
+        width = int(request.args.get("width", [96])[0])
+        height = int(request.args.get("height", [96])[0])
+        identicon_bytes = self.generate_identicon(name, width, height)
+        request.setHeader(b"Content-Type", b"image/png")
+        request.setHeader(
+            b"Cache-Control", b"public,max-age=86400,s-maxage=86400"
+        )
+        return identicon_bytes
diff --git a/synapse/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 461cc001f1..9ca4d884dd 100644
--- a/synapse/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -16,6 +16,7 @@
 from .upload_resource import UploadResource
 from .download_resource import DownloadResource
 from .thumbnail_resource import ThumbnailResource
+from .identicon_resource import IdenticonResource
 from .filepath import MediaFilePaths
 
 from twisted.web.resource import Resource
@@ -33,6 +34,7 @@ class MediaRepositoryResource(Resource):
 
         => POST /_matrix/media/v1/upload HTTP/1.1
            Content-Type: <media-type>
+           Content-Length: <content-length>
 
            <media>
 
@@ -75,3 +77,4 @@ class MediaRepositoryResource(Resource):
         self.putChild("upload", UploadResource(hs, filepaths))
         self.putChild("download", DownloadResource(hs, filepaths))
         self.putChild("thumbnail", ThumbnailResource(hs, filepaths))
+        self.putChild("identicon", IdenticonResource())
diff --git a/synapse/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py
index 84f5e3463c..84f5e3463c 100644
--- a/synapse/media/v1/thumbnail_resource.py
+++ b/synapse/rest/media/v1/thumbnail_resource.py
diff --git a/synapse/media/v1/thumbnailer.py b/synapse/rest/media/v1/thumbnailer.py
index 28404f2b7b..28404f2b7b 100644
--- a/synapse/media/v1/thumbnailer.py
+++ b/synapse/rest/media/v1/thumbnailer.py
diff --git a/synapse/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index b1718a630b..e5aba3af4c 100644
--- a/synapse/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -40,9 +40,39 @@ class UploadResource(BaseMediaResource):
         return NOT_DONE_YET
 
     @defer.inlineCallbacks
+    def create_content(self, media_type, upload_name, content, content_length,
+                       auth_user):
+        media_id = random_string(24)
+
+        fname = self.filepaths.local_media_filepath(media_id)
+        self._makedirs(fname)
+
+        # This shouldn't block for very long because the content will have
+        # already been uploaded at this point.
+        with open(fname, "wb") as f:
+            f.write(content)
+
+        yield self.store.store_local_media(
+            media_id=media_id,
+            media_type=media_type,
+            time_now_ms=self.clock.time_msec(),
+            upload_name=upload_name,
+            media_length=content_length,
+            user_id=auth_user,
+        )
+        media_info = {
+            "media_type": media_type,
+            "media_length": content_length,
+        }
+
+        yield self._generate_local_thumbnails(media_id, media_info)
+
+        defer.returnValue("mxc://%s/%s" % (self.server_name, media_id))
+
+    @defer.inlineCallbacks
     def _async_render_POST(self, request):
         try:
-            auth_user = yield self.auth.get_user_by_req(request)
+            auth_user, client = yield self.auth.get_user_by_req(request)
             # TODO: The checks here are a bit late. The content will have
             # already been uploaded to a tmp file at this point
             content_length = request.getHeader("Content-Length")
@@ -66,36 +96,14 @@ class UploadResource(BaseMediaResource):
                     code=400,
                 )
 
-            #if headers.hasHeader("Content-Disposition"):
-            #    disposition = headers.getRawHeaders("Content-Disposition")[0]
+            # if headers.hasHeader("Content-Disposition"):
+            #     disposition = headers.getRawHeaders("Content-Disposition")[0]
             # TODO(markjh): parse content-dispostion
 
-            media_id = random_string(24)
-
-            fname = self.filepaths.local_media_filepath(media_id)
-            self._makedirs(fname)
-
-            # This shouldn't block for very long because the content will have
-            # already been uploaded at this point.
-            with open(fname, "wb") as f:
-                f.write(request.content.read())
-
-            yield self.store.store_local_media(
-                media_id=media_id,
-                media_type=media_type,
-                time_now_ms=self.clock.time_msec(),
-                upload_name=None,
-                media_length=content_length,
-                user_id=auth_user,
+            content_uri = yield self.create_content(
+                media_type, None, request.content.read(),
+                content_length, auth_user
             )
-            media_info = {
-                "media_type": media_type,
-                "media_length": content_length,
-            }
-
-            yield self._generate_local_thumbnails(media_id, media_info)
-
-            content_uri = "mxc://%s/%s" % (self.server_name, media_id)
 
             respond_with_json(
                 request, 200, {"content_uri": content_uri}, send_cors=True
diff --git a/synapse/server.py b/synapse/server.py
index d861efd2fd..cb8610a1b4 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -20,21 +20,20 @@
 
 # Imports required for the default HomeServer() implementation
 from synapse.federation import initialize_http_replication
-from synapse.events.utils import serialize_event
 from synapse.notifier import Notifier
 from synapse.api.auth import Auth
 from synapse.handlers import Handlers
-from synapse.rest import RestServletFactory
 from synapse.state import StateHandler
 from synapse.storage import DataStore
-from synapse.types import UserID, RoomAlias, RoomID, EventID
 from synapse.util import Clock
 from synapse.util.distributor import Distributor
 from synapse.util.lockutils import LockManager
 from synapse.streams.events import EventSources
 from synapse.api.ratelimiting import Ratelimiter
 from synapse.crypto.keyring import Keyring
+from synapse.push.pusherpool import PusherPool
 from synapse.events.builder import EventBuilderFactory
+from synapse.api.filtering import Filtering
 
 
 class BaseHomeServer(object):
@@ -72,15 +71,20 @@ class BaseHomeServer(object):
         'notifier',
         'distributor',
         'resource_for_client',
+        'resource_for_client_v2_alpha',
         'resource_for_federation',
+        'resource_for_static_content',
         'resource_for_web_client',
         'resource_for_content_repo',
         'resource_for_server_key',
         'resource_for_media_repository',
+        'resource_for_app_services',
         'event_sources',
         'ratelimiter',
         'keyring',
+        'pusherpool',
         'event_builder_factory',
+        'filtering',
     ]
 
     def __init__(self, hostname, **kwargs):
@@ -125,33 +129,6 @@ class BaseHomeServer(object):
 
         setattr(BaseHomeServer, "get_%s" % (depname), _get)
 
-    # TODO: Why are these parse_ methods so high up along with other globals?
-    # Surely these should be in a util package or in the api package?
-
-    # Other utility methods
-    def parse_userid(self, s):
-        """Parse the string given by 's' as a User ID and return a UserID
-        object."""
-        return UserID.from_string(s)
-
-    def parse_roomalias(self, s):
-        """Parse the string given by 's' as a Room Alias and return a RoomAlias
-        object."""
-        return RoomAlias.from_string(s)
-
-    def parse_roomid(self, s):
-        """Parse the string given by 's' as a Room ID and return a RoomID
-        object."""
-        return RoomID.from_string(s)
-
-    def parse_eventid(self, s):
-        """Parse the string given by 's' as a Event ID and return a EventID
-        object."""
-        return EventID.from_string(s)
-
-    def serialize_event(self, e, as_client_event=True):
-        return serialize_event(self, e, as_client_event)
-
     def get_ip_from_request(self, request):
         # May be an X-Forwarding-For header depending on config
         ip_addr = request.getClientIP()
@@ -203,9 +180,6 @@ class HomeServer(BaseHomeServer):
     def build_auth(self):
         return Auth(self)
 
-    def build_rest_servlet_factory(self):
-        return RestServletFactory(self)
-
     def build_state_handler(self):
         return StateHandler(self)
 
@@ -230,8 +204,8 @@ class HomeServer(BaseHomeServer):
             hostname=self.hostname,
         )
 
-    def register_servlets(self):
-        """ Register all servlets associated with this HomeServer.
-        """
-        # Simply building the ServletFactory is sufficient to have it register
-        self.get_rest_servlet_factory()
+    def build_filtering(self):
+        return Filtering(self)
+
+    def build_pusherpool(self):
+        return PusherPool(self)
diff --git a/synapse/state.py b/synapse/state.py
index 8144fa02b4..80cced351d 100644
--- a/synapse/state.py
+++ b/synapse/state.py
@@ -18,7 +18,9 @@ from twisted.internet import defer
 
 from synapse.util.logutils import log_function
 from synapse.util.async import run_on_reactor
+from synapse.util.expiringcache import ExpiringCache
 from synapse.api.constants import EventTypes
+from synapse.api.errors import AuthError
 from synapse.events.snapshot import EventContext
 
 from collections import namedtuple
@@ -36,12 +38,46 @@ def _get_state_key_from_event(event):
 KeyStateTuple = namedtuple("KeyStateTuple", ("context", "type", "state_key"))
 
 
+AuthEventTypes = (
+    EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels,
+    EventTypes.JoinRules,
+)
+
+
+SIZE_OF_CACHE = 1000
+EVICTION_TIMEOUT_SECONDS = 20
+
+
+class _StateCacheEntry(object):
+    def __init__(self, state, state_group, ts):
+        self.state = state
+        self.state_group = state_group
+
+
 class StateHandler(object):
     """ Responsible for doing state conflict resolution.
     """
 
     def __init__(self, hs):
+        self.clock = hs.get_clock()
         self.store = hs.get_datastore()
+        self.hs = hs
+
+        # dict of set of event_ids -> _StateCacheEntry.
+        self._state_cache = None
+
+    def start_caching(self):
+        logger.debug("start_caching")
+
+        self._state_cache = ExpiringCache(
+            cache_name="state_cache",
+            clock=self.clock,
+            max_len=SIZE_OF_CACHE,
+            expiry_ms=EVICTION_TIMEOUT_SECONDS*1000,
+            reset_expiry_on_get=True,
+        )
+
+        self._state_cache.start()
 
     @defer.inlineCallbacks
     def get_current_state(self, room_id, event_type=None, state_key=""):
@@ -62,13 +98,22 @@ class StateHandler(object):
             for e_id, _, _ in events
         ]
 
-        res = yield self.resolve_state_groups(event_ids)
+        cache = None
+        if self._state_cache is not None:
+            cache = self._state_cache.get(frozenset(event_ids), None)
+
+        if cache:
+            cache.ts = self.clock.time_msec()
+            state = cache.state
+        else:
+            res = yield self.resolve_state_groups(event_ids)
+            state = res[1]
 
         if event_type:
-            defer.returnValue(res[1].get((event_type, state_key)))
+            defer.returnValue(state.get((event_type, state_key)))
             return
 
-        defer.returnValue(res[1].values())
+        defer.returnValue(state)
 
     @defer.inlineCallbacks
     def compute_event_context(self, event, old_state=None):
@@ -95,7 +140,9 @@ class StateHandler(object):
             context.state_group = None
 
             if hasattr(event, "auth_events") and event.auth_events:
-                auth_ids = zip(*event.auth_events)[0]
+                auth_ids = self.hs.get_auth().compute_auth_events(
+                    event, context.current_state
+                )
                 context.auth_events = {
                     k: v
                     for k, v in context.current_state.items()
@@ -141,7 +188,9 @@ class StateHandler(object):
                 event.unsigned["replaces_state"] = replaces.event_id
 
         if hasattr(event, "auth_events") and event.auth_events:
-            auth_ids = zip(*event.auth_events)[0]
+            auth_ids = self.hs.get_auth().compute_auth_events(
+                event, context.current_state
+            )
             context.auth_events = {
                 k: v
                 for k, v in context.current_state.items()
@@ -163,10 +212,31 @@ class StateHandler(object):
         first is the name of a state group if one and only one is involved,
         otherwise `None`.
         """
+        logger.debug("resolve_state_groups event_ids %s", event_ids)
+
+        if self._state_cache is not None:
+            cache = self._state_cache.get(frozenset(event_ids), None)
+            if cache and cache.state_group:
+                cache.ts = self.clock.time_msec()
+                prev_state = cache.state.get((event_type, state_key), None)
+                if prev_state:
+                    prev_state = prev_state.event_id
+                    prev_states = [prev_state]
+                else:
+                    prev_states = []
+                defer.returnValue(
+                    (cache.state_group, cache.state, prev_states)
+                )
+
         state_groups = yield self.store.get_state_groups(
             event_ids
         )
 
+        logger.debug(
+            "resolve_state_groups state_groups %s",
+            state_groups.keys()
+        )
+
         group_names = set(state_groups.keys())
         if len(group_names) == 1:
             name, state_list = state_groups.items().pop()
@@ -181,15 +251,48 @@ class StateHandler(object):
             else:
                 prev_states = []
 
+            if self._state_cache is not None:
+                cache = _StateCacheEntry(
+                    state=state,
+                    state_group=name,
+                    ts=self.clock.time_msec()
+                )
+
+                self._state_cache[frozenset(event_ids)] = cache
+
             defer.returnValue((name, state, prev_states))
 
+        new_state, prev_states = self._resolve_events(
+            state_groups.values(), event_type, state_key
+        )
+
+        if self._state_cache is not None:
+            cache = _StateCacheEntry(
+                state=new_state,
+                state_group=None,
+                ts=self.clock.time_msec()
+            )
+
+            self._state_cache[frozenset(event_ids)] = cache
+
+        defer.returnValue((None, new_state, prev_states))
+
+    def resolve_events(self, state_sets, event):
+        if event.is_state():
+            return self._resolve_events(
+                state_sets, event.type, event.state_key
+            )
+        else:
+            return self._resolve_events(state_sets)
+
+    def _resolve_events(self, state_sets, event_type=None, state_key=""):
         state = {}
-        for group, g_state in state_groups.items():
-            for s in g_state:
+        for st in state_sets:
+            for e in st:
                 state.setdefault(
-                    (s.type, s.state_key),
+                    (e.type, e.state_key),
                     {}
-                )[s.event_id] = s
+                )[e.event_id] = e
 
         unconflicted_state = {
             k: v.values()[0] for k, v in state.items()
@@ -210,64 +313,102 @@ class StateHandler(object):
         else:
             prev_states = []
 
+        auth_events = {
+            k: e for k, e in unconflicted_state.items()
+            if k[0] in AuthEventTypes
+        }
+
         try:
-            new_state = {}
-            new_state.update(unconflicted_state)
-            for key, events in conflicted_state.items():
-                new_state[key] = self._resolve_state_events(events)
+            resolved_state = self._resolve_state_events(
+                conflicted_state, auth_events
+            )
         except:
             logger.exception("Failed to resolve state")
             raise
 
-        defer.returnValue((None, new_state, prev_states))
-
-    def _get_power_level_from_event_state(self, event, user_id):
-        if hasattr(event, "old_state_events") and event.old_state_events:
-            key = (EventTypes.PowerLevels, "", )
-            power_level_event = event.old_state_events.get(key)
-            level = None
-            if power_level_event:
-                level = power_level_event.content.get("users", {}).get(
-                    user_id
-                )
-                if not level:
-                    level = power_level_event.content.get("users_default", 0)
+        new_state = unconflicted_state
+        new_state.update(resolved_state)
 
-            return level
-        else:
-            return 0
+        return new_state, prev_states
 
     @log_function
-    def _resolve_state_events(self, events):
-        curr_events = events
+    def _resolve_state_events(self, conflicted_state, auth_events):
+        """ This is where we actually decide which of the conflicted state to
+        use.
+
+        We resolve conflicts in the following order:
+            1. power levels
+            2. memberships
+            3. other events.
+        """
+        resolved_state = {}
+        power_key = (EventTypes.PowerLevels, "")
+        if power_key in conflicted_state.items():
+            power_levels = conflicted_state[power_key]
+            resolved_state[power_key] = self._resolve_auth_events(power_levels)
+
+        auth_events.update(resolved_state)
+
+        for key, events in conflicted_state.items():
+            if key[0] == EventTypes.JoinRules:
+                resolved_state[key] = self._resolve_auth_events(
+                    events,
+                    auth_events
+                )
 
-        new_powers = [
-            self._get_power_level_from_event_state(e, e.user_id)
-            for e in curr_events
-        ]
+        auth_events.update(resolved_state)
 
-        new_powers = [
-            int(p) if p else 0 for p in new_powers
-        ]
+        for key, events in conflicted_state.items():
+            if key[0] == EventTypes.Member:
+                resolved_state[key] = self._resolve_auth_events(
+                    events,
+                    auth_events
+                )
 
-        max_power = max(new_powers)
+        auth_events.update(resolved_state)
 
-        curr_events = [
-            z[0] for z in zip(curr_events, new_powers)
-            if z[1] == max_power
-        ]
+        for key, events in conflicted_state.items():
+            if key not in resolved_state:
+                resolved_state[key] = self._resolve_normal_events(
+                    events, auth_events
+                )
 
-        if not curr_events:
-            raise RuntimeError("Max didn't get a max?")
-        elif len(curr_events) == 1:
-            return curr_events[0]
-
-        # TODO: For now, just choose the one with the largest event_id.
-        return (
-            sorted(
-                curr_events,
-                key=lambda e: hashlib.sha1(
-                    e.event_id + e.user_id + e.room_id + e.type
-                ).hexdigest()
-            )[0]
-        )
+        return resolved_state
+
+    def _resolve_auth_events(self, events, auth_events):
+        reverse = [i for i in reversed(self._ordered_events(events))]
+
+        auth_events = dict(auth_events)
+
+        prev_event = reverse[0]
+        for event in reverse[1:]:
+            auth_events[(prev_event.type, prev_event.state_key)] = prev_event
+            try:
+                # FIXME: hs.get_auth() is bad style, but we need to do it to
+                # get around circular deps.
+                self.hs.get_auth().check(event, auth_events)
+                prev_event = event
+            except AuthError:
+                return prev_event
+
+        return event
+
+    def _resolve_normal_events(self, events, auth_events):
+        for event in self._ordered_events(events):
+            try:
+                # FIXME: hs.get_auth() is bad style, but we need to do it to
+                # get around circular deps.
+                self.hs.get_auth().check(event, auth_events)
+                return event
+            except AuthError:
+                pass
+
+        # Use the last event (the one with the least depth) if they all fail
+        # the auth check.
+        return event
+
+    def _ordered_events(self, events):
+        def key_func(e):
+            return -int(e.depth), hashlib.sha1(e.event_id).hexdigest()
+
+        return sorted(events, key=key_func)
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index 4beb951b9f..a3ff995695 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -18,6 +18,7 @@ from twisted.internet import defer
 from synapse.util.logutils import log_function
 from synapse.api.constants import EventTypes
 
+from .appservice import ApplicationServiceStore
 from .directory import DirectoryStore
 from .feedback import FeedbackStore
 from .presence import PresenceStore
@@ -29,10 +30,14 @@ from .stream import StreamStore
 from .transactions import TransactionStore
 from .keys import KeyStore
 from .event_federation import EventFederationStore
+from .pusher import PusherStore
+from .push_rule import PushRuleStore
 from .media_repository import MediaRepositoryStore
+from .rejections import RejectionsStore
 
 from .state import StateStore
 from .signatures import SignatureStore
+from .filtering import FilteringStore
 
 from syutil.base64util import decode_base64
 from syutil.jsonutil import encode_canonical_json
@@ -40,33 +45,21 @@ from syutil.jsonutil import encode_canonical_json
 from synapse.crypto.event_signing import compute_event_reference_hash
 
 
-import json
+import fnmatch
+import imp
 import logging
 import os
+import re
 
 
 logger = logging.getLogger(__name__)
 
 
-SCHEMAS = [
-    "transactions",
-    "users",
-    "profiles",
-    "presence",
-    "im",
-    "room_aliases",
-    "keys",
-    "redactions",
-    "state",
-    "event_edges",
-    "event_signatures",
-    "media_repository",
-]
+# Remember to update this number every time a change is made to database
+# schema files, so the users will be informed on server restarts.
+SCHEMA_VERSION = 14
 
-
-# Remember to update this number every time an incompatible change is made to
-# database schema files, so the users will be informed on server restarts.
-SCHEMA_VERSION = 11
+dir_path = os.path.abspath(os.path.dirname(__file__))
 
 
 class _RollbackButIsFineException(Exception):
@@ -80,8 +73,13 @@ class DataStore(RoomMemberStore, RoomStore,
                 RegistrationStore, StreamStore, ProfileStore, FeedbackStore,
                 PresenceStore, TransactionStore,
                 DirectoryStore, KeyStore, StateStore, SignatureStore,
+                ApplicationServiceStore,
                 EventFederationStore,
                 MediaRepositoryStore,
+                RejectionsStore,
+                FilteringStore,
+                PusherStore,
+                PushRuleStore
                 ):
 
     def __init__(self, hs):
@@ -117,21 +115,147 @@ class DataStore(RoomMemberStore, RoomStore,
             pass
 
     @defer.inlineCallbacks
-    def get_event(self, event_id, allow_none=False):
-        events = yield self._get_events([event_id])
+    def get_event(self, event_id, check_redacted=True,
+                  get_prev_content=False, allow_rejected=False,
+                  allow_none=False):
+        """Get an event from the database by event_id.
+
+        Args:
+            event_id (str): The event_id of the event to fetch
+            check_redacted (bool): If True, check if event has been redacted
+                and redact it.
+            get_prev_content (bool): If True and event is a state event,
+                include the previous states content in the unsigned field.
+            allow_rejected (bool): If True return rejected events.
+            allow_none (bool): If True, return None if no event found, if
+                False throw an exception.
+
+        Returns:
+            Deferred : A FrozenEvent.
+        """
+        event = yield self.runInteraction(
+            "get_event", self._get_event_txn,
+            event_id,
+            check_redacted=check_redacted,
+            get_prev_content=get_prev_content,
+            allow_rejected=allow_rejected,
+        )
 
-        if not events:
-            if allow_none:
-                defer.returnValue(None)
-            else:
-                raise RuntimeError("Could not find event %s" % (event_id,))
+        if not event and not allow_none:
+            raise RuntimeError("Could not find event %s" % (event_id,))
 
-        defer.returnValue(events[0])
+        defer.returnValue(event)
 
     @log_function
     def _persist_event_txn(self, txn, event, context, backfilled,
                            stream_ordering=None, is_new_state=True,
                            current_state=None):
+
+        # Remove the any existing cache entries for the event_id
+        self._get_event_cache.pop(event.event_id)
+
+        # We purposefully do this first since if we include a `current_state`
+        # key, we *want* to update the `current_state_events` table
+        if current_state:
+            txn.execute(
+                "DELETE FROM current_state_events WHERE room_id = ?",
+                (event.room_id,)
+            )
+
+            for s in current_state:
+                self._simple_insert_txn(
+                    txn,
+                    "current_state_events",
+                    {
+                        "event_id": s.event_id,
+                        "room_id": s.room_id,
+                        "type": s.type,
+                        "state_key": s.state_key,
+                    },
+                    or_replace=True,
+                )
+
+        if event.is_state() and is_new_state:
+            if not backfilled and not context.rejected:
+                self._simple_insert_txn(
+                    txn,
+                    table="state_forward_extremities",
+                    values={
+                        "event_id": event.event_id,
+                        "room_id": event.room_id,
+                        "type": event.type,
+                        "state_key": event.state_key,
+                    },
+                    or_replace=True,
+                )
+
+                for prev_state_id, _ in event.prev_state:
+                    self._simple_delete_txn(
+                        txn,
+                        table="state_forward_extremities",
+                        keyvalues={
+                            "event_id": prev_state_id,
+                        }
+                    )
+
+        outlier = event.internal_metadata.is_outlier()
+
+        if not outlier:
+            self._store_state_groups_txn(txn, event, context)
+
+            self._update_min_depth_for_room_txn(
+                txn,
+                event.room_id,
+                event.depth
+            )
+
+        self._handle_prev_events(
+            txn,
+            outlier=outlier,
+            event_id=event.event_id,
+            prev_events=event.prev_events,
+            room_id=event.room_id,
+        )
+
+        have_persisted = self._simple_select_one_onecol_txn(
+            txn,
+            table="event_json",
+            keyvalues={"event_id": event.event_id},
+            retcol="event_id",
+            allow_none=True,
+        )
+
+        metadata_json = encode_canonical_json(
+            event.internal_metadata.get_dict()
+        )
+
+        # If we have already persisted this event, we don't need to do any
+        # more processing.
+        # The processing above must be done on every call to persist event,
+        # since they might not have happened on previous calls. For example,
+        # if we are persisting an event that we had persisted as an outlier,
+        # but is no longer one.
+        if have_persisted:
+            if not outlier:
+                sql = (
+                    "UPDATE event_json SET internal_metadata = ?"
+                    " WHERE event_id = ?"
+                )
+                txn.execute(
+                    sql,
+                    (metadata_json.decode("UTF-8"), event.event_id,)
+                )
+
+                sql = (
+                    "UPDATE events SET outlier = 0"
+                    " WHERE event_id = ?"
+                )
+                txn.execute(
+                    sql,
+                    (event.event_id,)
+                )
+            return
+
         if event.type == EventTypes.Member:
             self._store_room_member_txn(txn, event)
         elif event.type == EventTypes.Feedback:
@@ -143,8 +267,6 @@ class DataStore(RoomMemberStore, RoomStore,
         elif event.type == EventTypes.Redaction:
             self._store_redaction(txn, event)
 
-        outlier = event.internal_metadata.is_outlier()
-
         event_dict = {
             k: v
             for k, v in event.get_dict().items()
@@ -154,10 +276,6 @@ class DataStore(RoomMemberStore, RoomStore,
             ]
         }
 
-        metadata_json = encode_canonical_json(
-            event.internal_metadata.get_dict()
-        )
-
         self._simple_insert_txn(
             txn,
             table="event_json",
@@ -170,12 +288,16 @@ class DataStore(RoomMemberStore, RoomStore,
             or_replace=True,
         )
 
+        content = encode_canonical_json(
+            event.content
+        ).decode("UTF-8")
+
         vals = {
             "topological_ordering": event.depth,
             "event_id": event.event_id,
             "type": event.type,
             "room_id": event.room_id,
-            "content": json.dumps(event.get_dict()["content"]),
+            "content": content,
             "processed": True,
             "outlier": outlier,
             "depth": event.depth,
@@ -195,7 +317,10 @@ class DataStore(RoomMemberStore, RoomStore,
                 "prev_events",
             ]
         }
-        vals["unrecognized_keys"] = json.dumps(unrec)
+
+        vals["unrecognized_keys"] = encode_canonical_json(
+            unrec
+        ).decode("UTF-8")
 
         try:
             self._simple_insert_txn(
@@ -213,38 +338,10 @@ class DataStore(RoomMemberStore, RoomStore,
             )
             raise _RollbackButIsFineException("_persist_event")
 
-        self._handle_prev_events(
-            txn,
-            outlier=outlier,
-            event_id=event.event_id,
-            prev_events=event.prev_events,
-            room_id=event.room_id,
-        )
-
-        if not outlier:
-            self._store_state_groups_txn(txn, event, context)
-
-        if current_state:
-            txn.execute(
-                "DELETE FROM current_state_events WHERE room_id = ?",
-                (event.room_id,)
-            )
-
-            for s in current_state:
-                self._simple_insert_txn(
-                    txn,
-                    "current_state_events",
-                    {
-                        "event_id": s.event_id,
-                        "room_id": s.room_id,
-                        "type": s.type,
-                        "state_key": s.state_key,
-                    },
-                    or_replace=True,
-                )
+        if context.rejected:
+            self._store_rejections_txn(txn, event.event_id, context.rejected)
 
-        is_state = hasattr(event, "state_key") and event.state_key is not None
-        if is_state:
+        if event.is_state():
             vals = {
                 "event_id": event.event_id,
                 "room_id": event.room_id,
@@ -252,6 +349,7 @@ class DataStore(RoomMemberStore, RoomStore,
                 "state_key": event.state_key,
             }
 
+            # TODO: How does this work with backfilling?
             if hasattr(event, "replaces_state"):
                 vals["prev_state"] = event.replaces_state
 
@@ -262,7 +360,7 @@ class DataStore(RoomMemberStore, RoomStore,
                 or_replace=True,
             )
 
-            if is_new_state:
+            if is_new_state and not context.rejected:
                 self._simple_insert_txn(
                     txn,
                     "current_state_events",
@@ -288,28 +386,6 @@ class DataStore(RoomMemberStore, RoomStore,
                     or_ignore=True,
                 )
 
-            if not backfilled:
-                self._simple_insert_txn(
-                    txn,
-                    table="state_forward_extremities",
-                    values={
-                        "event_id": event.event_id,
-                        "room_id": event.room_id,
-                        "type": event.type,
-                        "state_key": event.state_key,
-                    },
-                    or_replace=True,
-                )
-
-                for prev_state_id, _ in event.prev_state:
-                    self._simple_delete_txn(
-                        txn,
-                        table="state_forward_extremities",
-                        keyvalues={
-                            "event_id": prev_state_id,
-                        }
-                    )
-
         for hash_alg, hash_base64 in event.hashes.items():
             hash_bytes = decode_base64(hash_base64)
             self._store_event_content_hash_txn(
@@ -340,14 +416,9 @@ class DataStore(RoomMemberStore, RoomStore,
             txn, event.event_id, ref_alg, ref_hash_bytes
         )
 
-        if not outlier:
-            self._update_min_depth_for_room_txn(
-                txn,
-                event.room_id,
-                event.depth
-            )
-
     def _store_redaction(self, txn, event):
+        # invalidate the cache for the redacted event
+        self._get_event_cache.pop(event.redacts)
         txn.execute(
             "INSERT OR IGNORE INTO redactions "
             "(event_id, redacts) VALUES (?,?)",
@@ -370,9 +441,12 @@ class DataStore(RoomMemberStore, RoomStore,
             "redacted": del_sql,
         }
 
-        if event_type:
+        if event_type and state_key is not None:
             sql += " AND s.type = ? AND s.state_key = ? "
             args = (room_id, event_type, state_key)
+        elif event_type:
+            sql += " AND s.type = ?"
+            args = (room_id, event_type)
         else:
             args = (room_id, )
 
@@ -382,6 +456,41 @@ class DataStore(RoomMemberStore, RoomStore,
         defer.returnValue(events)
 
     @defer.inlineCallbacks
+    def get_room_name_and_aliases(self, room_id):
+        del_sql = (
+            "SELECT event_id FROM redactions WHERE redacts = e.event_id "
+            "LIMIT 1"
+        )
+
+        sql = (
+            "SELECT e.*, (%(redacted)s) AS redacted FROM events as e "
+            "INNER JOIN current_state_events as c ON e.event_id = c.event_id "
+            "INNER JOIN state_events as s ON e.event_id = s.event_id "
+            "WHERE c.room_id = ? "
+        ) % {
+            "redacted": del_sql,
+        }
+
+        sql += " AND ((s.type = 'm.room.name' AND s.state_key = '')"
+        sql += " OR s.type = 'm.room.aliases')"
+        args = (room_id,)
+
+        results = yield self._execute_and_decode(sql, *args)
+
+        events = yield self._parse_events(results)
+
+        name = None
+        aliases = []
+
+        for e in events:
+            if e.type == 'm.room.name':
+                name = e.content['name']
+            elif e.type == 'm.room.aliases':
+                aliases.extend(e.content['aliases'])
+
+        defer.returnValue((name, aliases))
+
+    @defer.inlineCallbacks
     def _get_min_token(self):
         row = yield self._execute(
             None,
@@ -417,30 +526,48 @@ class DataStore(RoomMemberStore, RoomStore,
             ],
         )
 
+    def have_events(self, event_ids):
+        """Given a list of event ids, check if we have already processed them.
+
+        Returns:
+            dict: Has an entry for each event id we already have seen. Maps to
+            the rejected reason string if we rejected the event, else maps to
+            None.
+        """
+        if not event_ids:
+            return defer.succeed({})
+
+        def f(txn):
+            sql = (
+                "SELECT e.event_id, reason FROM events as e "
+                "LEFT JOIN rejections as r ON e.event_id = r.event_id "
+                "WHERE e.event_id = ?"
+            )
 
-def schema_path(schema):
-    """ Get a filesystem path for the named database schema
+            res = {}
+            for event_id in event_ids:
+                txn.execute(sql, (event_id,))
+                row = txn.fetchone()
+                if row:
+                    _, rejected = row
+                    res[event_id] = rejected
 
-    Args:
-        schema: Name of the database schema.
-    Returns:
-        A filesystem path pointing at a ".sql" file.
+            return res
 
-    """
-    dir_path = os.path.dirname(__file__)
-    schemaPath = os.path.join(dir_path, "schema", schema + ".sql")
-    return schemaPath
+        return self.runInteraction(
+            "have_events", f,
+        )
 
 
-def read_schema(schema):
+def read_schema(path):
     """ Read the named database schema.
 
     Args:
-        schema: Name of the datbase schema.
+        path: Path of the database schema.
     Returns:
         A string containing the database schema.
     """
-    with open(schema_path(schema)) as schema_file:
+    with open(path) as schema_file:
         return schema_file.read()
 
 
@@ -453,46 +580,275 @@ class UpgradeDatabaseException(PrepareDatabaseException):
 
 
 def prepare_database(db_conn):
-    """ Set up all the dbs. Since all the *.sql have IF NOT EXISTS, so we
-    don't have to worry about overwriting existing content.
+    """Prepares a database for usage. Will either create all necessary tables
+    or upgrade from an older schema version.
     """
-    c = db_conn.cursor()
-    c.execute("PRAGMA user_version")
-    row = c.fetchone()
+    try:
+        cur = db_conn.cursor()
+        version_info = _get_or_create_schema_state(cur)
+
+        if version_info:
+            user_version, delta_files, upgraded = version_info
+            _upgrade_existing_database(cur, user_version, delta_files, upgraded)
+        else:
+            _setup_new_database(cur)
 
-    if row and row[0]:
-        user_version = row[0]
+        cur.execute("PRAGMA user_version = %d" % (SCHEMA_VERSION,))
 
-        if user_version > SCHEMA_VERSION:
-            raise ValueError(
-                "Cannot use this database as it is too " +
-                "new for the server to understand"
-            )
-        elif user_version < SCHEMA_VERSION:
-            logger.info(
-                "Upgrading database from version %d",
-                user_version
+        cur.close()
+        db_conn.commit()
+    except:
+        db_conn.rollback()
+        raise
+
+
+def _setup_new_database(cur):
+    """Sets up the database by finding a base set of "full schemas" and then
+    applying any necessary deltas.
+
+    The "full_schemas" directory has subdirectories named after versions. This
+    function searches for the highest version less than or equal to
+    `SCHEMA_VERSION` and executes all .sql files in that directory.
+
+    The function will then apply all deltas for all versions after the base
+    version.
+
+    Example directory structure:
+
+        schema/
+            delta/
+                ...
+            full_schemas/
+                3/
+                    test.sql
+                    ...
+                11/
+                    foo.sql
+                    bar.sql
+                ...
+
+    In the example foo.sql and bar.sql would be run, and then any delta files
+    for versions strictly greater than 11.
+    """
+    current_dir = os.path.join(dir_path, "schema", "full_schemas")
+    directory_entries = os.listdir(current_dir)
+
+    valid_dirs = []
+    pattern = re.compile(r"^\d+(\.sql)?$")
+    for filename in directory_entries:
+        match = pattern.match(filename)
+        abs_path = os.path.join(current_dir, filename)
+        if match and os.path.isdir(abs_path):
+            ver = int(match.group(0))
+            if ver <= SCHEMA_VERSION:
+                valid_dirs.append((ver, abs_path))
+        else:
+            logger.warn("Unexpected entry in 'full_schemas': %s", filename)
+
+    if not valid_dirs:
+        raise PrepareDatabaseException(
+            "Could not find a suitable base set of full schemas"
+        )
+
+    max_current_ver, sql_dir = max(valid_dirs, key=lambda x: x[0])
+
+    logger.debug("Initialising schema v%d", max_current_ver)
+
+    directory_entries = os.listdir(sql_dir)
+
+    sql_script = "BEGIN TRANSACTION;\n"
+    for filename in fnmatch.filter(directory_entries, "*.sql"):
+        sql_loc = os.path.join(sql_dir, filename)
+        logger.debug("Applying schema %s", sql_loc)
+        sql_script += read_schema(sql_loc)
+        sql_script += "\n"
+    sql_script += "COMMIT TRANSACTION;"
+    cur.executescript(sql_script)
+
+    cur.execute(
+        "INSERT OR REPLACE INTO schema_version (version, upgraded)"
+        " VALUES (?,?)",
+        (max_current_ver, False)
+    )
+
+    _upgrade_existing_database(
+        cur,
+        current_version=max_current_ver,
+        applied_delta_files=[],
+        upgraded=False
+    )
+
+
+def _upgrade_existing_database(cur, current_version, applied_delta_files,
+                               upgraded):
+    """Upgrades an existing database.
+
+    Delta files can either be SQL stored in *.sql files, or python modules
+    in *.py.
+
+    There can be multiple delta files per version. Synapse will keep track of
+    which delta files have been applied, and will apply any that haven't been
+    even if there has been no version bump. This is useful for development
+    where orthogonal schema changes may happen on separate branches.
+
+    Different delta files for the same version *must* be orthogonal and give
+    the same result when applied in any order. No guarantees are made on the
+    order of execution of these scripts.
+
+    This is a no-op of current_version == SCHEMA_VERSION.
+
+    Example directory structure:
+
+        schema/
+            delta/
+                11/
+                    foo.sql
+                    ...
+                12/
+                    foo.sql
+                    bar.py
+                ...
+            full_schemas/
+                ...
+
+    In the example, if current_version is 11, then foo.sql will be run if and
+    only if `upgraded` is True. Then `foo.sql` and `bar.py` would be run in
+    some arbitrary order.
+
+    Args:
+        cur (Cursor)
+        current_version (int): The current version of the schema.
+        applied_delta_files (list): A list of deltas that have already been
+            applied.
+        upgraded (bool): Whether the current version was generated by having
+            applied deltas or from full schema file. If `True` the function
+            will never apply delta files for the given `current_version`, since
+            the current_version wasn't generated by applying those delta files.
+    """
+
+    if current_version > SCHEMA_VERSION:
+        raise ValueError(
+            "Cannot use this database as it is too " +
+            "new for the server to understand"
+        )
+
+    start_ver = current_version
+    if not upgraded:
+        start_ver += 1
+
+    for v in range(start_ver, SCHEMA_VERSION + 1):
+        logger.debug("Upgrading schema to v%d", v)
+
+        delta_dir = os.path.join(dir_path, "schema", "delta", str(v))
+
+        try:
+            directory_entries = os.listdir(delta_dir)
+        except OSError:
+            logger.exception("Could not open delta dir for version %d", v)
+            raise UpgradeDatabaseException(
+                "Could not open delta dir for version %d" % (v,)
             )
 
-            # Run every version since after the current version.
-            for v in range(user_version + 1, SCHEMA_VERSION + 1):
-                if v == 10:
-                    raise UpgradeDatabaseException(
-                        "No delta for version 10"
+        directory_entries.sort()
+        for file_name in directory_entries:
+            relative_path = os.path.join(str(v), file_name)
+            if relative_path in applied_delta_files:
+                continue
+
+            absolute_path = os.path.join(
+                dir_path, "schema", "delta", relative_path,
+            )
+            root_name, ext = os.path.splitext(file_name)
+            if ext == ".py":
+                # This is a python upgrade module. We need to import into some
+                # package and then execute its `run_upgrade` function.
+                module_name = "synapse.storage.v%d_%s" % (
+                    v, root_name
+                )
+                with open(absolute_path) as python_file:
+                    module = imp.load_source(
+                        module_name, absolute_path, python_file
                     )
-                sql_script = read_schema("delta/v%d" % (v))
-                c.executescript(sql_script)
-
-            db_conn.commit()
-
-    else:
-        sql_script = "BEGIN TRANSACTION;\n"
-        for sql_loc in SCHEMAS:
-            sql_script += read_schema(sql_loc)
-            sql_script += "\n"
-        sql_script += "COMMIT TRANSACTION;"
-        c.executescript(sql_script)
-        db_conn.commit()
-        c.execute("PRAGMA user_version = %d" % SCHEMA_VERSION)
+                logger.debug("Running script %s", relative_path)
+                module.run_upgrade(cur)
+            elif ext == ".sql":
+                # A plain old .sql file, just read and execute it
+                delta_schema = read_schema(absolute_path)
+                logger.debug("Applying schema %s", relative_path)
+                cur.executescript(delta_schema)
+            else:
+                # Not a valid delta file.
+                logger.warn(
+                    "Found directory entry that did not end in .py or"
+                    " .sql: %s",
+                    relative_path,
+                )
+                continue
+
+            # Mark as done.
+            cur.execute(
+                "INSERT INTO applied_schema_deltas (version, file)"
+                " VALUES (?,?)",
+                (v, relative_path)
+            )
 
-    c.close()
+            cur.execute(
+                "INSERT OR REPLACE INTO schema_version (version, upgraded)"
+                " VALUES (?,?)",
+                (v, True)
+            )
+
+
+def _get_or_create_schema_state(txn):
+    schema_path = os.path.join(
+        dir_path, "schema", "schema_version.sql",
+    )
+    create_schema = read_schema(schema_path)
+    txn.executescript(create_schema)
+
+    txn.execute("SELECT version, upgraded FROM schema_version")
+    row = txn.fetchone()
+    current_version = int(row[0]) if row else None
+    upgraded = bool(row[1]) if row else None
+
+    if current_version:
+        txn.execute(
+            "SELECT file FROM applied_schema_deltas WHERE version >= ?",
+            (current_version,)
+        )
+        return current_version, txn.fetchall(), upgraded
+
+    return None
+
+
+def prepare_sqlite3_database(db_conn):
+    """This function should be called before `prepare_database` on sqlite3
+    databases.
+
+    Since we changed the way we store the current schema version and handle
+    updates to schemas, we need a way to upgrade from the old method to the
+    new. This only affects sqlite databases since they were the only ones
+    supported at the time.
+    """
+    with db_conn:
+        schema_path = os.path.join(
+            dir_path, "schema", "schema_version.sql",
+        )
+        create_schema = read_schema(schema_path)
+        db_conn.executescript(create_schema)
+
+        c = db_conn.execute("SELECT * FROM schema_version")
+        rows = c.fetchall()
+        c.close()
+
+        if not rows:
+            c = db_conn.execute("PRAGMA user_version")
+            row = c.fetchone()
+            c.close()
+
+            if row and row[0]:
+                db_conn.execute(
+                    "INSERT OR REPLACE INTO schema_version (version, upgraded)"
+                    " VALUES (?,?)",
+                    (row[0], False)
+                )
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index ce63f12008..3725c9795d 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -19,11 +19,12 @@ from synapse.events import FrozenEvent
 from synapse.events.utils import prune_event
 from synapse.util.logutils import log_function
 from synapse.util.logcontext import PreserveLoggingContext, LoggingContext
+from synapse.util.lrucache import LruCache
 
 from twisted.internet import defer
 
-import collections
-import json
+from collections import namedtuple, OrderedDict
+import simplejson as json
 import sys
 import time
 
@@ -34,6 +35,52 @@ sql_logger = logging.getLogger("synapse.storage.SQL")
 transaction_logger = logging.getLogger("synapse.storage.txn")
 
 
+# TODO(paul):
+#  * more generic key management
+#  * export monitoring stats
+#  * consider other eviction strategies - LRU?
+def cached(max_entries=1000):
+    """ A method decorator that applies a memoizing cache around the function.
+
+    The function is presumed to take one additional argument, which is used as
+    the key for the cache. Cache hits are served directly from the cache;
+    misses use the function body to generate the value.
+
+    The wrapped function has an additional member, a callable called
+    "invalidate". This can be used to remove individual entries from the cache.
+
+    The wrapped function has another additional callable, called "prefill",
+    which can be used to insert values into the cache specifically, without
+    calling the calculation function.
+    """
+    def wrap(orig):
+        cache = OrderedDict()
+
+        def prefill(key, value):
+            while len(cache) > max_entries:
+                cache.popitem(last=False)
+
+            cache[key] = value
+
+        @defer.inlineCallbacks
+        def wrapped(self, key):
+            if key in cache:
+                defer.returnValue(cache[key])
+
+            ret = yield orig(self, key)
+            prefill(key, ret)
+            defer.returnValue(ret)
+
+        def invalidate(key):
+            cache.pop(key, None)
+
+        wrapped.invalidate = invalidate
+        wrapped.prefill = prefill
+        return wrapped
+
+    return wrap
+
+
 class LoggingTransaction(object):
     """An object that almost-transparently proxies for the 'txn' object
     passed to the constructor. Adds logging to the .execute() method."""
@@ -77,6 +124,43 @@ class LoggingTransaction(object):
             sql_logger.debug("[SQL time] {%s} %f", self.name, end - start)
 
 
+class PerformanceCounters(object):
+    def __init__(self):
+        self.current_counters = {}
+        self.previous_counters = {}
+
+    def update(self, key, start_time, end_time=None):
+        if end_time is None:
+            end_time = time.time() * 1000
+        duration = end_time - start_time
+        count, cum_time = self.current_counters.get(key, (0, 0))
+        count += 1
+        cum_time += duration
+        self.current_counters[key] = (count, cum_time)
+        return end_time
+
+    def interval(self, interval_duration, limit=3):
+        counters = []
+        for name, (count, cum_time) in self.current_counters.items():
+            prev_count, prev_time = self.previous_counters.get(name, (0, 0))
+            counters.append((
+                (cum_time - prev_time) / interval_duration,
+                count - prev_count,
+                name
+            ))
+
+        self.previous_counters = dict(self.current_counters)
+
+        counters.sort(reverse=True)
+
+        top_n_counters = ", ".join(
+            "%s(%d): %.3f%%" % (name, count, 100 * ratio)
+            for ratio, count, name in counters[:limit]
+        )
+
+        return top_n_counters
+
+
 class SQLBaseStore(object):
     _TXN_ID = 0
 
@@ -85,6 +169,43 @@ class SQLBaseStore(object):
         self._db_pool = hs.get_db_pool()
         self._clock = hs.get_clock()
 
+        self._previous_txn_total_time = 0
+        self._current_txn_total_time = 0
+        self._previous_loop_ts = 0
+        self._txn_perf_counters = PerformanceCounters()
+        self._get_event_counters = PerformanceCounters()
+
+        self._get_event_cache = LruCache(hs.config.event_cache_size)
+
+    def start_profiling(self):
+        self._previous_loop_ts = self._clock.time_msec()
+
+        def loop():
+            curr = self._current_txn_total_time
+            prev = self._previous_txn_total_time
+            self._previous_txn_total_time = curr
+
+            time_now = self._clock.time_msec()
+            time_then = self._previous_loop_ts
+            self._previous_loop_ts = time_now
+
+            ratio = (curr - prev)/(time_now - time_then)
+
+            top_three_counters = self._txn_perf_counters.interval(
+                time_now - time_then, limit=3
+            )
+
+            top_3_event_counters = self._get_event_counters.interval(
+                time_now - time_then, limit=3
+            )
+
+            logger.info(
+                "Total database time: %.3f%% {%s} {%s}",
+                ratio * 100, top_three_counters, top_3_event_counters
+            )
+
+        self._clock.looping_call(loop, 10000)
+
     @defer.inlineCallbacks
     def runInteraction(self, desc, func, *args, **kwargs):
         """Wraps the .runInteraction() method on the underlying db_pool."""
@@ -94,8 +215,7 @@ class SQLBaseStore(object):
             with LoggingContext("runInteraction") as context:
                 current_context.copy_to(context)
                 start = time.time() * 1000
-                txn_id = SQLBaseStore._TXN_ID
-                SQLBaseStore._TXN_ID += 1
+                txn_id = self._TXN_ID
 
                 # We don't really need these to be unique, so lets stop it from
                 # growing really large.
@@ -115,6 +235,10 @@ class SQLBaseStore(object):
                         "[TXN END] {%s} %f",
                         name, end - start
                     )
+
+                    self._current_txn_total_time += end - start
+                    self._txn_perf_counters.update(desc, start, end)
+
         with PreserveLoggingContext():
             result = yield self._db_pool.runInteraction(
                 inner_func, *args, **kwargs
@@ -194,6 +318,50 @@ class SQLBaseStore(object):
         txn.execute(sql, values.values())
         return txn.lastrowid
 
+    def _simple_upsert(self, table, keyvalues, values):
+        """
+        Args:
+            table (str): The table to upsert into
+            keyvalues (dict): The unique key tables and their new values
+            values (dict): The nonunique columns and their new values
+        Returns: A deferred
+        """
+        return self.runInteraction(
+            "_simple_upsert",
+            self._simple_upsert_txn, table, keyvalues, values
+        )
+
+    def _simple_upsert_txn(self, txn, table, keyvalues, values):
+        # Try to update
+        sql = "UPDATE %s SET %s WHERE %s" % (
+            table,
+            ", ".join("%s = ?" % (k,) for k in values),
+            " AND ".join("%s = ?" % (k,) for k in keyvalues)
+        )
+        sqlargs = values.values() + keyvalues.values()
+        logger.debug(
+            "[SQL] %s Args=%s",
+            sql, sqlargs,
+        )
+
+        txn.execute(sql, sqlargs)
+        if txn.rowcount == 0:
+            # We didn't update and rows so insert a new one
+            allvalues = {}
+            allvalues.update(keyvalues)
+            allvalues.update(values)
+
+            sql = "INSERT INTO %s (%s) VALUES (%s)" % (
+                table,
+                ", ".join(k for k in allvalues),
+                ", ".join("?" for _ in allvalues)
+            )
+            logger.debug(
+                "[SQL] %s Args=%s",
+                sql, keyvalues.values(),
+            )
+            txn.execute(sql, allvalues.values())
+
     def _simple_select_one(self, table, keyvalues, retcols,
                            allow_none=False):
         """Executes a SELECT query on the named table, which is expected to
@@ -282,7 +450,8 @@ class SQLBaseStore(object):
 
         Args:
             table : string giving the table name
-            keyvalues : dict of column names and values to select the rows with
+            keyvalues : dict of column names and values to select the rows with,
+            or None to not apply a WHERE clause.
             retcols : list of strings giving the names of the columns to return
         """
         return self.runInteraction(
@@ -301,13 +470,20 @@ class SQLBaseStore(object):
             keyvalues : dict of column names and values to select the rows with
             retcols : list of strings giving the names of the columns to return
         """
-        sql = "SELECT %s FROM %s WHERE %s ORDER BY rowid asc" % (
-            ", ".join(retcols),
-            table,
-            " AND ".join("%s = ?" % (k, ) for k in keyvalues)
-        )
+        if keyvalues:
+            sql = "SELECT %s FROM %s WHERE %s ORDER BY rowid asc" % (
+                ", ".join(retcols),
+                table,
+                " AND ".join("%s = ?" % (k, ) for k in keyvalues)
+            )
+            txn.execute(sql, keyvalues.values())
+        else:
+            sql = "SELECT %s FROM %s ORDER BY rowid asc" % (
+                ", ".join(retcols),
+                table
+            )
+            txn.execute(sql)
 
-        txn.execute(sql, keyvalues.values())
         return self.cursor_to_dict(txn)
 
     def _simple_update_one(self, table, keyvalues, updatevalues,
@@ -345,8 +521,8 @@ class SQLBaseStore(object):
         if updatevalues:
             update_sql = "UPDATE %s SET %s WHERE %s" % (
                 table,
-                ", ".join("%s = ?" % (k) for k in updatevalues),
-                " AND ".join("%s = ?" % (k) for k in keyvalues)
+                ", ".join("%s = ?" % (k,) for k in updatevalues),
+                " AND ".join("%s = ?" % (k,) for k in keyvalues)
             )
 
         def func(txn):
@@ -459,10 +635,26 @@ class SQLBaseStore(object):
         return [e for e in events if e]
 
     def _get_event_txn(self, txn, event_id, check_redacted=True,
-                       get_prev_content=False):
+                       get_prev_content=False, allow_rejected=False):
+
+        start_time = time.time() * 1000
+        update_counter = self._get_event_counters.update
+
+        cache = self._get_event_cache.setdefault(event_id, {})
+
+        try:
+            # Separate cache entries for each way to invoke _get_event_txn
+            return cache[(check_redacted, get_prev_content, allow_rejected)]
+        except KeyError:
+            pass
+        finally:
+            start_time = update_counter("event_cache", start_time)
+
         sql = (
-            "SELECT internal_metadata, json, r.event_id FROM event_json as e "
+            "SELECT e.internal_metadata, e.json, r.event_id, rej.reason "
+            "FROM event_json as e "
             "LEFT JOIN redactions as r ON e.event_id = r.redacts "
+            "LEFT JOIN rejections as rej on rej.event_id = e.event_id  "
             "WHERE e.event_id = ? "
             "LIMIT 1 "
         )
@@ -474,20 +666,35 @@ class SQLBaseStore(object):
         if not res:
             return None
 
-        internal_metadata, js, redacted = res
+        internal_metadata, js, redacted, rejected_reason = res
 
-        return self._get_event_from_row_txn(
-            txn, internal_metadata, js, redacted,
-            check_redacted=check_redacted,
-            get_prev_content=get_prev_content,
-        )
+        start_time = update_counter("select_event", start_time)
+
+        if allow_rejected or not rejected_reason:
+            result = self._get_event_from_row_txn(
+                txn, internal_metadata, js, redacted,
+                check_redacted=check_redacted,
+                get_prev_content=get_prev_content,
+            )
+            cache[(check_redacted, get_prev_content, allow_rejected)] = result
+            return result
+        else:
+            return None
 
     def _get_event_from_row_txn(self, txn, internal_metadata, js, redacted,
                                 check_redacted=True, get_prev_content=False):
+
+        start_time = time.time() * 1000
+        update_counter = self._get_event_counters.update
+
         d = json.loads(js)
+        start_time = update_counter("decode_json", start_time)
+
         internal_metadata = json.loads(internal_metadata)
+        start_time = update_counter("decode_internal", start_time)
 
         ev = FrozenEvent(d, internal_metadata_dict=internal_metadata)
+        start_time = update_counter("build_frozen_event", start_time)
 
         if check_redacted and redacted:
             ev = prune_event(ev)
@@ -503,6 +710,7 @@ class SQLBaseStore(object):
 
             if because:
                 ev.unsigned["redacted_because"] = because
+            start_time = update_counter("redact_event", start_time)
 
         if get_prev_content and "replaces_state" in ev.unsigned:
             prev = self._get_event_txn(
@@ -512,6 +720,7 @@ class SQLBaseStore(object):
             )
             if prev:
                 ev.unsigned["prev_content"] = prev.get_dict()["content"]
+            start_time = update_counter("get_prev_content", start_time)
 
         return ev
 
@@ -632,7 +841,7 @@ class JoinHelper(object):
         for table in self.tables:
             res += [f for f in table.fields if f not in res]
 
-        self.EntryType = collections.namedtuple("JoinHelperEntry", res)
+        self.EntryType = namedtuple("JoinHelperEntry", res)
 
     def get_fields(self, **prefixes):
         """Get a string representing a list of fields for use in SELECT
diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py
new file mode 100644
index 0000000000..e30265750a
--- /dev/null
+++ b/synapse/storage/appservice.py
@@ -0,0 +1,338 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import simplejson
+from simplejson import JSONDecodeError
+from twisted.internet import defer
+
+from synapse.api.constants import Membership
+from synapse.api.errors import StoreError
+from synapse.appservice import ApplicationService
+from synapse.storage.roommember import RoomsForUser
+from ._base import SQLBaseStore
+
+
+logger = logging.getLogger(__name__)
+
+
+def log_failure(failure):
+    logger.error("Failed to detect application services: %s", failure.value)
+    logger.error(failure.getTraceback())
+
+
+class ApplicationServiceStore(SQLBaseStore):
+
+    def __init__(self, hs):
+        super(ApplicationServiceStore, self).__init__(hs)
+        self.services_cache = []
+        self.cache_defer = self._populate_cache()
+        self.cache_defer.addErrback(log_failure)
+
+    @defer.inlineCallbacks
+    def unregister_app_service(self, token):
+        """Unregisters this service.
+
+        This removes all AS specific regex and the base URL. The token is the
+        only thing preserved for future registration attempts.
+        """
+        yield self.cache_defer  # make sure the cache is ready
+        yield self.runInteraction(
+            "unregister_app_service",
+            self._unregister_app_service_txn,
+            token,
+        )
+        # update cache TODO: Should this be in the txn?
+        for service in self.services_cache:
+            if service.token == token:
+                service.url = None
+                service.namespaces = None
+                service.hs_token = None
+
+    def _unregister_app_service_txn(self, txn, token):
+        # kill the url to prevent pushes
+        txn.execute(
+            "UPDATE application_services SET url=NULL WHERE token=?",
+            (token,)
+        )
+
+        # cleanup regex
+        as_id = self._get_as_id_txn(txn, token)
+        if not as_id:
+            logger.warning(
+                "unregister_app_service_txn: Failed to find as_id for token=",
+                token
+            )
+            return False
+
+        txn.execute(
+            "DELETE FROM application_services_regex WHERE as_id=?",
+            (as_id,)
+        )
+        return True
+
+    @defer.inlineCallbacks
+    def update_app_service(self, service):
+        """Update an application service, clobbering what was previously there.
+
+        Args:
+            service(ApplicationService): The updated service.
+        """
+        yield self.cache_defer  # make sure the cache is ready
+
+        # NB: There is no "insert" since we provide no public-facing API to
+        # allocate new ASes. It relies on the server admin inserting the AS
+        # token into the database manually.
+
+        if not service.token or not service.url:
+            raise StoreError(400, "Token and url must be specified.")
+
+        if not service.hs_token:
+            raise StoreError(500, "No HS token")
+
+        yield self.runInteraction(
+            "update_app_service",
+            self._update_app_service_txn,
+            service
+        )
+
+        # update cache TODO: Should this be in the txn?
+        for (index, cache_service) in enumerate(self.services_cache):
+            if service.token == cache_service.token:
+                self.services_cache[index] = service
+                logger.info("Updated: %s", service)
+                return
+        # new entry
+        self.services_cache.append(service)
+        logger.info("Updated(new): %s", service)
+
+    def _update_app_service_txn(self, txn, service):
+        as_id = self._get_as_id_txn(txn, service.token)
+        if not as_id:
+            logger.warning(
+                "update_app_service_txn: Failed to find as_id for token=",
+                service.token
+            )
+            return False
+
+        txn.execute(
+            "UPDATE application_services SET url=?, hs_token=?, sender=? "
+            "WHERE id=?",
+            (service.url, service.hs_token, service.sender, as_id,)
+        )
+        # cleanup regex
+        txn.execute(
+            "DELETE FROM application_services_regex WHERE as_id=?",
+            (as_id,)
+        )
+        for (ns_int, ns_str) in enumerate(ApplicationService.NS_LIST):
+            if ns_str in service.namespaces:
+                for regex_obj in service.namespaces[ns_str]:
+                    txn.execute(
+                        "INSERT INTO application_services_regex("
+                        "as_id, namespace, regex) values(?,?,?)",
+                        (as_id, ns_int, simplejson.dumps(regex_obj))
+                    )
+        return True
+
+    def _get_as_id_txn(self, txn, token):
+        cursor = txn.execute(
+            "SELECT id FROM application_services WHERE token=?",
+            (token,)
+        )
+        res = cursor.fetchone()
+        if res:
+            return res[0]
+
+    @defer.inlineCallbacks
+    def get_app_services(self):
+        yield self.cache_defer  # make sure the cache is ready
+        defer.returnValue(self.services_cache)
+
+    @defer.inlineCallbacks
+    def get_app_service_by_user_id(self, user_id):
+        """Retrieve an application service from their user ID.
+
+        All application services have associated with them a particular user ID.
+        There is no distinguishing feature on the user ID which indicates it
+        represents an application service. This function allows you to map from
+        a user ID to an application service.
+
+        Args:
+            user_id(str): The user ID to see if it is an application service.
+        Returns:
+            synapse.appservice.ApplicationService or None.
+        """
+
+        yield self.cache_defer  # make sure the cache is ready
+
+        for service in self.services_cache:
+            if service.sender == user_id:
+                defer.returnValue(service)
+                return
+        defer.returnValue(None)
+
+    @defer.inlineCallbacks
+    def get_app_service_by_token(self, token, from_cache=True):
+        """Get the application service with the given appservice token.
+
+        Args:
+            token (str): The application service token.
+            from_cache (bool): True to get this service from the cache, False to
+                               check the database.
+        Raises:
+            StoreError if there was a problem retrieving this service.
+        """
+        yield self.cache_defer  # make sure the cache is ready
+
+        if from_cache:
+            for service in self.services_cache:
+                if service.token == token:
+                    defer.returnValue(service)
+                    return
+            defer.returnValue(None)
+
+        # TODO: The from_cache=False impl
+        # TODO: This should be JOINed with the application_services_regex table.
+
+    def get_app_service_rooms(self, service):
+        """Get a list of RoomsForUser for this application service.
+
+        Application services may be "interested" in lots of rooms depending on
+        the room ID, the room aliases, or the members in the room. This function
+        takes all of these into account and returns a list of RoomsForUser which
+        represent the entire list of room IDs that this application service
+        wants to know about.
+
+        Args:
+            service: The application service to get a room list for.
+        Returns:
+            A list of RoomsForUser.
+        """
+        return self.runInteraction(
+            "get_app_service_rooms",
+            self._get_app_service_rooms_txn,
+            service,
+        )
+
+    def _get_app_service_rooms_txn(self, txn, service):
+        # get all rooms matching the room ID regex.
+        room_entries = self._simple_select_list_txn(
+            txn=txn, table="rooms", keyvalues=None, retcols=["room_id"]
+        )
+        matching_room_list = set([
+            r["room_id"] for r in room_entries if
+            service.is_interested_in_room(r["room_id"])
+        ])
+
+        # resolve room IDs for matching room alias regex.
+        room_alias_mappings = self._simple_select_list_txn(
+            txn=txn, table="room_aliases", keyvalues=None,
+            retcols=["room_id", "room_alias"]
+        )
+        matching_room_list |= set([
+            r["room_id"] for r in room_alias_mappings if
+            service.is_interested_in_alias(r["room_alias"])
+        ])
+
+        # get all rooms for every user for this AS. This is scoped to users on
+        # this HS only.
+        user_list = self._simple_select_list_txn(
+            txn=txn, table="users", keyvalues=None, retcols=["name"]
+        )
+        user_list = [
+            u["name"] for u in user_list if
+            service.is_interested_in_user(u["name"])
+        ]
+        rooms_for_user_matching_user_id = set()  # RoomsForUser list
+        for user_id in user_list:
+            # FIXME: This assumes this store is linked with RoomMemberStore :(
+            rooms_for_user = self._get_rooms_for_user_where_membership_is_txn(
+                txn=txn,
+                user_id=user_id,
+                membership_list=[Membership.JOIN]
+            )
+            rooms_for_user_matching_user_id |= set(rooms_for_user)
+
+        # make RoomsForUser tuples for room ids and aliases which are not in the
+        # main rooms_for_user_list - e.g. they are rooms which do not have AS
+        # registered users in it.
+        known_room_ids = [r.room_id for r in rooms_for_user_matching_user_id]
+        missing_rooms_for_user = [
+            RoomsForUser(r, service.sender, "join") for r in
+            matching_room_list if r not in known_room_ids
+        ]
+        rooms_for_user_matching_user_id |= set(missing_rooms_for_user)
+
+        return rooms_for_user_matching_user_id
+
+    @defer.inlineCallbacks
+    def _populate_cache(self):
+        """Populates the ApplicationServiceCache from the database."""
+        sql = ("SELECT * FROM application_services LEFT JOIN "
+               "application_services_regex ON application_services.id = "
+               "application_services_regex.as_id")
+        # SQL results in the form:
+        # [
+        #   {
+        #     'regex': "something",
+        #     'url': "something",
+        #     'namespace': enum,
+        #     'as_id': 0,
+        #     'token': "something",
+        #     'hs_token': "otherthing",
+        #     'id': 0
+        #   }
+        # ]
+        services = {}
+        results = yield self._execute_and_decode(sql)
+        for res in results:
+            as_token = res["token"]
+            if as_token not in services:
+                # add the service
+                services[as_token] = {
+                    "url": res["url"],
+                    "token": as_token,
+                    "hs_token": res["hs_token"],
+                    "sender": res["sender"],
+                    "namespaces": {
+                        ApplicationService.NS_USERS: [],
+                        ApplicationService.NS_ALIASES: [],
+                        ApplicationService.NS_ROOMS: []
+                    }
+                }
+            # add the namespace regex if one exists
+            ns_int = res["namespace"]
+            if ns_int is None:
+                continue
+            try:
+                services[as_token]["namespaces"][
+                    ApplicationService.NS_LIST[ns_int]].append(
+                    simplejson.loads(res["regex"])
+                )
+            except IndexError:
+                logger.error("Bad namespace enum '%s'. %s", ns_int, res)
+            except JSONDecodeError:
+                logger.error("Bad regex object '%s'", res["regex"])
+
+        # TODO get last successful txn id f.e. service
+        for service in services.values():
+            logger.info("Found application service: %s", service)
+            self.services_cache.append(ApplicationService(
+                token=service["token"],
+                url=service["url"],
+                namespaces=service["namespaces"],
+                hs_token=service["hs_token"],
+                sender=service["sender"]
+            ))
diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py
index 0cbcdd1b55..2deda8ac50 100644
--- a/synapse/storage/event_federation.py
+++ b/synapse/storage/event_federation.py
@@ -55,17 +55,19 @@ class EventFederationStore(SQLBaseStore):
         results = set()
 
         base_sql = (
-            "SELECT auth_id FROM event_auth WHERE %s"
+            "SELECT auth_id FROM event_auth WHERE event_id = ?"
         )
 
         front = set(event_ids)
         while front:
-            sql = base_sql % (
-                " OR ".join(["event_id=?"] * len(front)),
-            )
+            new_front = set()
+            for f in front:
+                txn.execute(base_sql, (f,))
+                new_front.update([r[0] for r in txn.fetchall()])
+
+            new_front -= results
 
-            txn.execute(sql, list(front))
-            front = [r[0] for r in txn.fetchall()]
+            front = new_front
             results.update(front)
 
         return list(results)
@@ -379,3 +381,51 @@ class EventFederationStore(SQLBaseStore):
             event_results += new_front
 
         return self._get_events_txn(txn, event_results)
+
+    def get_missing_events(self, room_id, earliest_events, latest_events,
+                           limit, min_depth):
+        return self.runInteraction(
+            "get_missing_events",
+            self._get_missing_events,
+            room_id, earliest_events, latest_events, limit, min_depth
+        )
+
+    def _get_missing_events(self, txn, room_id, earliest_events, latest_events,
+                            limit, min_depth):
+
+        earliest_events = set(earliest_events)
+        front = set(latest_events) - earliest_events
+
+        event_results = set()
+
+        query = (
+            "SELECT prev_event_id FROM event_edges "
+            "WHERE room_id = ? AND event_id = ? AND is_state = 0 "
+            "LIMIT ?"
+        )
+
+        while front and len(event_results) < limit:
+            new_front = set()
+            for event_id in front:
+                txn.execute(
+                    query,
+                    (room_id, event_id, limit - len(event_results))
+                )
+
+                for e_id, in txn.fetchall():
+                    new_front.add(e_id)
+
+            new_front -= earliest_events
+            new_front -= event_results
+
+            front = new_front
+            event_results |= new_front
+
+        events = self._get_events_txn(txn, event_results)
+
+        events = sorted(
+            [ev for ev in events if ev.depth >= min_depth],
+            key=lambda e: e.depth,
+        )
+
+        return events[:limit]
diff --git a/synapse/storage/filtering.py b/synapse/storage/filtering.py
new file mode 100644
index 0000000000..457a11fd02
--- /dev/null
+++ b/synapse/storage/filtering.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from ._base import SQLBaseStore
+
+import simplejson as json
+
+
+class FilteringStore(SQLBaseStore):
+    @defer.inlineCallbacks
+    def get_user_filter(self, user_localpart, filter_id):
+        def_json = yield self._simple_select_one_onecol(
+            table="user_filters",
+            keyvalues={
+                "user_id": user_localpart,
+                "filter_id": filter_id,
+            },
+            retcol="filter_json",
+            allow_none=False,
+        )
+
+        defer.returnValue(json.loads(def_json))
+
+    def add_user_filter(self, user_localpart, user_filter):
+        def_json = json.dumps(user_filter)
+
+        # Need an atomic transaction to SELECT the maximal ID so far then
+        # INSERT a new one
+        def _do_txn(txn):
+            sql = (
+                "SELECT MAX(filter_id) FROM user_filters "
+                "WHERE user_id = ?"
+            )
+            txn.execute(sql, (user_localpart,))
+            max_id = txn.fetchone()[0]
+            if max_id is None:
+                filter_id = 0
+            else:
+                filter_id = max_id + 1
+
+            sql = (
+                "INSERT INTO user_filters (user_id, filter_id, filter_json)"
+                "VALUES(?, ?, ?)"
+            )
+            txn.execute(sql, (user_localpart, filter_id, def_json))
+
+            return filter_id
+
+        return self.runInteraction("add_user_filter", _do_txn)
diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py
new file mode 100644
index 0000000000..bbf322cc84
--- /dev/null
+++ b/synapse/storage/push_rule.py
@@ -0,0 +1,264 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+
+from ._base import SQLBaseStore, Table
+from twisted.internet import defer
+
+import logging
+import copy
+import simplejson as json
+
+logger = logging.getLogger(__name__)
+
+
+class PushRuleStore(SQLBaseStore):
+    @defer.inlineCallbacks
+    def get_push_rules_for_user(self, user_name):
+        sql = (
+            "SELECT "+",".join(PushRuleTable.fields)+" "
+            "FROM "+PushRuleTable.table_name+" "
+            "WHERE user_name = ? "
+            "ORDER BY priority_class DESC, priority DESC"
+        )
+        rows = yield self._execute(None, sql, user_name)
+
+        dicts = []
+        for r in rows:
+            d = {}
+            for i, f in enumerate(PushRuleTable.fields):
+                d[f] = r[i]
+            dicts.append(d)
+
+        defer.returnValue(dicts)
+
+    @defer.inlineCallbacks
+    def get_push_rules_enabled_for_user(self, user_name):
+        results = yield self._simple_select_list(
+            PushRuleEnableTable.table_name,
+            {'user_name': user_name},
+            PushRuleEnableTable.fields
+        )
+        defer.returnValue(
+            {r['rule_id']: False if r['enabled'] == 0 else True for r in results}
+        )
+
+    @defer.inlineCallbacks
+    def get_push_rule_enabled_by_user_rule_id(self, user_name, rule_id):
+        results = yield self._simple_select_list(
+            PushRuleEnableTable.table_name,
+            {'user_name': user_name, 'rule_id': rule_id},
+            ['enabled']
+        )
+        if not results:
+            defer.returnValue(True)
+        defer.returnValue(results[0])
+
+    @defer.inlineCallbacks
+    def add_push_rule(self, before, after, **kwargs):
+        vals = copy.copy(kwargs)
+        if 'conditions' in vals:
+            vals['conditions'] = json.dumps(vals['conditions'])
+        if 'actions' in vals:
+            vals['actions'] = json.dumps(vals['actions'])
+        # we could check the rest of the keys are valid column names
+        # but sqlite will do that anyway so I think it's just pointless.
+        if 'id' in vals:
+            del vals['id']
+
+        if before or after:
+            ret = yield self.runInteraction(
+                "_add_push_rule_relative_txn",
+                self._add_push_rule_relative_txn,
+                before=before,
+                after=after,
+                **vals
+            )
+            defer.returnValue(ret)
+        else:
+            ret = yield self.runInteraction(
+                "_add_push_rule_highest_priority_txn",
+                self._add_push_rule_highest_priority_txn,
+                **vals
+            )
+            defer.returnValue(ret)
+
+    def _add_push_rule_relative_txn(self, txn, user_name, **kwargs):
+        after = None
+        relative_to_rule = None
+        if 'after' in kwargs and kwargs['after']:
+            after = kwargs['after']
+            relative_to_rule = after
+        if 'before' in kwargs and kwargs['before']:
+            relative_to_rule = kwargs['before']
+
+        # get the priority of the rule we're inserting after/before
+        sql = (
+            "SELECT priority_class, priority FROM ? "
+            "WHERE user_name = ? and rule_id = ?" % (PushRuleTable.table_name,)
+        )
+        txn.execute(sql, (user_name, relative_to_rule))
+        res = txn.fetchall()
+        if not res:
+            raise RuleNotFoundException(
+                "before/after rule not found: %s" % (relative_to_rule,)
+            )
+        priority_class, base_rule_priority = res[0]
+
+        if 'priority_class' in kwargs and kwargs['priority_class'] != priority_class:
+            raise InconsistentRuleException(
+                "Given priority class does not match class of relative rule"
+            )
+
+        new_rule = copy.copy(kwargs)
+        if 'before' in new_rule:
+            del new_rule['before']
+        if 'after' in new_rule:
+            del new_rule['after']
+        new_rule['priority_class'] = priority_class
+        new_rule['user_name'] = user_name
+
+        # check if the priority before/after is free
+        new_rule_priority = base_rule_priority
+        if after:
+            new_rule_priority -= 1
+        else:
+            new_rule_priority += 1
+
+        new_rule['priority'] = new_rule_priority
+
+        sql = (
+            "SELECT COUNT(*) FROM " + PushRuleTable.table_name +
+            " WHERE user_name = ? AND priority_class = ? AND priority = ?"
+        )
+        txn.execute(sql, (user_name, priority_class, new_rule_priority))
+        res = txn.fetchall()
+        num_conflicting = res[0][0]
+
+        # if there are conflicting rules, bump everything
+        if num_conflicting:
+            sql = "UPDATE "+PushRuleTable.table_name+" SET priority = priority "
+            if after:
+                sql += "-1"
+            else:
+                sql += "+1"
+            sql += " WHERE user_name = ? AND priority_class = ? AND priority "
+            if after:
+                sql += "<= ?"
+            else:
+                sql += ">= ?"
+
+            txn.execute(sql, (user_name, priority_class, new_rule_priority))
+
+        # now insert the new rule
+        sql = "INSERT OR REPLACE INTO "+PushRuleTable.table_name+" ("
+        sql += ",".join(new_rule.keys())+") VALUES ("
+        sql += ", ".join(["?" for _ in new_rule.keys()])+")"
+
+        txn.execute(sql, new_rule.values())
+
+    def _add_push_rule_highest_priority_txn(self, txn, user_name,
+                                            priority_class, **kwargs):
+        # find the highest priority rule in that class
+        sql = (
+            "SELECT COUNT(*), MAX(priority) FROM " + PushRuleTable.table_name +
+            " WHERE user_name = ? and priority_class = ?"
+        )
+        txn.execute(sql, (user_name, priority_class))
+        res = txn.fetchall()
+        (how_many, highest_prio) = res[0]
+
+        new_prio = 0
+        if how_many > 0:
+            new_prio = highest_prio + 1
+
+        # and insert the new rule
+        new_rule = copy.copy(kwargs)
+        if 'id' in new_rule:
+            del new_rule['id']
+        new_rule['user_name'] = user_name
+        new_rule['priority_class'] = priority_class
+        new_rule['priority'] = new_prio
+
+        sql = "INSERT OR REPLACE INTO "+PushRuleTable.table_name+" ("
+        sql += ",".join(new_rule.keys())+") VALUES ("
+        sql += ", ".join(["?" for _ in new_rule.keys()])+")"
+
+        txn.execute(sql, new_rule.values())
+
+    @defer.inlineCallbacks
+    def delete_push_rule(self, user_name, rule_id):
+        """
+        Delete a push rule. Args specify the row to be deleted and can be
+        any of the columns in the push_rule table, but below are the
+        standard ones
+
+        Args:
+            user_name (str): The matrix ID of the push rule owner
+            rule_id (str): The rule_id of the rule to be deleted
+        """
+        yield self._simple_delete_one(
+            PushRuleTable.table_name,
+            {'user_name': user_name, 'rule_id': rule_id}
+        )
+
+    @defer.inlineCallbacks
+    def set_push_rule_enabled(self, user_name, rule_id, enabled):
+        if enabled:
+            yield self._simple_delete_one(
+                PushRuleEnableTable.table_name,
+                {'user_name': user_name, 'rule_id': rule_id}
+            )
+        else:
+            yield self._simple_upsert(
+                PushRuleEnableTable.table_name,
+                {'user_name': user_name, 'rule_id': rule_id},
+                {'enabled': False}
+            )
+
+
+class RuleNotFoundException(Exception):
+    pass
+
+
+class InconsistentRuleException(Exception):
+    pass
+
+
+class PushRuleTable(Table):
+    table_name = "push_rules"
+
+    fields = [
+        "id",
+        "user_name",
+        "rule_id",
+        "priority_class",
+        "priority",
+        "conditions",
+        "actions",
+    ]
+
+    EntryType = collections.namedtuple("PushRuleEntry", fields)
+
+
+class PushRuleEnableTable(Table):
+    table_name = "push_rules_enable"
+
+    fields = [
+        "user_name",
+        "rule_id",
+        "enabled"
+    ]
diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py
new file mode 100644
index 0000000000..6622b4d18a
--- /dev/null
+++ b/synapse/storage/pusher.py
@@ -0,0 +1,173 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+
+from ._base import SQLBaseStore, Table
+from twisted.internet import defer
+
+from synapse.api.errors import StoreError
+
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class PusherStore(SQLBaseStore):
+    @defer.inlineCallbacks
+    def get_pushers_by_app_id_and_pushkey(self, app_id_and_pushkey):
+        sql = (
+            "SELECT id, user_name, kind, profile_tag, app_id,"
+            "app_display_name, device_display_name, pushkey, ts, data, "
+            "last_token, last_success, failing_since "
+            "FROM pushers "
+            "WHERE app_id = ? AND pushkey = ?"
+        )
+
+        rows = yield self._execute(
+            None, sql, app_id_and_pushkey[0], app_id_and_pushkey[1]
+        )
+
+        ret = [
+            {
+                "id": r[0],
+                "user_name": r[1],
+                "kind": r[2],
+                "profile_tag": r[3],
+                "app_id": r[4],
+                "app_display_name": r[5],
+                "device_display_name": r[6],
+                "pushkey": r[7],
+                "pushkey_ts": r[8],
+                "data": r[9],
+                "last_token": r[10],
+                "last_success": r[11],
+                "failing_since": r[12]
+            }
+            for r in rows
+        ]
+
+        defer.returnValue(ret[0])
+
+    @defer.inlineCallbacks
+    def get_all_pushers(self):
+        sql = (
+            "SELECT id, user_name, kind, profile_tag, app_id,"
+            "app_display_name, device_display_name, pushkey, ts, data, "
+            "last_token, last_success, failing_since "
+            "FROM pushers"
+        )
+
+        rows = yield self._execute(None, sql)
+
+        ret = [
+            {
+                "id": r[0],
+                "user_name": r[1],
+                "kind": r[2],
+                "profile_tag": r[3],
+                "app_id": r[4],
+                "app_display_name": r[5],
+                "device_display_name": r[6],
+                "pushkey": r[7],
+                "pushkey_ts": r[8],
+                "data": r[9],
+                "last_token": r[10],
+                "last_success": r[11],
+                "failing_since": r[12]
+            }
+            for r in rows
+        ]
+
+        defer.returnValue(ret)
+
+    @defer.inlineCallbacks
+    def add_pusher(self, user_name, profile_tag, kind, app_id,
+                   app_display_name, device_display_name,
+                   pushkey, pushkey_ts, lang, data):
+        try:
+            yield self._simple_upsert(
+                PushersTable.table_name,
+                dict(
+                    app_id=app_id,
+                    pushkey=pushkey,
+                ),
+                dict(
+                    user_name=user_name,
+                    kind=kind,
+                    profile_tag=profile_tag,
+                    app_display_name=app_display_name,
+                    device_display_name=device_display_name,
+                    ts=pushkey_ts,
+                    lang=lang,
+                    data=data
+                ))
+        except Exception as e:
+            logger.error("create_pusher with failed: %s", e)
+            raise StoreError(500, "Problem creating pusher.")
+
+    @defer.inlineCallbacks
+    def delete_pusher_by_app_id_pushkey(self, app_id, pushkey):
+        yield self._simple_delete_one(
+            PushersTable.table_name,
+            dict(app_id=app_id, pushkey=pushkey)
+        )
+
+    @defer.inlineCallbacks
+    def update_pusher_last_token(self, app_id, pushkey, last_token):
+        yield self._simple_update_one(
+            PushersTable.table_name,
+            {'app_id': app_id, 'pushkey': pushkey},
+            {'last_token': last_token}
+        )
+
+    @defer.inlineCallbacks
+    def update_pusher_last_token_and_success(self, app_id, pushkey,
+                                             last_token, last_success):
+        yield self._simple_update_one(
+            PushersTable.table_name,
+            {'app_id': app_id, 'pushkey': pushkey},
+            {'last_token': last_token, 'last_success': last_success}
+        )
+
+    @defer.inlineCallbacks
+    def update_pusher_failing_since(self, app_id, pushkey, failing_since):
+        yield self._simple_update_one(
+            PushersTable.table_name,
+            {'app_id': app_id, 'pushkey': pushkey},
+            {'failing_since': failing_since}
+        )
+
+
+class PushersTable(Table):
+    table_name = "pushers"
+
+    fields = [
+        "id",
+        "user_name",
+        "kind",
+        "profile_tag",
+        "app_id",
+        "app_display_name",
+        "device_display_name",
+        "pushkey",
+        "pushkey_ts",
+        "data",
+        "last_token",
+        "last_success",
+        "failing_since"
+    ]
+
+    EntryType = collections.namedtuple("PusherEntry", fields)
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index 75dffa4db2..029b07cc66 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -122,7 +122,8 @@ class RegistrationStore(SQLBaseStore):
 
     def _query_for_auth(self, txn, token):
         sql = (
-            "SELECT users.name, users.admin, access_tokens.device_id"
+            "SELECT users.name, users.admin,"
+            " access_tokens.device_id, access_tokens.id as token_id"
             " FROM users"
             " INNER JOIN access_tokens on users.id = access_tokens.user_id"
             " WHERE token = ?"
diff --git a/synapse/storage/rejections.py b/synapse/storage/rejections.py
new file mode 100644
index 0000000000..4e1a9a2783
--- /dev/null
+++ b/synapse/storage/rejections.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import SQLBaseStore
+
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class RejectionsStore(SQLBaseStore):
+    def _store_rejections_txn(self, txn, event_id, reason):
+        self._simple_insert_txn(
+            txn,
+            table="rejections",
+            values={
+                "event_id": event_id,
+                "reason": reason,
+                "last_check": self._clock.time_msec(),
+            }
+        )
+
+    def get_rejection_reason(self, event_id):
+        return self._simple_select_one_onecol(
+            table="rejections",
+            retcol="reason",
+            keyvalues={
+                "event_id": event_id,
+            },
+            allow_none=True,
+        )
diff --git a/synapse/storage/room.py b/synapse/storage/room.py
index 6542f8e4f8..750b17a45f 100644
--- a/synapse/storage/room.py
+++ b/synapse/storage/room.py
@@ -82,38 +82,45 @@ class RoomStore(SQLBaseStore):
             "topic" key if one is set, and a "name" key if one is set
         """
 
-        topic_subquery = (
-            "SELECT topics.event_id as event_id, "
-            "topics.room_id as room_id, topic "
-            "FROM topics "
-            "INNER JOIN current_state_events as c "
-            "ON c.event_id = topics.event_id "
-        )
+        def f(txn):
+            topic_subquery = (
+                "SELECT topics.event_id as event_id, "
+                "topics.room_id as room_id, topic "
+                "FROM topics "
+                "INNER JOIN current_state_events as c "
+                "ON c.event_id = topics.event_id "
+            )
 
-        name_subquery = (
-            "SELECT room_names.event_id as event_id, "
-            "room_names.room_id as room_id, name "
-            "FROM room_names "
-            "INNER JOIN current_state_events as c "
-            "ON c.event_id = room_names.event_id "
-        )
+            name_subquery = (
+                "SELECT room_names.event_id as event_id, "
+                "room_names.room_id as room_id, name "
+                "FROM room_names "
+                "INNER JOIN current_state_events as c "
+                "ON c.event_id = room_names.event_id "
+            )
 
-        # We use non printing ascii character US () as a seperator
-        sql = (
-            "SELECT r.room_id, n.name, t.topic, "
-            "group_concat(a.room_alias, '') "
-            "FROM rooms AS r "
-            "LEFT JOIN (%(topic)s) AS t ON t.room_id = r.room_id "
-            "LEFT JOIN (%(name)s) AS n ON n.room_id = r.room_id "
-            "INNER JOIN room_aliases AS a ON a.room_id = r.room_id "
-            "WHERE r.is_public = ? "
-            "GROUP BY r.room_id "
-        ) % {
-            "topic": topic_subquery,
-            "name": name_subquery,
-        }
-
-        rows = yield self._execute(None, sql, is_public)
+            # We use non printing ascii character US () as a seperator
+            sql = (
+                "SELECT r.room_id, n.name, t.topic, "
+                "group_concat(a.room_alias, '') "
+                "FROM rooms AS r "
+                "LEFT JOIN (%(topic)s) AS t ON t.room_id = r.room_id "
+                "LEFT JOIN (%(name)s) AS n ON n.room_id = r.room_id "
+                "INNER JOIN room_aliases AS a ON a.room_id = r.room_id "
+                "WHERE r.is_public = ? "
+                "GROUP BY r.room_id "
+            ) % {
+                "topic": topic_subquery,
+                "name": name_subquery,
+            }
+
+            c = txn.execute(sql, (is_public,))
+
+            return c.fetchall()
+
+        rows = yield self.runInteraction(
+            "get_rooms", f
+        )
 
         ret = [
             {
diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py
index e59e65529b..65ffb4627f 100644
--- a/synapse/storage/roommember.py
+++ b/synapse/storage/roommember.py
@@ -17,9 +17,10 @@ from twisted.internet import defer
 
 from collections import namedtuple
 
-from ._base import SQLBaseStore
+from ._base import SQLBaseStore, cached
 
 from synapse.api.constants import Membership
+from synapse.types import UserID
 
 import logging
 
@@ -39,7 +40,7 @@ class RoomMemberStore(SQLBaseStore):
         """
         try:
             target_user_id = event.state_key
-            domain = self.hs.parse_userid(target_user_id).domain
+            domain = UserID.from_string(target_user_id).domain
         except:
             logger.exception(
                 "Failed to parse target_user_id=%s", target_user_id
@@ -84,7 +85,7 @@ class RoomMemberStore(SQLBaseStore):
             for e in member_events:
                 try:
                     joined_domains.add(
-                        self.hs.parse_userid(e.state_key).domain
+                        UserID.from_string(e.state_key).domain
                     )
                 except:
                     # FIXME: How do we deal with invalid user ids in the db?
@@ -97,6 +98,8 @@ class RoomMemberStore(SQLBaseStore):
 
                 txn.execute(sql, (event.room_id, domain))
 
+        self.get_rooms_for_user.invalidate(target_user_id)
+
     @defer.inlineCallbacks
     def get_room_member(self, user_id, room_id):
         """Retrieve the current state of a room member.
@@ -177,6 +180,14 @@ class RoomMemberStore(SQLBaseStore):
         if not membership_list:
             return defer.succeed(None)
 
+        return self.runInteraction(
+            "get_rooms_for_user_where_membership_is",
+            self._get_rooms_for_user_where_membership_is_txn,
+            user_id, membership_list
+        )
+
+    def _get_rooms_for_user_where_membership_is_txn(self, txn, user_id,
+                                                    membership_list):
         where_clause = "user_id = ? AND (%s)" % (
             " OR ".join(["membership = ?" for _ in membership_list]),
         )
@@ -184,24 +195,18 @@ class RoomMemberStore(SQLBaseStore):
         args = [user_id]
         args.extend(membership_list)
 
-        def f(txn):
-            sql = (
-                "SELECT m.room_id, m.sender, m.membership"
-                " FROM room_memberships as m"
-                " INNER JOIN current_state_events as c"
-                " ON m.event_id = c.event_id"
-                " WHERE %s"
-            ) % (where_clause,)
-
-            txn.execute(sql, args)
-            return [
-                RoomsForUser(**r) for r in self.cursor_to_dict(txn)
-            ]
+        sql = (
+            "SELECT m.room_id, m.sender, m.membership"
+            " FROM room_memberships as m"
+            " INNER JOIN current_state_events as c"
+            " ON m.event_id = c.event_id"
+            " WHERE %s"
+        ) % (where_clause,)
 
-        return self.runInteraction(
-            "get_rooms_for_user_where_membership_is",
-            f
-        )
+        txn.execute(sql, args)
+        return [
+            RoomsForUser(**r) for r in self.cursor_to_dict(txn)
+        ]
 
     def get_joined_hosts_for_room(self, room_id):
         return self._simple_select_onecol(
@@ -239,28 +244,32 @@ class RoomMemberStore(SQLBaseStore):
         results = self._parse_events_txn(txn, rows)
         return results
 
+    @cached()
+    def get_rooms_for_user(self, user_id):
+        return self.get_rooms_for_user_where_membership_is(
+            user_id, membership_list=[Membership.JOIN],
+        )
+
+    @defer.inlineCallbacks
     def user_rooms_intersect(self, user_id_list):
         """ Checks whether all the users whose IDs are given in a list share a
         room.
+
+        This is a "hot path" function that's called a lot, e.g. by presence for
+        generating the event stream. As such, it is implemented locally by
+        wrapping logic around heavily-cached database queries.
         """
-        def interaction(txn):
-            user_list_clause = " OR ".join(["m.user_id = ?"] * len(user_id_list))
-            sql = (
-                "SELECT m.room_id FROM room_memberships as m "
-                "INNER JOIN current_state_events as c "
-                "ON m.event_id = c.event_id "
-                "WHERE m.membership = 'join' "
-                "AND (%(clause)s) "
-                # TODO(paul): We've got duplicate rows in the database somewhere
-                #   so we have to DISTINCT m.user_id here
-                "GROUP BY m.room_id HAVING COUNT(DISTINCT m.user_id) = ?"
-            ) % {"clause": user_list_clause}
+        if len(user_id_list) < 2:
+            defer.returnValue(True)
+
+        deferreds = [self.get_rooms_for_user(u) for u in user_id_list]
 
-            args = list(user_id_list)
-            args.append(len(user_id_list))
+        results = yield defer.DeferredList(deferreds, consumeErrors=True)
 
-            txn.execute(sql, args)
+        # A list of sets of strings giving room IDs for each user
+        room_id_lists = [set([r.room_id for r in result[1]]) for result in results]
 
-            return len(txn.fetchall()) > 0
+        # There isn't a setintersection(*list_of_sets)
+        ret = len(room_id_lists.pop(0).intersection(*room_id_lists)) > 0
 
-        return self.runInteraction("user_rooms_intersect", interaction)
+        defer.returnValue(ret)
diff --git a/synapse/storage/schema/delta/v11.sql b/synapse/storage/schema/delta/11/v11.sql
index 313592221b..313592221b 100644
--- a/synapse/storage/schema/delta/v11.sql
+++ b/synapse/storage/schema/delta/11/v11.sql
diff --git a/synapse/storage/schema/delta/12/v12.sql b/synapse/storage/schema/delta/12/v12.sql
new file mode 100644
index 0000000000..b87ef1fe79
--- /dev/null
+++ b/synapse/storage/schema/delta/12/v12.sql
@@ -0,0 +1,67 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+CREATE TABLE IF NOT EXISTS rejections(
+    event_id TEXT NOT NULL,
+    reason TEXT NOT NULL,
+    last_check TEXT NOT NULL,
+    CONSTRAINT ev_id UNIQUE (event_id) ON CONFLICT REPLACE
+);
+
+-- Push notification endpoints that users have configured
+CREATE TABLE IF NOT EXISTS pushers (
+  id INTEGER PRIMARY KEY AUTOINCREMENT,
+  user_name TEXT NOT NULL,
+  profile_tag varchar(32) NOT NULL,
+  kind varchar(8) NOT NULL,
+  app_id varchar(64) NOT NULL,
+  app_display_name varchar(64) NOT NULL,
+  device_display_name varchar(128) NOT NULL,
+  pushkey blob NOT NULL,
+  ts BIGINT NOT NULL,
+  lang varchar(8),
+  data blob,
+  last_token TEXT,
+  last_success BIGINT,
+  failing_since BIGINT,
+  FOREIGN KEY(user_name) REFERENCES users(name),
+  UNIQUE (app_id, pushkey)
+);
+
+CREATE TABLE IF NOT EXISTS push_rules (
+  id INTEGER PRIMARY KEY AUTOINCREMENT,
+  user_name TEXT NOT NULL,
+  rule_id TEXT NOT NULL,
+  priority_class TINYINT NOT NULL,
+  priority INTEGER NOT NULL DEFAULT 0,
+  conditions TEXT NOT NULL,
+  actions TEXT NOT NULL,
+  UNIQUE(user_name, rule_id)
+);
+
+CREATE INDEX IF NOT EXISTS push_rules_user_name on push_rules (user_name);
+
+CREATE TABLE IF NOT EXISTS user_filters(
+  user_id TEXT,
+  filter_id INTEGER,
+  filter_json TEXT,
+  FOREIGN KEY(user_id) REFERENCES users(id)
+);
+
+CREATE INDEX IF NOT EXISTS user_filters_by_user_id_filter_id ON user_filters(
+  user_id, filter_id
+);
+
+PRAGMA user_version = 12;
diff --git a/synapse/storage/schema/delta/v3.sql b/synapse/storage/schema/delta/13/v13.sql
index c67e38ff52..e491ad5aec 100644
--- a/synapse/storage/schema/delta/v3.sql
+++ b/synapse/storage/schema/delta/13/v13.sql
@@ -1,4 +1,4 @@
-/* Copyright 2014, 2015 OpenMarket Ltd
+/* Copyright 2015 OpenMarket Ltd
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,15 +13,22 @@
  * limitations under the License.
  */
 
+CREATE TABLE IF NOT EXISTS application_services(
+    id INTEGER PRIMARY KEY AUTOINCREMENT,
+    url TEXT,
+    token TEXT,
+    hs_token TEXT,
+    sender TEXT,
+    UNIQUE(token) ON CONFLICT ROLLBACK
+);
 
-CREATE INDEX IF NOT EXISTS room_aliases_alias ON room_aliases(room_alias);
-CREATE INDEX IF NOT EXISTS room_aliases_id ON room_aliases(room_id);
+CREATE TABLE IF NOT EXISTS application_services_regex(
+    id INTEGER PRIMARY KEY AUTOINCREMENT,
+    as_id INTEGER NOT NULL,
+    namespace INTEGER,  /* enum[room_id|room_alias|user_id] */
+    regex TEXT,
+    FOREIGN KEY(as_id) REFERENCES application_services(id)
+);
 
 
-CREATE INDEX IF NOT EXISTS room_alias_servers_alias ON room_alias_servers(room_alias);
 
-DELETE FROM room_aliases WHERE rowid NOT IN (SELECT max(rowid) FROM room_aliases GROUP BY room_alias, room_id);
-
-CREATE UNIQUE INDEX IF NOT EXISTS room_aliases_uniq ON room_aliases(room_alias, room_id);
-
-PRAGMA user_version = 3;
diff --git a/synapse/storage/schema/delta/14/upgrade_appservice_db.py b/synapse/storage/schema/delta/14/upgrade_appservice_db.py
new file mode 100644
index 0000000000..847b1c5b89
--- /dev/null
+++ b/synapse/storage/schema/delta/14/upgrade_appservice_db.py
@@ -0,0 +1,23 @@
+import json
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+def run_upgrade(cur):
+    cur.execute("SELECT id, regex FROM application_services_regex")
+    for row in cur.fetchall():
+        try:
+            logger.debug("Checking %s..." % row[0])
+            json.loads(row[1])
+        except ValueError:
+            # row isn't in json, make it so.
+            string_regex = row[1]
+            new_regex = json.dumps({
+                "regex": string_regex,
+                "exclusive": True
+            })
+            cur.execute(
+                "UPDATE application_services_regex SET regex=? WHERE id=?",
+                (new_regex, row[0])
+            )
diff --git a/synapse/storage/schema/delta/14/v14.sql b/synapse/storage/schema/delta/14/v14.sql
new file mode 100644
index 0000000000..0212726448
--- /dev/null
+++ b/synapse/storage/schema/delta/14/v14.sql
@@ -0,0 +1,9 @@
+CREATE TABLE IF NOT EXISTS push_rules_enable (
+  id INTEGER PRIMARY KEY AUTOINCREMENT,
+  user_name TEXT NOT NULL,
+  rule_id TEXT NOT NULL,
+  enabled TINYINT,
+  UNIQUE(user_name, rule_id)
+);
+
+CREATE INDEX IF NOT EXISTS push_rules_enable_user_name on push_rules_enable (user_name);
diff --git a/synapse/storage/schema/delta/v2.sql b/synapse/storage/schema/delta/v2.sql
deleted file mode 100644
index f740f6dd5d..0000000000
--- a/synapse/storage/schema/delta/v2.sql
+++ /dev/null
@@ -1,168 +0,0 @@
-/* Copyright 2014, 2015 OpenMarket Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-CREATE TABLE IF NOT EXISTS events(
-    stream_ordering INTEGER PRIMARY KEY AUTOINCREMENT,
-    topological_ordering INTEGER NOT NULL,
-    event_id TEXT NOT NULL,
-    type TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    content TEXT NOT NULL,
-    unrecognized_keys TEXT,
-    processed BOOL NOT NULL,
-    outlier BOOL NOT NULL,
-    CONSTRAINT ev_uniq UNIQUE (event_id)
-);
-
-CREATE INDEX IF NOT EXISTS events_event_id ON events (event_id);
-CREATE INDEX IF NOT EXISTS events_stream_ordering ON events (stream_ordering);
-CREATE INDEX IF NOT EXISTS events_topological_ordering ON events (topological_ordering);
-CREATE INDEX IF NOT EXISTS events_room_id ON events (room_id);
-
-CREATE TABLE IF NOT EXISTS state_events(
-    event_id TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    type TEXT NOT NULL,
-    state_key TEXT NOT NULL,
-    prev_state TEXT
-);
-
-CREATE UNIQUE INDEX IF NOT EXISTS state_events_event_id ON state_events (event_id);
-CREATE INDEX IF NOT EXISTS state_events_room_id ON state_events (room_id);
-CREATE INDEX IF NOT EXISTS state_events_type ON state_events (type);
-CREATE INDEX IF NOT EXISTS state_events_state_key ON state_events (state_key);
-
-
-CREATE TABLE IF NOT EXISTS current_state_events(
-    event_id TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    type TEXT NOT NULL,
-    state_key TEXT NOT NULL,
-    CONSTRAINT curr_uniq UNIQUE (room_id, type, state_key) ON CONFLICT REPLACE
-);
-
-CREATE INDEX IF NOT EXISTS curr_events_event_id ON current_state_events (event_id);
-CREATE INDEX IF NOT EXISTS current_state_events_room_id ON current_state_events (room_id);
-CREATE INDEX IF NOT EXISTS current_state_events_type ON current_state_events (type);
-CREATE INDEX IF NOT EXISTS current_state_events_state_key ON current_state_events (state_key);
-
-CREATE TABLE IF NOT EXISTS room_memberships(
-    event_id TEXT NOT NULL,
-    user_id TEXT NOT NULL,
-    sender TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    membership TEXT NOT NULL
-);
-
-CREATE INDEX IF NOT EXISTS room_memberships_event_id ON room_memberships (event_id);
-CREATE INDEX IF NOT EXISTS room_memberships_room_id ON room_memberships (room_id);
-CREATE INDEX IF NOT EXISTS room_memberships_user_id ON room_memberships (user_id);
-
-CREATE TABLE IF NOT EXISTS feedback(
-    event_id TEXT NOT NULL,
-    feedback_type TEXT,
-    target_event_id TEXT,
-    sender TEXT,
-    room_id TEXT
-);
-
-CREATE TABLE IF NOT EXISTS topics(
-    event_id TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    topic TEXT NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS room_names(
-    event_id TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    name TEXT NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS rooms(
-    room_id TEXT PRIMARY KEY NOT NULL,
-    is_public INTEGER,
-    creator TEXT
-);
-
-CREATE TABLE IF NOT EXISTS room_join_rules(
-    event_id TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    join_rule TEXT NOT NULL
-);
-CREATE INDEX IF NOT EXISTS room_join_rules_event_id ON room_join_rules(event_id);
-CREATE INDEX IF NOT EXISTS room_join_rules_room_id ON room_join_rules(room_id);
-
-
-CREATE TABLE IF NOT EXISTS room_power_levels(
-    event_id TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    user_id TEXT NOT NULL,
-    level INTEGER NOT NULL
-);
-CREATE INDEX IF NOT EXISTS room_power_levels_event_id ON room_power_levels(event_id);
-CREATE INDEX IF NOT EXISTS room_power_levels_room_id ON room_power_levels(room_id);
-CREATE INDEX IF NOT EXISTS room_power_levels_room_user ON room_power_levels(room_id, user_id);
-
-
-CREATE TABLE IF NOT EXISTS room_default_levels(
-    event_id TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    level INTEGER NOT NULL
-);
-
-CREATE INDEX IF NOT EXISTS room_default_levels_event_id ON room_default_levels(event_id);
-CREATE INDEX IF NOT EXISTS room_default_levels_room_id ON room_default_levels(room_id);
-
-
-CREATE TABLE IF NOT EXISTS room_add_state_levels(
-    event_id TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    level INTEGER NOT NULL
-);
-
-CREATE INDEX IF NOT EXISTS room_add_state_levels_event_id ON room_add_state_levels(event_id);
-CREATE INDEX IF NOT EXISTS room_add_state_levels_room_id ON room_add_state_levels(room_id);
-
-
-CREATE TABLE IF NOT EXISTS room_send_event_levels(
-    event_id TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    level INTEGER NOT NULL
-);
-
-CREATE INDEX IF NOT EXISTS room_send_event_levels_event_id ON room_send_event_levels(event_id);
-CREATE INDEX IF NOT EXISTS room_send_event_levels_room_id ON room_send_event_levels(room_id);
-
-
-CREATE TABLE IF NOT EXISTS room_ops_levels(
-    event_id TEXT NOT NULL,
-    room_id TEXT NOT NULL,
-    ban_level INTEGER,
-    kick_level INTEGER
-);
-
-CREATE INDEX IF NOT EXISTS room_ops_levels_event_id ON room_ops_levels(event_id);
-CREATE INDEX IF NOT EXISTS room_ops_levels_room_id ON room_ops_levels(room_id);
-
-
-CREATE TABLE IF NOT EXISTS room_hosts(
-    room_id TEXT NOT NULL,
-    host TEXT NOT NULL,
-    CONSTRAINT room_hosts_uniq UNIQUE (room_id, host) ON CONFLICT IGNORE
-);
-
-CREATE INDEX IF NOT EXISTS room_hosts_room_id ON room_hosts (room_id);
-
-PRAGMA user_version = 2;
diff --git a/synapse/storage/schema/delta/v4.sql b/synapse/storage/schema/delta/v4.sql
deleted file mode 100644
index d3807b7686..0000000000
--- a/synapse/storage/schema/delta/v4.sql
+++ /dev/null
@@ -1,26 +0,0 @@
-/* Copyright 2014, 2015 OpenMarket Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-CREATE TABLE IF NOT EXISTS redactions (
-    event_id TEXT NOT NULL,
-    redacts TEXT NOT NULL,
-    CONSTRAINT ev_uniq UNIQUE (event_id)
-);
-
-CREATE INDEX IF NOT EXISTS redactions_event_id ON redactions (event_id);
-CREATE INDEX IF NOT EXISTS redactions_redacts ON redactions (redacts);
-
-ALTER TABLE room_ops_levels ADD COLUMN redact_level INTEGER;
-
-PRAGMA user_version = 4;
diff --git a/synapse/storage/schema/delta/v5.sql b/synapse/storage/schema/delta/v5.sql
deleted file mode 100644
index 0874a15431..0000000000
--- a/synapse/storage/schema/delta/v5.sql
+++ /dev/null
@@ -1,30 +0,0 @@
-/* Copyright 2014, 2015 OpenMarket Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-CREATE TABLE IF NOT EXISTS user_ips (
-    user TEXT NOT NULL,
-    access_token TEXT NOT NULL,
-    device_id TEXT,
-    ip TEXT NOT NULL,
-    user_agent TEXT NOT NULL,
-    last_seen INTEGER NOT NULL,
-    CONSTRAINT user_ip UNIQUE (user, access_token, ip, user_agent) ON CONFLICT REPLACE
-);
-
-CREATE INDEX IF NOT EXISTS user_ips_user ON user_ips(user);
-
-ALTER TABLE users ADD COLUMN admin BOOL DEFAULT 0 NOT NULL;
-
-PRAGMA user_version = 5;
diff --git a/synapse/storage/schema/delta/v6.sql b/synapse/storage/schema/delta/v6.sql
deleted file mode 100644
index a9e0a4fe0d..0000000000
--- a/synapse/storage/schema/delta/v6.sql
+++ /dev/null
@@ -1,31 +0,0 @@
-/* Copyright 2014, 2015 OpenMarket Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-CREATE TABLE IF NOT EXISTS server_tls_certificates(
-  server_name TEXT, -- Server name.
-  fingerprint TEXT, -- Certificate fingerprint.
-  from_server TEXT, -- Which key server the certificate was fetched from.
-  ts_added_ms INTEGER, -- When the certifcate was added.
-  tls_certificate BLOB, -- DER encoded x509 certificate.
-  CONSTRAINT uniqueness UNIQUE (server_name, fingerprint)
-);
-
-CREATE TABLE IF NOT EXISTS server_signature_keys(
-  server_name TEXT, -- Server name.
-  key_id TEXT, -- Key version.
-  from_server TEXT, -- Which key server the key was fetched form.
-  ts_added_ms INTEGER, -- When the key was added.
-  verify_key BLOB, -- NACL verification key.
-  CONSTRAINT uniqueness UNIQUE (server_name, key_id)
-);
diff --git a/synapse/storage/schema/delta/v8.sql b/synapse/storage/schema/delta/v8.sql
deleted file mode 100644
index 1e9f8b18cb..0000000000
--- a/synapse/storage/schema/delta/v8.sql
+++ /dev/null
@@ -1,34 +0,0 @@
-/* Copyright 2014, 2015 OpenMarket Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
- CREATE TABLE IF NOT EXISTS event_signatures_2 (
-    event_id TEXT,
-    signature_name TEXT,
-    key_id TEXT,
-    signature BLOB,
-    CONSTRAINT uniqueness UNIQUE (event_id, signature_name, key_id)
-);
-
-INSERT INTO event_signatures_2 (event_id, signature_name, key_id, signature)
-SELECT event_id, signature_name, key_id, signature FROM event_signatures;
-
-DROP TABLE event_signatures;
-ALTER TABLE event_signatures_2 RENAME TO event_signatures;
-
-CREATE INDEX IF NOT EXISTS event_signatures_id ON event_signatures (
-    event_id
-);
-
-PRAGMA user_version = 8;
\ No newline at end of file
diff --git a/synapse/storage/schema/delta/v9.sql b/synapse/storage/schema/delta/v9.sql
deleted file mode 100644
index 455d51a70c..0000000000
--- a/synapse/storage/schema/delta/v9.sql
+++ /dev/null
@@ -1,79 +0,0 @@
-/* Copyright 2014, 2015 OpenMarket Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
--- To track destination health
-CREATE TABLE IF NOT EXISTS destinations(
-    destination TEXT PRIMARY KEY,
-    retry_last_ts INTEGER,
-    retry_interval INTEGER
-);
-
-
-CREATE TABLE IF NOT EXISTS local_media_repository (
-    media_id TEXT, -- The id used to refer to the media.
-    media_type TEXT, -- The MIME-type of the media.
-    media_length INTEGER, -- Length of the media in bytes.
-    created_ts INTEGER, -- When the content was uploaded in ms.
-    upload_name TEXT, -- The name the media was uploaded with.
-    user_id TEXT, -- The user who uploaded the file.
-    CONSTRAINT uniqueness UNIQUE (media_id)
-);
-
-CREATE TABLE IF NOT EXISTS local_media_repository_thumbnails (
-    media_id TEXT, -- The id used to refer to the media.
-    thumbnail_width INTEGER, -- The width of the thumbnail in pixels.
-    thumbnail_height INTEGER, -- The height of the thumbnail in pixels.
-    thumbnail_type TEXT, -- The MIME-type of the thumbnail.
-    thumbnail_method TEXT, -- The method used to make the thumbnail.
-    thumbnail_length INTEGER, -- The length of the thumbnail in bytes.
-    CONSTRAINT uniqueness UNIQUE (
-        media_id, thumbnail_width, thumbnail_height, thumbnail_type
-    )
-);
-
-CREATE INDEX IF NOT EXISTS local_media_repository_thumbnails_media_id
-    ON local_media_repository_thumbnails (media_id);
-
-CREATE TABLE IF NOT EXISTS remote_media_cache (
-    media_origin TEXT, -- The remote HS the media came from.
-    media_id TEXT, -- The id used to refer to the media on that server.
-    media_type TEXT, -- The MIME-type of the media.
-    created_ts INTEGER, -- When the content was uploaded in ms.
-    upload_name TEXT, -- The name the media was uploaded with.
-    media_length INTEGER, -- Length of the media in bytes.
-    filesystem_id TEXT, -- The name used to store the media on disk.
-    CONSTRAINT uniqueness UNIQUE (media_origin, media_id)
-);
-
-CREATE TABLE IF NOT EXISTS remote_media_cache_thumbnails (
-    media_origin TEXT, -- The remote HS the media came from.
-    media_id TEXT, -- The id used to refer to the media.
-    thumbnail_width INTEGER, -- The width of the thumbnail in pixels.
-    thumbnail_height INTEGER, -- The height of the thumbnail in pixels.
-    thumbnail_method TEXT, -- The method used to make the thumbnail
-    thumbnail_type TEXT, -- The MIME-type of the thumbnail.
-    thumbnail_length INTEGER, -- The length of the thumbnail in bytes.
-    filesystem_id TEXT, -- The name used to store the media on disk.
-    CONSTRAINT uniqueness UNIQUE (
-        media_origin, media_id, thumbnail_width, thumbnail_height,
-        thumbnail_type, thumbnail_type
-    )
-);
-
-CREATE INDEX IF NOT EXISTS remote_media_cache_thumbnails_media_id
-    ON local_media_repository_thumbnails (media_id);
-
-
-PRAGMA user_version = 9;
diff --git a/synapse/storage/schema/event_edges.sql b/synapse/storage/schema/full_schemas/11/event_edges.sql
index 1e766d6db2..1e766d6db2 100644
--- a/synapse/storage/schema/event_edges.sql
+++ b/synapse/storage/schema/full_schemas/11/event_edges.sql
diff --git a/synapse/storage/schema/event_signatures.sql b/synapse/storage/schema/full_schemas/11/event_signatures.sql
index c28c39c48a..c28c39c48a 100644
--- a/synapse/storage/schema/event_signatures.sql
+++ b/synapse/storage/schema/full_schemas/11/event_signatures.sql
diff --git a/synapse/storage/schema/im.sql b/synapse/storage/schema/full_schemas/11/im.sql
index dd00c1cd2f..dd00c1cd2f 100644
--- a/synapse/storage/schema/im.sql
+++ b/synapse/storage/schema/full_schemas/11/im.sql
diff --git a/synapse/storage/schema/keys.sql b/synapse/storage/schema/full_schemas/11/keys.sql
index a9e0a4fe0d..a9e0a4fe0d 100644
--- a/synapse/storage/schema/keys.sql
+++ b/synapse/storage/schema/full_schemas/11/keys.sql
diff --git a/synapse/storage/schema/media_repository.sql b/synapse/storage/schema/full_schemas/11/media_repository.sql
index afdf48cbfb..afdf48cbfb 100644
--- a/synapse/storage/schema/media_repository.sql
+++ b/synapse/storage/schema/full_schemas/11/media_repository.sql
diff --git a/synapse/storage/schema/presence.sql b/synapse/storage/schema/full_schemas/11/presence.sql
index f9f8db9697..f9f8db9697 100644
--- a/synapse/storage/schema/presence.sql
+++ b/synapse/storage/schema/full_schemas/11/presence.sql
diff --git a/synapse/storage/schema/profiles.sql b/synapse/storage/schema/full_schemas/11/profiles.sql
index f06a528b4d..f06a528b4d 100644
--- a/synapse/storage/schema/profiles.sql
+++ b/synapse/storage/schema/full_schemas/11/profiles.sql
diff --git a/synapse/storage/schema/redactions.sql b/synapse/storage/schema/full_schemas/11/redactions.sql
index 5011d95db8..5011d95db8 100644
--- a/synapse/storage/schema/redactions.sql
+++ b/synapse/storage/schema/full_schemas/11/redactions.sql
diff --git a/synapse/storage/schema/room_aliases.sql b/synapse/storage/schema/full_schemas/11/room_aliases.sql
index 0d2df01603..0d2df01603 100644
--- a/synapse/storage/schema/room_aliases.sql
+++ b/synapse/storage/schema/full_schemas/11/room_aliases.sql
diff --git a/synapse/storage/schema/state.sql b/synapse/storage/schema/full_schemas/11/state.sql
index 1fe8f1e430..1fe8f1e430 100644
--- a/synapse/storage/schema/state.sql
+++ b/synapse/storage/schema/full_schemas/11/state.sql
diff --git a/synapse/storage/schema/transactions.sql b/synapse/storage/schema/full_schemas/11/transactions.sql
index 2d30f99b06..2d30f99b06 100644
--- a/synapse/storage/schema/transactions.sql
+++ b/synapse/storage/schema/full_schemas/11/transactions.sql
diff --git a/synapse/storage/schema/users.sql b/synapse/storage/schema/full_schemas/11/users.sql
index 08ccfdac0a..08ccfdac0a 100644
--- a/synapse/storage/schema/users.sql
+++ b/synapse/storage/schema/full_schemas/11/users.sql
diff --git a/synapse/storage/schema/schema_version.sql b/synapse/storage/schema/schema_version.sql
new file mode 100644
index 0000000000..0431e2d051
--- /dev/null
+++ b/synapse/storage/schema/schema_version.sql
@@ -0,0 +1,30 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+CREATE TABLE IF NOT EXISTS schema_version(
+    Lock char(1) NOT NULL DEFAULT 'X',  -- Makes sure this table only has one row.
+    version INTEGER NOT NULL,
+    upgraded BOOL NOT NULL,  -- Whether we reached this version from an upgrade or an initial schema.
+    CONSTRAINT schema_version_lock_x CHECK (Lock='X')
+    CONSTRAINT schema_version_lock_uniq UNIQUE (Lock)
+);
+
+CREATE TABLE IF NOT EXISTS applied_schema_deltas(
+    version INTEGER NOT NULL,
+    file TEXT NOT NULL,
+    CONSTRAINT schema_deltas_ver_file UNIQUE (version, file) ON CONFLICT IGNORE
+);
+
+CREATE INDEX IF NOT EXISTS schema_deltas_ver ON applied_schema_deltas(version);
diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py
index 8ac2adab05..09bc522210 100644
--- a/synapse/storage/stream.py
+++ b/synapse/storage/stream.py
@@ -36,6 +36,7 @@ what sort order was used:
 from twisted.internet import defer
 
 from ._base import SQLBaseStore
+from synapse.api.constants import EventTypes
 from synapse.api.errors import SynapseError
 from synapse.util.logutils import log_function
 
@@ -82,10 +83,10 @@ class _StreamToken(namedtuple("_StreamToken", "topological stream")):
     def parse(cls, string):
         try:
             if string[0] == 's':
-                return cls(None, int(string[1:]))
+                return cls(topological=None, stream=int(string[1:]))
             if string[0] == 't':
                 parts = string[1:].split('-', 1)
-                return cls(int(parts[1]), int(parts[0]))
+                return cls(topological=int(parts[0]), stream=int(parts[1]))
         except:
             pass
         raise SynapseError(400, "Invalid token %r" % (string,))
@@ -94,7 +95,7 @@ class _StreamToken(namedtuple("_StreamToken", "topological stream")):
     def parse_stream_token(cls, string):
         try:
             if string[0] == 's':
-                return cls(None, int(string[1:]))
+                return cls(topological=None, stream=int(string[1:]))
         except:
             pass
         raise SynapseError(400, "Invalid token %r" % (string,))
@@ -127,6 +128,85 @@ class _StreamToken(namedtuple("_StreamToken", "topological stream")):
 
 
 class StreamStore(SQLBaseStore):
+
+    @defer.inlineCallbacks
+    def get_appservice_room_stream(self, service, from_key, to_key, limit=0):
+        # NB this lives here instead of appservice.py so we can reuse the
+        # 'private' StreamToken class in this file.
+        if limit:
+            limit = max(limit, MAX_STREAM_SIZE)
+        else:
+            limit = MAX_STREAM_SIZE
+
+        # From and to keys should be integers from ordering.
+        from_id = _StreamToken.parse_stream_token(from_key)
+        to_id = _StreamToken.parse_stream_token(to_key)
+
+        if from_key == to_key:
+            defer.returnValue(([], to_key))
+            return
+
+        # select all the events between from/to with a sensible limit
+        sql = (
+            "SELECT e.event_id, e.room_id, e.type, s.state_key, "
+            "e.stream_ordering FROM events AS e LEFT JOIN state_events as s ON "
+            "e.event_id = s.event_id "
+            "WHERE e.stream_ordering > ? AND e.stream_ordering <= ? "
+            "ORDER BY stream_ordering ASC LIMIT %(limit)d "
+        ) % {
+            "limit": limit
+        }
+
+        def f(txn):
+            # pull out all the events between the tokens
+            txn.execute(sql, (from_id.stream, to_id.stream,))
+            rows = self.cursor_to_dict(txn)
+
+            # Logic:
+            #  - We want ALL events which match the AS room_id regex
+            #  - We want ALL events which match the rooms represented by the AS
+            #    room_alias regex
+            #  - We want ALL events for rooms that AS users have joined.
+            # This is currently supported via get_app_service_rooms (which is
+            # used for the Notifier listener rooms). We can't reasonably make a
+            # SQL query for these room IDs, so we'll pull all the events between
+            # from/to and filter in python.
+            rooms_for_as = self._get_app_service_rooms_txn(txn, service)
+            room_ids_for_as = [r.room_id for r in rooms_for_as]
+
+            def app_service_interested(row):
+                if row["room_id"] in room_ids_for_as:
+                    return True
+
+                if row["type"] == EventTypes.Member:
+                    if service.is_interested_in_user(row.get("state_key")):
+                        return True
+                return False
+
+            ret = self._get_events_txn(
+                txn,
+                # apply the filter on the room id list
+                [
+                    r["event_id"] for r in rows
+                    if app_service_interested(r)
+                ],
+                get_prev_content=True
+            )
+
+            self._set_before_and_after(ret, rows)
+
+            if rows:
+                key = "s%d" % max(r["stream_ordering"] for r in rows)
+            else:
+                # Assume we didn't get anything because there was nothing to
+                # get.
+                key = to_key
+
+            return ret, key
+
+        results = yield self.runInteraction("get_appservice_room_stream", f)
+        defer.returnValue(results)
+
     @log_function
     def get_room_events_stream(self, user_id, from_key, to_key, room_id,
                                limit=0, with_feedback=False):
@@ -181,8 +261,10 @@ class StreamStore(SQLBaseStore):
                 get_prev_content=True
             )
 
+            self._set_before_and_after(ret, rows)
+
             if rows:
-                key = "s%d" % max([r["stream_ordering"] for r in rows])
+                key = "s%d" % max(r["stream_ordering"] for r in rows)
             else:
                 # Assume we didn't get anything because there was nothing to
                 # get.
@@ -260,22 +342,44 @@ class StreamStore(SQLBaseStore):
                 get_prev_content=True
             )
 
+            self._set_before_and_after(events, rows)
+
             return events, next_token,
 
         return self.runInteraction("paginate_room_events", f)
 
     def get_recent_events_for_room(self, room_id, limit, end_token,
-                                   with_feedback=False):
+                                   with_feedback=False, from_token=None):
         # TODO (erikj): Handle compressed feedback
 
-        sql = (
-            "SELECT stream_ordering, topological_ordering, event_id FROM events "
-            "WHERE room_id = ? AND stream_ordering <= ? AND outlier = 0 "
-            "ORDER BY topological_ordering DESC, stream_ordering DESC LIMIT ? "
-        )
+        end_token = _StreamToken.parse_stream_token(end_token)
 
-        def f(txn):
-            txn.execute(sql, (room_id, end_token, limit,))
+        if from_token is None:
+            sql = (
+                "SELECT stream_ordering, topological_ordering, event_id"
+                " FROM events"
+                " WHERE room_id = ? AND stream_ordering <= ? AND outlier = 0"
+                " ORDER BY topological_ordering DESC, stream_ordering DESC"
+                " LIMIT ?"
+            )
+        else:
+            from_token = _StreamToken.parse_stream_token(from_token)
+            sql = (
+                "SELECT stream_ordering, topological_ordering, event_id"
+                " FROM events"
+                " WHERE room_id = ? AND stream_ordering > ?"
+                " AND stream_ordering <= ? AND outlier = 0"
+                " ORDER BY topological_ordering DESC, stream_ordering DESC"
+                " LIMIT ?"
+            )
+
+        def get_recent_events_for_room_txn(txn):
+            if from_token is None:
+                txn.execute(sql, (room_id, end_token.stream, limit,))
+            else:
+                txn.execute(sql, (
+                    room_id, from_token.stream, end_token.stream, limit
+                ))
 
             rows = self.cursor_to_dict(txn)
 
@@ -291,9 +395,9 @@ class StreamStore(SQLBaseStore):
                 toke = rows[0]["stream_ordering"] - 1
                 start_token = str(_StreamToken(topo, toke))
 
-                token = (start_token, end_token)
+                token = (start_token, str(end_token))
             else:
-                token = (end_token, end_token)
+                token = (str(end_token), str(end_token))
 
             events = self._get_events_txn(
                 txn,
@@ -301,9 +405,13 @@ class StreamStore(SQLBaseStore):
                 get_prev_content=True
             )
 
+            self._set_before_and_after(events, rows)
+
             return events, token
 
-        return self.runInteraction("get_recent_events_for_room", f)
+        return self.runInteraction(
+            "get_recent_events_for_room", get_recent_events_for_room_txn
+        )
 
     def get_room_events_max_id(self):
         return self.runInteraction(
@@ -325,3 +433,12 @@ class StreamStore(SQLBaseStore):
 
         key = res[0]["m"]
         return "s%d" % (key,)
+
+    @staticmethod
+    def _set_before_and_after(events, rows):
+        for event, row in zip(events, rows):
+            stream = row["stream_ordering"]
+            topo = event.depth
+            internal = event.internal_metadata
+            internal.before = str(_StreamToken(topo, stream - 1))
+            internal.after = str(_StreamToken(topo, stream))
diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py
index e06ef35690..0b8a3b7a07 100644
--- a/synapse/storage/transactions.py
+++ b/synapse/storage/transactions.py
@@ -13,12 +13,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from ._base import SQLBaseStore, Table
+from ._base import SQLBaseStore, Table, cached
 
 from collections import namedtuple
 
-from twisted.internet import defer
-
 import logging
 
 logger = logging.getLogger(__name__)
@@ -28,10 +26,6 @@ class TransactionStore(SQLBaseStore):
     """A collection of queries for handling PDUs.
     """
 
-    # a write-through cache of DestinationsTable.EntryType indexed by
-    # destination string
-    destination_retry_cache = {}
-
     def get_received_txn_response(self, transaction_id, origin):
         """For an incoming transaction from a given origin, check if we have
         already responded to it. If so, return the response code and response
@@ -211,6 +205,7 @@ class TransactionStore(SQLBaseStore):
 
         return ReceivedTransactionsTable.decode_results(txn.fetchall())
 
+    @cached()
     def get_destination_retry_timings(self, destination):
         """Gets the current retry timings (if any) for a given destination.
 
@@ -221,9 +216,6 @@ class TransactionStore(SQLBaseStore):
             None if not retrying
             Otherwise a DestinationsTable.EntryType for the retry scheme
         """
-        if destination in self.destination_retry_cache:
-            return defer.succeed(self.destination_retry_cache[destination])
-
         return self.runInteraction(
             "get_destination_retry_timings",
             self._get_destination_retry_timings, destination)
@@ -250,7 +242,9 @@ class TransactionStore(SQLBaseStore):
             retry_interval (int) - how long until next retry in ms
         """
 
-        self.destination_retry_cache[destination] = (
+        # As this is the new value, we might as well prefill the cache
+        self.get_destination_retry_timings.prefill(
+            destination,
             DestinationsTable.EntryType(
                 destination,
                 retry_last_ts,
diff --git a/synapse/types.py b/synapse/types.py
index faac729ff2..f6a1b0bbcf 100644
--- a/synapse/types.py
+++ b/synapse/types.py
@@ -119,3 +119,6 @@ class StreamToken(
         d = self._asdict()
         d[key] = new_value
         return StreamToken(**d)
+
+
+ClientInfo = namedtuple("ClientInfo", ("device_id", "token_id"))
diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py
index 4e837a918e..79109d0b19 100644
--- a/synapse/util/__init__.py
+++ b/synapse/util/__init__.py
@@ -15,9 +15,12 @@
 
 from synapse.util.logcontext import LoggingContext
 
-from twisted.internet import reactor
+from twisted.internet import defer, reactor, task
 
 import time
+import logging
+
+logger = logging.getLogger(__name__)
 
 
 class Clock(object):
@@ -35,6 +38,14 @@ class Clock(object):
         """Returns the current system time in miliseconds since epoch."""
         return self.time() * 1000
 
+    def looping_call(self, f, msec):
+        l = task.LoopingCall(f)
+        l.start(msec/1000.0, now=False)
+        return l
+
+    def stop_looping_call(self, loop):
+        loop.stop()
+
     def call_later(self, delay, callback):
         current_context = LoggingContext.current_context()
 
@@ -45,3 +56,51 @@ class Clock(object):
 
     def cancel_call_later(self, timer):
         timer.cancel()
+
+    def time_bound_deferred(self, given_deferred, time_out):
+        if given_deferred.called:
+            return given_deferred
+
+        ret_deferred = defer.Deferred()
+
+        def timed_out_fn():
+            try:
+                ret_deferred.errback(RuntimeError("Timed out"))
+            except:
+                pass
+
+            try:
+                given_deferred.cancel()
+            except:
+                pass
+
+        timer = None
+
+        def cancel(res):
+            try:
+                self.cancel_call_later(timer)
+            except:
+                pass
+            return res
+
+        ret_deferred.addBoth(cancel)
+
+        def sucess(res):
+            try:
+                ret_deferred.callback(res)
+            except:
+                pass
+
+            return res
+
+        def err(res):
+            try:
+                ret_deferred.errback(res)
+            except:
+                pass
+
+        given_deferred.addCallbacks(callback=sucess, errback=err)
+
+        timer = self.call_later(time_out, timed_out_fn)
+
+        return ret_deferred
diff --git a/synapse/util/expiringcache.py b/synapse/util/expiringcache.py
new file mode 100644
index 0000000000..1c7859297a
--- /dev/null
+++ b/synapse/util/expiringcache.py
@@ -0,0 +1,115 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class ExpiringCache(object):
+    def __init__(self, cache_name, clock, max_len=0, expiry_ms=0,
+                 reset_expiry_on_get=False):
+        """
+        Args:
+            cache_name (str): Name of this cache, used for logging.
+            clock (Clock)
+            max_len (int): Max size of dict. If the dict grows larger than this
+                then the oldest items get automatically evicted. Default is 0,
+                which indicates there is no max limit.
+            expiry_ms (int): How long before an item is evicted from the cache
+                in milliseconds. Default is 0, indicating items never get
+                evicted based on time.
+            reset_expiry_on_get (bool): If true, will reset the expiry time for
+                an item on access. Defaults to False.
+
+        """
+        self._cache_name = cache_name
+
+        self._clock = clock
+
+        self._max_len = max_len
+        self._expiry_ms = expiry_ms
+
+        self._reset_expiry_on_get = reset_expiry_on_get
+
+        self._cache = {}
+
+    def start(self):
+        if not self._expiry_ms:
+            # Don't bother starting the loop if things never expire
+            return
+
+        def f():
+            self._prune_cache()
+
+        self._clock.looping_call(f, self._expiry_ms/2)
+
+    def __setitem__(self, key, value):
+        now = self._clock.time_msec()
+        self._cache[key] = _CacheEntry(now, value)
+
+        # Evict if there are now too many items
+        if self._max_len and len(self._cache.keys()) > self._max_len:
+            sorted_entries = sorted(
+                self._cache.items(),
+                key=lambda k, v: v.time,
+            )
+
+            for k, _ in sorted_entries[self._max_len:]:
+                self._cache.pop(k)
+
+    def __getitem__(self, key):
+        entry = self._cache[key]
+
+        if self._reset_expiry_on_get:
+            entry.time = self._clock.time_msec()
+
+        return entry.value
+
+    def get(self, key, default=None):
+        try:
+            return self[key]
+        except KeyError:
+            return default
+
+    def _prune_cache(self):
+        if not self._expiry_ms:
+            # zero expiry time means don't expire. This should never get called
+            # since we have this check in start too.
+            return
+        begin_length = len(self._cache)
+
+        now = self._clock.time_msec()
+
+        keys_to_delete = set()
+
+        for key, cache_entry in self._cache.items():
+            if now - cache_entry.time > self._expiry_ms:
+                keys_to_delete.add(key)
+
+        for k in keys_to_delete:
+            self._cache.pop(k)
+
+        logger.debug(
+            "[%s] _prune_cache before: %d, after len: %d",
+            self._cache_name, begin_length, len(self._cache.keys())
+        )
+
+
+class _CacheEntry(object):
+    def __init__(self, time, value):
+        self.time = time
+        self.value = value
diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py
index a13a2015e4..9e10d37aec 100644
--- a/synapse/util/frozenutils.py
+++ b/synapse/util/frozenutils.py
@@ -21,6 +21,9 @@ def freeze(o):
     if t is dict:
         return frozendict({k: freeze(v) for k, v in o.items()})
 
+    if t is frozendict:
+        return o
+
     if t is str or t is unicode:
         return o
 
@@ -33,10 +36,11 @@ def freeze(o):
 
 
 def unfreeze(o):
-    if isinstance(o, frozendict) or isinstance(o, dict):
+    t = type(o)
+    if t is dict or t is frozendict:
         return dict({k: unfreeze(v) for k, v in o.items()})
 
-    if isinstance(o, basestring):
+    if t is str or t is unicode:
         return o
 
     try:
diff --git a/synapse/util/lrucache.py b/synapse/util/lrucache.py
new file mode 100644
index 0000000000..f115f50e50
--- /dev/null
+++ b/synapse/util/lrucache.py
@@ -0,0 +1,117 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class LruCache(object):
+    """Least-recently-used cache."""
+    # TODO(mjark) Add hit/miss counters
+    # TODO(mjark) Add mutex for linked list for thread safety.
+    def __init__(self, max_size):
+        cache = {}
+        list_root = []
+        list_root[:] = [list_root, list_root, None, None]
+
+        PREV, NEXT, KEY, VALUE = 0, 1, 2, 3
+
+        def add_node(key, value):
+            prev_node = list_root
+            next_node = prev_node[NEXT]
+            node = [prev_node, next_node, key, value]
+            prev_node[NEXT] = node
+            next_node[PREV] = node
+            cache[key] = node
+
+        def move_node_to_front(node):
+            prev_node = node[PREV]
+            next_node = node[NEXT]
+            prev_node[NEXT] = next_node
+            next_node[PREV] = prev_node
+            prev_node = list_root
+            next_node = prev_node[NEXT]
+            node[PREV] = prev_node
+            node[NEXT] = next_node
+            prev_node[NEXT] = node
+            next_node[PREV] = node
+
+        def delete_node(node):
+            prev_node = node[PREV]
+            next_node = node[NEXT]
+            prev_node[NEXT] = next_node
+            next_node[PREV] = prev_node
+            cache.pop(node[KEY], None)
+
+        def cache_get(key, default=None):
+            node = cache.get(key, None)
+            if node is not None:
+                move_node_to_front(node)
+                return node[VALUE]
+            else:
+                return default
+
+        def cache_set(key, value):
+            node = cache.get(key, None)
+            if node is not None:
+                move_node_to_front(node)
+                node[VALUE] = value
+            else:
+                add_node(key, value)
+                if len(cache) > max_size:
+                    delete_node(list_root[PREV])
+
+        def cache_set_default(key, value):
+            node = cache.get(key, None)
+            if node is not None:
+                return node[VALUE]
+            else:
+                add_node(key, value)
+                if len(cache) > max_size:
+                    delete_node(list_root[PREV])
+                return value
+
+        def cache_pop(key, default=None):
+            node = cache.get(key, None)
+            if node:
+                delete_node(node)
+                return node[VALUE]
+            else:
+                return default
+
+        def cache_len():
+            return len(cache)
+
+        self.sentinel = object()
+        self.get = cache_get
+        self.set = cache_set
+        self.setdefault = cache_set_default
+        self.pop = cache_pop
+        self.len = cache_len
+
+    def __getitem__(self, key):
+        result = self.get(key, self.sentinel)
+        if result is self.sentinel:
+            raise KeyError()
+        else:
+            return result
+
+    def __setitem__(self, key, value):
+        self.set(key, value)
+
+    def __delitem__(self, key, value):
+        result = self.pop(key, self.sentinel)
+        if result is self.sentinel:
+            raise KeyError()
+
+    def __len__(self):
+        return self.len()
diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py
new file mode 100644
index 0000000000..d4457af950
--- /dev/null
+++ b/synapse/util/ratelimitutils.py
@@ -0,0 +1,216 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.api.errors import LimitExceededError
+
+from synapse.util.async import sleep
+
+import collections
+import contextlib
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class FederationRateLimiter(object):
+    def __init__(self, clock, window_size, sleep_limit, sleep_msec,
+                 reject_limit, concurrent_requests):
+        """
+        Args:
+            clock (Clock)
+            window_size (int): The window size in milliseconds.
+            sleep_limit (int): The number of requests received in the last
+                `window_size` milliseconds before we artificially start
+                delaying processing of requests.
+            sleep_msec (int): The number of milliseconds to delay processing
+                of incoming requests by.
+            reject_limit (int): The maximum number of requests that are can be
+                queued for processing before we start rejecting requests with
+                a 429 Too Many Requests response.
+            concurrent_requests (int): The number of concurrent requests to
+                process.
+        """
+        self.clock = clock
+
+        self.window_size = window_size
+        self.sleep_limit = sleep_limit
+        self.sleep_msec = sleep_msec
+        self.reject_limit = reject_limit
+        self.concurrent_requests = concurrent_requests
+
+        self.ratelimiters = {}
+
+    def ratelimit(self, host):
+        """Used to ratelimit an incoming request from given host
+
+        Example usage:
+
+            with rate_limiter.ratelimit(origin) as wait_deferred:
+                yield wait_deferred
+                # Handle request ...
+
+        Args:
+            host (str): Origin of incoming request.
+
+        Returns:
+            _PerHostRatelimiter
+        """
+        return self.ratelimiters.setdefault(
+            host,
+            _PerHostRatelimiter(
+                clock=self.clock,
+                window_size=self.window_size,
+                sleep_limit=self.sleep_limit,
+                sleep_msec=self.sleep_msec,
+                reject_limit=self.reject_limit,
+                concurrent_requests=self.concurrent_requests,
+            )
+        ).ratelimit()
+
+
+class _PerHostRatelimiter(object):
+    def __init__(self, clock, window_size, sleep_limit, sleep_msec,
+                 reject_limit, concurrent_requests):
+        self.clock = clock
+
+        self.window_size = window_size
+        self.sleep_limit = sleep_limit
+        self.sleep_msec = sleep_msec
+        self.reject_limit = reject_limit
+        self.concurrent_requests = concurrent_requests
+
+        self.sleeping_requests = set()
+        self.ready_request_queue = collections.OrderedDict()
+        self.current_processing = set()
+        self.request_times = []
+
+    def is_empty(self):
+        time_now = self.clock.time_msec()
+        self.request_times[:] = [
+            r for r in self.request_times
+            if time_now - r < self.window_size
+        ]
+
+        return not (
+            self.ready_request_queue
+            or self.sleeping_requests
+            or self.current_processing
+            or self.request_times
+        )
+
+    @contextlib.contextmanager
+    def ratelimit(self):
+        # `contextlib.contextmanager` takes a generator and turns it into a
+        # context manager. The generator should only yield once with a value
+        # to be returned by manager.
+        # Exceptions will be reraised at the yield.
+
+        request_id = object()
+        ret = self._on_enter(request_id)
+        try:
+            yield ret
+        finally:
+            self._on_exit(request_id)
+
+    def _on_enter(self, request_id):
+        time_now = self.clock.time_msec()
+        self.request_times[:] = [
+            r for r in self.request_times
+            if time_now - r < self.window_size
+        ]
+
+        queue_size = len(self.ready_request_queue) + len(self.sleeping_requests)
+        if queue_size > self.reject_limit:
+            raise LimitExceededError(
+                retry_after_ms=int(
+                    self.window_size / self.sleep_limit
+                ),
+            )
+
+        self.request_times.append(time_now)
+
+        def queue_request():
+            if len(self.current_processing) > self.concurrent_requests:
+                logger.debug("Ratelimit [%s]: Queue req", id(request_id))
+                queue_defer = defer.Deferred()
+                self.ready_request_queue[request_id] = queue_defer
+                return queue_defer
+            else:
+                return defer.succeed(None)
+
+        logger.debug(
+            "Ratelimit [%s]: len(self.request_times)=%d",
+            id(request_id), len(self.request_times),
+        )
+
+        if len(self.request_times) > self.sleep_limit:
+            logger.debug(
+                "Ratelimit [%s]: sleeping req",
+                id(request_id),
+            )
+            ret_defer = sleep(self.sleep_msec/1000.0)
+
+            self.sleeping_requests.add(request_id)
+
+            def on_wait_finished(_):
+                logger.debug(
+                    "Ratelimit [%s]: Finished sleeping",
+                    id(request_id),
+                )
+                self.sleeping_requests.discard(request_id)
+                queue_defer = queue_request()
+                return queue_defer
+
+            ret_defer.addBoth(on_wait_finished)
+        else:
+            ret_defer = queue_request()
+
+        def on_start(r):
+            logger.debug(
+                "Ratelimit [%s]: Processing req",
+                id(request_id),
+            )
+            self.current_processing.add(request_id)
+            return r
+
+        def on_err(r):
+            self.current_processing.discard(request_id)
+            return r
+
+        def on_both(r):
+            # Ensure that we've properly cleaned up.
+            self.sleeping_requests.discard(request_id)
+            self.ready_request_queue.pop(request_id, None)
+            return r
+
+        ret_defer.addCallbacks(on_start, on_err)
+        ret_defer.addBoth(on_both)
+        return ret_defer
+
+    def _on_exit(self, request_id):
+        logger.debug(
+            "Ratelimit [%s]: Processed req",
+            id(request_id),
+        )
+        self.current_processing.discard(request_id)
+        try:
+            request_id, deferred = self.ready_request_queue.popitem()
+            self.current_processing.add(request_id)
+            deferred.callback(None)
+        except KeyError:
+            pass
diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py
new file mode 100644
index 0000000000..4e82232796
--- /dev/null
+++ b/synapse/util/retryutils.py
@@ -0,0 +1,153 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.api.errors import CodeMessageException
+
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+class NotRetryingDestination(Exception):
+    def __init__(self, retry_last_ts, retry_interval, destination):
+        msg = "Not retrying server %s." % (destination,)
+        super(NotRetryingDestination, self).__init__(msg)
+
+        self.retry_last_ts = retry_last_ts
+        self.retry_interval = retry_interval
+        self.destination = destination
+
+
+@defer.inlineCallbacks
+def get_retry_limiter(destination, clock, store, **kwargs):
+    """For a given destination check if we have previously failed to
+    send a request there and are waiting before retrying the destination.
+    If we are not ready to retry the destination, this will raise a
+    NotRetryingDestination exception. Otherwise, will return a Context Manager
+    that will mark the destination as down if an exception is thrown (excluding
+    CodeMessageException with code < 500)
+
+    Example usage:
+
+        try:
+            limiter = yield get_retry_limiter(destination, clock, store)
+            with limiter:
+                response = yield do_request()
+        except NotRetryingDestination:
+            # We aren't ready to retry that destination.
+            raise
+    """
+    retry_last_ts, retry_interval = (0, 0)
+
+    retry_timings = yield store.get_destination_retry_timings(
+        destination
+    )
+
+    if retry_timings:
+        retry_last_ts, retry_interval = (
+            retry_timings.retry_last_ts, retry_timings.retry_interval
+        )
+
+        now = int(clock.time_msec())
+
+        if retry_last_ts + retry_interval > now:
+            raise NotRetryingDestination(
+                retry_last_ts=retry_last_ts,
+                retry_interval=retry_interval,
+                destination=destination,
+            )
+
+    defer.returnValue(
+        RetryDestinationLimiter(
+            destination,
+            clock,
+            store,
+            retry_interval,
+            **kwargs
+        )
+    )
+
+
+class RetryDestinationLimiter(object):
+    def __init__(self, destination, clock, store, retry_interval,
+                 min_retry_interval=5000, max_retry_interval=60 * 60 * 1000,
+                 multiplier_retry_interval=2,):
+        """Marks the destination as "down" if an exception is thrown in the
+        context, except for CodeMessageException with code < 500.
+
+        If no exception is raised, marks the destination as "up".
+
+        Args:
+            destination (str)
+            clock (Clock)
+            store (DataStore)
+            retry_interval (int): The next retry interval taken from the
+                database in milliseconds, or zero if the last request was
+                successful.
+            min_retry_interval (int): The minimum retry interval to use after
+                a failed request, in milliseconds.
+            max_retry_interval (int): The maximum retry interval to use after
+                a failed request, in milliseconds.
+            multiplier_retry_interval (int): The multiplier to use to increase
+                the retry interval after a failed request.
+        """
+        self.clock = clock
+        self.store = store
+        self.destination = destination
+
+        self.retry_interval = retry_interval
+        self.min_retry_interval = min_retry_interval
+        self.max_retry_interval = max_retry_interval
+        self.multiplier_retry_interval = multiplier_retry_interval
+
+    def __enter__(self):
+        pass
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        def err(failure):
+            logger.exception(
+                "Failed to store set_destination_retry_timings",
+                failure.value
+            )
+
+        valid_err_code = False
+        if exc_type is CodeMessageException:
+            valid_err_code = 0 <= exc_val.code < 500
+
+        if exc_type is None or valid_err_code:
+            # We connected successfully.
+            if not self.retry_interval:
+                return
+
+            retry_last_ts = 0
+            self.retry_interval = 0
+        else:
+            # We couldn't connect.
+            if self.retry_interval:
+                self.retry_interval *= self.multiplier_retry_interval
+
+                if self.retry_interval >= self.max_retry_interval:
+                    self.retry_interval = self.max_retry_interval
+            else:
+                self.retry_interval = self.min_retry_interval
+
+            retry_last_ts = int(self.clock.time_msec())
+
+        self.store.set_destination_retry_timings(
+            self.destination, retry_last_ts, self.retry_interval
+        ).addErrback(err)
diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py
new file mode 100644
index 0000000000..4f83db5e84
--- /dev/null
+++ b/tests/api/test_auth.py
@@ -0,0 +1,139 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from tests import unittest
+from twisted.internet import defer
+
+from mock import Mock
+
+from synapse.api.auth import Auth
+from synapse.api.errors import AuthError
+
+
+class AuthTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.state_handler = Mock()
+        self.store = Mock()
+
+        self.hs = Mock()
+        self.hs.get_datastore = Mock(return_value=self.store)
+        self.hs.get_state_handler = Mock(return_value=self.state_handler)
+        self.auth = Auth(self.hs)
+
+        self.test_user = "@foo:bar"
+        self.test_token = "_test_token_"
+
+    @defer.inlineCallbacks
+    def test_get_user_by_req_user_valid_token(self):
+        self.store.get_app_service_by_token = Mock(return_value=None)
+        user_info = {
+            "name": self.test_user,
+            "device_id": "nothing",
+            "token_id": "ditto",
+            "admin": False
+        }
+        self.store.get_user_by_token = Mock(return_value=user_info)
+
+        request = Mock(args={})
+        request.args["access_token"] = [self.test_token]
+        request.requestHeaders.getRawHeaders = Mock(return_value=[""])
+        (user, info) = yield self.auth.get_user_by_req(request)
+        self.assertEquals(user.to_string(), self.test_user)
+
+    def test_get_user_by_req_user_bad_token(self):
+        self.store.get_app_service_by_token = Mock(return_value=None)
+        self.store.get_user_by_token = Mock(return_value=None)
+
+        request = Mock(args={})
+        request.args["access_token"] = [self.test_token]
+        request.requestHeaders.getRawHeaders = Mock(return_value=[""])
+        d = self.auth.get_user_by_req(request)
+        self.failureResultOf(d, AuthError)
+
+    def test_get_user_by_req_user_missing_token(self):
+        self.store.get_app_service_by_token = Mock(return_value=None)
+        user_info = {
+            "name": self.test_user,
+            "device_id": "nothing",
+            "token_id": "ditto",
+            "admin": False
+        }
+        self.store.get_user_by_token = Mock(return_value=user_info)
+
+        request = Mock(args={})
+        request.requestHeaders.getRawHeaders = Mock(return_value=[""])
+        d = self.auth.get_user_by_req(request)
+        self.failureResultOf(d, AuthError)
+
+    @defer.inlineCallbacks
+    def test_get_user_by_req_appservice_valid_token(self):
+        app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
+        self.store.get_app_service_by_token = Mock(return_value=app_service)
+        self.store.get_user_by_token = Mock(return_value=None)
+
+        request = Mock(args={})
+        request.args["access_token"] = [self.test_token]
+        request.requestHeaders.getRawHeaders = Mock(return_value=[""])
+        (user, info) = yield self.auth.get_user_by_req(request)
+        self.assertEquals(user.to_string(), self.test_user)
+
+    def test_get_user_by_req_appservice_bad_token(self):
+        self.store.get_app_service_by_token = Mock(return_value=None)
+        self.store.get_user_by_token = Mock(return_value=None)
+
+        request = Mock(args={})
+        request.args["access_token"] = [self.test_token]
+        request.requestHeaders.getRawHeaders = Mock(return_value=[""])
+        d = self.auth.get_user_by_req(request)
+        self.failureResultOf(d, AuthError)
+
+    def test_get_user_by_req_appservice_missing_token(self):
+        app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
+        self.store.get_app_service_by_token = Mock(return_value=app_service)
+        self.store.get_user_by_token = Mock(return_value=None)
+
+        request = Mock(args={})
+        request.requestHeaders.getRawHeaders = Mock(return_value=[""])
+        d = self.auth.get_user_by_req(request)
+        self.failureResultOf(d, AuthError)
+
+    @defer.inlineCallbacks
+    def test_get_user_by_req_appservice_valid_token_valid_user_id(self):
+        masquerading_user_id = "@doppelganger:matrix.org"
+        app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
+        app_service.is_interested_in_user = Mock(return_value=True)
+        self.store.get_app_service_by_token = Mock(return_value=app_service)
+        self.store.get_user_by_token = Mock(return_value=None)
+
+        request = Mock(args={})
+        request.args["access_token"] = [self.test_token]
+        request.args["user_id"] = [masquerading_user_id]
+        request.requestHeaders.getRawHeaders = Mock(return_value=[""])
+        (user, info) = yield self.auth.get_user_by_req(request)
+        self.assertEquals(user.to_string(), masquerading_user_id)
+
+    def test_get_user_by_req_appservice_valid_token_bad_user_id(self):
+        masquerading_user_id = "@doppelganger:matrix.org"
+        app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
+        app_service.is_interested_in_user = Mock(return_value=False)
+        self.store.get_app_service_by_token = Mock(return_value=app_service)
+        self.store.get_user_by_token = Mock(return_value=None)
+
+        request = Mock(args={})
+        request.args["access_token"] = [self.test_token]
+        request.args["user_id"] = [masquerading_user_id]
+        request.requestHeaders.getRawHeaders = Mock(return_value=[""])
+        d = self.auth.get_user_by_req(request)
+        self.failureResultOf(d, AuthError)
diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py
new file mode 100644
index 0000000000..65b2f590c8
--- /dev/null
+++ b/tests/api/test_filtering.py
@@ -0,0 +1,502 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import namedtuple
+from tests import unittest
+from twisted.internet import defer
+
+from mock import Mock, NonCallableMock
+from tests.utils import (
+    MockHttpResource, DeferredMockCallable, setup_test_homeserver
+)
+
+from synapse.types import UserID
+from synapse.api.filtering import Filter
+
+user_localpart = "test_user"
+MockEvent = namedtuple("MockEvent", "sender type room_id")
+
+class FilteringTestCase(unittest.TestCase):
+
+    @defer.inlineCallbacks
+    def setUp(self):
+        self.mock_federation_resource = MockHttpResource()
+
+        self.mock_http_client = Mock(spec=[])
+        self.mock_http_client.put_json = DeferredMockCallable()
+
+        hs = yield setup_test_homeserver(
+            handlers=None,
+            http_client=self.mock_http_client,
+            keyring=Mock(),
+        )
+
+        self.filtering = hs.get_filtering()
+        self.filter = Filter({})
+
+        self.datastore = hs.get_datastore()
+
+    def test_definition_types_works_with_literals(self):
+        definition = {
+            "types": ["m.room.message", "org.matrix.foo.bar"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="m.room.message",
+            room_id="!foo:bar"
+        )
+        self.assertTrue(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_types_works_with_wildcards(self):
+        definition = {
+            "types": ["m.*", "org.matrix.foo.bar"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="m.room.message",
+            room_id="!foo:bar"
+        )
+        self.assertTrue(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_types_works_with_unknowns(self):
+        definition = {
+            "types": ["m.room.message", "org.matrix.foo.bar"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="now.for.something.completely.different",
+            room_id="!foo:bar"
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_not_types_works_with_literals(self):
+        definition = {
+            "not_types": ["m.room.message", "org.matrix.foo.bar"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="m.room.message",
+            room_id="!foo:bar"
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_not_types_works_with_wildcards(self):
+        definition = {
+            "not_types": ["m.room.message", "org.matrix.*"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="org.matrix.custom.event",
+            room_id="!foo:bar"
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_not_types_works_with_unknowns(self):
+        definition = {
+            "not_types": ["m.*", "org.*"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="com.nom.nom.nom",
+            room_id="!foo:bar"
+        )
+        self.assertTrue(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_not_types_takes_priority_over_types(self):
+        definition = {
+            "not_types": ["m.*", "org.*"],
+            "types": ["m.room.message", "m.room.topic"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="m.room.topic",
+            room_id="!foo:bar"
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_senders_works_with_literals(self):
+        definition = {
+            "senders": ["@flibble:wibble"]
+        }
+        event = MockEvent(
+            sender="@flibble:wibble",
+            type="com.nom.nom.nom",
+            room_id="!foo:bar"
+        )
+        self.assertTrue(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_senders_works_with_unknowns(self):
+        definition = {
+            "senders": ["@flibble:wibble"]
+        }
+        event = MockEvent(
+            sender="@challenger:appears",
+            type="com.nom.nom.nom",
+            room_id="!foo:bar"
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_not_senders_works_with_literals(self):
+        definition = {
+            "not_senders": ["@flibble:wibble"]
+        }
+        event = MockEvent(
+            sender="@flibble:wibble",
+            type="com.nom.nom.nom",
+            room_id="!foo:bar"
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_not_senders_works_with_unknowns(self):
+        definition = {
+            "not_senders": ["@flibble:wibble"]
+        }
+        event = MockEvent(
+            sender="@challenger:appears",
+            type="com.nom.nom.nom",
+            room_id="!foo:bar"
+        )
+        self.assertTrue(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_not_senders_takes_priority_over_senders(self):
+        definition = {
+            "not_senders": ["@misspiggy:muppets"],
+            "senders": ["@kermit:muppets", "@misspiggy:muppets"]
+        }
+        event = MockEvent(
+            sender="@misspiggy:muppets",
+            type="m.room.topic",
+            room_id="!foo:bar"
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_rooms_works_with_literals(self):
+        definition = {
+            "rooms": ["!secretbase:unknown"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="m.room.message",
+            room_id="!secretbase:unknown"
+        )
+        self.assertTrue(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_rooms_works_with_unknowns(self):
+        definition = {
+            "rooms": ["!secretbase:unknown"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="m.room.message",
+            room_id="!anothersecretbase:unknown"
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_not_rooms_works_with_literals(self):
+        definition = {
+            "not_rooms": ["!anothersecretbase:unknown"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="m.room.message",
+            room_id="!anothersecretbase:unknown"
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_not_rooms_works_with_unknowns(self):
+        definition = {
+            "not_rooms": ["!secretbase:unknown"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="m.room.message",
+            room_id="!anothersecretbase:unknown"
+        )
+        self.assertTrue(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_not_rooms_takes_priority_over_rooms(self):
+        definition = {
+            "not_rooms": ["!secretbase:unknown"],
+            "rooms": ["!secretbase:unknown"]
+        }
+        event = MockEvent(
+            sender="@foo:bar",
+            type="m.room.message",
+            room_id="!secretbase:unknown"
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_combined_event(self):
+        definition = {
+            "not_senders": ["@misspiggy:muppets"],
+            "senders": ["@kermit:muppets"],
+            "rooms": ["!stage:unknown"],
+            "not_rooms": ["!piggyshouse:muppets"],
+            "types": ["m.room.message", "muppets.kermit.*"],
+            "not_types": ["muppets.misspiggy.*"]
+        }
+        event = MockEvent(
+            sender="@kermit:muppets",  # yup
+            type="m.room.message",  # yup
+            room_id="!stage:unknown"  # yup
+        )
+        self.assertTrue(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_combined_event_bad_sender(self):
+        definition = {
+            "not_senders": ["@misspiggy:muppets"],
+            "senders": ["@kermit:muppets"],
+            "rooms": ["!stage:unknown"],
+            "not_rooms": ["!piggyshouse:muppets"],
+            "types": ["m.room.message", "muppets.kermit.*"],
+            "not_types": ["muppets.misspiggy.*"]
+        }
+        event = MockEvent(
+            sender="@misspiggy:muppets",  # nope
+            type="m.room.message",  # yup
+            room_id="!stage:unknown"  # yup
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_combined_event_bad_room(self):
+        definition = {
+            "not_senders": ["@misspiggy:muppets"],
+            "senders": ["@kermit:muppets"],
+            "rooms": ["!stage:unknown"],
+            "not_rooms": ["!piggyshouse:muppets"],
+            "types": ["m.room.message", "muppets.kermit.*"],
+            "not_types": ["muppets.misspiggy.*"]
+        }
+        event = MockEvent(
+            sender="@kermit:muppets",  # yup
+            type="m.room.message",  # yup
+            room_id="!piggyshouse:muppets"  # nope
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    def test_definition_combined_event_bad_type(self):
+        definition = {
+            "not_senders": ["@misspiggy:muppets"],
+            "senders": ["@kermit:muppets"],
+            "rooms": ["!stage:unknown"],
+            "not_rooms": ["!piggyshouse:muppets"],
+            "types": ["m.room.message", "muppets.kermit.*"],
+            "not_types": ["muppets.misspiggy.*"]
+        }
+        event = MockEvent(
+            sender="@kermit:muppets",  # yup
+            type="muppets.misspiggy.kisses",  # nope
+            room_id="!stage:unknown"  # yup
+        )
+        self.assertFalse(
+            self.filter._passes_definition(definition, event)
+        )
+
+    @defer.inlineCallbacks
+    def test_filter_public_user_data_match(self):
+        user_filter_json = {
+            "public_user_data": {
+                "types": ["m.*"]
+            }
+        }
+        user = UserID.from_string("@" + user_localpart + ":test")
+        filter_id = yield self.datastore.add_user_filter(
+            user_localpart=user_localpart,
+            user_filter=user_filter_json,
+        )
+        event = MockEvent(
+            sender="@foo:bar",
+            type="m.profile",
+            room_id="!foo:bar"
+        )
+        events = [event]
+
+        user_filter = yield self.filtering.get_user_filter(
+            user_localpart=user_localpart,
+            filter_id=filter_id,
+        )
+
+        results = user_filter.filter_public_user_data(events=events)
+        self.assertEquals(events, results)
+
+    @defer.inlineCallbacks
+    def test_filter_public_user_data_no_match(self):
+        user_filter_json = {
+            "public_user_data": {
+                "types": ["m.*"]
+            }
+        }
+        user = UserID.from_string("@" + user_localpart + ":test")
+        filter_id = yield self.datastore.add_user_filter(
+            user_localpart=user_localpart,
+            user_filter=user_filter_json,
+        )
+        event = MockEvent(
+            sender="@foo:bar",
+            type="custom.avatar.3d.crazy",
+            room_id="!foo:bar"
+        )
+        events = [event]
+
+        user_filter = yield self.filtering.get_user_filter(
+            user_localpart=user_localpart,
+            filter_id=filter_id,
+        )
+
+        results = user_filter.filter_public_user_data(events=events)
+        self.assertEquals([], results)
+
+    @defer.inlineCallbacks
+    def test_filter_room_state_match(self):
+        user_filter_json = {
+            "room": {
+                "state": {
+                    "types": ["m.*"]
+                }
+            }
+        }
+        user = UserID.from_string("@" + user_localpart + ":test")
+        filter_id = yield self.datastore.add_user_filter(
+            user_localpart=user_localpart,
+            user_filter=user_filter_json,
+        )
+        event = MockEvent(
+            sender="@foo:bar",
+            type="m.room.topic",
+            room_id="!foo:bar"
+        )
+        events = [event]
+
+        user_filter = yield self.filtering.get_user_filter(
+            user_localpart=user_localpart,
+            filter_id=filter_id,
+        )
+
+        results = user_filter.filter_room_state(events=events)
+        self.assertEquals(events, results)
+
+    @defer.inlineCallbacks
+    def test_filter_room_state_no_match(self):
+        user_filter_json = {
+            "room": {
+                "state": {
+                    "types": ["m.*"]
+                }
+            }
+        }
+        user = UserID.from_string("@" + user_localpart + ":test")
+        filter_id = yield self.datastore.add_user_filter(
+            user_localpart=user_localpart,
+            user_filter=user_filter_json,
+        )
+        event = MockEvent(
+            sender="@foo:bar",
+            type="org.matrix.custom.event",
+            room_id="!foo:bar"
+        )
+        events = [event]
+
+        user_filter = yield self.filtering.get_user_filter(
+            user_localpart=user_localpart,
+            filter_id=filter_id,
+        )
+
+        results = user_filter.filter_room_state(events)
+        self.assertEquals([], results)
+
+    @defer.inlineCallbacks
+    def test_add_filter(self):
+        user_filter_json = {
+            "room": {
+                "state": {
+                    "types": ["m.*"]
+                }
+            }
+        }
+
+        filter_id = yield self.filtering.add_user_filter(
+            user_localpart=user_localpart,
+            user_filter=user_filter_json,
+        )
+
+        self.assertEquals(filter_id, 0)
+        self.assertEquals(user_filter_json,
+            (yield self.datastore.get_user_filter(
+                user_localpart=user_localpart,
+                filter_id=0,
+            ))
+        )
+
+    @defer.inlineCallbacks
+    def test_get_filter(self):
+        user_filter_json = {
+            "room": {
+                "state": {
+                    "types": ["m.*"]
+                }
+            }
+        }
+
+        filter_id = yield self.datastore.add_user_filter(
+            user_localpart=user_localpart,
+            user_filter=user_filter_json,
+        )
+
+        filter = yield self.filtering.get_user_filter(
+            user_localpart=user_localpart,
+            filter_id=filter_id,
+        )
+
+        self.assertEquals(filter.filter_json, user_filter_json)
diff --git a/tests/appservice/__init__.py b/tests/appservice/__init__.py
new file mode 100644
index 0000000000..1a84d94cd9
--- /dev/null
+++ b/tests/appservice/__init__.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py
new file mode 100644
index 0000000000..eb7becf725
--- /dev/null
+++ b/tests/appservice/test_appservice.py
@@ -0,0 +1,225 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from synapse.appservice import ApplicationService
+
+from mock import Mock, PropertyMock
+from tests import unittest
+
+
+def _regex(regex, exclusive=True):
+    return {
+        "regex": regex,
+        "exclusive": exclusive
+    }
+
+
+class ApplicationServiceTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.service = ApplicationService(
+            url="some_url",
+            token="some_token",
+            namespaces={
+                ApplicationService.NS_USERS: [],
+                ApplicationService.NS_ROOMS: [],
+                ApplicationService.NS_ALIASES: []
+            }
+        )
+        self.event = Mock(
+            type="m.something", room_id="!foo:bar", sender="@someone:somewhere"
+        )
+
+    def test_regex_user_id_prefix_match(self):
+        self.service.namespaces[ApplicationService.NS_USERS].append(
+            _regex("@irc_.*")
+        )
+        self.event.sender = "@irc_foobar:matrix.org"
+        self.assertTrue(self.service.is_interested(self.event))
+
+    def test_regex_user_id_prefix_no_match(self):
+        self.service.namespaces[ApplicationService.NS_USERS].append(
+            _regex("@irc_.*")
+        )
+        self.event.sender = "@someone_else:matrix.org"
+        self.assertFalse(self.service.is_interested(self.event))
+
+    def test_regex_room_member_is_checked(self):
+        self.service.namespaces[ApplicationService.NS_USERS].append(
+            _regex("@irc_.*")
+        )
+        self.event.sender = "@someone_else:matrix.org"
+        self.event.type = "m.room.member"
+        self.event.state_key = "@irc_foobar:matrix.org"
+        self.assertTrue(self.service.is_interested(self.event))
+
+    def test_regex_room_id_match(self):
+        self.service.namespaces[ApplicationService.NS_ROOMS].append(
+            _regex("!some_prefix.*some_suffix:matrix.org")
+        )
+        self.event.room_id = "!some_prefixs0m3th1nGsome_suffix:matrix.org"
+        self.assertTrue(self.service.is_interested(self.event))
+
+    def test_regex_room_id_no_match(self):
+        self.service.namespaces[ApplicationService.NS_ROOMS].append(
+            _regex("!some_prefix.*some_suffix:matrix.org")
+        )
+        self.event.room_id = "!XqBunHwQIXUiqCaoxq:matrix.org"
+        self.assertFalse(self.service.is_interested(self.event))
+
+    def test_regex_alias_match(self):
+        self.service.namespaces[ApplicationService.NS_ALIASES].append(
+            _regex("#irc_.*:matrix.org")
+        )
+        self.assertTrue(self.service.is_interested(
+            self.event,
+            aliases_for_event=["#irc_foobar:matrix.org", "#athing:matrix.org"]
+        ))
+
+    def test_non_exclusive_alias(self):
+        self.service.namespaces[ApplicationService.NS_ALIASES].append(
+            _regex("#irc_.*:matrix.org", exclusive=False)
+        )
+        self.assertFalse(self.service.is_exclusive_alias(
+            "#irc_foobar:matrix.org"
+        ))
+
+    def test_non_exclusive_room(self):
+        self.service.namespaces[ApplicationService.NS_ROOMS].append(
+            _regex("!irc_.*:matrix.org", exclusive=False)
+        )
+        self.assertFalse(self.service.is_exclusive_room(
+            "!irc_foobar:matrix.org"
+        ))
+
+    def test_non_exclusive_user(self):
+        self.service.namespaces[ApplicationService.NS_USERS].append(
+            _regex("@irc_.*:matrix.org", exclusive=False)
+        )
+        self.assertFalse(self.service.is_exclusive_user(
+            "@irc_foobar:matrix.org"
+        ))
+
+    def test_exclusive_alias(self):
+        self.service.namespaces[ApplicationService.NS_ALIASES].append(
+            _regex("#irc_.*:matrix.org", exclusive=True)
+        )
+        self.assertTrue(self.service.is_exclusive_alias(
+            "#irc_foobar:matrix.org"
+        ))
+
+    def test_exclusive_user(self):
+        self.service.namespaces[ApplicationService.NS_USERS].append(
+            _regex("@irc_.*:matrix.org", exclusive=True)
+        )
+        self.assertTrue(self.service.is_exclusive_user(
+            "@irc_foobar:matrix.org"
+        ))
+
+    def test_exclusive_room(self):
+        self.service.namespaces[ApplicationService.NS_ROOMS].append(
+            _regex("!irc_.*:matrix.org", exclusive=True)
+        )
+        self.assertTrue(self.service.is_exclusive_room(
+            "!irc_foobar:matrix.org"
+        ))
+
+    def test_regex_alias_no_match(self):
+        self.service.namespaces[ApplicationService.NS_ALIASES].append(
+            _regex("#irc_.*:matrix.org")
+        )
+        self.assertFalse(self.service.is_interested(
+            self.event,
+            aliases_for_event=["#xmpp_foobar:matrix.org", "#athing:matrix.org"]
+        ))
+
+    def test_regex_multiple_matches(self):
+        self.service.namespaces[ApplicationService.NS_ALIASES].append(
+            _regex("#irc_.*:matrix.org")
+        )
+        self.service.namespaces[ApplicationService.NS_USERS].append(
+            _regex("@irc_.*")
+        )
+        self.event.sender = "@irc_foobar:matrix.org"
+        self.assertTrue(self.service.is_interested(
+            self.event,
+            aliases_for_event=["#irc_barfoo:matrix.org"]
+        ))
+
+    def test_restrict_to_rooms(self):
+        self.service.namespaces[ApplicationService.NS_ROOMS].append(
+            _regex("!flibble_.*:matrix.org")
+        )
+        self.service.namespaces[ApplicationService.NS_USERS].append(
+            _regex("@irc_.*")
+        )
+        self.event.sender = "@irc_foobar:matrix.org"
+        self.event.room_id = "!wibblewoo:matrix.org"
+        self.assertFalse(self.service.is_interested(
+            self.event,
+            restrict_to=ApplicationService.NS_ROOMS
+        ))
+
+    def test_restrict_to_aliases(self):
+        self.service.namespaces[ApplicationService.NS_ALIASES].append(
+            _regex("#xmpp_.*:matrix.org")
+        )
+        self.service.namespaces[ApplicationService.NS_USERS].append(
+            _regex("@irc_.*")
+        )
+        self.event.sender = "@irc_foobar:matrix.org"
+        self.assertFalse(self.service.is_interested(
+            self.event,
+            restrict_to=ApplicationService.NS_ALIASES,
+            aliases_for_event=["#irc_barfoo:matrix.org"]
+        ))
+
+    def test_restrict_to_senders(self):
+        self.service.namespaces[ApplicationService.NS_ALIASES].append(
+            _regex("#xmpp_.*:matrix.org")
+        )
+        self.service.namespaces[ApplicationService.NS_USERS].append(
+            _regex("@irc_.*")
+        )
+        self.event.sender = "@xmpp_foobar:matrix.org"
+        self.assertFalse(self.service.is_interested(
+            self.event,
+            restrict_to=ApplicationService.NS_USERS,
+            aliases_for_event=["#xmpp_barfoo:matrix.org"]
+        ))
+
+    def test_member_list_match(self):
+        self.service.namespaces[ApplicationService.NS_USERS].append(
+            _regex("@irc_.*")
+        )
+        join_list = [
+            Mock(
+                type="m.room.member", room_id="!foo:bar", sender="@alice:here",
+                state_key="@alice:here"
+            ),
+            Mock(
+                type="m.room.member", room_id="!foo:bar", sender="@irc_fo:here",
+                state_key="@irc_fo:here"  # AS user
+            ),
+            Mock(
+                type="m.room.member", room_id="!foo:bar", sender="@bob:here",
+                state_key="@bob:here"
+            )
+        ]
+
+        self.event.sender = "@xmpp_foobar:matrix.org"
+        self.assertTrue(self.service.is_interested(
+            event=self.event,
+            member_list=join_list
+        ))
diff --git a/tests/federation/test_federation.py b/tests/federation/test_federation.py
index 3e484cd303..2ecd00d2ad 100644
--- a/tests/federation/test_federation.py
+++ b/tests/federation/test_federation.py
@@ -19,9 +19,8 @@ from tests import unittest
 # python imports
 from mock import Mock, ANY
 
-from ..utils import MockHttpResource, MockClock, MockKey
+from ..utils import MockHttpResource, MockClock, setup_test_homeserver
 
-from synapse.server import HomeServer
 from synapse.federation import initialize_http_replication
 from synapse.events import FrozenEvent
 
@@ -40,6 +39,7 @@ def make_pdu(prev_pdus=[], **kwargs):
 
 
 class FederationTestCase(unittest.TestCase):
+    @defer.inlineCallbacks
     def setUp(self):
         self.mock_resource = MockHttpResource()
         self.mock_http_client = Mock(spec=[
@@ -61,17 +61,12 @@ class FederationTestCase(unittest.TestCase):
             defer.succeed(DestinationsTable.EntryType("", 0, 0))
         )
         self.mock_persistence.get_auth_chain.return_value = []
-        self.mock_config = Mock()
-        self.mock_config.signing_key = [MockKey()]
         self.clock = MockClock()
-        hs = HomeServer(
-            "test",
+        hs = yield setup_test_homeserver(
             resource_for_federation=self.mock_resource,
             http_client=self.mock_http_client,
-            db_pool=None,
             datastore=self.mock_persistence,
             clock=self.clock,
-            config=self.mock_config,
             keyring=Mock(),
         )
         self.federation = initialize_http_replication(hs)
diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py
new file mode 100644
index 0000000000..a2c541317c
--- /dev/null
+++ b/tests/handlers/test_appservice.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+from .. import unittest
+
+from synapse.handlers.appservice import ApplicationServicesHandler
+
+from mock import Mock
+
+
+class AppServiceHandlerTestCase(unittest.TestCase):
+    """ Tests the ApplicationServicesHandler. """
+
+    def setUp(self):
+        self.mock_store = Mock()
+        self.mock_as_api = Mock()
+        hs = Mock()
+        hs.get_datastore = Mock(return_value=self.mock_store)
+        self.handler = ApplicationServicesHandler(
+            hs, self.mock_as_api
+        )
+
+    @defer.inlineCallbacks
+    def test_notify_interested_services(self):
+        interested_service = self._mkservice(is_interested=True)
+        services = [
+            self._mkservice(is_interested=False),
+            interested_service,
+            self._mkservice(is_interested=False)
+        ]
+
+        self.mock_store.get_app_services = Mock(return_value=services)
+        self.mock_store.get_user_by_id = Mock(return_value=[])
+
+        event = Mock(
+            sender="@someone:anywhere",
+            type="m.room.message",
+            room_id="!foo:bar"
+        )
+        self.mock_as_api.push = Mock()
+        yield self.handler.notify_interested_services(event)
+        self.mock_as_api.push.assert_called_once_with(interested_service, event)
+
+    @defer.inlineCallbacks
+    def test_query_room_alias_exists(self):
+        room_alias_str = "#foo:bar"
+        room_alias = Mock()
+        room_alias.to_string = Mock(return_value=room_alias_str)
+
+        room_id = "!alpha:bet"
+        servers = ["aperture"]
+        interested_service = self._mkservice(is_interested=True)
+        services = [
+            self._mkservice(is_interested=False),
+            interested_service,
+            self._mkservice(is_interested=False)
+        ]
+
+        self.mock_store.get_app_services = Mock(return_value=services)
+        self.mock_store.get_association_from_room_alias = Mock(
+            return_value=Mock(room_id=room_id, servers=servers)
+        )
+
+        result = yield self.handler.query_room_alias_exists(room_alias)
+
+        self.mock_as_api.query_alias.assert_called_once_with(
+            interested_service,
+            room_alias_str
+        )
+        self.assertEquals(result.room_id, room_id)
+        self.assertEquals(result.servers, servers)
+
+
+
+    def _mkservice(self, is_interested):
+        service = Mock()
+        service.is_interested = Mock(return_value=is_interested)
+        service.token = "mock_service_token"
+        service.url = "mock_service_url"
+        return service
diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py
index 8e164e4be0..27306ba427 100644
--- a/tests/handlers/test_directory.py
+++ b/tests/handlers/test_directory.py
@@ -19,10 +19,10 @@ from twisted.internet import defer
 
 from mock import Mock
 
-from synapse.server import HomeServer
 from synapse.handlers.directory import DirectoryHandler
+from synapse.types import RoomAlias
 
-from tests.utils import SQLiteMemoryDbPool, MockKey
+from tests.utils import setup_test_homeserver
 
 
 class DirectoryHandlers(object):
@@ -45,19 +45,10 @@ class DirectoryTestCase(unittest.TestCase):
             self.query_handlers[query_type] = handler
         self.mock_federation.register_query_handler = register_query_handler
 
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        self.mock_config = Mock()
-        self.mock_config.signing_key = [MockKey()]
-
-        hs = HomeServer(
-            "test",
-            db_pool=db_pool,
+        hs = yield setup_test_homeserver(
             http_client=None,
             resource_for_federation=Mock(),
             replication_layer=self.mock_federation,
-            config=self.mock_config,
         )
         hs.handlers = DirectoryHandlers(hs)
 
@@ -65,9 +56,9 @@ class DirectoryTestCase(unittest.TestCase):
 
         self.store = hs.get_datastore()
 
-        self.my_room = hs.parse_roomalias("#my-room:test")
-        self.your_room = hs.parse_roomalias("#your-room:test")
-        self.remote_room = hs.parse_roomalias("#another:remote")
+        self.my_room = RoomAlias.from_string("#my-room:test")
+        self.your_room = RoomAlias.from_string("#your-room:test")
+        self.remote_room = RoomAlias.from_string("#another:remote")
 
     @defer.inlineCallbacks
     def test_get_local_association(self):
diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py
index ed21defd13..c13ade3286 100644
--- a/tests/handlers/test_federation.py
+++ b/tests/handlers/test_federation.py
@@ -19,20 +19,17 @@ from tests import unittest
 from synapse.api.constants import EventTypes
 from synapse.events import FrozenEvent
 from synapse.handlers.federation import FederationHandler
-from synapse.server import HomeServer
 
 from mock import NonCallableMock, ANY, Mock
 
-from ..utils import MockKey
+from ..utils import setup_test_homeserver
 
 
 class FederationTestCase(unittest.TestCase):
 
+    @defer.inlineCallbacks
     def setUp(self):
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
         self.state_handler = NonCallableMock(spec_set=[
             "compute_event_context",
         ])
@@ -43,15 +40,15 @@ class FederationTestCase(unittest.TestCase):
         ])
 
         self.hostname = "test"
-        hs = HomeServer(
+        hs = yield setup_test_homeserver(
             self.hostname,
-            db_pool=None,
             datastore=NonCallableMock(spec_set=[
                 "persist_event",
                 "store_room",
                 "get_room",
                 "get_destination_retry_timings",
                 "set_destination_retry_timings",
+                "have_events",
             ]),
             resource_for_federation=NonCallableMock(),
             http_client=NonCallableMock(spec_set=[]),
@@ -60,7 +57,6 @@ class FederationTestCase(unittest.TestCase):
                 "room_member_handler",
                 "federation_handler",
             ]),
-            config=self.mock_config,
             auth=self.auth,
             state_handler=self.state_handler,
             keyring=Mock(),
@@ -91,6 +87,10 @@ class FederationTestCase(unittest.TestCase):
         self.datastore.get_room.return_value = defer.succeed(True)
         self.auth.check_host_in_room.return_value = defer.succeed(True)
 
+        def have_events(event_ids):
+            return defer.succeed({})
+        self.datastore.have_events.side_effect = have_events
+
         def annotate(ev, old_state=None):
             context = Mock()
             context.current_state = {}
diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py
index c309fbb054..6ffc3c99cc 100644
--- a/tests/handlers/test_presence.py
+++ b/tests/handlers/test_presence.py
@@ -17,20 +17,19 @@
 from tests import unittest
 from twisted.internet import defer, reactor
 
-from mock import Mock, call, ANY, NonCallableMock, patch
+from mock import Mock, call, ANY, NonCallableMock
 import json
 
 from tests.utils import (
-    MockHttpResource, MockClock, DeferredMockCallable, SQLiteMemoryDbPool,
-    MockKey
+    MockHttpResource, MockClock, DeferredMockCallable, setup_test_homeserver
 )
 
-from synapse.server import HomeServer
 from synapse.api.constants import PresenceState
 from synapse.api.errors import SynapseError
 from synapse.handlers.presence import PresenceHandler, UserPresenceCache
 from synapse.streams.config import SourcePaginationConfig
 from synapse.storage.transactions import DestinationsTable
+from synapse.types import UserID
 
 OFFLINE = PresenceState.OFFLINE
 UNAVAILABLE = PresenceState.UNAVAILABLE
@@ -63,59 +62,50 @@ class JustPresenceHandlers(object):
 class PresenceTestCase(unittest.TestCase):
     @defer.inlineCallbacks
     def setUp(self):
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
         self.clock = MockClock()
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
         self.mock_federation_resource = MockHttpResource()
 
         self.mock_http_client = Mock(spec=[])
         self.mock_http_client.put_json = DeferredMockCallable()
 
-        hs = HomeServer("test",
+        hs_kwargs = {}
+        if hasattr(self, "make_datastore_mock"):
+            hs_kwargs["datastore"] = self.make_datastore_mock()
+
+        hs = yield setup_test_homeserver(
             clock=self.clock,
-            db_pool=db_pool,
             handlers=None,
             resource_for_federation=self.mock_federation_resource,
             http_client=self.mock_http_client,
-            config=self.mock_config,
             keyring=Mock(),
+            **hs_kwargs
         )
         hs.handlers = JustPresenceHandlers(hs)
 
-        self.store = hs.get_datastore()
-
-        # Mock the RoomMemberHandler
-        room_member_handler = Mock(spec=[])
-        hs.handlers.room_member_handler = room_member_handler
-
-        # Some local users to test with
-        self.u_apple = hs.parse_userid("@apple:test")
-        self.u_banana = hs.parse_userid("@banana:test")
-        self.u_clementine = hs.parse_userid("@clementine:test")
+        self.datastore = hs.get_datastore()
 
-        for u in self.u_apple, self.u_banana, self.u_clementine:
-            yield self.store.create_presence(u.localpart)
+        self.setUp_roommemberhandler_mocks(hs.handlers)
 
-        yield self.store.set_presence_state(
-            self.u_apple.localpart, {"state": ONLINE, "status_msg": "Online"}
-        )
+        self.handler = hs.get_handlers().presence_handler
+        self.event_source = hs.get_event_sources().sources["presence"]
 
-        # ID of a local user that does not exist
-        self.u_durian = hs.parse_userid("@durian:test")
+        self.distributor = hs.get_distributor()
+        self.distributor.declare("user_joined_room")
 
-        # A remote user
-        self.u_cabbage = hs.parse_userid("@cabbage:elsewhere")
-
-        self.handler = hs.get_handlers().presence_handler
+        yield self.setUp_users(hs)
 
+    def setUp_roommemberhandler_mocks(self, handlers):
         self.room_id = "a-room"
         self.room_members = []
 
+        room_member_handler = handlers.room_member_handler = Mock(spec=[
+            "get_rooms_for_user",
+            "get_room_members",
+            "fetch_room_distributions_into",
+        ])
+        self.room_member_handler = room_member_handler
+
         def get_rooms_for_user(user):
             if user in self.room_members:
                 return defer.succeed([self.room_id])
@@ -130,22 +120,150 @@ class PresenceTestCase(unittest.TestCase):
                 return defer.succeed([])
         room_member_handler.get_room_members = get_room_members
 
+        @defer.inlineCallbacks
+        def fetch_room_distributions_into(room_id, localusers=None,
+                remotedomains=None, ignore_user=None):
+
+            members = yield get_room_members(room_id)
+            for member in members:
+                if ignore_user is not None and member == ignore_user:
+                    continue
+
+                if member.is_mine:
+                    if localusers is not None:
+                        localusers.add(member)
+                else:
+                    if remotedomains is not None:
+                        remotedomains.add(member.domain)
+        room_member_handler.fetch_room_distributions_into = (
+                fetch_room_distributions_into)
+
+        self.setUp_datastore_room_mocks(self.datastore)
+
+    def setUp_datastore_room_mocks(self, datastore):
+        def get_room_hosts(room_id):
+            if room_id == self.room_id:
+                hosts = set([u.domain for u in self.room_members])
+                return defer.succeed(hosts)
+            else:
+                return defer.succeed([])
+        datastore.get_joined_hosts_for_room = get_room_hosts
+
         def user_rooms_intersect(userlist):
             room_member_ids = map(lambda u: u.to_string(), self.room_members)
 
             shared = all(map(lambda i: i in room_member_ids, userlist))
             return defer.succeed(shared)
-        self.store.user_rooms_intersect = user_rooms_intersect
+        datastore.user_rooms_intersect = user_rooms_intersect
 
-        self.mock_start = Mock()
-        self.mock_stop = Mock()
+    @defer.inlineCallbacks
+    def setUp_users(self, hs):
+        # Some local users to test with
+        self.u_apple = UserID.from_string("@apple:test")
+        self.u_banana = UserID.from_string("@banana:test")
+        self.u_clementine = UserID.from_string("@clementine:test")
 
-        self.handler.start_polling_presence = self.mock_start
-        self.handler.stop_polling_presence = self.mock_stop
+        for u in self.u_apple, self.u_banana, self.u_clementine:
+            yield self.datastore.create_presence(u.localpart)
+
+        yield self.datastore.set_presence_state(
+            self.u_apple.localpart, {"state": ONLINE, "status_msg": "Online"}
+        )
+
+        # ID of a local user that does not exist
+        self.u_durian = UserID.from_string("@durian:test")
+
+        # A remote user
+        self.u_cabbage = UserID.from_string("@cabbage:elsewhere")
+
+
+class MockedDatastorePresenceTestCase(PresenceTestCase):
+    def make_datastore_mock(self):
+        datastore = Mock(spec=[
+            # Bits that Federation needs
+            "prep_send_transaction",
+            "delivered_txn",
+            "get_received_txn_response",
+            "set_received_txn_response",
+            "get_destination_retry_timings",
+        ])
+
+        self.setUp_datastore_federation_mocks(datastore)
+        self.setUp_datastore_presence_mocks(datastore)
+
+        return datastore
+
+    def setUp_datastore_federation_mocks(self, datastore):
+        datastore.get_destination_retry_timings.return_value = (
+            defer.succeed(DestinationsTable.EntryType("", 0, 0))
+        )
+
+        def get_received_txn_response(*args):
+            return defer.succeed(None)
+        datastore.get_received_txn_response = get_received_txn_response
+
+    def setUp_datastore_presence_mocks(self, datastore):
+        self.current_user_state = {
+            "apple": OFFLINE,
+            "banana": OFFLINE,
+            "clementine": OFFLINE,
+            "fig": OFFLINE,
+        }
+
+        def get_presence_state(user_localpart):
+            return defer.succeed(
+                    {"state": self.current_user_state[user_localpart],
+                     "status_msg": None,
+                     "mtime": 123456000}
+            )
+        datastore.get_presence_state = get_presence_state
+
+        def set_presence_state(user_localpart, new_state):
+            was = self.current_user_state[user_localpart]
+            self.current_user_state[user_localpart] = new_state["state"]
+            return defer.succeed({"state": was})
+        datastore.set_presence_state = set_presence_state
+
+        def get_presence_list(user_localpart, accepted):
+            if not user_localpart in self.PRESENCE_LIST:
+                return defer.succeed([])
+            return defer.succeed([
+                {"observed_user_id": u} for u in
+                self.PRESENCE_LIST[user_localpart]])
+        datastore.get_presence_list = get_presence_list
+
+        def is_presence_visible(observed_localpart, observer_userid):
+            return True
+        datastore.is_presence_visible = is_presence_visible
+
+    @defer.inlineCallbacks
+    def setUp_users(self, hs):
+        # Some local users to test with
+        self.u_apple = UserID.from_string("@apple:test")
+        self.u_banana = UserID.from_string("@banana:test")
+        self.u_clementine = UserID.from_string("@clementine:test")
+        self.u_durian = UserID.from_string("@durian:test")
+        self.u_elderberry = UserID.from_string("@elderberry:test")
+        self.u_fig = UserID.from_string("@fig:test")
+
+        # Remote user
+        self.u_onion = UserID.from_string("@onion:farm")
+        self.u_potato = UserID.from_string("@potato:remote")
+
+        yield
 
 
 class PresenceStateTestCase(PresenceTestCase):
     """ Tests presence management. """
+    @defer.inlineCallbacks
+    def setUp(self):
+        yield super(PresenceStateTestCase, self).setUp()
+
+        self.mock_start = Mock()
+        self.mock_stop = Mock()
+
+        self.handler.start_polling_presence = self.mock_start
+        self.handler.stop_polling_presence = self.mock_stop
 
     @defer.inlineCallbacks
     def test_get_my_state(self):
@@ -160,7 +278,7 @@ class PresenceStateTestCase(PresenceTestCase):
 
     @defer.inlineCallbacks
     def test_get_allowed_state(self):
-        yield self.store.allow_presence_visible(
+        yield self.datastore.allow_presence_visible(
             observed_localpart=self.u_apple.localpart,
             observer_userid=self.u_banana.to_string(),
         )
@@ -208,7 +326,7 @@ class PresenceStateTestCase(PresenceTestCase):
             {"state": UNAVAILABLE,
              "status_msg": "Away",
              "mtime": 1000000},
-            (yield self.store.get_presence_state(self.u_apple.localpart))
+            (yield self.datastore.get_presence_state(self.u_apple.localpart))
         )
 
         self.mock_start.assert_called_with(self.u_apple,
@@ -227,6 +345,15 @@ class PresenceStateTestCase(PresenceTestCase):
 
 class PresenceInvitesTestCase(PresenceTestCase):
     """ Tests presence management. """
+    @defer.inlineCallbacks
+    def setUp(self):
+        yield super(PresenceInvitesTestCase, self).setUp()
+
+        self.mock_start = Mock()
+        self.mock_stop = Mock()
+
+        self.handler.start_polling_presence = self.mock_start
+        self.handler.stop_polling_presence = self.mock_stop
 
     @defer.inlineCallbacks
     def test_invite_local(self):
@@ -238,10 +365,10 @@ class PresenceInvitesTestCase(PresenceTestCase):
 
         self.assertEquals(
             [{"observed_user_id": "@banana:test", "accepted": 1}],
-            (yield self.store.get_presence_list(self.u_apple.localpart))
+            (yield self.datastore.get_presence_list(self.u_apple.localpart))
         )
         self.assertTrue(
-            (yield self.store.is_presence_visible(
+            (yield self.datastore.is_presence_visible(
                 observed_localpart=self.u_banana.localpart,
                 observer_userid=self.u_apple.to_string(),
             ))
@@ -257,19 +384,23 @@ class PresenceInvitesTestCase(PresenceTestCase):
 
         self.assertEquals(
             [],
-            (yield self.store.get_presence_list(self.u_apple.localpart))
+            (yield self.datastore.get_presence_list(self.u_apple.localpart))
         )
 
     @defer.inlineCallbacks
     def test_invite_remote(self):
+        # Use a different destination, otherwise retry logic might fail the
+        # request
+        u_rocket = UserID.from_string("@rocket:there")
+
         put_json = self.mock_http_client.put_json
         put_json.expect_call_and_return(
-            call("elsewhere",
+            call("there",
                 path="/_matrix/federation/v1/send/1000000/",
-                data=_expect_edu("elsewhere", "m.presence_invite",
+                data=_expect_edu("there", "m.presence_invite",
                     content={
                         "observer_user": "@apple:test",
-                        "observed_user": "@cabbage:elsewhere",
+                        "observed_user": "@rocket:there",
                     }
                 ),
                 json_data_callback=ANY,
@@ -278,11 +409,11 @@ class PresenceInvitesTestCase(PresenceTestCase):
         )
 
         yield self.handler.send_invite(
-                observer_user=self.u_apple, observed_user=self.u_cabbage)
+                observer_user=self.u_apple, observed_user=u_rocket)
 
         self.assertEquals(
-            [{"observed_user_id": "@cabbage:elsewhere", "accepted": 0}],
-            (yield self.store.get_presence_list(self.u_apple.localpart))
+            [{"observed_user_id": "@rocket:there", "accepted": 0}],
+            (yield self.datastore.get_presence_list(self.u_apple.localpart))
         )
 
         yield put_json.await_calls()
@@ -291,13 +422,18 @@ class PresenceInvitesTestCase(PresenceTestCase):
     def test_accept_remote(self):
         # TODO(paul): This test will likely break if/when real auth permissions
         # are added; for now the HS will always accept any invite
+
+        # Use a different destination, otherwise retry logic might fail the
+        # request
+        u_rocket = UserID.from_string("@rocket:moon")
+
         put_json = self.mock_http_client.put_json
         put_json.expect_call_and_return(
-            call("elsewhere",
+            call("moon",
                 path="/_matrix/federation/v1/send/1000000/",
-                data=_expect_edu("elsewhere", "m.presence_accept",
+                data=_expect_edu("moon", "m.presence_accept",
                     content={
-                        "observer_user": "@cabbage:elsewhere",
+                        "observer_user": "@rocket:moon",
                         "observed_user": "@apple:test",
                     }
                 ),
@@ -310,16 +446,16 @@ class PresenceInvitesTestCase(PresenceTestCase):
             "/_matrix/federation/v1/send/1000000/",
             _make_edu_json("elsewhere", "m.presence_invite",
                 content={
-                    "observer_user": "@cabbage:elsewhere",
+                    "observer_user": "@rocket:moon",
                     "observed_user": "@apple:test",
                 }
             )
         )
 
         self.assertTrue(
-            (yield self.store.is_presence_visible(
+            (yield self.datastore.is_presence_visible(
                 observed_localpart=self.u_apple.localpart,
-                observer_userid=self.u_cabbage.to_string(),
+                observer_userid=u_rocket.to_string(),
             ))
         )
 
@@ -327,13 +463,17 @@ class PresenceInvitesTestCase(PresenceTestCase):
 
     @defer.inlineCallbacks
     def test_invited_remote_nonexistant(self):
+        # Use a different destination, otherwise retry logic might fail the
+        # request
+        u_rocket = UserID.from_string("@rocket:sun")
+
         put_json = self.mock_http_client.put_json
         put_json.expect_call_and_return(
-            call("elsewhere",
+            call("sun",
                 path="/_matrix/federation/v1/send/1000000/",
-                data=_expect_edu("elsewhere", "m.presence_deny",
+                data=_expect_edu("sun", "m.presence_deny",
                     content={
-                        "observer_user": "@cabbage:elsewhere",
+                        "observer_user": "@rocket:sun",
                         "observed_user": "@durian:test",
                     }
                 ),
@@ -344,9 +484,9 @@ class PresenceInvitesTestCase(PresenceTestCase):
 
         yield self.mock_federation_resource.trigger("PUT",
             "/_matrix/federation/v1/send/1000000/",
-            _make_edu_json("elsewhere", "m.presence_invite",
+            _make_edu_json("sun", "m.presence_invite",
                 content={
-                    "observer_user": "@cabbage:elsewhere",
+                    "observer_user": "@rocket:sun",
                     "observed_user": "@durian:test",
                 }
             )
@@ -356,7 +496,7 @@ class PresenceInvitesTestCase(PresenceTestCase):
 
     @defer.inlineCallbacks
     def test_accepted_remote(self):
-        yield self.store.add_presence_list_pending(
+        yield self.datastore.add_presence_list_pending(
             observer_localpart=self.u_apple.localpart,
             observed_userid=self.u_cabbage.to_string(),
         )
@@ -373,7 +513,7 @@ class PresenceInvitesTestCase(PresenceTestCase):
 
         self.assertEquals(
             [{"observed_user_id": "@cabbage:elsewhere", "accepted": 1}],
-            (yield self.store.get_presence_list(self.u_apple.localpart))
+            (yield self.datastore.get_presence_list(self.u_apple.localpart))
         )
 
         self.mock_start.assert_called_with(
@@ -381,7 +521,7 @@ class PresenceInvitesTestCase(PresenceTestCase):
 
     @defer.inlineCallbacks
     def test_denied_remote(self):
-        yield self.store.add_presence_list_pending(
+        yield self.datastore.add_presence_list_pending(
             observer_localpart=self.u_apple.localpart,
             observed_userid="@eggplant:elsewhere",
         )
@@ -398,16 +538,16 @@ class PresenceInvitesTestCase(PresenceTestCase):
 
         self.assertEquals(
             [],
-            (yield self.store.get_presence_list(self.u_apple.localpart))
+            (yield self.datastore.get_presence_list(self.u_apple.localpart))
         )
 
     @defer.inlineCallbacks
     def test_drop_local(self):
-        yield self.store.add_presence_list_pending(
+        yield self.datastore.add_presence_list_pending(
             observer_localpart=self.u_apple.localpart,
             observed_userid=self.u_banana.to_string(),
         )
-        yield self.store.set_presence_list_accepted(
+        yield self.datastore.set_presence_list_accepted(
             observer_localpart=self.u_apple.localpart,
             observed_userid=self.u_banana.to_string(),
         )
@@ -419,7 +559,7 @@ class PresenceInvitesTestCase(PresenceTestCase):
 
         self.assertEquals(
             [],
-            (yield self.store.get_presence_list(self.u_apple.localpart))
+            (yield self.datastore.get_presence_list(self.u_apple.localpart))
         )
 
         self.mock_stop.assert_called_with(
@@ -427,11 +567,11 @@ class PresenceInvitesTestCase(PresenceTestCase):
 
     @defer.inlineCallbacks
     def test_drop_remote(self):
-        yield self.store.add_presence_list_pending(
+        yield self.datastore.add_presence_list_pending(
             observer_localpart=self.u_apple.localpart,
             observed_userid=self.u_cabbage.to_string(),
         )
-        yield self.store.set_presence_list_accepted(
+        yield self.datastore.set_presence_list_accepted(
             observer_localpart=self.u_apple.localpart,
             observed_userid=self.u_cabbage.to_string(),
         )
@@ -443,16 +583,16 @@ class PresenceInvitesTestCase(PresenceTestCase):
 
         self.assertEquals(
             [],
-            (yield self.store.get_presence_list(self.u_apple.localpart))
+            (yield self.datastore.get_presence_list(self.u_apple.localpart))
         )
 
     @defer.inlineCallbacks
     def test_get_presence_list(self):
-        yield self.store.add_presence_list_pending(
+        yield self.datastore.add_presence_list_pending(
             observer_localpart=self.u_apple.localpart,
             observed_userid=self.u_banana.to_string(),
         )
-        yield self.store.set_presence_list_accepted(
+        yield self.datastore.set_presence_list_accepted(
             observer_localpart=self.u_apple.localpart,
             observed_userid=self.u_banana.to_string(),
         )
@@ -467,7 +607,7 @@ class PresenceInvitesTestCase(PresenceTestCase):
         ], presence)
 
 
-class PresencePushTestCase(unittest.TestCase):
+class PresencePushTestCase(MockedDatastorePresenceTestCase):
     """ Tests steady-state presence status updates.
 
     They assert that presence state update messages are pushed around the place
@@ -477,139 +617,9 @@ class PresencePushTestCase(unittest.TestCase):
     presence handler; namely the _local_pushmap and _remote_recvmap.
     BE WARNED...
     """
-    def setUp(self):
-        self.clock = MockClock()
-
-        self.mock_http_client = Mock(spec=[])
-        self.mock_http_client.put_json = DeferredMockCallable()
-
-        self.mock_federation_resource = MockHttpResource()
-
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        hs = HomeServer("test",
-                clock=self.clock,
-                db_pool=None,
-                datastore=Mock(spec=[
-                    "set_presence_state",
-                    "get_joined_hosts_for_room",
-
-                    # Bits that Federation needs
-                    "prep_send_transaction",
-                    "delivered_txn",
-                    "get_received_txn_response",
-                    "set_received_txn_response",
-                    "get_destination_retry_timings",
-                ]),
-                handlers=None,
-                resource_for_client=Mock(),
-                resource_for_federation=self.mock_federation_resource,
-                http_client=self.mock_http_client,
-                config=self.mock_config,
-                keyring=Mock(),
-            )
-        hs.handlers = JustPresenceHandlers(hs)
-
-        self.datastore = hs.get_datastore()
-        self.datastore.get_destination_retry_timings.return_value = (
-            defer.succeed(DestinationsTable.EntryType("", 0, 0))
-        )
-
-        def get_received_txn_response(*args):
-            return defer.succeed(None)
-        self.datastore.get_received_txn_response = get_received_txn_response
-
-        self.handler = hs.get_handlers().presence_handler
-        self.event_source = hs.get_event_sources().sources["presence"]
-
-        # Mock the RoomMemberHandler
-        hs.handlers.room_member_handler = Mock(spec=[
-            "get_rooms_for_user",
-            "get_room_members",
-        ])
-        self.room_member_handler = hs.handlers.room_member_handler
-
-        self.room_id = "a-room"
-        self.room_members = []
-
-        def get_rooms_for_user(user):
-            if user in self.room_members:
-                return defer.succeed([self.room_id])
-            else:
-                return defer.succeed([])
-        self.room_member_handler.get_rooms_for_user = get_rooms_for_user
-
-        def get_room_members(room_id):
-            if room_id == self.room_id:
-                return defer.succeed(self.room_members)
-            else:
-                return defer.succeed([])
-        self.room_member_handler.get_room_members = get_room_members
-
-        def get_room_hosts(room_id):
-            if room_id == self.room_id:
-                hosts = set([u.domain for u in self.room_members])
-                return defer.succeed(hosts)
-            else:
-                return defer.succeed([])
-        self.datastore.get_joined_hosts_for_room = get_room_hosts
-
-        def user_rooms_intersect(userlist):
-            room_member_ids = map(lambda u: u.to_string(), self.room_members)
-
-            shared = all(map(lambda i: i in room_member_ids, userlist))
-            return defer.succeed(shared)
-        self.datastore.user_rooms_intersect = user_rooms_intersect
-
-        @defer.inlineCallbacks
-        def fetch_room_distributions_into(room_id, localusers=None,
-                remotedomains=None, ignore_user=None):
-
-            members = yield get_room_members(room_id)
-            for member in members:
-                if ignore_user is not None and member == ignore_user:
-                    continue
-
-                if member.is_mine:
-                    if localusers is not None:
-                        localusers.add(member)
-                else:
-                    if remotedomains is not None:
-                        remotedomains.add(member.domain)
-        self.room_member_handler.fetch_room_distributions_into = (
-                fetch_room_distributions_into)
-
-        def get_presence_list(user_localpart, accepted=None):
-            if user_localpart == "apple":
-                return defer.succeed([
-                    {"observed_user_id": "@banana:test"},
-                    {"observed_user_id": "@clementine:test"},
-                ])
-            else:
-                return defer.succeed([])
-        self.datastore.get_presence_list = get_presence_list
-
-        def is_presence_visible(observer_userid, observed_localpart):
-            if (observed_localpart == "clementine" and
-                observer_userid == "@banana:test"):
-                return False
-            return False
-        self.datastore.is_presence_visible = is_presence_visible
-
-        self.distributor = hs.get_distributor()
-        self.distributor.declare("user_joined_room")
-
-        # Some local users to test with
-        self.u_apple = hs.parse_userid("@apple:test")
-        self.u_banana = hs.parse_userid("@banana:test")
-        self.u_clementine = hs.parse_userid("@clementine:test")
-        self.u_durian = hs.parse_userid("@durian:test")
-        self.u_elderberry = hs.parse_userid("@elderberry:test")
-
-        # Remote user
-        self.u_onion = hs.parse_userid("@onion:farm")
-        self.u_potato = hs.parse_userid("@potato:remote")
+    PRESENCE_LIST = {
+            'apple': [ "@banana:test", "@clementine:test" ],
+    }
 
     @defer.inlineCallbacks
     def test_push_local(self):
@@ -982,7 +992,7 @@ class PresencePushTestCase(unittest.TestCase):
         put_json.await_calls()
 
 
-class PresencePollingTestCase(unittest.TestCase):
+class PresencePollingTestCase(MockedDatastorePresenceTestCase):
     """ Tests presence status polling. """
 
     # For this test, we have three local users; apple is watching and is
@@ -995,106 +1005,18 @@ class PresencePollingTestCase(unittest.TestCase):
             'fig': [ "@potato:remote" ],
     }
 
-
+    @defer.inlineCallbacks
     def setUp(self):
-        self.mock_http_client = Mock(spec=[])
-        self.mock_http_client.put_json = DeferredMockCallable()
-
-        self.mock_federation_resource = MockHttpResource()
-
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        hs = HomeServer("test",
-                clock=MockClock(),
-                db_pool=None,
-                datastore=Mock(spec=[
-                    # Bits that Federation needs
-                    "prep_send_transaction",
-                    "delivered_txn",
-                    "get_received_txn_response",
-                    "set_received_txn_response",
-                    "get_destination_retry_timings",
-                ]),
-                handlers=None,
-                resource_for_client=Mock(),
-                resource_for_federation=self.mock_federation_resource,
-                http_client=self.mock_http_client,
-                config=self.mock_config,
-                keyring=Mock(),
-            )
-        hs.handlers = JustPresenceHandlers(hs)
-
-        self.datastore = hs.get_datastore()
-        self.datastore.get_destination_retry_timings.return_value = (
-            defer.succeed(DestinationsTable.EntryType("", 0, 0))
-        )
-
-        def get_received_txn_response(*args):
-            return defer.succeed(None)
-        self.datastore.get_received_txn_response = get_received_txn_response
+        yield super(PresencePollingTestCase, self).setUp()
 
         self.mock_update_client = Mock()
 
         def update(*args,**kwargs):
-            # print "mock_update_client: Args=%s, kwargs=%s" %(args, kwargs,)
             return defer.succeed(None)
-
         self.mock_update_client.side_effect = update
 
-        self.handler = hs.get_handlers().presence_handler
         self.handler.push_update_to_clients = self.mock_update_client
 
-        hs.handlers.room_member_handler = Mock(spec=[
-            "get_rooms_for_user",
-        ])
-        # For this test no users are ever in rooms
-        def get_rooms_for_user(user):
-            return defer.succeed([])
-        hs.handlers.room_member_handler.get_rooms_for_user = get_rooms_for_user
-
-        # Mocked database state
-        # Local users always start offline
-        self.current_user_state = {
-            "apple": OFFLINE,
-            "banana": OFFLINE,
-            "clementine": OFFLINE,
-            "fig": OFFLINE,
-        }
-
-        def get_presence_state(user_localpart):
-            return defer.succeed(
-                    {"state": self.current_user_state[user_localpart],
-                     "status_msg": None,
-                     "mtime": 123456000}
-            )
-        self.datastore.get_presence_state = get_presence_state
-
-        def set_presence_state(user_localpart, new_state):
-            was = self.current_user_state[user_localpart]
-            self.current_user_state[user_localpart] = new_state["state"]
-            return defer.succeed({"state": was})
-        self.datastore.set_presence_state = set_presence_state
-
-        def get_presence_list(user_localpart, accepted):
-            return defer.succeed([
-                {"observed_user_id": u} for u in
-                self.PRESENCE_LIST[user_localpart]])
-        self.datastore.get_presence_list = get_presence_list
-
-        def is_presence_visible(observed_localpart, observer_userid):
-            return True
-        self.datastore.is_presence_visible = is_presence_visible
-
-        # Local users
-        self.u_apple = hs.parse_userid("@apple:test")
-        self.u_banana = hs.parse_userid("@banana:test")
-        self.u_clementine = hs.parse_userid("@clementine:test")
-        self.u_fig = hs.parse_userid("@fig:test")
-
-        # Remote users
-        self.u_potato = hs.parse_userid("@potato:remote")
-
     @defer.inlineCallbacks
     def test_push_local(self):
         # apple goes online
diff --git a/tests/handlers/test_presencelike.py b/tests/handlers/test_presencelike.py
index 0584e4c8b9..18cac9a846 100644
--- a/tests/handlers/test_presencelike.py
+++ b/tests/handlers/test_presencelike.py
@@ -21,12 +21,12 @@ from twisted.internet import defer
 
 from mock import Mock, call, ANY, NonCallableMock
 
-from ..utils import MockClock, MockKey
+from ..utils import MockClock, setup_test_homeserver
 
-from synapse.server import HomeServer
 from synapse.api.constants import PresenceState
 from synapse.handlers.presence import PresenceHandler
 from synapse.handlers.profile import ProfileHandler
+from synapse.types import UserID
 
 
 OFFLINE = PresenceState.OFFLINE
@@ -56,29 +56,23 @@ class PresenceAndProfileHandlers(object):
 
 class PresenceProfilelikeDataTestCase(unittest.TestCase):
 
+    @defer.inlineCallbacks
     def setUp(self):
-        self.mock_config = Mock()
-        self.mock_config.signing_key = [MockKey()]
-
-        hs = HomeServer("test",
-                clock=MockClock(),
-                db_pool=None,
-                datastore=Mock(spec=[
-                    "set_presence_state",
-                    "is_presence_visible",
-
-                    "set_profile_displayname",
-
-                    "get_rooms_for_user_where_membership_is",
-                ]),
-                handlers=None,
-                resource_for_federation=Mock(),
-                http_client=None,
-                replication_layer=MockReplication(),
-                ratelimiter=NonCallableMock(spec_set=[
+        hs = yield setup_test_homeserver(
+            clock=MockClock(),
+            datastore=Mock(spec=[
+                "set_presence_state",
+                "is_presence_visible",
+                "set_profile_displayname",
+                "get_rooms_for_user_where_membership_is",
+            ]),
+            handlers=None,
+            resource_for_federation=Mock(),
+            http_client=None,
+            replication_layer=MockReplication(),
+            ratelimiter=NonCallableMock(spec_set=[
                 "send_message",
-                ]),
-                config=self.mock_config
+            ]),
         )
         self.ratelimiter = hs.get_ratelimiter()
         self.ratelimiter.send_message.return_value = (True, 0)
@@ -136,12 +130,12 @@ class PresenceProfilelikeDataTestCase(unittest.TestCase):
                 lambda u: defer.succeed([]))
 
         # Some local users to test with
-        self.u_apple = hs.parse_userid("@apple:test")
-        self.u_banana = hs.parse_userid("@banana:test")
-        self.u_clementine = hs.parse_userid("@clementine:test")
+        self.u_apple = UserID.from_string("@apple:test")
+        self.u_banana = UserID.from_string("@banana:test")
+        self.u_clementine = UserID.from_string("@clementine:test")
 
         # Remote user
-        self.u_potato = hs.parse_userid("@potato:remote")
+        self.u_potato = UserID.from_string("@potato:remote")
 
         self.mock_get_joined = (
             self.datastore.get_rooms_for_user_where_membership_is
diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py
index 25b172aa5e..31f03d73df 100644
--- a/tests/handlers/test_profile.py
+++ b/tests/handlers/test_profile.py
@@ -20,11 +20,10 @@ from twisted.internet import defer
 from mock import Mock, NonCallableMock
 
 from synapse.api.errors import AuthError
-from synapse.server import HomeServer
 from synapse.handlers.profile import ProfileHandler
-from synapse.api.constants import Membership
+from synapse.types import UserID
 
-from tests.utils import SQLiteMemoryDbPool, MockKey
+from tests.utils import setup_test_homeserver
 
 
 class ProfileHandlers(object):
@@ -46,23 +45,15 @@ class ProfileTestCase(unittest.TestCase):
             self.query_handlers[query_type] = handler
         self.mock_federation.register_query_handler = register_query_handler
 
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        self.mock_config = Mock()
-        self.mock_config.signing_key = [MockKey()]
-
-        hs = HomeServer("test",
-                db_pool=db_pool,
-                http_client=None,
-                handlers=None,
-                resource_for_federation=Mock(),
-                replication_layer=self.mock_federation,
-                config=self.mock_config,
-                ratelimiter=NonCallableMock(spec_set=[
-                    "send_message",
-                ])
-            )
+        hs = yield setup_test_homeserver(
+            http_client=None,
+            handlers=None,
+            resource_for_federation=Mock(),
+            replication_layer=self.mock_federation,
+            ratelimiter=NonCallableMock(spec_set=[
+                "send_message",
+            ])
+        )
 
         self.ratelimiter = hs.get_ratelimiter()
         self.ratelimiter.send_message.return_value = (True, 0)
@@ -71,9 +62,9 @@ class ProfileTestCase(unittest.TestCase):
 
         self.store = hs.get_datastore()
 
-        self.frank = hs.parse_userid("@1234ABCD:test")
-        self.bob   = hs.parse_userid("@4567:test")
-        self.alice = hs.parse_userid("@alice:remote")
+        self.frank = UserID.from_string("@1234ABCD:test")
+        self.bob   = UserID.from_string("@4567:test")
+        self.alice = UserID.from_string("@alice:remote")
 
         yield self.store.create_profile(self.frank.localpart)
 
diff --git a/tests/handlers/test_room.py b/tests/handlers/test_room.py
index d3253b48b8..6417f73309 100644
--- a/tests/handlers/test_room.py
+++ b/tests/handlers/test_room.py
@@ -15,27 +15,24 @@
 
 
 from twisted.internet import defer
-from tests import unittest
+from .. import unittest
 
 from synapse.api.constants import EventTypes, Membership
 from synapse.handlers.room import RoomMemberHandler, RoomCreationHandler
 from synapse.handlers.profile import ProfileHandler
-from synapse.server import HomeServer
-from ..utils import MockKey
+from synapse.types import UserID
+from ..utils import setup_test_homeserver
 
 from mock import Mock, NonCallableMock
 
 
 class RoomMemberHandlerTestCase(unittest.TestCase):
 
+    @defer.inlineCallbacks
     def setUp(self):
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
         self.hostname = "red"
-        hs = HomeServer(
+        hs = yield setup_test_homeserver(
             self.hostname,
-            db_pool=None,
             ratelimiter=NonCallableMock(spec_set=[
                 "send_message",
             ]),
@@ -63,7 +60,6 @@ class RoomMemberHandlerTestCase(unittest.TestCase):
                 "compute_event_context",
                 "get_current_state",
             ]),
-            config=self.mock_config,
         )
 
         self.federation = NonCallableMock(spec_set=[
@@ -164,7 +160,7 @@ class RoomMemberHandlerTestCase(unittest.TestCase):
             event, context=context,
         )
         self.notifier.on_new_room_event.assert_called_once_with(
-            event, extra_users=[self.hs.parse_userid(target_user_id)]
+            event, extra_users=[UserID.from_string(target_user_id)]
         )
         self.assertFalse(self.datastore.get_room.called)
         self.assertFalse(self.datastore.store_room.called)
@@ -174,7 +170,7 @@ class RoomMemberHandlerTestCase(unittest.TestCase):
     def test_simple_join(self):
         room_id = "!foo:red"
         user_id = "@bob:red"
-        user = self.hs.parse_userid(user_id)
+        user = UserID.from_string(user_id)
 
         join_signal_observer = Mock()
         self.distributor.observe("user_joined_room", join_signal_observer)
@@ -252,7 +248,7 @@ class RoomMemberHandlerTestCase(unittest.TestCase):
     def test_simple_leave(self):
         room_id = "!foo:red"
         user_id = "@bob:red"
-        user = self.hs.parse_userid(user_id)
+        user = UserID.from_string(user_id)
 
         builder = self.hs.get_event_builder_factory().new({
             "type": EventTypes.Member,
@@ -318,15 +314,12 @@ class RoomMemberHandlerTestCase(unittest.TestCase):
 
 class RoomCreationTest(unittest.TestCase):
 
+    @defer.inlineCallbacks
     def setUp(self):
         self.hostname = "red"
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        hs = HomeServer(
+        hs = yield setup_test_homeserver(
             self.hostname,
-            db_pool=None,
             datastore=NonCallableMock(spec_set=[
                 "store_room",
                 "snapshot_room",
@@ -343,7 +336,6 @@ class RoomCreationTest(unittest.TestCase):
             ratelimiter=NonCallableMock(spec_set=[
                 "send_message",
             ]),
-            config=self.mock_config,
         )
 
         self.federation = NonCallableMock(spec_set=[
diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py
index 6a498b23a4..bf34b7ccbd 100644
--- a/tests/handlers/test_typing.py
+++ b/tests/handlers/test_typing.py
@@ -20,13 +20,15 @@ from twisted.internet import defer
 from mock import Mock, call, ANY
 import json
 
-from ..utils import MockHttpResource, MockClock, DeferredMockCallable, MockKey
+from ..utils import (
+    MockHttpResource, MockClock, DeferredMockCallable, setup_test_homeserver
+)
 
 from synapse.api.errors import AuthError
-from synapse.server import HomeServer
 from synapse.handlers.typing import TypingNotificationHandler
 
 from synapse.storage.transactions import DestinationsTable
+from synapse.types import UserID
 
 
 def _expect_edu(destination, edu_type, content, origin="test"):
@@ -55,6 +57,7 @@ class JustTypingNotificationHandlers(object):
 
 class TypingNotificationsTestCase(unittest.TestCase):
     """Tests typing notifications to rooms."""
+    @defer.inlineCallbacks
     def setUp(self):
         self.clock = MockClock()
 
@@ -63,34 +66,29 @@ class TypingNotificationsTestCase(unittest.TestCase):
 
         self.mock_federation_resource = MockHttpResource()
 
-        self.mock_config = Mock()
-        self.mock_config.signing_key = [MockKey()]
-
         mock_notifier = Mock(spec=["on_new_user_event"])
         self.on_new_user_event = mock_notifier.on_new_user_event
 
         self.auth = Mock(spec=[])
 
-        hs = HomeServer("test",
-                auth=self.auth,
-                clock=self.clock,
-                db_pool=None,
-                datastore=Mock(spec=[
-                    # Bits that Federation needs
-                    "prep_send_transaction",
-                    "delivered_txn",
-                    "get_received_txn_response",
-                    "set_received_txn_response",
-                    "get_destination_retry_timings",
-                ]),
-                handlers=None,
-                notifier=mock_notifier,
-                resource_for_client=Mock(),
-                resource_for_federation=self.mock_federation_resource,
-                http_client=self.mock_http_client,
-                config=self.mock_config,
-                keyring=Mock(),
-            )
+        hs = yield setup_test_homeserver(
+            auth=self.auth,
+            clock=self.clock,
+            datastore=Mock(spec=[
+                # Bits that Federation needs
+                "prep_send_transaction",
+                "delivered_txn",
+                "get_received_txn_response",
+                "set_received_txn_response",
+                "get_destination_retry_timings",
+            ]),
+            handlers=None,
+            notifier=mock_notifier,
+            resource_for_client=Mock(),
+            resource_for_federation=self.mock_federation_resource,
+            http_client=self.mock_http_client,
+            keyring=Mock(),
+        )
         hs.handlers = JustTypingNotificationHandlers(hs)
 
         self.handler = hs.get_handlers().typing_notification_handler
@@ -153,11 +151,11 @@ class TypingNotificationsTestCase(unittest.TestCase):
         self.auth.check_joined_room = check_joined_room
 
         # Some local users to test with
-        self.u_apple = hs.parse_userid("@apple:test")
-        self.u_banana = hs.parse_userid("@banana:test")
+        self.u_apple = UserID.from_string("@apple:test")
+        self.u_banana = UserID.from_string("@banana:test")
 
         # Remote user
-        self.u_onion = hs.parse_userid("@onion:farm")
+        self.u_onion = UserID.from_string("@onion:farm")
 
     @defer.inlineCallbacks
     def test_started_typing_local(self):
diff --git a/tests/rest/__init__.py b/tests/rest/__init__.py
index 9bff9ec169..1a84d94cd9 100644
--- a/tests/rest/__init__.py
+++ b/tests/rest/__init__.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Copyright 2014 OpenMarket Ltd
+# Copyright 2015 OpenMarket Ltd
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,4 +12,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
diff --git a/tests/rest/client/__init__.py b/tests/rest/client/__init__.py
new file mode 100644
index 0000000000..1a84d94cd9
--- /dev/null
+++ b/tests/rest/client/__init__.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/synapse/http/agent_name.py b/tests/rest/client/v1/__init__.py
index d761890863..9bff9ec169 100644
--- a/synapse/http/agent_name.py
+++ b/tests/rest/client/v1/__init__.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Copyright 2014, 2015 OpenMarket Ltd
+# Copyright 2014 OpenMarket Ltd
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,6 +13,3 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from synapse import __version__
-
-AGENT_NAME = ("Synapse/%s" % (__version__,)).encode("ascii")
diff --git a/tests/rest/test_events.py b/tests/rest/client/v1/test_events.py
index d3159e2cf4..36b0f2ff6d 100644
--- a/tests/rest/test_events.py
+++ b/tests/rest/client/v1/test_events.py
@@ -19,13 +19,12 @@ from tests import unittest
 # twisted imports
 from twisted.internet import defer
 
-import synapse.rest.events
-import synapse.rest.register
-import synapse.rest.room
+import synapse.rest.client.v1.events
+import synapse.rest.client.v1.register
+import synapse.rest.client.v1.room
 
-from synapse.server import HomeServer
 
-from ..utils import MockHttpResource, SQLiteMemoryDbPool, MockKey
+from ....utils import MockHttpResource, setup_test_homeserver
 from .utils import RestTestCase
 
 from mock import Mock, NonCallableMock
@@ -113,15 +112,7 @@ class EventStreamPermissionsTestCase(RestTestCase):
     def setUp(self):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer(
-            "test",
-            db_pool=db_pool,
+        hs = yield setup_test_homeserver(
             http_client=None,
             replication_layer=Mock(),
             clock=Mock(spec=[
@@ -133,20 +124,20 @@ class EventStreamPermissionsTestCase(RestTestCase):
             ratelimiter=NonCallableMock(spec_set=[
                 "send_message",
             ]),
-            config=self.mock_config,
         )
         self.ratelimiter = hs.get_ratelimiter()
         self.ratelimiter.send_message.return_value = (True, 0)
         hs.config.enable_registration_captcha = False
+        hs.config.disable_registration = False
 
         hs.get_handlers().federation_handler = Mock()
 
         hs.get_clock().time_msec.return_value = 1000000
         hs.get_clock().time.return_value = 1000
 
-        synapse.rest.register.register_servlets(hs, self.mock_resource)
-        synapse.rest.events.register_servlets(hs, self.mock_resource)
-        synapse.rest.room.register_servlets(hs, self.mock_resource)
+        synapse.rest.client.v1.register.register_servlets(hs, self.mock_resource)
+        synapse.rest.client.v1.events.register_servlets(hs, self.mock_resource)
+        synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
 
         # register an account
         self.user_id = "sid1"
diff --git a/tests/rest/test_presence.py b/tests/rest/client/v1/test_presence.py
index 769c7824bc..5f2ef64efc 100644
--- a/tests/rest/test_presence.py
+++ b/tests/rest/client/v1/test_presence.py
@@ -20,11 +20,13 @@ from twisted.internet import defer
 
 from mock import Mock
 
-from ..utils import MockHttpResource, MockKey
+from ....utils import MockHttpResource, setup_test_homeserver
 
 from synapse.api.constants import PresenceState
 from synapse.handlers.presence import PresenceHandler
-from synapse.server import HomeServer
+from synapse.rest.client.v1 import presence
+from synapse.rest.client.v1 import events
+from synapse.types import UserID
 
 
 OFFLINE = PresenceState.OFFLINE
@@ -43,12 +45,10 @@ class JustPresenceHandlers(object):
 
 class PresenceStateTestCase(unittest.TestCase):
 
+    @defer.inlineCallbacks
     def setUp(self):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
-        self.mock_config = Mock()
-        self.mock_config.signing_key = [MockKey()]
-        hs = HomeServer("test",
-            db_pool=None,
+        hs = yield setup_test_homeserver(
             datastore=Mock(spec=[
                 "get_presence_state",
                 "set_presence_state",
@@ -57,11 +57,11 @@ class PresenceStateTestCase(unittest.TestCase):
             http_client=None,
             resource_for_client=self.mock_resource,
             resource_for_federation=self.mock_resource,
-            config=self.mock_config,
         )
         hs.handlers = JustPresenceHandlers(hs)
 
         self.datastore = hs.get_datastore()
+        self.datastore.get_app_service_by_token = Mock(return_value=None)
 
         def get_presence_list(*a, **kw):
             return defer.succeed([])
@@ -69,9 +69,10 @@ class PresenceStateTestCase(unittest.TestCase):
 
         def _get_user_by_token(token=None):
             return {
-                "user": hs.parse_userid(myid),
+                "user": UserID.from_string(myid),
                 "admin": False,
                 "device_id": None,
+                "token_id": 1,
             }
 
         hs.get_auth().get_user_by_token = _get_user_by_token
@@ -86,9 +87,9 @@ class PresenceStateTestCase(unittest.TestCase):
             return defer.succeed([])
         room_member_handler.get_rooms_for_user = get_rooms_for_user
 
-        hs.register_servlets()
+        presence.register_servlets(hs, self.mock_resource)
 
-        self.u_apple = hs.parse_userid(myid)
+        self.u_apple = UserID.from_string(myid)
 
     @defer.inlineCallbacks
     def test_get_my_status(self):
@@ -124,13 +125,11 @@ class PresenceStateTestCase(unittest.TestCase):
 
 class PresenceListTestCase(unittest.TestCase):
 
+    @defer.inlineCallbacks
     def setUp(self):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
-        self.mock_config = Mock()
-        self.mock_config.signing_key = [MockKey()]
 
-        hs = HomeServer("test",
-            db_pool=None,
+        hs = yield setup_test_homeserver(
             datastore=Mock(spec=[
                 "has_presence_state",
                 "get_presence_state",
@@ -145,11 +144,11 @@ class PresenceListTestCase(unittest.TestCase):
             http_client=None,
             resource_for_client=self.mock_resource,
             resource_for_federation=self.mock_resource,
-            config=self.mock_config,
         )
         hs.handlers = JustPresenceHandlers(hs)
 
         self.datastore = hs.get_datastore()
+        self.datastore.get_app_service_by_token = Mock(return_value=None)
 
         def has_presence_state(user_localpart):
             return defer.succeed(
@@ -159,12 +158,13 @@ class PresenceListTestCase(unittest.TestCase):
 
         def _get_user_by_token(token=None):
             return {
-                "user": hs.parse_userid(myid),
+                "user": UserID.from_string(myid),
                 "admin": False,
                 "device_id": None,
+                "token_id": 1,
             }
 
-        room_member_handler = hs.handlers.room_member_handler = Mock(
+        hs.handlers.room_member_handler = Mock(
             spec=[
                 "get_rooms_for_user",
             ]
@@ -172,10 +172,10 @@ class PresenceListTestCase(unittest.TestCase):
 
         hs.get_auth().get_user_by_token = _get_user_by_token
 
-        hs.register_servlets()
+        presence.register_servlets(hs, self.mock_resource)
 
-        self.u_apple = hs.parse_userid("@apple:test")
-        self.u_banana = hs.parse_userid("@banana:test")
+        self.u_apple = UserID.from_string("@apple:test")
+        self.u_banana = UserID.from_string("@banana:test")
 
     @defer.inlineCallbacks
     def test_get_my_list(self):
@@ -237,12 +237,10 @@ class PresenceListTestCase(unittest.TestCase):
 
 
 class PresenceEventStreamTestCase(unittest.TestCase):
+    @defer.inlineCallbacks
     def setUp(self):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
 
-        self.mock_config = Mock()
-        self.mock_config.signing_key = [MockKey()]
-
         # HIDEOUS HACKERY
         # TODO(paul): This should be injected in via the HomeServer DI system
         from synapse.streams.events import (
@@ -259,8 +257,7 @@ class PresenceEventStreamTestCase(unittest.TestCase):
         }
         EventSources.SOURCE_TYPES["presence"] = PresenceEventSource
 
-        hs = HomeServer("test",
-            db_pool=None,
+        hs = yield setup_test_homeserver(
             http_client=None,
             resource_for_client=self.mock_resource,
             resource_for_federation=self.mock_resource,
@@ -273,17 +270,17 @@ class PresenceEventStreamTestCase(unittest.TestCase):
                 "cancel_call_later",
                 "time_msec",
             ]),
-            config=self.mock_config,
         )
 
         hs.get_clock().time_msec.return_value = 1000000
 
         def _get_user_by_req(req=None):
-            return hs.parse_userid(myid)
+            return (UserID.from_string(myid), "")
 
         hs.get_auth().get_user_by_req = _get_user_by_req
 
-        hs.register_servlets()
+        presence.register_servlets(hs, self.mock_resource)
+        events.register_servlets(hs, self.mock_resource)
 
         hs.handlers.room_member_handler = Mock(spec=[])
 
@@ -297,6 +294,10 @@ class PresenceEventStreamTestCase(unittest.TestCase):
         hs.handlers.room_member_handler.get_rooms_for_user = get_rooms_for_user
 
         self.mock_datastore = hs.get_datastore()
+        self.mock_datastore.get_app_service_by_token = Mock(return_value=None)
+        self.mock_datastore.get_app_service_by_user_id = Mock(
+            return_value=defer.succeed(None)
+        )
 
         def get_profile_displayname(user_id):
             return defer.succeed("Frank")
@@ -319,8 +320,8 @@ class PresenceEventStreamTestCase(unittest.TestCase):
 
         self.presence = hs.get_handlers().presence_handler
 
-        self.u_apple = hs.parse_userid("@apple:test")
-        self.u_banana = hs.parse_userid("@banana:test")
+        self.u_apple = UserID.from_string("@apple:test")
+        self.u_banana = UserID.from_string("@banana:test")
 
     @defer.inlineCallbacks
     def test_shortpoll(self):
diff --git a/tests/rest/test_profile.py b/tests/rest/client/v1/test_profile.py
index 3a0d1e700a..5cd5767f2e 100644
--- a/tests/rest/test_profile.py
+++ b/tests/rest/client/v1/test_profile.py
@@ -20,10 +20,12 @@ from twisted.internet import defer
 
 from mock import Mock, NonCallableMock
 
-from ..utils import MockHttpResource, MockKey
+from ....utils import MockHttpResource, setup_test_homeserver
 
 from synapse.api.errors import SynapseError, AuthError
-from synapse.server import HomeServer
+from synapse.types import UserID
+
+from synapse.rest.client.v1 import profile
 
 myid = "@1234ABCD:test"
 PATH_PREFIX = "/_matrix/client/api/v1"
@@ -32,6 +34,7 @@ PATH_PREFIX = "/_matrix/client/api/v1"
 class ProfileTestCase(unittest.TestCase):
     """ Tests profile management. """
 
+    @defer.inlineCallbacks
     def setUp(self):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
         self.mock_handler = Mock(spec=[
@@ -41,27 +44,22 @@ class ProfileTestCase(unittest.TestCase):
             "set_avatar_url",
         ])
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        hs = HomeServer("test",
-            db_pool=None,
+        hs = yield setup_test_homeserver(
+            "test",
             http_client=None,
             resource_for_client=self.mock_resource,
             federation=Mock(),
             replication_layer=Mock(),
-            datastore=None,
-            config=self.mock_config,
         )
 
         def _get_user_by_req(request=None):
-            return hs.parse_userid(myid)
+            return (UserID.from_string(myid), "")
 
         hs.get_auth().get_user_by_req = _get_user_by_req
 
         hs.get_handlers().profile_handler = self.mock_handler
 
-        hs.register_servlets()
+        profile.register_servlets(hs, self.mock_resource)
 
     @defer.inlineCallbacks
     def test_get_my_name(self):
diff --git a/tests/rest/test_rooms.py b/tests/rest/client/v1/test_rooms.py
index 8e65ff9a1c..72fb4576b1 100644
--- a/tests/rest/test_rooms.py
+++ b/tests/rest/client/v1/test_rooms.py
@@ -18,19 +18,15 @@
 # twisted imports
 from twisted.internet import defer
 
-import synapse.rest.room
+import synapse.rest.client.v1.room
 from synapse.api.constants import Membership
 
-from synapse.server import HomeServer
+from synapse.types import UserID
 
-from tests import unittest
-
-# python imports
 import json
 import urllib
-import types
 
-from ..utils import MockHttpResource, SQLiteMemoryDbPool, MockKey
+from ....utils import MockHttpResource, setup_test_homeserver
 from .utils import RestTestCase
 
 from mock import Mock, NonCallableMock
@@ -47,21 +43,11 @@ class RoomPermissionsTestCase(RestTestCase):
     def setUp(self):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer(
+        hs = yield setup_test_homeserver(
             "red",
-            db_pool=db_pool,
             http_client=None,
             replication_layer=Mock(),
-            ratelimiter=NonCallableMock(spec_set=[
-                "send_message",
-            ]),
-            config=self.mock_config,
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
         )
         self.ratelimiter = hs.get_ratelimiter()
         self.ratelimiter.send_message.return_value = (True, 0)
@@ -70,9 +56,10 @@ class RoomPermissionsTestCase(RestTestCase):
 
         def _get_user_by_token(token=None):
             return {
-                "user": hs.parse_userid(self.auth_user_id),
+                "user": UserID.from_string(self.auth_user_id),
                 "admin": False,
                 "device_id": None,
+                "token_id": 1,
             }
         hs.get_auth().get_user_by_token = _get_user_by_token
 
@@ -82,7 +69,7 @@ class RoomPermissionsTestCase(RestTestCase):
 
         self.auth_user_id = self.rmcreator_id
 
-        synapse.rest.room.register_servlets(hs, self.mock_resource)
+        synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
 
         self.auth = hs.get_auth()
 
@@ -441,21 +428,11 @@ class RoomsMemberListTestCase(RestTestCase):
     def setUp(self):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer(
+        hs = yield setup_test_homeserver(
             "red",
-            db_pool=db_pool,
             http_client=None,
             replication_layer=Mock(),
-            ratelimiter=NonCallableMock(spec_set=[
-                "send_message",
-            ]),
-            config=self.mock_config,
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
         )
         self.ratelimiter = hs.get_ratelimiter()
         self.ratelimiter.send_message.return_value = (True, 0)
@@ -466,9 +443,10 @@ class RoomsMemberListTestCase(RestTestCase):
 
         def _get_user_by_token(token=None):
             return {
-                "user": hs.parse_userid(self.auth_user_id),
+                "user": UserID.from_string(self.auth_user_id),
                 "admin": False,
                 "device_id": None,
+                "token_id": 1,
             }
         hs.get_auth().get_user_by_token = _get_user_by_token
 
@@ -476,7 +454,7 @@ class RoomsMemberListTestCase(RestTestCase):
             return defer.succeed(None)
         hs.get_datastore().insert_client_ip = _insert_client_ip
 
-        synapse.rest.room.register_servlets(hs, self.mock_resource)
+        synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
 
     def tearDown(self):
         pass
@@ -532,21 +510,11 @@ class RoomsCreateTestCase(RestTestCase):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
         self.auth_user_id = self.user_id
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer(
+        hs = yield setup_test_homeserver(
             "red",
-            db_pool=db_pool,
             http_client=None,
             replication_layer=Mock(),
-            ratelimiter=NonCallableMock(spec_set=[
-                "send_message",
-            ]),
-            config=self.mock_config,
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
         )
         self.ratelimiter = hs.get_ratelimiter()
         self.ratelimiter.send_message.return_value = (True, 0)
@@ -555,9 +523,10 @@ class RoomsCreateTestCase(RestTestCase):
 
         def _get_user_by_token(token=None):
             return {
-                "user": hs.parse_userid(self.auth_user_id),
+                "user": UserID.from_string(self.auth_user_id),
                 "admin": False,
                 "device_id": None,
+                "token_id": 1,
             }
         hs.get_auth().get_user_by_token = _get_user_by_token
 
@@ -565,7 +534,7 @@ class RoomsCreateTestCase(RestTestCase):
             return defer.succeed(None)
         hs.get_datastore().insert_client_ip = _insert_client_ip
 
-        synapse.rest.room.register_servlets(hs, self.mock_resource)
+        synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
 
     def tearDown(self):
         pass
@@ -634,21 +603,11 @@ class RoomTopicTestCase(RestTestCase):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
         self.auth_user_id = self.user_id
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer(
+        hs = yield setup_test_homeserver(
             "red",
-            db_pool=db_pool,
             http_client=None,
             replication_layer=Mock(),
-            ratelimiter=NonCallableMock(spec_set=[
-                "send_message",
-            ]),
-            config=self.mock_config,
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
         )
         self.ratelimiter = hs.get_ratelimiter()
         self.ratelimiter.send_message.return_value = (True, 0)
@@ -657,9 +616,10 @@ class RoomTopicTestCase(RestTestCase):
 
         def _get_user_by_token(token=None):
             return {
-                "user": hs.parse_userid(self.auth_user_id),
+                "user": UserID.from_string(self.auth_user_id),
                 "admin": False,
                 "device_id": None,
+                "token_id": 1,
             }
 
         hs.get_auth().get_user_by_token = _get_user_by_token
@@ -668,7 +628,7 @@ class RoomTopicTestCase(RestTestCase):
             return defer.succeed(None)
         hs.get_datastore().insert_client_ip = _insert_client_ip
 
-        synapse.rest.room.register_servlets(hs, self.mock_resource)
+        synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
 
         # create the room
         self.room_id = yield self.create_room_as(self.user_id)
@@ -750,21 +710,11 @@ class RoomMemberStateTestCase(RestTestCase):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
         self.auth_user_id = self.user_id
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer(
+        hs = yield setup_test_homeserver(
             "red",
-            db_pool=db_pool,
             http_client=None,
             replication_layer=Mock(),
-            ratelimiter=NonCallableMock(spec_set=[
-                "send_message",
-            ]),
-            config=self.mock_config,
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
         )
         self.ratelimiter = hs.get_ratelimiter()
         self.ratelimiter.send_message.return_value = (True, 0)
@@ -773,9 +723,10 @@ class RoomMemberStateTestCase(RestTestCase):
 
         def _get_user_by_token(token=None):
             return {
-                "user": hs.parse_userid(self.auth_user_id),
+                "user": UserID.from_string(self.auth_user_id),
                 "admin": False,
                 "device_id": None,
+                "token_id": 1,
             }
         hs.get_auth().get_user_by_token = _get_user_by_token
 
@@ -783,7 +734,7 @@ class RoomMemberStateTestCase(RestTestCase):
             return defer.succeed(None)
         hs.get_datastore().insert_client_ip = _insert_client_ip
 
-        synapse.rest.room.register_servlets(hs, self.mock_resource)
+        synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
 
         self.room_id = yield self.create_room_as(self.user_id)
 
@@ -886,21 +837,11 @@ class RoomMessagesTestCase(RestTestCase):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
         self.auth_user_id = self.user_id
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer(
+        hs = yield setup_test_homeserver(
             "red",
-            db_pool=db_pool,
             http_client=None,
             replication_layer=Mock(),
-            ratelimiter=NonCallableMock(spec_set=[
-                "send_message",
-            ]),
-            config=self.mock_config,
+            ratelimiter=NonCallableMock(spec_set=["send_message"]),
         )
         self.ratelimiter = hs.get_ratelimiter()
         self.ratelimiter.send_message.return_value = (True, 0)
@@ -909,9 +850,10 @@ class RoomMessagesTestCase(RestTestCase):
 
         def _get_user_by_token(token=None):
             return {
-                "user": hs.parse_userid(self.auth_user_id),
+                "user": UserID.from_string(self.auth_user_id),
                 "admin": False,
                 "device_id": None,
+                "token_id": 1,
             }
         hs.get_auth().get_user_by_token = _get_user_by_token
 
@@ -919,7 +861,7 @@ class RoomMessagesTestCase(RestTestCase):
             return defer.succeed(None)
         hs.get_datastore().insert_client_ip = _insert_client_ip
 
-        synapse.rest.room.register_servlets(hs, self.mock_resource)
+        synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
 
         self.room_id = yield self.create_room_as(self.user_id)
 
@@ -990,21 +932,13 @@ class RoomInitialSyncTestCase(RestTestCase):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
         self.auth_user_id = self.user_id
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer(
+        hs = yield setup_test_homeserver(
             "red",
-            db_pool=db_pool,
             http_client=None,
             replication_layer=Mock(),
             ratelimiter=NonCallableMock(spec_set=[
                 "send_message",
             ]),
-            config=self.mock_config,
         )
         self.ratelimiter = hs.get_ratelimiter()
         self.ratelimiter.send_message.return_value = (True, 0)
@@ -1013,9 +947,10 @@ class RoomInitialSyncTestCase(RestTestCase):
 
         def _get_user_by_token(token=None):
             return {
-                "user": hs.parse_userid(self.auth_user_id),
+                "user": UserID.from_string(self.auth_user_id),
                 "admin": False,
                 "device_id": None,
+                "token_id": 1,
             }
         hs.get_auth().get_user_by_token = _get_user_by_token
 
@@ -1023,12 +958,12 @@ class RoomInitialSyncTestCase(RestTestCase):
             return defer.succeed(None)
         hs.get_datastore().insert_client_ip = _insert_client_ip
 
-        synapse.rest.room.register_servlets(hs, self.mock_resource)
+        synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
 
         # Since I'm getting my own presence I need to exist as far as presence
         # is concerned.
         hs.get_handlers().presence_handler.registered_user(
-            hs.parse_userid(self.user_id)
+            UserID.from_string(self.user_id)
         )
 
         # create the room
diff --git a/tests/rest/test_typing.py b/tests/rest/client/v1/test_typing.py
index 18138af1b5..80f2ec9ddf 100644
--- a/tests/rest/test_typing.py
+++ b/tests/rest/client/v1/test_typing.py
@@ -18,10 +18,10 @@
 # twisted imports
 from twisted.internet import defer
 
-import synapse.rest.room
-from synapse.server import HomeServer
+import synapse.rest.client.v1.room
+from synapse.types import UserID
 
-from ..utils import MockHttpResource, MockClock, SQLiteMemoryDbPool, MockKey
+from ....utils import MockHttpResource, MockClock, setup_test_homeserver
 from .utils import RestTestCase
 
 from mock import Mock, NonCallableMock
@@ -41,22 +41,14 @@ class RoomTypingTestCase(RestTestCase):
         self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
         self.auth_user_id = self.user_id
 
-        self.mock_config = NonCallableMock()
-        self.mock_config.signing_key = [MockKey()]
-
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer(
+        hs = yield setup_test_homeserver(
             "red",
             clock=self.clock,
-            db_pool=db_pool,
             http_client=None,
             replication_layer=Mock(),
             ratelimiter=NonCallableMock(spec_set=[
                 "send_message",
             ]),
-            config=self.mock_config,
         )
         self.hs = hs
 
@@ -69,9 +61,10 @@ class RoomTypingTestCase(RestTestCase):
 
         def _get_user_by_token(token=None):
             return {
-                "user": hs.parse_userid(self.auth_user_id),
+                "user": UserID.from_string(self.auth_user_id),
                 "admin": False,
                 "device_id": None,
+                "token_id": 1,
             }
 
         hs.get_auth().get_user_by_token = _get_user_by_token
@@ -82,7 +75,7 @@ class RoomTypingTestCase(RestTestCase):
 
         def get_room_members(room_id):
             if room_id == self.room_id:
-                return defer.succeed([hs.parse_userid(self.user_id)])
+                return defer.succeed([UserID.from_string(self.user_id)])
             else:
                 return defer.succeed([])
 
@@ -104,7 +97,7 @@ class RoomTypingTestCase(RestTestCase):
         hs.get_handlers().room_member_handler.fetch_room_distributions_into = (
                 fetch_room_distributions_into)
 
-        synapse.rest.room.register_servlets(hs, self.mock_resource)
+        synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
 
         self.room_id = yield self.create_room_as(self.user_id)
         # Need another user to make notifications actually work
diff --git a/tests/rest/utils.py b/tests/rest/client/v1/utils.py
index 579441fb4a..579441fb4a 100644
--- a/tests/rest/utils.py
+++ b/tests/rest/client/v1/utils.py
diff --git a/tests/rest/client/v2_alpha/__init__.py b/tests/rest/client/v2_alpha/__init__.py
new file mode 100644
index 0000000000..de5a917e6a
--- /dev/null
+++ b/tests/rest/client/v2_alpha/__init__.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from tests import unittest
+
+from mock import Mock
+
+from ....utils import MockHttpResource, setup_test_homeserver
+
+from synapse.types import UserID
+
+from twisted.internet import defer
+
+
+PATH_PREFIX = "/_matrix/client/v2_alpha"
+
+
+class V2AlphaRestTestCase(unittest.TestCase):
+    # Consumer must define
+    #   USER_ID = <some string>
+    #   TO_REGISTER = [<list of REST servlets to register>]
+
+    @defer.inlineCallbacks
+    def setUp(self):
+        self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
+
+        hs = yield setup_test_homeserver(
+            datastore=self.make_datastore_mock(),
+            http_client=None,
+            resource_for_client=self.mock_resource,
+            resource_for_federation=self.mock_resource,
+        )
+
+        def _get_user_by_token(token=None):
+            return {
+                "user": UserID.from_string(self.USER_ID),
+                "admin": False,
+                "device_id": None,
+                "token_id": 1,
+            }
+        hs.get_auth().get_user_by_token = _get_user_by_token
+
+        for r in self.TO_REGISTER:
+            r.register_servlets(hs, self.mock_resource)
+
+    def make_datastore_mock(self):
+        store =  Mock(spec=[
+            "insert_client_ip",
+        ])
+        store.get_app_service_by_token = Mock(return_value=None)
+        return store
diff --git a/tests/rest/client/v2_alpha/test_filter.py b/tests/rest/client/v2_alpha/test_filter.py
new file mode 100644
index 0000000000..80ddabf818
--- /dev/null
+++ b/tests/rest/client/v2_alpha/test_filter.py
@@ -0,0 +1,95 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from mock import Mock
+
+from . import V2AlphaRestTestCase
+
+from synapse.rest.client.v2_alpha import filter
+
+from synapse.api.errors import StoreError
+
+
+class FilterTestCase(V2AlphaRestTestCase):
+    USER_ID = "@apple:test"
+    TO_REGISTER = [filter]
+
+    def make_datastore_mock(self):
+        datastore = super(FilterTestCase, self).make_datastore_mock()
+
+        self._user_filters = {}
+
+        def add_user_filter(user_localpart, definition):
+            filters = self._user_filters.setdefault(user_localpart, [])
+            filter_id = len(filters)
+            filters.append(definition)
+            return defer.succeed(filter_id)
+        datastore.add_user_filter = add_user_filter
+
+        def get_user_filter(user_localpart, filter_id):
+            if user_localpart not in self._user_filters:
+                raise StoreError(404, "No user")
+            filters = self._user_filters[user_localpart]
+            if filter_id >= len(filters):
+                raise StoreError(404, "No filter")
+            return defer.succeed(filters[filter_id])
+        datastore.get_user_filter = get_user_filter
+
+        return datastore
+
+    @defer.inlineCallbacks
+    def test_add_filter(self):
+        (code, response) = yield self.mock_resource.trigger("POST",
+            "/user/%s/filter" % (self.USER_ID),
+            '{"type": ["m.*"]}'
+        )
+        self.assertEquals(200, code)
+        self.assertEquals({"filter_id": "0"}, response)
+
+        self.assertIn("apple", self._user_filters)
+        self.assertEquals(len(self._user_filters["apple"]), 1)
+        self.assertEquals({"type": ["m.*"]}, self._user_filters["apple"][0])
+
+    @defer.inlineCallbacks
+    def test_get_filter(self):
+        self._user_filters["apple"] = [
+            {"type": ["m.*"]}
+        ]
+
+        (code, response) = yield self.mock_resource.trigger("GET",
+            "/user/%s/filter/0" % (self.USER_ID), None
+        )
+        self.assertEquals(200, code)
+        self.assertEquals({"type": ["m.*"]}, response)
+
+    @defer.inlineCallbacks
+    def test_get_filter_no_id(self):
+        self._user_filters["apple"] = [
+            {"type": ["m.*"]}
+        ]
+
+        (code, response) = yield self.mock_resource.trigger("GET",
+            "/user/%s/filter/2" % (self.USER_ID), None
+        )
+        self.assertEquals(404, code)
+
+    @defer.inlineCallbacks
+    def test_get_filter_no_user(self):
+        (code, response) = yield self.mock_resource.trigger("GET",
+            "/user/%s/filter/0" % (self.USER_ID), None
+        )
+        self.assertEquals(404, code)
diff --git a/tests/storage/TESTS_NEEDED_FOR b/tests/storage/TESTS_NEEDED_FOR
deleted file mode 100644
index 8e5d0cbdc4..0000000000
--- a/tests/storage/TESTS_NEEDED_FOR
+++ /dev/null
@@ -1,5 +0,0 @@
-synapse/storage/feedback.py
-synapse/storage/keys.py
-synapse/storage/pdu.py
-synapse/storage/stream.py
-synapse/storage/transactions.py
diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py
new file mode 100644
index 0000000000..55d22f665a
--- /dev/null
+++ b/tests/storage/test__base.py
@@ -0,0 +1,110 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from tests import unittest
+from twisted.internet import defer
+
+from synapse.storage._base import cached
+
+
+class CacheDecoratorTestCase(unittest.TestCase):
+
+    @defer.inlineCallbacks
+    def test_passthrough(self):
+        @cached()
+        def func(self, key):
+            return key
+
+        self.assertEquals((yield func(self, "foo")), "foo")
+        self.assertEquals((yield func(self, "bar")), "bar")
+
+    @defer.inlineCallbacks
+    def test_hit(self):
+        callcount = [0]
+
+        @cached()
+        def func(self, key):
+            callcount[0] += 1
+            return key
+
+        yield func(self, "foo")
+
+        self.assertEquals(callcount[0], 1)
+
+        self.assertEquals((yield func(self, "foo")), "foo")
+        self.assertEquals(callcount[0], 1)
+
+    @defer.inlineCallbacks
+    def test_invalidate(self):
+        callcount = [0]
+
+        @cached()
+        def func(self, key):
+            callcount[0] += 1
+            return key
+
+        yield func(self, "foo")
+
+        self.assertEquals(callcount[0], 1)
+
+        func.invalidate("foo")
+
+        yield func(self, "foo")
+
+        self.assertEquals(callcount[0], 2)
+
+    def test_invalidate_missing(self):
+        @cached()
+        def func(self, key):
+            return key
+
+        func.invalidate("what")
+
+    @defer.inlineCallbacks
+    def test_max_entries(self):
+        callcount = [0]
+
+        @cached(max_entries=10)
+        def func(self, key):
+            callcount[0] += 1
+            return key
+
+        for k in range(0,12):
+            yield func(self, k)
+
+        self.assertEquals(callcount[0], 12)
+
+        # There must have been at least 2 evictions, meaning if we calculate
+        # all 12 values again, we must get called at least 2 more times
+        for k in range(0,12):
+            yield func(self, k)
+
+        self.assertTrue(callcount[0] >= 14,
+            msg="Expected callcount >= 14, got %d" % (callcount[0]))
+
+    @defer.inlineCallbacks
+    def test_prefill(self):
+        callcount = [0]
+
+        @cached()
+        def func(self, key):
+            callcount[0] += 1
+            return key
+
+        func.prefill("foo", 123)
+
+        self.assertEquals((yield func(self, "foo")), 123)
+        self.assertEquals(callcount[0], 0)
diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py
new file mode 100644
index 0000000000..ca5b92ec85
--- /dev/null
+++ b/tests/storage/test_appservice.py
@@ -0,0 +1,116 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from tests import unittest
+from twisted.internet import defer
+
+from synapse.appservice import ApplicationService
+from synapse.server import HomeServer
+from synapse.storage.appservice import ApplicationServiceStore
+
+from mock import Mock
+from tests.utils import SQLiteMemoryDbPool, MockClock
+
+
+class ApplicationServiceStoreTestCase(unittest.TestCase):
+
+    @defer.inlineCallbacks
+    def setUp(self):
+        db_pool = SQLiteMemoryDbPool()
+        yield db_pool.prepare()
+        hs = HomeServer(
+            "test", db_pool=db_pool, clock=MockClock(), config=Mock()
+        )
+        self.as_token = "token1"
+        db_pool.runQuery(
+            "INSERT INTO application_services(token) VALUES(?)",
+            (self.as_token,)
+        )
+        db_pool.runQuery(
+            "INSERT INTO application_services(token) VALUES(?)", ("token2",)
+        )
+        db_pool.runQuery(
+            "INSERT INTO application_services(token) VALUES(?)", ("token3",)
+        )
+        # must be done after inserts
+        self.store = ApplicationServiceStore(hs)
+
+    @defer.inlineCallbacks
+    def test_update_and_retrieval_of_service(self):
+        url = "https://matrix.org/appservices/foobar"
+        hs_token = "hstok"
+        user_regex = [
+            {"regex": "@foobar_.*:matrix.org", "exclusive": True}
+        ]
+        alias_regex = [
+            {"regex": "#foobar_.*:matrix.org", "exclusive": False}
+        ]
+        room_regex = [
+
+        ]
+        service = ApplicationService(
+            url=url, hs_token=hs_token, token=self.as_token, namespaces={
+                ApplicationService.NS_USERS: user_regex,
+                ApplicationService.NS_ALIASES: alias_regex,
+                ApplicationService.NS_ROOMS: room_regex
+        })
+        yield self.store.update_app_service(service)
+
+        stored_service = yield self.store.get_app_service_by_token(
+            self.as_token
+        )
+        self.assertEquals(stored_service.token, self.as_token)
+        self.assertEquals(stored_service.url, url)
+        self.assertEquals(
+            stored_service.namespaces[ApplicationService.NS_ALIASES],
+            alias_regex
+        )
+        self.assertEquals(
+            stored_service.namespaces[ApplicationService.NS_ROOMS],
+            room_regex
+        )
+        self.assertEquals(
+            stored_service.namespaces[ApplicationService.NS_USERS],
+            user_regex
+        )
+
+    @defer.inlineCallbacks
+    def test_retrieve_unknown_service_token(self):
+        service = yield self.store.get_app_service_by_token("invalid_token")
+        self.assertEquals(service, None)
+
+    @defer.inlineCallbacks
+    def test_retrieval_of_service(self):
+        stored_service = yield self.store.get_app_service_by_token(
+            self.as_token
+        )
+        self.assertEquals(stored_service.token, self.as_token)
+        self.assertEquals(stored_service.url, None)
+        self.assertEquals(
+            stored_service.namespaces[ApplicationService.NS_ALIASES],
+            []
+        )
+        self.assertEquals(
+            stored_service.namespaces[ApplicationService.NS_ROOMS],
+            []
+        )
+        self.assertEquals(
+            stored_service.namespaces[ApplicationService.NS_USERS],
+            []
+        )
+
+    @defer.inlineCallbacks
+    def test_retrieval_of_all_services(self):
+        services = yield self.store.get_app_services()
+        self.assertEquals(len(services), 3)
diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py
index a6f1d6a333..55fbffa7a2 100644
--- a/tests/storage/test_base.py
+++ b/tests/storage/test_base.py
@@ -22,6 +22,7 @@ from mock import Mock, call
 from collections import OrderedDict
 
 from synapse.server import HomeServer
+
 from synapse.storage._base import SQLBaseStore
 
 
@@ -37,8 +38,9 @@ class SQLBaseStoreTestCase(unittest.TestCase):
             return defer.succeed(func(self.mock_txn, *args, **kwargs))
         self.db_pool.runInteraction = runInteraction
 
-        hs = HomeServer("test",
-                db_pool=self.db_pool)
+        config = Mock()
+        config.event_cache_size = 1
+        hs = HomeServer("test", db_pool=self.db_pool, config=config)
 
         self.datastore = SQLBaseStore(hs)
 
diff --git a/tests/storage/test_directory.py b/tests/storage/test_directory.py
index e9c242cc07..b9bfbc00e2 100644
--- a/tests/storage/test_directory.py
+++ b/tests/storage/test_directory.py
@@ -17,28 +17,22 @@
 from tests import unittest
 from twisted.internet import defer
 
-from synapse.server import HomeServer
 from synapse.storage.directory import DirectoryStore
+from synapse.types import RoomID, RoomAlias
 
-from tests.utils import SQLiteMemoryDbPool
+from tests.utils import setup_test_homeserver
 
 
 class DirectoryStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer(
-            "test",
-            db_pool=db_pool,
-        )
+        hs = yield setup_test_homeserver()
 
         self.store = DirectoryStore(hs)
 
-        self.room = hs.parse_roomid("!abcde:test")
-        self.alias = hs.parse_roomalias("#my-room:test")
+        self.room = RoomID.from_string("!abcde:test")
+        self.alias = RoomAlias.from_string("#my-room:test")
 
     @defer.inlineCallbacks
     def test_room_to_alias(self):
diff --git a/tests/storage/test_presence.py b/tests/storage/test_presence.py
index 9655d3cf42..065eebdbcf 100644
--- a/tests/storage/test_presence.py
+++ b/tests/storage/test_presence.py
@@ -17,28 +17,22 @@
 from tests import unittest
 from twisted.internet import defer
 
-from synapse.server import HomeServer
 from synapse.storage.presence import PresenceStore
+from synapse.types import UserID
 
-from tests.utils import SQLiteMemoryDbPool, MockClock
+from tests.utils import setup_test_homeserver, MockClock
 
 
 class PresenceStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer("test",
-            clock=MockClock(),
-            db_pool=db_pool,
-        )
+        hs = yield setup_test_homeserver(clock=MockClock())
 
         self.store = PresenceStore(hs)
 
-        self.u_apple = hs.parse_userid("@apple:test")
-        self.u_banana = hs.parse_userid("@banana:test")
+        self.u_apple = UserID.from_string("@apple:test")
+        self.u_banana = UserID.from_string("@banana:test")
 
     @defer.inlineCallbacks
     def test_state(self):
diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py
index 5d36723c28..1fa783f313 100644
--- a/tests/storage/test_profile.py
+++ b/tests/storage/test_profile.py
@@ -17,26 +17,21 @@
 from tests import unittest
 from twisted.internet import defer
 
-from synapse.server import HomeServer
 from synapse.storage.profile import ProfileStore
+from synapse.types import UserID
 
-from tests.utils import SQLiteMemoryDbPool
+from tests.utils import setup_test_homeserver
 
 
 class ProfileStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer("test",
-            db_pool=db_pool,
-        )
+        hs = yield setup_test_homeserver()
 
         self.store = ProfileStore(hs)
 
-        self.u_frank = hs.parse_userid("@frank:test")
+        self.u_frank = UserID.from_string("@frank:test")
 
     @defer.inlineCallbacks
     def test_displayname(self):
diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py
index 9806fbc69b..b57006fcb4 100644
--- a/tests/storage/test_redaction.py
+++ b/tests/storage/test_redaction.py
@@ -17,10 +17,10 @@
 from tests import unittest
 from twisted.internet import defer
 
-from synapse.server import HomeServer
 from synapse.api.constants import EventTypes, Membership
+from synapse.types import UserID, RoomID
 
-from tests.utils import SQLiteMemoryDbPool, MockKey
+from tests.utils import setup_test_homeserver
 
 from mock import Mock
 
@@ -29,16 +29,7 @@ class RedactionTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        self.mock_config = Mock()
-        self.mock_config.signing_key = [MockKey()]
-
-        hs = HomeServer(
-            "test",
-            db_pool=db_pool,
-            config=self.mock_config,
+        hs = yield setup_test_homeserver(
             resource_for_federation=Mock(),
             http_client=None,
         )
@@ -48,10 +39,10 @@ class RedactionTestCase(unittest.TestCase):
         self.handlers = hs.get_handlers()
         self.message_handler = self.handlers.message_handler
 
-        self.u_alice = hs.parse_userid("@alice:test")
-        self.u_bob = hs.parse_userid("@bob:test")
+        self.u_alice = UserID.from_string("@alice:test")
+        self.u_bob = UserID.from_string("@bob:test")
 
-        self.room1 = hs.parse_roomid("!abc123:test")
+        self.room1 = RoomID.from_string("!abc123:test")
 
         self.depth = 1
 
diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py
index 84bfde7568..e0b81f2b57 100644
--- a/tests/storage/test_registration.py
+++ b/tests/storage/test_registration.py
@@ -17,22 +17,16 @@
 from tests import unittest
 from twisted.internet import defer
 
-from synapse.server import HomeServer
 from synapse.storage.registration import RegistrationStore
 
-from tests.utils import SQLiteMemoryDbPool
+from tests.utils import setup_test_homeserver
 
 
 class RegistrationStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer("test",
-            db_pool=db_pool,
-        )
+        hs = yield setup_test_homeserver()
 
         self.store = RegistrationStore(hs)
 
@@ -53,7 +47,10 @@ class RegistrationStoreTestCase(unittest.TestCase):
         )
 
         self.assertEquals(
-            {"admin": 0, "device_id": None, "name": self.user_id},
+            {"admin": 0,
+             "device_id": None,
+             "name": self.user_id,
+             "token_id": 1},
             (yield self.store.get_user_by_token(self.tokens[0]))
         )
 
@@ -63,7 +60,10 @@ class RegistrationStoreTestCase(unittest.TestCase):
         yield self.store.add_access_token_to_user(self.user_id, self.tokens[1])
 
         self.assertEquals(
-            {"admin": 0, "device_id": None, "name": self.user_id},
+            {"admin": 0,
+             "device_id": None,
+             "name": self.user_id,
+             "token_id": 2},
             (yield self.store.get_user_by_token(self.tokens[1]))
         )
 
diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py
index e7739776ec..c88dd446fb 100644
--- a/tests/storage/test_room.py
+++ b/tests/storage/test_room.py
@@ -17,30 +17,25 @@
 from tests import unittest
 from twisted.internet import defer
 
-from synapse.server import HomeServer
 from synapse.api.constants import EventTypes
+from synapse.types import UserID, RoomID, RoomAlias
 
-from tests.utils import SQLiteMemoryDbPool
+from tests.utils import setup_test_homeserver
 
 
 class RoomStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer("test",
-            db_pool=db_pool,
-        )
+        hs = yield setup_test_homeserver()
 
         # We can't test RoomStore on its own without the DirectoryStore, for
         # management of the 'room_aliases' table
         self.store = hs.get_datastore()
 
-        self.room = hs.parse_roomid("!abcde:test")
-        self.alias = hs.parse_roomalias("#a-room-name:test")
-        self.u_creator = hs.parse_userid("@creator:test")
+        self.room = RoomID.from_string("!abcde:test")
+        self.alias = RoomAlias.from_string("#a-room-name:test")
+        self.u_creator = UserID.from_string("@creator:test")
 
         yield self.store.store_room(self.room.to_string(),
             room_creator_user_id=self.u_creator.to_string(),
@@ -85,19 +80,14 @@ class RoomEventsStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        hs = HomeServer("test",
-            db_pool=db_pool,
-        )
+        hs = setup_test_homeserver()
 
         # Room events need the full datastore, for persist_event() and
         # get_room_state()
         self.store = hs.get_datastore()
         self.event_factory = hs.get_event_factory();
 
-        self.room = hs.parse_roomid("!abcde:test")
+        self.room = RoomID.from_string("!abcde:test")
 
         yield self.store.store_room(self.room.to_string(),
             room_creator_user_id="@creator:text",
diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py
index a23a8189df..811fea544b 100644
--- a/tests/storage/test_roommember.py
+++ b/tests/storage/test_roommember.py
@@ -17,10 +17,10 @@
 from tests import unittest
 from twisted.internet import defer
 
-from synapse.server import HomeServer
 from synapse.api.constants import EventTypes, Membership
+from synapse.types import UserID, RoomID
 
-from tests.utils import SQLiteMemoryDbPool, MockKey
+from tests.utils import setup_test_homeserver
 
 from mock import Mock
 
@@ -29,16 +29,7 @@ class RoomMemberStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        self.mock_config = Mock()
-        self.mock_config.signing_key = [MockKey()]
-
-        hs = HomeServer(
-            "test",
-            db_pool=db_pool,
-            config=self.mock_config,
+        hs = yield setup_test_homeserver(
             resource_for_federation=Mock(),
             http_client=None,
         )
@@ -49,13 +40,13 @@ class RoomMemberStoreTestCase(unittest.TestCase):
         self.handlers = hs.get_handlers()
         self.message_handler = self.handlers.message_handler
 
-        self.u_alice = hs.parse_userid("@alice:test")
-        self.u_bob = hs.parse_userid("@bob:test")
+        self.u_alice = UserID.from_string("@alice:test")
+        self.u_bob = UserID.from_string("@bob:test")
 
         # User elsewhere on another host
-        self.u_charlie = hs.parse_userid("@charlie:elsewhere")
+        self.u_charlie = UserID.from_string("@charlie:elsewhere")
 
-        self.room = hs.parse_roomid("!abc123:test")
+        self.room = RoomID.from_string("!abc123:test")
 
     @defer.inlineCallbacks
     def inject_room_member(self, room, user, membership, replaces_state=None):
diff --git a/tests/storage/test_stream.py b/tests/storage/test_stream.py
index 9247fc579e..0c9b89d765 100644
--- a/tests/storage/test_stream.py
+++ b/tests/storage/test_stream.py
@@ -17,10 +17,10 @@
 from tests import unittest
 from twisted.internet import defer
 
-from synapse.server import HomeServer
 from synapse.api.constants import EventTypes, Membership
+from synapse.types import UserID, RoomID
 
-from tests.utils import SQLiteMemoryDbPool, MockKey
+from tests.utils import setup_test_homeserver
 
 from mock import Mock
 
@@ -29,16 +29,7 @@ class StreamStoreTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def setUp(self):
-        db_pool = SQLiteMemoryDbPool()
-        yield db_pool.prepare()
-
-        self.mock_config = Mock()
-        self.mock_config.signing_key = [MockKey()]
-
-        hs = HomeServer(
-            "test",
-            db_pool=db_pool,
-            config=self.mock_config,
+        hs = yield setup_test_homeserver(
             resource_for_federation=Mock(),
             http_client=None,
         )
@@ -48,11 +39,11 @@ class StreamStoreTestCase(unittest.TestCase):
         self.handlers = hs.get_handlers()
         self.message_handler = self.handlers.message_handler
 
-        self.u_alice = hs.parse_userid("@alice:test")
-        self.u_bob = hs.parse_userid("@bob:test")
+        self.u_alice = UserID.from_string("@alice:test")
+        self.u_bob = UserID.from_string("@bob:test")
 
-        self.room1 = hs.parse_roomid("!abc123:test")
-        self.room2 = hs.parse_roomid("!xyx987:test")
+        self.room1 = RoomID.from_string("!abc123:test")
+        self.room2 = RoomID.from_string("!xyx987:test")
 
         self.depth = 1
 
diff --git a/tests/test_state.py b/tests/test_state.py
index 98ad9e54cd..fea25f7021 100644
--- a/tests/test_state.py
+++ b/tests/test_state.py
@@ -16,11 +16,122 @@
 from tests import unittest
 from twisted.internet import defer
 
+from synapse.events import FrozenEvent
+from synapse.api.auth import Auth
+from synapse.api.constants import EventTypes, Membership
 from synapse.state import StateHandler
 
+from .utils import MockClock
+
 from mock import Mock
 
 
+_next_event_id = 1000
+
+
+def create_event(name=None, type=None, state_key=None, depth=2, event_id=None,
+                 prev_events=[], **kwargs):
+    global _next_event_id
+
+    if not event_id:
+        _next_event_id += 1
+        event_id = str(_next_event_id)
+
+    if not name:
+        if state_key is not None:
+            name = "<%s-%s, %s>" % (type, state_key, event_id,)
+        else:
+            name = "<%s, %s>" % (type, event_id,)
+
+    d = {
+        "event_id": event_id,
+        "type": type,
+        "sender": "@user_id:example.com",
+        "room_id": "!room_id:example.com",
+        "depth": depth,
+        "prev_events": prev_events,
+    }
+
+    if state_key is not None:
+        d["state_key"] = state_key
+
+    d.update(kwargs)
+
+    event = FrozenEvent(d)
+
+    return event
+
+
+class StateGroupStore(object):
+    def __init__(self):
+        self._event_to_state_group = {}
+        self._group_to_state = {}
+
+        self._next_group = 1
+
+    def get_state_groups(self, event_ids):
+        groups = {}
+        for event_id in event_ids:
+            group = self._event_to_state_group.get(event_id)
+            if group:
+                groups[group] = self._group_to_state[group]
+
+        return defer.succeed(groups)
+
+    def store_state_groups(self, event, context):
+        if context.current_state is None:
+            return
+
+        state_events = context.current_state
+
+        if event.is_state():
+            state_events[(event.type, event.state_key)] = event
+
+        state_group = context.state_group
+        if not state_group:
+            state_group = self._next_group
+            self._next_group += 1
+
+            self._group_to_state[state_group] = state_events.values()
+
+        self._event_to_state_group[event.event_id] = state_group
+
+
+class DictObj(dict):
+    def __init__(self, **kwargs):
+        super(DictObj, self).__init__(kwargs)
+        self.__dict__ = self
+
+
+class Graph(object):
+    def __init__(self, nodes, edges):
+        events = {}
+        clobbered = set(events.keys())
+
+        for event_id, fields in nodes.items():
+            refs = edges.get(event_id)
+            if refs:
+                clobbered.difference_update(refs)
+                prev_events = [(r, {}) for r in refs]
+            else:
+                prev_events = []
+
+            events[event_id] = create_event(
+                event_id=event_id,
+                prev_events=prev_events,
+                **fields
+            )
+
+        self._leaves = clobbered
+        self._events = sorted(events.values(), key=lambda e: e.depth)
+
+    def walk(self):
+        return iter(self._events)
+
+    def get_leaves(self):
+        return (self._events[i] for i in self._leaves)
+
+
 class StateTestCase(unittest.TestCase):
     def setUp(self):
         self.store = Mock(
@@ -29,20 +140,191 @@ class StateTestCase(unittest.TestCase):
                 "add_event_hashes",
             ]
         )
-        hs = Mock(spec=["get_datastore"])
+        hs = Mock(spec=[
+            "get_datastore", "get_auth", "get_state_handler", "get_clock",
+        ])
         hs.get_datastore.return_value = self.store
+        hs.get_state_handler.return_value = None
+        hs.get_auth.return_value = Auth(hs)
+        hs.get_clock.return_value = MockClock()
 
         self.state = StateHandler(hs)
         self.event_id = 0
 
     @defer.inlineCallbacks
+    def test_branch_no_conflict(self):
+        graph = Graph(
+            nodes={
+                "START": DictObj(
+                    type=EventTypes.Create,
+                    state_key="",
+                    depth=1,
+                ),
+                "A": DictObj(
+                    type=EventTypes.Message,
+                    depth=2,
+                ),
+                "B": DictObj(
+                    type=EventTypes.Message,
+                    depth=3,
+                ),
+                "C": DictObj(
+                    type=EventTypes.Name,
+                    state_key="",
+                    depth=3,
+                ),
+                "D": DictObj(
+                    type=EventTypes.Message,
+                    depth=4,
+                ),
+            },
+            edges={
+                "A": ["START"],
+                "B": ["A"],
+                "C": ["A"],
+                "D": ["B", "C"]
+            }
+        )
+
+        store = StateGroupStore()
+        self.store.get_state_groups.side_effect = store.get_state_groups
+
+        context_store = {}
+
+        for event in graph.walk():
+            context = yield self.state.compute_event_context(event)
+            store.store_state_groups(event, context)
+            context_store[event.event_id] = context
+
+        self.assertEqual(2, len(context_store["D"].current_state))
+
+    @defer.inlineCallbacks
+    def test_branch_basic_conflict(self):
+        graph = Graph(
+            nodes={
+                "START": DictObj(
+                    type=EventTypes.Create,
+                    state_key="creator",
+                    content={"membership": "@user_id:example.com"},
+                    depth=1,
+                ),
+                "A": DictObj(
+                    type=EventTypes.Member,
+                    state_key="@user_id:example.com",
+                    content={"membership": Membership.JOIN},
+                    membership=Membership.JOIN,
+                    depth=2,
+                ),
+                "B": DictObj(
+                    type=EventTypes.Name,
+                    state_key="",
+                    depth=3,
+                ),
+                "C": DictObj(
+                    type=EventTypes.Name,
+                    state_key="",
+                    depth=4,
+                ),
+                "D": DictObj(
+                    type=EventTypes.Message,
+                    depth=5,
+                ),
+            },
+            edges={
+                "A": ["START"],
+                "B": ["A"],
+                "C": ["A"],
+                "D": ["B", "C"]
+            }
+        )
+
+        store = StateGroupStore()
+        self.store.get_state_groups.side_effect = store.get_state_groups
+
+        context_store = {}
+
+        for event in graph.walk():
+            context = yield self.state.compute_event_context(event)
+            store.store_state_groups(event, context)
+            context_store[event.event_id] = context
+
+        self.assertSetEqual(
+            {"START", "A", "C"},
+            {e.event_id for e in context_store["D"].current_state.values()}
+        )
+
+    @defer.inlineCallbacks
+    def test_branch_have_banned_conflict(self):
+        graph = Graph(
+            nodes={
+                "START": DictObj(
+                    type=EventTypes.Create,
+                    state_key="creator",
+                    content={"membership": "@user_id:example.com"},
+                    depth=1,
+                ),
+                "A": DictObj(
+                    type=EventTypes.Member,
+                    state_key="@user_id:example.com",
+                    content={"membership": Membership.JOIN},
+                    membership=Membership.JOIN,
+                    depth=2,
+                ),
+                "B": DictObj(
+                    type=EventTypes.Name,
+                    state_key="",
+                    depth=3,
+                ),
+                "C": DictObj(
+                    type=EventTypes.Member,
+                    state_key="@user_id_2:example.com",
+                    content={"membership": Membership.BAN},
+                    membership=Membership.BAN,
+                    depth=4,
+                ),
+                "D": DictObj(
+                    type=EventTypes.Name,
+                    state_key="",
+                    depth=4,
+                    sender="@user_id_2:example.com",
+                ),
+                "E": DictObj(
+                    type=EventTypes.Message,
+                    depth=5,
+                ),
+            },
+            edges={
+                "A": ["START"],
+                "B": ["A"],
+                "C": ["B"],
+                "D": ["B"],
+                "E": ["C", "D"]
+            }
+        )
+
+        store = StateGroupStore()
+        self.store.get_state_groups.side_effect = store.get_state_groups
+
+        context_store = {}
+
+        for event in graph.walk():
+            context = yield self.state.compute_event_context(event)
+            store.store_state_groups(event, context)
+            context_store[event.event_id] = context
+
+        self.assertSetEqual(
+            {"START", "A", "B", "C"},
+            {e.event_id for e in context_store["E"].current_state.values()}
+        )
+
+    @defer.inlineCallbacks
     def test_annotate_with_old_message(self):
-        event = self.create_event(type="test_message", name="event")
+        event = create_event(type="test_message", name="event")
 
         old_state = [
-            self.create_event(type="test1", state_key="1"),
-            self.create_event(type="test1", state_key="2"),
-            self.create_event(type="test2", state_key=""),
+            create_event(type="test1", state_key="1"),
+            create_event(type="test1", state_key="2"),
+            create_event(type="test2", state_key=""),
         ]
 
         context = yield self.state.compute_event_context(
@@ -62,12 +344,12 @@ class StateTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_annotate_with_old_state(self):
-        event = self.create_event(type="state", state_key="", name="event")
+        event = create_event(type="state", state_key="", name="event")
 
         old_state = [
-            self.create_event(type="test1", state_key="1"),
-            self.create_event(type="test1", state_key="2"),
-            self.create_event(type="test2", state_key=""),
+            create_event(type="test1", state_key="1"),
+            create_event(type="test1", state_key="2"),
+            create_event(type="test2", state_key=""),
         ]
 
         context = yield self.state.compute_event_context(
@@ -88,13 +370,12 @@ class StateTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_trivial_annotate_message(self):
-        event = self.create_event(type="test_message", name="event")
-        event.prev_events = []
+        event = create_event(type="test_message", name="event")
 
         old_state = [
-            self.create_event(type="test1", state_key="1"),
-            self.create_event(type="test1", state_key="2"),
-            self.create_event(type="test2", state_key=""),
+            create_event(type="test1", state_key="1"),
+            create_event(type="test1", state_key="2"),
+            create_event(type="test2", state_key=""),
         ]
 
         group_name = "group_name_1"
@@ -119,13 +400,12 @@ class StateTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_trivial_annotate_state(self):
-        event = self.create_event(type="state", state_key="", name="event")
-        event.prev_events = []
+        event = create_event(type="state", state_key="", name="event")
 
         old_state = [
-            self.create_event(type="test1", state_key="1"),
-            self.create_event(type="test1", state_key="2"),
-            self.create_event(type="test2", state_key=""),
+            create_event(type="test1", state_key="1"),
+            create_event(type="test1", state_key="2"),
+            create_event(type="test2", state_key=""),
         ]
 
         group_name = "group_name_1"
@@ -150,30 +430,21 @@ class StateTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_resolve_message_conflict(self):
-        event = self.create_event(type="test_message", name="event")
-        event.prev_events = []
+        event = create_event(type="test_message", name="event")
 
         old_state_1 = [
-            self.create_event(type="test1", state_key="1"),
-            self.create_event(type="test1", state_key="2"),
-            self.create_event(type="test2", state_key=""),
+            create_event(type="test1", state_key="1"),
+            create_event(type="test1", state_key="2"),
+            create_event(type="test2", state_key=""),
         ]
 
         old_state_2 = [
-            self.create_event(type="test1", state_key="1"),
-            self.create_event(type="test3", state_key="2"),
-            self.create_event(type="test4", state_key=""),
+            create_event(type="test1", state_key="1"),
+            create_event(type="test3", state_key="2"),
+            create_event(type="test4", state_key=""),
         ]
 
-        group_name_1 = "group_name_1"
-        group_name_2 = "group_name_2"
-
-        self.store.get_state_groups.return_value = {
-            group_name_1: old_state_1,
-            group_name_2: old_state_2,
-        }
-
-        context = yield self.state.compute_event_context(event)
+        context = yield self._get_context(event, old_state_1, old_state_2)
 
         self.assertEqual(len(context.current_state), 5)
 
@@ -181,56 +452,76 @@ class StateTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_resolve_state_conflict(self):
-        event = self.create_event(type="test4", state_key="", name="event")
-        event.prev_events = []
+        event = create_event(type="test4", state_key="", name="event")
 
         old_state_1 = [
-            self.create_event(type="test1", state_key="1"),
-            self.create_event(type="test1", state_key="2"),
-            self.create_event(type="test2", state_key=""),
+            create_event(type="test1", state_key="1"),
+            create_event(type="test1", state_key="2"),
+            create_event(type="test2", state_key=""),
         ]
 
         old_state_2 = [
-            self.create_event(type="test1", state_key="1"),
-            self.create_event(type="test3", state_key="2"),
-            self.create_event(type="test4", state_key=""),
+            create_event(type="test1", state_key="1"),
+            create_event(type="test3", state_key="2"),
+            create_event(type="test4", state_key=""),
         ]
 
-        group_name_1 = "group_name_1"
-        group_name_2 = "group_name_2"
-
-        self.store.get_state_groups.return_value = {
-            group_name_1: old_state_1,
-            group_name_2: old_state_2,
-        }
-
-        context = yield self.state.compute_event_context(event)
+        context = yield self._get_context(event, old_state_1, old_state_2)
 
         self.assertEqual(len(context.current_state), 5)
 
         self.assertIsNone(context.state_group)
 
-    def create_event(self, name=None, type=None, state_key=None):
-        self.event_id += 1
-        event_id = str(self.event_id)
+    @defer.inlineCallbacks
+    def test_standard_depth_conflict(self):
+        event = create_event(type="test4", name="event")
+
+        member_event = create_event(
+            type=EventTypes.Member,
+            state_key="@user_id:example.com",
+            content={
+                "membership": Membership.JOIN,
+            }
+        )
 
-        if not name:
-            if state_key is not None:
-                name = "<%s-%s>" % (type, state_key)
-            else:
-                name = "<%s>" % (type, )
+        old_state_1 = [
+            member_event,
+            create_event(type="test1", state_key="1", depth=1),
+        ]
+
+        old_state_2 = [
+            member_event,
+            create_event(type="test1", state_key="1", depth=2),
+        ]
 
-        event = Mock(name=name, spec=[])
-        event.type = type
+        context = yield self._get_context(event, old_state_1, old_state_2)
 
-        if state_key is not None:
-            event.state_key = state_key
-        event.event_id = event_id
+        self.assertEqual(old_state_2[1], context.current_state[("test1", "1")])
+
+        # Reverse the depth to make sure we are actually using the depths
+        # during state resolution.
+
+        old_state_1 = [
+            member_event,
+            create_event(type="test1", state_key="1", depth=2),
+        ]
+
+        old_state_2 = [
+            member_event,
+            create_event(type="test1", state_key="1", depth=1),
+        ]
+
+        context = yield self._get_context(event, old_state_1, old_state_2)
+
+        self.assertEqual(old_state_1[1], context.current_state[("test1", "1")])
 
-        event.is_state = lambda: (state_key is not None)
-        event.unsigned = {}
+    def _get_context(self, event, old_state_1, old_state_2):
+        group_name_1 = "group_name_1"
+        group_name_2 = "group_name_2"
 
-        event.user_id = "@user_id:example.com"
-        event.room_id = "!room_id:example.com"
+        self.store.get_state_groups.return_value = {
+            group_name_1: old_state_1,
+            group_name_2: old_state_2,
+        }
 
-        return event
+        return self.state.compute_event_context(event)
diff --git a/tests/test_types.py b/tests/test_types.py
index bfb9e6f548..b29a8415b1 100644
--- a/tests/test_types.py
+++ b/tests/test_types.py
@@ -42,12 +42,6 @@ class UserIDTestCase(unittest.TestCase):
         self.assertTrue(userA == userAagain)
         self.assertTrue(userA != userB)
 
-    def test_via_homeserver(self):
-        user = mock_homeserver.parse_userid("@3456ijkl:my.domain")
-
-        self.assertEquals("3456ijkl", user.localpart)
-        self.assertEquals("my.domain", user.domain)
-
 
 class RoomAliasTestCase(unittest.TestCase):
 
@@ -62,9 +56,3 @@ class RoomAliasTestCase(unittest.TestCase):
         room = RoomAlias("channel", "my.domain")
 
         self.assertEquals(room.to_string(), "#channel:my.domain")
-
-    def test_via_homeserver(self):
-        room = mock_homeserver.parse_roomalias("#elsewhere:my.domain")
-
-        self.assertEquals("elsewhere", room.localpart)
-        self.assertEquals("my.domain", room.domain)
diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py
new file mode 100644
index 0000000000..ab934bf928
--- /dev/null
+++ b/tests/util/test_lrucache.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from .. import unittest
+
+from synapse.util.lrucache import LruCache
+
+class LruCacheTestCase(unittest.TestCase):
+
+    def test_get_set(self):
+        cache = LruCache(1)
+        cache["key"] = "value"
+        self.assertEquals(cache.get("key"), "value")
+        self.assertEquals(cache["key"], "value")
+
+    def test_eviction(self):
+        cache = LruCache(2)
+        cache[1] = 1
+        cache[2] = 2
+
+        self.assertEquals(cache.get(1), 1)
+        self.assertEquals(cache.get(2), 2)
+
+        cache[3] = 3
+
+        self.assertEquals(cache.get(1), None)
+        self.assertEquals(cache.get(2), 2)
+        self.assertEquals(cache.get(3), 3)
+
+    def test_setdefault(self):
+        cache = LruCache(1)
+        self.assertEquals(cache.setdefault("key", 1), 1)
+        self.assertEquals(cache.get("key"), 1)
+        self.assertEquals(cache.setdefault("key", 2), 1)
+        self.assertEquals(cache.get("key"), 1)
+
+    def test_pop(self):
+        cache = LruCache(1)
+        cache["key"] = 1
+        self.assertEquals(cache.pop("key"), 1)
+        self.assertEquals(cache.pop("key"), None)
+
+
diff --git a/tests/utils.py b/tests/utils.py
index 97fa8d8181..81e82a80df 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -17,6 +17,7 @@ from synapse.http.server import HttpServer
 from synapse.api.errors import cs_error, CodeMessageException, StoreError
 from synapse.api.constants import EventTypes
 from synapse.storage import prepare_database
+from synapse.server import HomeServer
 
 from synapse.util.logcontext import LoggingContext
 
@@ -31,6 +32,36 @@ import urlparse
 from inspect import getcallargs
 
 
+@defer.inlineCallbacks
+def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
+    """Setup a homeserver suitable for running tests against. Keyword arguments
+    are passed to the Homeserver constructor. If no datastore is supplied a
+    datastore backed by an in-memory sqlite db will be given to the HS.
+    """
+    if config is None:
+        config = Mock()
+        config.signing_key = [MockKey()]
+        config.event_cache_size = 1
+        config.disable_registration = False
+
+    if datastore is None:
+        db_pool = SQLiteMemoryDbPool()
+        yield db_pool.prepare()
+        hs = HomeServer(
+            name, db_pool=db_pool, config=config,
+            version_string="Synapse/tests",
+            **kargs
+        )
+    else:
+        hs = HomeServer(
+            name, db_pool=None, datastore=datastore, config=config,
+            version_string="Synapse/tests",
+            **kargs
+        )
+
+    defer.returnValue(hs)
+
+
 def get_mock_call_args(pattern_func, mock_func):
     """ Return the arguments the mock function was called with interpreted
     by the pattern functions argument list.