summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--CHANGES.rst50
-rw-r--r--docs/user_directory.md17
-rwxr-xr-xscripts/synapse_port_db7
-rw-r--r--synapse/__init__.py2
-rw-r--r--synapse/federation/transport/client.py23
-rw-r--r--synapse/federation/transport/server.py29
-rw-r--r--synapse/groups/groups_server.py58
-rw-r--r--synapse/handlers/appservice.py6
-rw-r--r--synapse/handlers/federation.py66
-rw-r--r--synapse/handlers/groups_local.py5
-rw-r--r--synapse/handlers/profile.py21
-rw-r--r--synapse/handlers/room_list.py50
-rw-r--r--synapse/handlers/user_directory.py4
-rw-r--r--synapse/http/servlet.py3
-rw-r--r--synapse/replication/slave/storage/_base.py2
-rw-r--r--synapse/rest/client/v1/register.py2
-rw-r--r--synapse/rest/client/v2_alpha/account.py15
-rw-r--r--synapse/rest/client/v2_alpha/groups.py32
-rw-r--r--synapse/rest/client/v2_alpha/register.py29
-rw-r--r--synapse/rest/media/v1/preview_url_resource.py88
-rw-r--r--synapse/storage/__init__.py2
-rw-r--r--synapse/storage/_base.py2
-rw-r--r--synapse/storage/account_data.py2
-rw-r--r--synapse/storage/appservice.py8
-rw-r--r--synapse/storage/background_updates.py4
-rw-r--r--synapse/storage/client_ips.py4
-rw-r--r--synapse/storage/deviceinbox.py4
-rw-r--r--synapse/storage/devices.py4
-rw-r--r--synapse/storage/event_federation.py4
-rw-r--r--synapse/storage/event_push_actions.py4
-rw-r--r--synapse/storage/events.py4
-rw-r--r--synapse/storage/group_server.py27
-rw-r--r--synapse/storage/media_repository.py6
-rw-r--r--synapse/storage/receipts.py4
-rw-r--r--synapse/storage/registration.py7
-rw-r--r--synapse/storage/roommember.py4
-rw-r--r--synapse/storage/schema/delta/43/user_share.sql2
-rw-r--r--synapse/storage/schema/delta/46/group_server.sql2
-rw-r--r--synapse/storage/schema/delta/46/user_dir_typos.sql24
-rw-r--r--synapse/storage/search.py4
-rw-r--r--synapse/storage/state.py4
-rw-r--r--synapse/storage/transactions.py4
-rw-r--r--synapse/storage/user_directory.py20
-rw-r--r--tests/handlers/test_appservice.py15
-rw-r--r--tests/storage/test_appservice.py14
-rw-r--r--tests/storage/test_base.py2
-rw-r--r--tests/storage/test_directory.py2
-rw-r--r--tests/storage/test_presence.py2
-rw-r--r--tests/storage/test_profile.py2
49 files changed, 493 insertions, 203 deletions
diff --git a/CHANGES.rst b/CHANGES.rst
index 4911cfa284..8e84323079 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,3 +1,53 @@
+Changes in synapse v0.25.0-rc1 (2017-11-14)
+===========================================
+
+Features:
+
+* Add is_public to groups table to allow for private groups (PR #2582)
+* Add a route for determining who you are (PR #2668) Thanks to @turt2live!
+* Add more features to the password providers (PR #2608, #2610, #2620, #2622,
+  #2623, #2624, #2626, #2628, #2629)
+* Add a hook for custom rest endpoints (PR #2627)
+* Add API to update group room visibility (PR #2651)
+
+
+Changes:
+
+* Ignore <noscript> tags when generating URL preview descriptions (PR #2576)
+  Thanks to @maximevaillancourt!
+* Register some /unstable endpoints in /r0 as well (PR #2579) Thanks to
+  @krombel!
+* Support /keys/upload on /r0 as well as /unstable (PR #2585)
+* Front-end proxy: pass through auth header (PR #2586)
+* Allow ASes to deactivate their own users (PR #2589)
+* Remove refresh tokens (PR #2613)
+* Automatically set default displayname on register (PR #2617)
+* Log login requests (PR #2618)
+* Always return `is_public` in the `/groups/:group_id/rooms` API (PR #2630)
+* Avoid no-op media deletes (PR #2637) Thanks to @spantaleev!
+* Fix various embarrassing typos around user_directory and add some doc. (PR
+  #2643)
+* Return whether a user is an admin within a group (PR #2647)
+* Namespace visibility options for groups (PR #2657)
+* Downcase UserIDs on registration (PR #2662)
+* Cache failures when fetching URL previews (PR #2669)
+
+
+Bug fixes:
+
+* Fix port script (PR #2577)
+* Fix error when running synapse with no logfile (PR #2581)
+* Fix UI auth when deleting devices (PR #2591)
+* Fix typo when checking if user is invited to group (PR #2599)
+* Fix the port script to drop NUL values in all tables (PR #2611)
+* Fix appservices being backlogged and not receiving new events due to a bug in
+  notify_interested_services (PR #2631) Thanks to @xyzz!
+* Fix updating rooms avatar/display name when modified by admin (PR #2636)
+  Thanks to @farialima!
+* Fix bug in state group storage (PR #2649)
+* Fix 500 on invalid utf-8 in request (PR #2663)
+
+
 Changes in synapse v0.24.1 (2017-10-24)
 =======================================
 
diff --git a/docs/user_directory.md b/docs/user_directory.md
new file mode 100644
index 0000000000..4c8ee44f37
--- /dev/null
+++ b/docs/user_directory.md
@@ -0,0 +1,17 @@
+User Directory API Implementation
+=================================
+
+The user directory is currently maintained based on the 'visible' users
+on this particular server - i.e. ones which your account shares a room with, or
+who are present in a publicly viewable room present on the server.
+
+The directory info is stored in various tables, which can (typically after
+DB corruption) get stale or out of sync.  If this happens, for now the
+quickest solution to fix it is:
+
+```
+UPDATE user_directory_stream_pos SET stream_id = NULL;
+```
+
+and restart the synapse, which should then start a background task to
+flush the current tables and regenerate the directory.
diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db
index 3a8972efc3..d46581e4e1 100755
--- a/scripts/synapse_port_db
+++ b/scripts/synapse_port_db
@@ -43,6 +43,13 @@ BOOLEAN_COLUMNS = {
     "device_lists_outbound_pokes": ["sent"],
     "users_who_share_rooms": ["share_private"],
     "groups": ["is_public"],
+    "group_rooms": ["is_public"],
+    "group_users": ["is_public", "is_admin"],
+    "group_summary_rooms": ["is_public"],
+    "group_room_categories": ["is_public"],
+    "group_summary_users": ["is_public"],
+    "group_roles": ["is_public"],
+    "local_group_membership": ["is_publicised", "is_admin"],
 }
 
 
diff --git a/synapse/__init__.py b/synapse/__init__.py
index e74abe0130..e04208961f 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -16,4 +16,4 @@
 """ This is a reference implementation of a Matrix home server.
 """
 
-__version__ = "0.24.1"
+__version__ = "0.25.0"
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index ed41dfc7ee..1f3ce238f6 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -531,9 +531,9 @@ class TransportLayerClient(object):
             ignore_backoff=True,
         )
 
-    def update_room_group_association(self, destination, group_id, requester_user_id,
-                                      room_id, content):
-        """Add or update an association between room and group
+    def add_room_to_group(self, destination, group_id, requester_user_id, room_id,
+                          content):
+        """Add a room to a group
         """
         path = PREFIX + "/groups/%s/room/%s" % (group_id, room_id,)
 
@@ -545,8 +545,21 @@ class TransportLayerClient(object):
             ignore_backoff=True,
         )
 
-    def delete_room_group_association(self, destination, group_id, requester_user_id,
-                                      room_id):
+    def update_room_in_group(self, destination, group_id, requester_user_id, room_id,
+                             config_key, content):
+        """Update room in group
+        """
+        path = PREFIX + "/groups/%s/room/%s/config/%s" % (group_id, room_id, config_key,)
+
+        return self.client.post_json(
+            destination=destination,
+            path=path,
+            args={"requester_user_id": requester_user_id},
+            data=content,
+            ignore_backoff=True,
+        )
+
+    def remove_room_from_group(self, destination, group_id, requester_user_id, room_id):
         """Remove a room from a group
         """
         path = PREFIX + "/groups/%s/room/%s" % (group_id, room_id,)
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index ded6d4edc9..2b02b021ec 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -676,7 +676,7 @@ class FederationGroupsRoomsServlet(BaseFederationServlet):
 class FederationGroupsAddRoomsServlet(BaseFederationServlet):
     """Add/remove room from group
     """
-    PATH = "/groups/(?P<group_id>[^/]*)/room/(?<room_id>)$"
+    PATH = "/groups/(?P<group_id>[^/]*)/room/(?P<room_id>[^/]*)$"
 
     @defer.inlineCallbacks
     def on_POST(self, origin, content, query, group_id, room_id):
@@ -684,7 +684,7 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet):
         if get_domain_from_id(requester_user_id) != origin:
             raise SynapseError(403, "requester_user_id doesn't match origin")
 
-        new_content = yield self.handler.update_room_group_association(
+        new_content = yield self.handler.add_room_to_group(
             group_id, requester_user_id, room_id, content
         )
 
@@ -696,13 +696,34 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet):
         if get_domain_from_id(requester_user_id) != origin:
             raise SynapseError(403, "requester_user_id doesn't match origin")
 
-        new_content = yield self.handler.delete_room_group_association(
+        new_content = yield self.handler.remove_room_from_group(
             group_id, requester_user_id, room_id,
         )
 
         defer.returnValue((200, new_content))
 
 
+class FederationGroupsAddRoomsConfigServlet(BaseFederationServlet):
+    """Update room config in group
+    """
+    PATH = (
+        "/groups/(?P<group_id>[^/]*)/room/(?P<room_id>[^/]*)"
+        "/config/(?P<config_key>[^/]*)$"
+    )
+
+    @defer.inlineCallbacks
+    def on_POST(self, origin, content, query, group_id, room_id, config_key):
+        requester_user_id = parse_string_from_args(query, "requester_user_id")
+        if get_domain_from_id(requester_user_id) != origin:
+            raise SynapseError(403, "requester_user_id doesn't match origin")
+
+        result = yield self.groups_handler.update_room_in_group(
+            group_id, requester_user_id, room_id, config_key, content,
+        )
+
+        defer.returnValue((200, result))
+
+
 class FederationGroupsUsersServlet(BaseFederationServlet):
     """Get the users in a group on behalf of a user
     """
@@ -1142,6 +1163,8 @@ GROUP_SERVER_SERVLET_CLASSES = (
     FederationGroupsRolesServlet,
     FederationGroupsRoleServlet,
     FederationGroupsSummaryUsersServlet,
+    FederationGroupsAddRoomsServlet,
+    FederationGroupsAddRoomsConfigServlet,
 )
 
 
diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py
index 11199dd215..0b995aed70 100644
--- a/synapse/groups/groups_server.py
+++ b/synapse/groups/groups_server.py
@@ -530,9 +530,8 @@ class GroupsServerHandler(object):
         })
 
     @defer.inlineCallbacks
-    def update_room_group_association(self, group_id, requester_user_id, room_id,
-                                      content):
-        """Add or update an association between room and group
+    def add_room_to_group(self, group_id, requester_user_id, room_id, content):
+        """Add room to group
         """
         RoomID.from_string(room_id)  # Ensure valid room id
 
@@ -542,21 +541,42 @@ class GroupsServerHandler(object):
 
         is_public = _parse_visibility_from_contents(content)
 
-        yield self.store.update_room_group_association(
-            group_id, room_id, is_public=is_public
+        yield self.store.add_room_to_group(group_id, room_id, is_public=is_public)
+
+        defer.returnValue({})
+
+    @defer.inlineCallbacks
+    def update_room_in_group(self, group_id, requester_user_id, room_id, config_key,
+                             content):
+        """Update room in group
+        """
+        RoomID.from_string(room_id)  # Ensure valid room id
+
+        yield self.check_group_is_ours(
+            group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id
         )
 
+        if config_key == "m.visibility":
+            is_public = _parse_visibility_dict(content)
+
+            yield self.store.update_room_in_group_visibility(
+                group_id, room_id,
+                is_public=is_public,
+            )
+        else:
+            raise SynapseError(400, "Uknown config option")
+
         defer.returnValue({})
 
     @defer.inlineCallbacks
-    def delete_room_group_association(self, group_id, requester_user_id, room_id):
+    def remove_room_from_group(self, group_id, requester_user_id, room_id):
         """Remove room from group
         """
         yield self.check_group_is_ours(
             group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id
         )
 
-        yield self.store.delete_room_group_association(group_id, room_id)
+        yield self.store.remove_room_from_group(group_id, room_id)
 
         defer.returnValue({})
 
@@ -820,15 +840,25 @@ def _parse_visibility_from_contents(content):
     public or not
     """
 
-    visibility = content.get("visibility")
+    visibility = content.get("m.visibility")
     if visibility:
-        vis_type = visibility["type"]
-        if vis_type not in ("public", "private"):
-            raise SynapseError(
-                400, "Synapse only supports 'public'/'private' visibility"
-            )
-        is_public = vis_type == "public"
+        return _parse_visibility_dict(visibility)
     else:
         is_public = True
 
     return is_public
+
+
+def _parse_visibility_dict(visibility):
+    """Given a dict for the "m.visibility" config return if the entity should
+    be public or not
+    """
+    vis_type = visibility.get("type")
+    if not vis_type:
+        return True
+
+    if vis_type not in ("public", "private"):
+        raise SynapseError(
+            400, "Synapse only supports 'public'/'private' visibility"
+        )
+    return vis_type == "public"
diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py
index 05af54d31b..543bf28aec 100644
--- a/synapse/handlers/appservice.py
+++ b/synapse/handlers/appservice.py
@@ -70,11 +70,10 @@ class ApplicationServicesHandler(object):
         with Measure(self.clock, "notify_interested_services"):
             self.is_processing = True
             try:
-                upper_bound = self.current_max
                 limit = 100
                 while True:
                     upper_bound, events = yield self.store.get_new_events_for_appservice(
-                        upper_bound, limit
+                        self.current_max, limit
                     )
 
                     if not events:
@@ -105,9 +104,6 @@ class ApplicationServicesHandler(object):
                             )
 
                     yield self.store.set_appservice_last_pos(upper_bound)
-
-                    if len(events) < limit:
-                        break
             finally:
                 self.is_processing = False
 
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 8b1e606754..ac70730885 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -1706,6 +1706,17 @@ class FederationHandler(BaseHandler):
     @defer.inlineCallbacks
     @log_function
     def do_auth(self, origin, event, context, auth_events):
+        """
+
+        Args:
+            origin (str):
+            event (synapse.events.FrozenEvent):
+            context (synapse.events.snapshot.EventContext):
+            auth_events (dict[(str, str)->str]):
+
+        Returns:
+            defer.Deferred[None]
+        """
         # Check if we have all the auth events.
         current_state = set(e.event_id for e in auth_events.values())
         event_auth_events = set(e_id for e_id, _ in event.auth_events)
@@ -1817,16 +1828,9 @@ class FederationHandler(BaseHandler):
                 current_state = set(e.event_id for e in auth_events.values())
                 different_auth = event_auth_events - current_state
 
-                context.current_state_ids = dict(context.current_state_ids)
-                context.current_state_ids.update({
-                    k: a.event_id for k, a in auth_events.items()
-                    if k != event_key
-                })
-                context.prev_state_ids = dict(context.prev_state_ids)
-                context.prev_state_ids.update({
-                    k: a.event_id for k, a in auth_events.items()
-                })
-                context.state_group = self.store.get_next_state_group()
+                self._update_context_for_auth_events(
+                    context, auth_events, event_key,
+                )
 
         if different_auth and not event.internal_metadata.is_outlier():
             logger.info("Different auth after resolution: %s", different_auth)
@@ -1906,16 +1910,9 @@ class FederationHandler(BaseHandler):
                 # 4. Look at rejects and their proofs.
                 # TODO.
 
-                context.current_state_ids = dict(context.current_state_ids)
-                context.current_state_ids.update({
-                    k: a.event_id for k, a in auth_events.items()
-                    if k != event_key
-                })
-                context.prev_state_ids = dict(context.prev_state_ids)
-                context.prev_state_ids.update({
-                    k: a.event_id for k, a in auth_events.items()
-                })
-                context.state_group = self.store.get_next_state_group()
+                self._update_context_for_auth_events(
+                    context, auth_events, event_key,
+                )
 
         try:
             self.auth.check(event, auth_events=auth_events)
@@ -1923,6 +1920,35 @@ class FederationHandler(BaseHandler):
             logger.warn("Failed auth resolution for %r because %s", event, e)
             raise e
 
+    def _update_context_for_auth_events(self, context, auth_events,
+                                        event_key):
+        """Update the state_ids in an event context after auth event resolution
+
+        Args:
+            context (synapse.events.snapshot.EventContext): event context
+                to be updated
+
+            auth_events (dict[(str, str)->str]): Events to update in the event
+                context.
+
+            event_key ((str, str)): (type, state_key) for the current event.
+                this will not be included in the current_state in the context.
+        """
+        state_updates = {
+            k: a.event_id for k, a in auth_events.iteritems()
+            if k != event_key
+        }
+        context.current_state_ids = dict(context.current_state_ids)
+        context.current_state_ids.update(state_updates)
+        if context.delta_ids is not None:
+            context.delta_ids = dict(context.delta_ids)
+            context.delta_ids.update(state_updates)
+        context.prev_state_ids = dict(context.prev_state_ids)
+        context.prev_state_ids.update({
+            k: a.event_id for k, a in auth_events.iteritems()
+        })
+        context.state_group = self.store.get_next_state_group()
+
     @defer.inlineCallbacks
     def construct_auth_difference(self, local_auth, remote_auth):
         """ Given a local and remote auth chain, find the differences. This
diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py
index dabc2a3fbb..da00aeb0f4 100644
--- a/synapse/handlers/groups_local.py
+++ b/synapse/handlers/groups_local.py
@@ -70,8 +70,9 @@ class GroupsLocalHandler(object):
 
     get_invited_users_in_group = _create_rerouter("get_invited_users_in_group")
 
-    update_room_group_association = _create_rerouter("update_room_group_association")
-    delete_room_group_association = _create_rerouter("delete_room_group_association")
+    add_room_to_group = _create_rerouter("add_room_to_group")
+    update_room_in_group = _create_rerouter("update_room_in_group")
+    remove_room_from_group = _create_rerouter("remove_room_from_group")
 
     update_group_summary_room = _create_rerouter("update_group_summary_room")
     delete_group_summary_room = _create_rerouter("delete_group_summary_room")
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index 62b9bd503e..5e5b1952dd 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -17,7 +17,6 @@ import logging
 
 from twisted.internet import defer
 
-import synapse.types
 from synapse.api.errors import SynapseError, AuthError, CodeMessageException
 from synapse.types import UserID, get_domain_from_id
 from ._base import BaseHandler
@@ -140,7 +139,7 @@ class ProfileHandler(BaseHandler):
             target_user.localpart, new_displayname
         )
 
-        yield self._update_join_states(requester)
+        yield self._update_join_states(requester, target_user)
 
     @defer.inlineCallbacks
     def get_avatar_url(self, target_user):
@@ -184,7 +183,7 @@ class ProfileHandler(BaseHandler):
             target_user.localpart, new_avatar_url
         )
 
-        yield self._update_join_states(requester)
+        yield self._update_join_states(requester, target_user)
 
     @defer.inlineCallbacks
     def on_profile_query(self, args):
@@ -209,28 +208,24 @@ class ProfileHandler(BaseHandler):
         defer.returnValue(response)
 
     @defer.inlineCallbacks
-    def _update_join_states(self, requester):
-        user = requester.user
-        if not self.hs.is_mine(user):
+    def _update_join_states(self, requester, target_user):
+        if not self.hs.is_mine(target_user):
             return
 
         yield self.ratelimit(requester)
 
         room_ids = yield self.store.get_rooms_for_user(
-            user.to_string(),
+            target_user.to_string(),
         )
 
         for room_id in room_ids:
             handler = self.hs.get_handlers().room_member_handler
             try:
-                # Assume the user isn't a guest because we don't let guests set
-                # profile or avatar data.
-                # XXX why are we recreating `requester` here for each room?
-                # what was wrong with the `requester` we were passed?
-                requester = synapse.types.create_requester(user)
+                # Assume the target_user isn't a guest,
+                # because we don't let guests set profile or avatar data.
                 yield handler.update_membership(
                     requester,
-                    user,
+                    target_user,
                     room_id,
                     "join",  # We treat a profile update like a join.
                     ratelimit=False,  # Try to hide that these events aren't atomic.
diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py
index 2cf34e51cb..bb40075387 100644
--- a/synapse/handlers/room_list.py
+++ b/synapse/handlers/room_list.py
@@ -154,6 +154,8 @@ class RoomListHandler(BaseHandler):
             # We want larger rooms to be first, hence negating num_joined_users
             rooms_to_order_value[room_id] = (-num_joined_users, room_id)
 
+        logger.info("Getting ordering for %i rooms since %s",
+                    len(room_ids), stream_token)
         yield concurrently_execute(get_order_for_room, room_ids, 10)
 
         sorted_entries = sorted(rooms_to_order_value.items(), key=lambda e: e[1])
@@ -181,34 +183,42 @@ class RoomListHandler(BaseHandler):
                 rooms_to_scan = rooms_to_scan[:since_token.current_limit]
                 rooms_to_scan.reverse()
 
-        # Actually generate the entries. _append_room_entry_to_chunk will append to
-        # chunk but will stop if len(chunk) > limit
-        chunk = []
-        if limit and not search_filter:
+        logger.info("After sorting and filtering, %i rooms remain",
+                    len(rooms_to_scan))
+
+        # _append_room_entry_to_chunk will append to chunk but will stop if
+        # len(chunk) > limit
+        #
+        # Normally we will generate enough results on the first iteration here,
+        #  but if there is a search filter, _append_room_entry_to_chunk may
+        # filter some results out, in which case we loop again.
+        #
+        # We don't want to scan over the entire range either as that
+        # would potentially waste a lot of work.
+        #
+        # XXX if there is no limit, we may end up DoSing the server with
+        # calls to get_current_state_ids for every single room on the
+        # server. Surely we should cap this somehow?
+        #
+        if limit:
             step = limit + 1
-            for i in xrange(0, len(rooms_to_scan), step):
-                # We iterate here because the vast majority of cases we'll stop
-                # at first iteration, but occaisonally _append_room_entry_to_chunk
-                # won't append to the chunk and so we need to loop again.
-                # We don't want to scan over the entire range either as that
-                # would potentially waste a lot of work.
-                yield concurrently_execute(
-                    lambda r: self._append_room_entry_to_chunk(
-                        r, rooms_to_num_joined[r],
-                        chunk, limit, search_filter
-                    ),
-                    rooms_to_scan[i:i + step], 10
-                )
-                if len(chunk) >= limit + 1:
-                    break
         else:
+            step = len(rooms_to_scan)
+
+        chunk = []
+        for i in xrange(0, len(rooms_to_scan), step):
+            batch = rooms_to_scan[i:i + step]
+            logger.info("Processing %i rooms for result", len(batch))
             yield concurrently_execute(
                 lambda r: self._append_room_entry_to_chunk(
                     r, rooms_to_num_joined[r],
                     chunk, limit, search_filter
                 ),
-                rooms_to_scan, 5
+                batch, 5,
             )
+            logger.info("Now %i rooms in result", len(chunk))
+            if len(chunk) >= limit + 1:
+                break
 
         chunk.sort(key=lambda e: (-e["num_joined_members"], e["room_id"]))
 
diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py
index 2a49456bfc..b5be5d9623 100644
--- a/synapse/handlers/user_directory.py
+++ b/synapse/handlers/user_directory.py
@@ -152,7 +152,7 @@ class UserDirectoyHandler(object):
 
         for room_id in room_ids:
             logger.info("Handling room %d/%d", num_processed_rooms, len(room_ids))
-            yield self._handle_intial_room(room_id)
+            yield self._handle_initial_room(room_id)
             num_processed_rooms += 1
             yield sleep(self.INITIAL_SLEEP_MS / 1000.)
 
@@ -166,7 +166,7 @@ class UserDirectoyHandler(object):
         yield self.store.update_user_directory_stream_pos(new_pos)
 
     @defer.inlineCallbacks
-    def _handle_intial_room(self, room_id):
+    def _handle_initial_room(self, room_id):
         """Called when we initially fill out user_directory one room at a time
         """
         is_in_room = yield self.store.is_host_joined(room_id, self.server_name)
diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py
index 8118ee7cc2..71420e54db 100644
--- a/synapse/http/servlet.py
+++ b/synapse/http/servlet.py
@@ -167,7 +167,8 @@ def parse_json_value_from_request(request):
 
     try:
         content = simplejson.loads(content_bytes)
-    except simplejson.JSONDecodeError:
+    except Exception as e:
+        logger.warn("Unable to parse JSON: %s", e)
         raise SynapseError(400, "Content not JSON.", errcode=Codes.NOT_JSON)
 
     return content
diff --git a/synapse/replication/slave/storage/_base.py b/synapse/replication/slave/storage/_base.py
index b962641166..61f5590c53 100644
--- a/synapse/replication/slave/storage/_base.py
+++ b/synapse/replication/slave/storage/_base.py
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
 
 class BaseSlavedStore(SQLBaseStore):
     def __init__(self, db_conn, hs):
-        super(BaseSlavedStore, self).__init__(hs)
+        super(BaseSlavedStore, self).__init__(db_conn, hs)
         if isinstance(self.database_engine, PostgresEngine):
             self._cache_id_gen = SlavedIdTracker(
                 db_conn, "cache_invalidation_stream", "stream_id",
diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py
index ecf7e311a9..32ed1d3ab2 100644
--- a/synapse/rest/client/v1/register.py
+++ b/synapse/rest/client/v1/register.py
@@ -359,7 +359,7 @@ class RegisterRestServlet(ClientV1RestServlet):
         if compare_digest(want_mac, got_mac):
             handler = self.handlers.registration_handler
             user_id, token = yield handler.register(
-                localpart=user,
+                localpart=user.lower(),
                 password=password,
                 admin=bool(admin),
             )
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index 3062e04c59..726e0a2826 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -382,6 +382,20 @@ class ThreepidDeleteRestServlet(RestServlet):
         defer.returnValue((200, {}))
 
 
+class WhoamiRestServlet(RestServlet):
+    PATTERNS = client_v2_patterns("/account/whoami$")
+
+    def __init__(self, hs):
+        super(WhoamiRestServlet, self).__init__()
+        self.auth = hs.get_auth()
+
+    @defer.inlineCallbacks
+    def on_GET(self, request):
+        requester = yield self.auth.get_user_by_req(request)
+
+        defer.returnValue((200, {'user_id': requester.user.to_string()}))
+
+
 def register_servlets(hs, http_server):
     EmailPasswordRequestTokenRestServlet(hs).register(http_server)
     MsisdnPasswordRequestTokenRestServlet(hs).register(http_server)
@@ -391,3 +405,4 @@ def register_servlets(hs, http_server):
     MsisdnThreepidRequestTokenRestServlet(hs).register(http_server)
     ThreepidRestServlet(hs).register(http_server)
     ThreepidDeleteRestServlet(hs).register(http_server)
+    WhoamiRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py
index 792608cd48..089ec71c81 100644
--- a/synapse/rest/client/v2_alpha/groups.py
+++ b/synapse/rest/client/v2_alpha/groups.py
@@ -451,7 +451,7 @@ class GroupAdminRoomsServlet(RestServlet):
         requester_user_id = requester.user.to_string()
 
         content = parse_json_object_from_request(request)
-        result = yield self.groups_handler.update_room_group_association(
+        result = yield self.groups_handler.add_room_to_group(
             group_id, requester_user_id, room_id, content,
         )
 
@@ -462,13 +462,40 @@ class GroupAdminRoomsServlet(RestServlet):
         requester = yield self.auth.get_user_by_req(request)
         requester_user_id = requester.user.to_string()
 
-        result = yield self.groups_handler.delete_room_group_association(
+        result = yield self.groups_handler.remove_room_from_group(
             group_id, requester_user_id, room_id,
         )
 
         defer.returnValue((200, result))
 
 
+class GroupAdminRoomsConfigServlet(RestServlet):
+    """Update the config of a room in a group
+    """
+    PATTERNS = client_v2_patterns(
+        "/groups/(?P<group_id>[^/]*)/admin/rooms/(?P<room_id>[^/]*)"
+        "/config/(?P<config_key>[^/]*)$"
+    )
+
+    def __init__(self, hs):
+        super(GroupAdminRoomsConfigServlet, self).__init__()
+        self.auth = hs.get_auth()
+        self.clock = hs.get_clock()
+        self.groups_handler = hs.get_groups_local_handler()
+
+    @defer.inlineCallbacks
+    def on_PUT(self, request, group_id, room_id, config_key):
+        requester = yield self.auth.get_user_by_req(request)
+        requester_user_id = requester.user.to_string()
+
+        content = parse_json_object_from_request(request)
+        result = yield self.groups_handler.update_room_in_group(
+            group_id, requester_user_id, room_id, config_key, content,
+        )
+
+        defer.returnValue((200, result))
+
+
 class GroupAdminUsersInviteServlet(RestServlet):
     """Invite a user to the group
     """
@@ -713,6 +740,7 @@ def register_servlets(hs, http_server):
     GroupRoomServlet(hs).register(http_server)
     GroupCreateServlet(hs).register(http_server)
     GroupAdminRoomsServlet(hs).register(http_server)
+    GroupAdminRoomsConfigServlet(hs).register(http_server)
     GroupAdminUsersInviteServlet(hs).register(http_server)
     GroupAdminUsersKickServlet(hs).register(http_server)
     GroupSelfLeaveServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index eebd071e59..9e2f7308ce 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -224,6 +224,12 @@ class RegisterRestServlet(RestServlet):
             # 'user' key not 'username'). Since this is a new addition, we'll
             # fallback to 'username' if they gave one.
             desired_username = body.get("user", desired_username)
+
+            # XXX we should check that desired_username is valid. Currently
+            # we give appservices carte blanche for any insanity in mxids,
+            # because the IRC bridges rely on being able to register stupid
+            # IDs.
+
             access_token = get_access_token_from_request(request)
 
             if isinstance(desired_username, basestring):
@@ -233,6 +239,15 @@ class RegisterRestServlet(RestServlet):
             defer.returnValue((200, result))  # we throw for non 200 responses
             return
 
+        # for either shared secret or regular registration, downcase the
+        # provided username before attempting to register it. This should mean
+        # that people who try to register with upper-case in their usernames
+        # don't get a nasty surprise. (Note that we treat username
+        # case-insenstively in login, so they are free to carry on imagining
+        # that their username is CrAzYh4cKeR if that keeps them happy)
+        if desired_username is not None:
+            desired_username = desired_username.lower()
+
         # == Shared Secret Registration == (e.g. create new user scripts)
         if 'mac' in body:
             # FIXME: Should we really be determining if this is shared secret
@@ -336,6 +351,9 @@ class RegisterRestServlet(RestServlet):
             new_password = params.get("password", None)
             guest_access_token = params.get("guest_access_token", None)
 
+            if desired_username is not None:
+                desired_username = desired_username.lower()
+
             (registered_user_id, _) = yield self.registration_handler.register(
                 localpart=desired_username,
                 password=new_password,
@@ -417,13 +435,22 @@ class RegisterRestServlet(RestServlet):
     def _do_shared_secret_registration(self, username, password, body):
         if not self.hs.config.registration_shared_secret:
             raise SynapseError(400, "Shared secret registration is not enabled")
+        if not username:
+            raise SynapseError(
+                400, "username must be specified", errcode=Codes.BAD_JSON,
+            )
 
-        user = username.encode("utf-8")
+        # use the username from the original request rather than the
+        # downcased one in `username` for the mac calculation
+        user = body["username"].encode("utf-8")
 
         # str() because otherwise hmac complains that 'unicode' does not
         # have the buffer interface
         got_mac = str(body["mac"])
 
+        # FIXME this is different to the /v1/register endpoint, which
+        # includes the password and admin flag in the hashed text. Why are
+        # these different?
         want_mac = hmac.new(
             key=self.hs.config.registration_shared_secret,
             msg=user,
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index 7907a9d17a..723f7043f4 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -20,6 +20,7 @@ from twisted.web.resource import Resource
 from synapse.api.errors import (
     SynapseError, Codes,
 )
+from synapse.util.logcontext import preserve_fn, make_deferred_yieldable
 from synapse.util.stringutils import random_string
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.http.client import SpiderHttpClient
@@ -63,16 +64,15 @@ class PreviewUrlResource(Resource):
 
         self.url_preview_url_blacklist = hs.config.url_preview_url_blacklist
 
-        # simple memory cache mapping urls to OG metadata
-        self.cache = ExpiringCache(
+        # memory cache mapping urls to an ObservableDeferred returning
+        # JSON-encoded OG metadata
+        self._cache = ExpiringCache(
             cache_name="url_previews",
             clock=self.clock,
             # don't spider URLs more often than once an hour
             expiry_ms=60 * 60 * 1000,
         )
-        self.cache.start()
-
-        self.downloads = {}
+        self._cache.start()
 
         self._cleaner_loop = self.clock.looping_call(
             self._expire_url_cache_data, 10 * 1000
@@ -94,6 +94,7 @@ class PreviewUrlResource(Resource):
         else:
             ts = self.clock.time_msec()
 
+        # XXX: we could move this into _do_preview if we wanted.
         url_tuple = urlparse.urlsplit(url)
         for entry in self.url_preview_url_blacklist:
             match = True
@@ -126,14 +127,42 @@ class PreviewUrlResource(Resource):
                     Codes.UNKNOWN
                 )
 
-        # first check the memory cache - good to handle all the clients on this
-        # HS thundering away to preview the same URL at the same time.
-        og = self.cache.get(url)
-        if og:
-            respond_with_json_bytes(request, 200, json.dumps(og), send_cors=True)
-            return
+        # the in-memory cache:
+        # * ensures that only one request is active at a time
+        # * takes load off the DB for the thundering herds
+        # * also caches any failures (unlike the DB) so we don't keep
+        #    requesting the same endpoint
+
+        observable = self._cache.get(url)
+
+        if not observable:
+            download = preserve_fn(self._do_preview)(
+                url, requester.user, ts,
+            )
+            observable = ObservableDeferred(
+                download,
+                consumeErrors=True
+            )
+            self._cache[url] = observable
+        else:
+            logger.info("Returning cached response")
+
+        og = yield make_deferred_yieldable(observable.observe())
+        respond_with_json_bytes(request, 200, og, send_cors=True)
 
-        # then check the URL cache in the DB (which will also provide us with
+    @defer.inlineCallbacks
+    def _do_preview(self, url, user, ts):
+        """Check the db, and download the URL and build a preview
+
+        Args:
+            url (str):
+            user (str):
+            ts (int):
+
+        Returns:
+            Deferred[str]: json-encoded og data
+        """
+        # check the URL cache in the DB (which will also provide us with
         # historical previews, if we have any)
         cache_result = yield self.store.get_url_cache(url, ts)
         if (
@@ -141,32 +170,10 @@ class PreviewUrlResource(Resource):
             cache_result["expires_ts"] > ts and
             cache_result["response_code"] / 100 == 2
         ):
-            respond_with_json_bytes(
-                request, 200, cache_result["og"].encode('utf-8'),
-                send_cors=True
-            )
+            defer.returnValue(cache_result["og"])
             return
 
-        # Ensure only one download for a given URL is active at a time
-        download = self.downloads.get(url)
-        if download is None:
-            download = self._download_url(url, requester.user)
-            download = ObservableDeferred(
-                download,
-                consumeErrors=True
-            )
-            self.downloads[url] = download
-
-            @download.addBoth
-            def callback(media_info):
-                del self.downloads[url]
-                return media_info
-        media_info = yield download.observe()
-
-        # FIXME: we should probably update our cache now anyway, so that
-        # even if the OG calculation raises, we don't keep hammering on the
-        # remote server.  For now, leave it uncached to aid debugging OG
-        # calculation problems
+        media_info = yield self._download_url(url, user)
 
         logger.debug("got media_info of '%s'" % media_info)
 
@@ -212,7 +219,7 @@ class PreviewUrlResource(Resource):
             # just rely on the caching on the master request to speed things up.
             if 'og:image' in og and og['og:image']:
                 image_info = yield self._download_url(
-                    _rebase_url(og['og:image'], media_info['uri']), requester.user
+                    _rebase_url(og['og:image'], media_info['uri']), user
                 )
 
                 if _is_media(image_info['media_type']):
@@ -239,8 +246,7 @@ class PreviewUrlResource(Resource):
 
         logger.debug("Calculated OG for %s as %s" % (url, og))
 
-        # store OG in ephemeral in-memory cache
-        self.cache[url] = og
+        jsonog = json.dumps(og)
 
         # store OG in history-aware DB cache
         yield self.store.store_url_cache(
@@ -248,12 +254,12 @@ class PreviewUrlResource(Resource):
             media_info["response_code"],
             media_info["etag"],
             media_info["expires"] + media_info["created_ts"],
-            json.dumps(og),
+            jsonog,
             media_info["filesystem_id"],
             media_info["created_ts"],
         )
 
-        respond_with_json_bytes(request, 200, json.dumps(og), send_cors=True)
+        defer.returnValue(jsonog)
 
     @defer.inlineCallbacks
     def _download_url(self, url, user):
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index 594566eb38..d01d46338a 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -268,7 +268,7 @@ class DataStore(RoomMemberStore, RoomStore,
         self._stream_order_on_start = self.get_room_max_stream_ordering()
         self._min_stream_order_on_start = self.get_room_min_stream_ordering()
 
-        super(DataStore, self).__init__(hs)
+        super(DataStore, self).__init__(db_conn, hs)
 
     def take_presence_startup_info(self):
         active_on_startup = self._presence_on_startup
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 6caf7b3356..e94917d9cd 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -162,7 +162,7 @@ class PerformanceCounters(object):
 class SQLBaseStore(object):
     _TXN_ID = 0
 
-    def __init__(self, hs):
+    def __init__(self, db_conn, hs):
         self.hs = hs
         self._clock = hs.get_clock()
         self._db_pool = hs.get_db_pool()
diff --git a/synapse/storage/account_data.py b/synapse/storage/account_data.py
index ff14e54c11..c8a1eb016b 100644
--- a/synapse/storage/account_data.py
+++ b/synapse/storage/account_data.py
@@ -63,7 +63,7 @@ class AccountDataStore(SQLBaseStore):
             "get_account_data_for_user", get_account_data_for_user_txn
         )
 
-    @cachedInlineCallbacks(num_args=2)
+    @cachedInlineCallbacks(num_args=2, max_entries=5000)
     def get_global_account_data_by_type_for_user(self, data_type, user_id):
         """
         Returns:
diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py
index c63935cb07..d8c84b7141 100644
--- a/synapse/storage/appservice.py
+++ b/synapse/storage/appservice.py
@@ -48,8 +48,8 @@ def _make_exclusive_regex(services_cache):
 
 class ApplicationServiceStore(SQLBaseStore):
 
-    def __init__(self, hs):
-        super(ApplicationServiceStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(ApplicationServiceStore, self).__init__(db_conn, hs)
         self.hostname = hs.hostname
         self.services_cache = load_appservices(
             hs.hostname,
@@ -173,8 +173,8 @@ class ApplicationServiceStore(SQLBaseStore):
 
 class ApplicationServiceTransactionStore(SQLBaseStore):
 
-    def __init__(self, hs):
-        super(ApplicationServiceTransactionStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(ApplicationServiceTransactionStore, self).__init__(db_conn, hs)
 
     @defer.inlineCallbacks
     def get_appservices_by_state(self, state):
diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py
index a6e6f52a6a..6f235ac051 100644
--- a/synapse/storage/background_updates.py
+++ b/synapse/storage/background_updates.py
@@ -80,8 +80,8 @@ class BackgroundUpdateStore(SQLBaseStore):
     BACKGROUND_UPDATE_INTERVAL_MS = 1000
     BACKGROUND_UPDATE_DURATION_MS = 100
 
-    def __init__(self, hs):
-        super(BackgroundUpdateStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(BackgroundUpdateStore, self).__init__(db_conn, hs)
         self._background_update_performance = {}
         self._background_update_queue = []
         self._background_update_handlers = {}
diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py
index 3c95e90eca..a03d1d6104 100644
--- a/synapse/storage/client_ips.py
+++ b/synapse/storage/client_ips.py
@@ -32,14 +32,14 @@ LAST_SEEN_GRANULARITY = 120 * 1000
 
 
 class ClientIpStore(background_updates.BackgroundUpdateStore):
-    def __init__(self, hs):
+    def __init__(self, db_conn, hs):
         self.client_ip_last_seen = Cache(
             name="client_ip_last_seen",
             keylen=4,
             max_entries=50000 * CACHE_SIZE_FACTOR,
         )
 
-        super(ClientIpStore, self).__init__(hs)
+        super(ClientIpStore, self).__init__(db_conn, hs)
 
         self.register_background_index_update(
             "user_ips_device_index",
diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py
index 0b62b493d5..548e795daf 100644
--- a/synapse/storage/deviceinbox.py
+++ b/synapse/storage/deviceinbox.py
@@ -29,8 +29,8 @@ logger = logging.getLogger(__name__)
 class DeviceInboxStore(BackgroundUpdateStore):
     DEVICE_INBOX_STREAM_ID = "device_inbox_stream_drop"
 
-    def __init__(self, hs):
-        super(DeviceInboxStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(DeviceInboxStore, self).__init__(db_conn, hs)
 
         self.register_background_index_update(
             "device_inbox_stream_index",
diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py
index bb27fd1f70..bd2effdf34 100644
--- a/synapse/storage/devices.py
+++ b/synapse/storage/devices.py
@@ -26,8 +26,8 @@ logger = logging.getLogger(__name__)
 
 
 class DeviceStore(SQLBaseStore):
-    def __init__(self, hs):
-        super(DeviceStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(DeviceStore, self).__init__(db_conn, hs)
 
         # Map of (user_id, device_id) -> bool. If there is an entry that implies
         # the device exists.
diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py
index e8133de2fa..55a05c59d5 100644
--- a/synapse/storage/event_federation.py
+++ b/synapse/storage/event_federation.py
@@ -39,8 +39,8 @@ class EventFederationStore(SQLBaseStore):
 
     EVENT_AUTH_STATE_ONLY = "event_auth_state_only"
 
-    def __init__(self, hs):
-        super(EventFederationStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(EventFederationStore, self).__init__(db_conn, hs)
 
         self.register_background_update_handler(
             self.EVENT_AUTH_STATE_ONLY,
diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py
index d6d8723b4a..8efe2fd4bb 100644
--- a/synapse/storage/event_push_actions.py
+++ b/synapse/storage/event_push_actions.py
@@ -65,8 +65,8 @@ def _deserialize_action(actions, is_highlight):
 class EventPushActionsStore(SQLBaseStore):
     EPA_HIGHLIGHT_INDEX = "epa_highlight_index"
 
-    def __init__(self, hs):
-        super(EventPushActionsStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(EventPushActionsStore, self).__init__(db_conn, hs)
 
         self.register_background_index_update(
             self.EPA_HIGHLIGHT_INDEX,
diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index 4298d8baf1..d08f7571d7 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -197,8 +197,8 @@ class EventsStore(SQLBaseStore):
     EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts"
     EVENT_FIELDS_SENDER_URL_UPDATE_NAME = "event_fields_sender_url"
 
-    def __init__(self, hs):
-        super(EventsStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(EventsStore, self).__init__(db_conn, hs)
         self._clock = hs.get_clock()
         self.register_background_update_handler(
             self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts
diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py
index 6b261dcc0f..8fde1aab8e 100644
--- a/synapse/storage/group_server.py
+++ b/synapse/storage/group_server.py
@@ -846,25 +846,32 @@ class GroupServerStore(SQLBaseStore):
             )
         return self.runInteraction("remove_user_from_group", _remove_user_from_group_txn)
 
-    def update_room_group_association(self, group_id, room_id, is_public):
-        return self._simple_upsert(
+    def add_room_to_group(self, group_id, room_id, is_public):
+        return self._simple_insert(
             table="group_rooms",
-            keyvalues={
+            values={
                 "group_id": group_id,
                 "room_id": room_id,
-            },
-            values={
                 "is_public": is_public,
             },
-            insertion_values={
+            desc="add_room_to_group",
+        )
+
+    def update_room_in_group_visibility(self, group_id, room_id, is_public):
+        return self._simple_update(
+            table="group_rooms",
+            keyvalues={
                 "group_id": group_id,
                 "room_id": room_id,
             },
-            desc="update_room_group_association",
+            updatevalues={
+                "is_public": is_public,
+            },
+            desc="update_room_in_group_visibility",
         )
 
-    def delete_room_group_association(self, group_id, room_id):
-        def _delete_room_group_association_txn(txn):
+    def remove_room_from_group(self, group_id, room_id):
+        def _remove_room_from_group_txn(txn):
             self._simple_delete_txn(
                 txn,
                 table="group_rooms",
@@ -883,7 +890,7 @@ class GroupServerStore(SQLBaseStore):
                 },
             )
         return self.runInteraction(
-            "delete_room_group_association", _delete_room_group_association_txn,
+            "remove_room_from_group", _remove_room_from_group_txn,
         )
 
     def get_publicised_groups_for_user(self, user_id):
diff --git a/synapse/storage/media_repository.py b/synapse/storage/media_repository.py
index 7110a71279..52e5cdad70 100644
--- a/synapse/storage/media_repository.py
+++ b/synapse/storage/media_repository.py
@@ -254,6 +254,9 @@ class MediaRepositoryStore(SQLBaseStore):
         return self.runInteraction("get_expired_url_cache", _get_expired_url_cache_txn)
 
     def delete_url_cache(self, media_ids):
+        if len(media_ids) == 0:
+            return
+
         sql = (
             "DELETE FROM local_media_repository_url_cache"
             " WHERE media_id = ?"
@@ -281,6 +284,9 @@ class MediaRepositoryStore(SQLBaseStore):
         )
 
     def delete_url_cache_media(self, media_ids):
+        if len(media_ids) == 0:
+            return
+
         def _delete_url_cache_media_txn(txn):
             sql = (
                 "DELETE FROM local_media_repository"
diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py
index f42b8014c7..12b3cc7f5f 100644
--- a/synapse/storage/receipts.py
+++ b/synapse/storage/receipts.py
@@ -27,8 +27,8 @@ logger = logging.getLogger(__name__)
 
 
 class ReceiptsStore(SQLBaseStore):
-    def __init__(self, hs):
-        super(ReceiptsStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(ReceiptsStore, self).__init__(db_conn, hs)
 
         self._receipts_stream_cache = StreamChangeCache(
             "ReceiptsRoomChangeCache", self._receipts_id_gen.get_current_token()
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index 9c4f61da76..8b9544c209 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -24,8 +24,8 @@ from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
 
 class RegistrationStore(background_updates.BackgroundUpdateStore):
 
-    def __init__(self, hs):
-        super(RegistrationStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(RegistrationStore, self).__init__(db_conn, hs)
 
         self.clock = hs.get_clock()
 
@@ -241,7 +241,6 @@ class RegistrationStore(background_updates.BackgroundUpdateStore):
             "user_set_password_hash", user_set_password_hash_txn
         )
 
-    @defer.inlineCallbacks
     def user_delete_access_tokens(self, user_id, except_token_id=None,
                                   device_id=None):
         """
@@ -290,7 +289,7 @@ class RegistrationStore(background_updates.BackgroundUpdateStore):
 
             return tokens_and_devices
 
-        yield self.runInteraction(
+        return self.runInteraction(
             "user_delete_access_tokens", f,
         )
 
diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py
index 3fa8019eb7..3e77fd3901 100644
--- a/synapse/storage/roommember.py
+++ b/synapse/storage/roommember.py
@@ -49,8 +49,8 @@ _MEMBERSHIP_PROFILE_UPDATE_NAME = "room_membership_profile_update"
 
 
 class RoomMemberStore(SQLBaseStore):
-    def __init__(self, hs):
-        super(RoomMemberStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(RoomMemberStore, self).__init__(db_conn, hs)
         self.register_background_update_handler(
             _MEMBERSHIP_PROFILE_UPDATE_NAME, self._background_add_membership_profile
         )
diff --git a/synapse/storage/schema/delta/43/user_share.sql b/synapse/storage/schema/delta/43/user_share.sql
index 4501d90cbb..ee7062abe4 100644
--- a/synapse/storage/schema/delta/43/user_share.sql
+++ b/synapse/storage/schema/delta/43/user_share.sql
@@ -29,5 +29,5 @@ CREATE INDEX users_who_share_rooms_r_idx ON users_who_share_rooms(room_id);
 CREATE INDEX users_who_share_rooms_o_idx ON users_who_share_rooms(other_user_id);
 
 
--- Make sure that we popualte the table initially
+-- Make sure that we populate the table initially
 UPDATE user_directory_stream_pos SET stream_id = NULL;
diff --git a/synapse/storage/schema/delta/46/group_server.sql b/synapse/storage/schema/delta/46/group_server.sql
index e754b554f8..097679bc9a 100644
--- a/synapse/storage/schema/delta/46/group_server.sql
+++ b/synapse/storage/schema/delta/46/group_server.sql
@@ -1,4 +1,4 @@
-/* Copyright 2017 Vector Creations Ltd
+/* Copyright 2017 New Vector Ltd
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
diff --git a/synapse/storage/schema/delta/46/user_dir_typos.sql b/synapse/storage/schema/delta/46/user_dir_typos.sql
new file mode 100644
index 0000000000..d9505f8da1
--- /dev/null
+++ b/synapse/storage/schema/delta/46/user_dir_typos.sql
@@ -0,0 +1,24 @@
+/* Copyright 2017 New Vector Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+-- this is just embarassing :|
+ALTER TABLE users_in_pubic_room RENAME TO users_in_public_rooms;
+
+-- this is only 300K rows on matrix.org and takes ~3s to generate the index,
+-- so is hopefully not going to block anyone else for that long...
+CREATE INDEX users_in_public_rooms_room_idx ON users_in_public_rooms(room_id);
+CREATE UNIQUE INDEX users_in_public_rooms_user_idx ON users_in_public_rooms(user_id);
+DROP INDEX users_in_pubic_room_room_idx;
+DROP INDEX users_in_pubic_room_user_idx;
diff --git a/synapse/storage/search.py b/synapse/storage/search.py
index 05d4ef586e..479b04c636 100644
--- a/synapse/storage/search.py
+++ b/synapse/storage/search.py
@@ -33,8 +33,8 @@ class SearchStore(BackgroundUpdateStore):
     EVENT_SEARCH_ORDER_UPDATE_NAME = "event_search_order"
     EVENT_SEARCH_USE_GIST_POSTGRES_NAME = "event_search_postgres_gist"
 
-    def __init__(self, hs):
-        super(SearchStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(SearchStore, self).__init__(db_conn, hs)
         self.register_background_update_handler(
             self.EVENT_SEARCH_UPDATE_NAME, self._background_reindex_search
         )
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index 5673e4aa96..dd01b68762 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -63,8 +63,8 @@ class StateStore(SQLBaseStore):
     STATE_GROUP_INDEX_UPDATE_NAME = "state_group_state_type_index"
     CURRENT_STATE_INDEX_UPDATE_NAME = "current_state_members_idx"
 
-    def __init__(self, hs):
-        super(StateStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(StateStore, self).__init__(db_conn, hs)
         self.register_background_update_handler(
             self.STATE_GROUP_DEDUPLICATION_UPDATE_NAME,
             self._background_deduplicate_state,
diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py
index 809fdd311f..8f61f7ffae 100644
--- a/synapse/storage/transactions.py
+++ b/synapse/storage/transactions.py
@@ -46,8 +46,8 @@ class TransactionStore(SQLBaseStore):
     """A collection of queries for handling PDUs.
     """
 
-    def __init__(self, hs):
-        super(TransactionStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(TransactionStore, self).__init__(db_conn, hs)
 
         self._clock.looping_call(self._cleanup_transactions, 30 * 60 * 1000)
 
diff --git a/synapse/storage/user_directory.py b/synapse/storage/user_directory.py
index 2a4db3f03c..5dc5b9582a 100644
--- a/synapse/storage/user_directory.py
+++ b/synapse/storage/user_directory.py
@@ -63,7 +63,7 @@ class UserDirectoryStore(SQLBaseStore):
             user_ids (list(str)): Users to add
         """
         yield self._simple_insert_many(
-            table="users_in_pubic_room",
+            table="users_in_public_rooms",
             values=[
                 {
                     "user_id": user_id,
@@ -219,7 +219,7 @@ class UserDirectoryStore(SQLBaseStore):
     @defer.inlineCallbacks
     def update_user_in_public_user_list(self, user_id, room_id):
         yield self._simple_update_one(
-            table="users_in_pubic_room",
+            table="users_in_public_rooms",
             keyvalues={"user_id": user_id},
             updatevalues={"room_id": room_id},
             desc="update_user_in_public_user_list",
@@ -240,7 +240,7 @@ class UserDirectoryStore(SQLBaseStore):
             )
             self._simple_delete_txn(
                 txn,
-                table="users_in_pubic_room",
+                table="users_in_public_rooms",
                 keyvalues={"user_id": user_id},
             )
             txn.call_after(
@@ -256,7 +256,7 @@ class UserDirectoryStore(SQLBaseStore):
     @defer.inlineCallbacks
     def remove_from_user_in_public_room(self, user_id):
         yield self._simple_delete(
-            table="users_in_pubic_room",
+            table="users_in_public_rooms",
             keyvalues={"user_id": user_id},
             desc="remove_from_user_in_public_room",
         )
@@ -267,7 +267,7 @@ class UserDirectoryStore(SQLBaseStore):
         in the given room_id
         """
         return self._simple_select_onecol(
-            table="users_in_pubic_room",
+            table="users_in_public_rooms",
             keyvalues={"room_id": room_id},
             retcol="user_id",
             desc="get_users_in_public_due_to_room",
@@ -286,7 +286,7 @@ class UserDirectoryStore(SQLBaseStore):
         )
 
         user_ids_pub = yield self._simple_select_onecol(
-            table="users_in_pubic_room",
+            table="users_in_public_rooms",
             keyvalues={"room_id": room_id},
             retcol="user_id",
             desc="get_users_in_dir_due_to_room",
@@ -514,7 +514,7 @@ class UserDirectoryStore(SQLBaseStore):
         def _delete_all_from_user_dir_txn(txn):
             txn.execute("DELETE FROM user_directory")
             txn.execute("DELETE FROM user_directory_search")
-            txn.execute("DELETE FROM users_in_pubic_room")
+            txn.execute("DELETE FROM users_in_public_rooms")
             txn.execute("DELETE FROM users_who_share_rooms")
             txn.call_after(self.get_user_in_directory.invalidate_all)
             txn.call_after(self.get_user_in_public_room.invalidate_all)
@@ -537,7 +537,7 @@ class UserDirectoryStore(SQLBaseStore):
     @cached()
     def get_user_in_public_room(self, user_id):
         return self._simple_select_one(
-            table="users_in_pubic_room",
+            table="users_in_public_rooms",
             keyvalues={"user_id": user_id},
             retcols=("room_id",),
             allow_none=True,
@@ -641,7 +641,7 @@ class UserDirectoryStore(SQLBaseStore):
                 SELECT d.user_id, display_name, avatar_url
                 FROM user_directory_search
                 INNER JOIN user_directory AS d USING (user_id)
-                LEFT JOIN users_in_pubic_room AS p USING (user_id)
+                LEFT JOIN users_in_public_rooms AS p USING (user_id)
                 LEFT JOIN (
                     SELECT other_user_id AS user_id FROM users_who_share_rooms
                     WHERE user_id = ? AND share_private
@@ -680,7 +680,7 @@ class UserDirectoryStore(SQLBaseStore):
                 SELECT d.user_id, display_name, avatar_url
                 FROM user_directory_search
                 INNER JOIN user_directory AS d USING (user_id)
-                LEFT JOIN users_in_pubic_room AS p USING (user_id)
+                LEFT JOIN users_in_public_rooms AS p USING (user_id)
                 LEFT JOIN (
                     SELECT other_user_id AS user_id FROM users_who_share_rooms
                     WHERE user_id = ? AND share_private
diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py
index 7fe88172c0..a667fb6f0e 100644
--- a/tests/handlers/test_appservice.py
+++ b/tests/handlers/test_appservice.py
@@ -53,7 +53,10 @@ class AppServiceHandlerTestCase(unittest.TestCase):
             type="m.room.message",
             room_id="!foo:bar"
         )
-        self.mock_store.get_new_events_for_appservice.return_value = (0, [event])
+        self.mock_store.get_new_events_for_appservice.side_effect = [
+            (0, [event]),
+            (0, [])
+        ]
         self.mock_as_api.push = Mock()
         yield self.handler.notify_interested_services(0)
         self.mock_scheduler.submit_event_for_as.assert_called_once_with(
@@ -75,7 +78,10 @@ class AppServiceHandlerTestCase(unittest.TestCase):
         )
         self.mock_as_api.push = Mock()
         self.mock_as_api.query_user = Mock()
-        self.mock_store.get_new_events_for_appservice.return_value = (0, [event])
+        self.mock_store.get_new_events_for_appservice.side_effect = [
+            (0, [event]),
+            (0, [])
+        ]
         yield self.handler.notify_interested_services(0)
         self.mock_as_api.query_user.assert_called_once_with(
             services[0], user_id
@@ -98,7 +104,10 @@ class AppServiceHandlerTestCase(unittest.TestCase):
         )
         self.mock_as_api.push = Mock()
         self.mock_as_api.query_user = Mock()
-        self.mock_store.get_new_events_for_appservice.return_value = (0, [event])
+        self.mock_store.get_new_events_for_appservice.side_effect = [
+            (0, [event]),
+            (0, [])
+        ]
         yield self.handler.notify_interested_services(0)
         self.assertFalse(
             self.mock_as_api.query_user.called,
diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py
index 79f569e787..13d81f972b 100644
--- a/tests/storage/test_appservice.py
+++ b/tests/storage/test_appservice.py
@@ -58,7 +58,7 @@ class ApplicationServiceStoreTestCase(unittest.TestCase):
         self._add_appservice("token2", "as2", "some_url", "some_hs_token", "bob")
         self._add_appservice("token3", "as3", "some_url", "some_hs_token", "bob")
         # must be done after inserts
-        self.store = ApplicationServiceStore(hs)
+        self.store = ApplicationServiceStore(None, hs)
 
     def tearDown(self):
         # TODO: suboptimal that we need to create files for tests!
@@ -150,7 +150,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
 
         self.as_yaml_files = []
 
-        self.store = TestTransactionStore(hs)
+        self.store = TestTransactionStore(None, hs)
 
     def _add_service(self, url, as_token, id):
         as_yaml = dict(url=url, as_token=as_token, hs_token="something",
@@ -420,8 +420,8 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
 class TestTransactionStore(ApplicationServiceTransactionStore,
                            ApplicationServiceStore):
 
-    def __init__(self, hs):
-        super(TestTransactionStore, self).__init__(hs)
+    def __init__(self, db_conn, hs):
+        super(TestTransactionStore, self).__init__(db_conn, hs)
 
 
 class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
@@ -458,7 +458,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
             replication_layer=Mock(),
         )
 
-        ApplicationServiceStore(hs)
+        ApplicationServiceStore(None, hs)
 
     @defer.inlineCallbacks
     def test_duplicate_ids(self):
@@ -477,7 +477,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
         )
 
         with self.assertRaises(ConfigError) as cm:
-            ApplicationServiceStore(hs)
+            ApplicationServiceStore(None, hs)
 
         e = cm.exception
         self.assertIn(f1, e.message)
@@ -501,7 +501,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
         )
 
         with self.assertRaises(ConfigError) as cm:
-            ApplicationServiceStore(hs)
+            ApplicationServiceStore(None, hs)
 
         e = cm.exception
         self.assertIn(f1, e.message)
diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py
index 91e971190c..0ac910e76f 100644
--- a/tests/storage/test_base.py
+++ b/tests/storage/test_base.py
@@ -56,7 +56,7 @@ class SQLBaseStoreTestCase(unittest.TestCase):
             database_engine=create_engine(config.database_config),
         )
 
-        self.datastore = SQLBaseStore(hs)
+        self.datastore = SQLBaseStore(None, hs)
 
     @defer.inlineCallbacks
     def test_insert_1col(self):
diff --git a/tests/storage/test_directory.py b/tests/storage/test_directory.py
index b087892e0b..95709cd50a 100644
--- a/tests/storage/test_directory.py
+++ b/tests/storage/test_directory.py
@@ -29,7 +29,7 @@ class DirectoryStoreTestCase(unittest.TestCase):
     def setUp(self):
         hs = yield setup_test_homeserver()
 
-        self.store = DirectoryStore(hs)
+        self.store = DirectoryStore(None, hs)
 
         self.room = RoomID.from_string("!abcde:test")
         self.alias = RoomAlias.from_string("#my-room:test")
diff --git a/tests/storage/test_presence.py b/tests/storage/test_presence.py
index 63203cea35..f5fcb611d4 100644
--- a/tests/storage/test_presence.py
+++ b/tests/storage/test_presence.py
@@ -29,7 +29,7 @@ class PresenceStoreTestCase(unittest.TestCase):
     def setUp(self):
         hs = yield setup_test_homeserver(clock=MockClock())
 
-        self.store = PresenceStore(hs)
+        self.store = PresenceStore(None, hs)
 
         self.u_apple = UserID.from_string("@apple:test")
         self.u_banana = UserID.from_string("@banana:test")
diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py
index 24118bbc86..423710c9c1 100644
--- a/tests/storage/test_profile.py
+++ b/tests/storage/test_profile.py
@@ -29,7 +29,7 @@ class ProfileStoreTestCase(unittest.TestCase):
     def setUp(self):
         hs = yield setup_test_homeserver()
 
-        self.store = ProfileStore(hs)
+        self.store = ProfileStore(None, hs)
 
         self.u_frank = UserID.from_string("@frank:test")