summary refs log tree commit diff
path: root/synapse/handlers
diff options
context:
space:
mode:
authorHubert Chathi <hubert@uhoreg.ca>2019-07-19 17:13:22 -0400
committerHubert Chathi <hubert@uhoreg.ca>2019-07-19 17:14:40 -0400
commit0a8d70eff292bb9f08d082d718f1cd77906ed685 (patch)
tree56c2b61a6084e93d3f148b33c7967b8bcdc93e77 /synapse/handlers
parentmake hidden field nullable (diff)
parentRevert "Remove deprecated 'verbose' cli arg" (diff)
downloadsynapse-0a8d70eff292bb9f08d082d718f1cd77906ed685.tar.xz
Merge branch 'develop' into cross-signing
Diffstat (limited to 'synapse/handlers')
-rw-r--r--synapse/handlers/admin.py183
-rw-r--r--synapse/handlers/auth.py34
-rw-r--r--synapse/handlers/e2e_keys.py10
-rw-r--r--synapse/handlers/identity.py2
-rw-r--r--synapse/handlers/message.py33
-rw-r--r--synapse/handlers/profile.py4
-rw-r--r--synapse/handlers/register.py37
-rw-r--r--synapse/handlers/room_member.py22
8 files changed, 281 insertions, 44 deletions
diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py
index 941ebfa107..e8a651e231 100644
--- a/synapse/handlers/admin.py
+++ b/synapse/handlers/admin.py
@@ -17,6 +17,10 @@ import logging
 
 from twisted.internet import defer
 
+from synapse.api.constants import Membership
+from synapse.types import RoomStreamToken
+from synapse.visibility import filter_events_for_client
+
 from ._base import BaseHandler
 
 logger = logging.getLogger(__name__)
@@ -89,3 +93,182 @@ class AdminHandler(BaseHandler):
         ret = yield self.store.search_users(term)
 
         defer.returnValue(ret)
+
+    @defer.inlineCallbacks
+    def export_user_data(self, user_id, writer):
+        """Write all data we have on the user to the given writer.
+
+        Args:
+            user_id (str)
+            writer (ExfiltrationWriter)
+
+        Returns:
+            defer.Deferred: Resolves when all data for a user has been written.
+            The returned value is that returned by `writer.finished()`.
+        """
+        # Get all rooms the user is in or has been in
+        rooms = yield self.store.get_rooms_for_user_where_membership_is(
+            user_id,
+            membership_list=(
+                Membership.JOIN,
+                Membership.LEAVE,
+                Membership.BAN,
+                Membership.INVITE,
+            ),
+        )
+
+        # We only try and fetch events for rooms the user has been in. If
+        # they've been e.g. invited to a room without joining then we handle
+        # those seperately.
+        rooms_user_has_been_in = yield self.store.get_rooms_user_has_been_in(user_id)
+
+        for index, room in enumerate(rooms):
+            room_id = room.room_id
+
+            logger.info(
+                "[%s] Handling room %s, %d/%d", user_id, room_id, index + 1, len(rooms)
+            )
+
+            forgotten = yield self.store.did_forget(user_id, room_id)
+            if forgotten:
+                logger.info("[%s] User forgot room %d, ignoring", user_id, room_id)
+                continue
+
+            if room_id not in rooms_user_has_been_in:
+                # If we haven't been in the rooms then the filtering code below
+                # won't return anything, so we need to handle these cases
+                # explicitly.
+
+                if room.membership == Membership.INVITE:
+                    event_id = room.event_id
+                    invite = yield self.store.get_event(event_id, allow_none=True)
+                    if invite:
+                        invited_state = invite.unsigned["invite_room_state"]
+                        writer.write_invite(room_id, invite, invited_state)
+
+                continue
+
+            # We only want to bother fetching events up to the last time they
+            # were joined. We estimate that point by looking at the
+            # stream_ordering of the last membership if it wasn't a join.
+            if room.membership == Membership.JOIN:
+                stream_ordering = yield self.store.get_room_max_stream_ordering()
+            else:
+                stream_ordering = room.stream_ordering
+
+            from_key = str(RoomStreamToken(0, 0))
+            to_key = str(RoomStreamToken(None, stream_ordering))
+
+            written_events = set()  # Events that we've processed in this room
+
+            # We need to track gaps in the events stream so that we can then
+            # write out the state at those events. We do this by keeping track
+            # of events whose prev events we haven't seen.
+
+            # Map from event ID to prev events that haven't been processed,
+            # dict[str, set[str]].
+            event_to_unseen_prevs = {}
+
+            # The reverse mapping to above, i.e. map from unseen event to events
+            # that have the unseen event in their prev_events, i.e. the unseen
+            # events "children". dict[str, set[str]]
+            unseen_to_child_events = {}
+
+            # We fetch events in the room the user could see by fetching *all*
+            # events that we have and then filtering, this isn't the most
+            # efficient method perhaps but it does guarantee we get everything.
+            while True:
+                events, _ = yield self.store.paginate_room_events(
+                    room_id, from_key, to_key, limit=100, direction="f"
+                )
+                if not events:
+                    break
+
+                from_key = events[-1].internal_metadata.after
+
+                events = yield filter_events_for_client(self.store, user_id, events)
+
+                writer.write_events(room_id, events)
+
+                # Update the extremity tracking dicts
+                for event in events:
+                    # Check if we have any prev events that haven't been
+                    # processed yet, and add those to the appropriate dicts.
+                    unseen_events = set(event.prev_event_ids()) - written_events
+                    if unseen_events:
+                        event_to_unseen_prevs[event.event_id] = unseen_events
+                        for unseen in unseen_events:
+                            unseen_to_child_events.setdefault(unseen, set()).add(
+                                event.event_id
+                            )
+
+                    # Now check if this event is an unseen prev event, if so
+                    # then we remove this event from the appropriate dicts.
+                    for child_id in unseen_to_child_events.pop(event.event_id, []):
+                        event_to_unseen_prevs[child_id].discard(event.event_id)
+
+                    written_events.add(event.event_id)
+
+                logger.info(
+                    "Written %d events in room %s", len(written_events), room_id
+                )
+
+            # Extremities are the events who have at least one unseen prev event.
+            extremities = (
+                event_id
+                for event_id, unseen_prevs in event_to_unseen_prevs.items()
+                if unseen_prevs
+            )
+            for event_id in extremities:
+                if not event_to_unseen_prevs[event_id]:
+                    continue
+                state = yield self.store.get_state_for_event(event_id)
+                writer.write_state(room_id, event_id, state)
+
+        defer.returnValue(writer.finished())
+
+
+class ExfiltrationWriter(object):
+    """Interface used to specify how to write exported data.
+    """
+
+    def write_events(self, room_id, events):
+        """Write a batch of events for a room.
+
+        Args:
+            room_id (str)
+            events (list[FrozenEvent])
+        """
+        pass
+
+    def write_state(self, room_id, event_id, state):
+        """Write the state at the given event in the room.
+
+        This only gets called for backward extremities rather than for each
+        event.
+
+        Args:
+            room_id (str)
+            event_id (str)
+            state (dict[tuple[str, str], FrozenEvent])
+        """
+        pass
+
+    def write_invite(self, room_id, event, state):
+        """Write an invite for the room, with associated invite state.
+
+        Args:
+            room_id (str)
+            event (FrozenEvent)
+            state (dict[tuple[str, str], dict]): A subset of the state at the
+                invite, with a subset of the event keys (type, state_key
+                content and sender)
+        """
+
+    def finished(self):
+        """Called when all data has succesfully been exported and written.
+
+        This functions return value is passed to the caller of
+        `export_user_data`.
+        """
+        pass
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index ef5585aa99..d4d6574975 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -15,6 +15,7 @@
 # limitations under the License.
 
 import logging
+import time
 import unicodedata
 
 import attr
@@ -34,6 +35,7 @@ from synapse.api.errors import (
     LoginError,
     StoreError,
     SynapseError,
+    UserDeactivatedError,
 )
 from synapse.api.ratelimiting import Ratelimiter
 from synapse.logging.context import defer_to_thread
@@ -558,7 +560,7 @@ class AuthHandler(BaseHandler):
         return self.sessions[session_id]
 
     @defer.inlineCallbacks
-    def get_access_token_for_user_id(self, user_id, device_id=None):
+    def get_access_token_for_user_id(self, user_id, device_id, valid_until_ms):
         """
         Creates a new access token for the user with the given user ID.
 
@@ -572,15 +574,27 @@ class AuthHandler(BaseHandler):
             device_id (str|None): the device ID to associate with the tokens.
                None to leave the tokens unassociated with a device (deprecated:
                we should always have a device ID)
+            valid_until_ms (int|None): when the token is valid until. None for
+                no expiry.
         Returns:
               The access token for the user's session.
         Raises:
             StoreError if there was a problem storing the token.
         """
-        logger.info("Logging in user %s on device %s", user_id, device_id)
-        access_token = yield self.issue_access_token(user_id, device_id)
+        fmt_expiry = ""
+        if valid_until_ms is not None:
+            fmt_expiry = time.strftime(
+                " until %Y-%m-%d %H:%M:%S", time.localtime(valid_until_ms / 1000.0)
+            )
+        logger.info("Logging in user %s on device %s%s", user_id, device_id, fmt_expiry)
+
         yield self.auth.check_auth_blocking(user_id)
 
+        access_token = self.macaroon_gen.generate_access_token(user_id)
+        yield self.store.add_access_token_to_user(
+            user_id, access_token, device_id, valid_until_ms
+        )
+
         # the device *should* have been registered before we got here; however,
         # it's possible we raced against a DELETE operation. The thing we
         # really don't want is active access_tokens without a record of the
@@ -610,6 +624,7 @@ class AuthHandler(BaseHandler):
         Raises:
             LimitExceededError if the ratelimiter's login requests count for this
                 user is too high too proceed.
+            UserDeactivatedError if a user is found but is deactivated.
         """
         self.ratelimit_login_per_account(user_id)
         res = yield self._find_user_id_and_pwd_hash(user_id)
@@ -825,6 +840,13 @@ class AuthHandler(BaseHandler):
         if not lookupres:
             defer.returnValue(None)
         (user_id, password_hash) = lookupres
+
+        # If the password hash is None, the account has likely been deactivated
+        if not password_hash:
+            deactivated = yield self.store.get_user_deactivated_status(user_id)
+            if deactivated:
+                raise UserDeactivatedError("This account has been deactivated")
+
         result = yield self.validate_hash(password, password_hash)
         if not result:
             logger.warn("Failed password login for user %s", user_id)
@@ -832,12 +854,6 @@ class AuthHandler(BaseHandler):
         defer.returnValue(user_id)
 
     @defer.inlineCallbacks
-    def issue_access_token(self, user_id, device_id=None):
-        access_token = self.macaroon_gen.generate_access_token(user_id)
-        yield self.store.add_access_token_to_user(user_id, access_token, device_id)
-        defer.returnValue(access_token)
-
-    @defer.inlineCallbacks
     def validate_short_term_login_token_and_get_user_id(self, login_token):
         auth_api = self.hs.get_auth()
         user_id = None
diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py
index 1997df556e..454f99524b 100644
--- a/synapse/handlers/e2e_keys.py
+++ b/synapse/handlers/e2e_keys.py
@@ -24,12 +24,7 @@ from signedjson.sign import SignatureVerifyException, verify_signed_json
 
 from twisted.internet import defer
 
-from synapse.api.errors import (
-    CodeMessageException,
-    Codes,
-    FederationDeniedError,
-    SynapseError,
-)
+from synapse.api.errors import CodeMessageException, Codes, SynapseError
 from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.types import (
     UserID,
@@ -554,9 +549,6 @@ def _exception_to_failure(e):
     if isinstance(e, NotRetryingDestination):
         return {"status": 503, "message": "Not ready for retry"}
 
-    if isinstance(e, FederationDeniedError):
-        return {"status": 403, "message": "Federation Denied"}
-
     # include ConnectionRefused and other errors
     #
     # Note that some Exceptions (notably twisted's ResponseFailed etc) don't
diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py
index c82b1933f2..546d6169e9 100644
--- a/synapse/handlers/identity.py
+++ b/synapse/handlers/identity.py
@@ -118,7 +118,7 @@ class IdentityHandler(BaseHandler):
             raise SynapseError(400, "No client_secret in creds")
 
         try:
-            data = yield self.http_client.post_urlencoded_get_json(
+            data = yield self.http_client.post_json_get_json(
                 "https://%s%s" % (id_server, "/_matrix/identity/api/v1/3pid/bind"),
                 {"sid": creds["sid"], "client_secret": client_secret, "mxid": mxid},
             )
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index eaeda7a5cb..6d7a987f13 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -23,6 +23,7 @@ from canonicaljson import encode_canonical_json, json
 from twisted.internet import defer
 from twisted.internet.defer import succeed
 
+from synapse import event_auth
 from synapse.api.constants import EventTypes, Membership, RelationTypes
 from synapse.api.errors import (
     AuthError,
@@ -784,6 +785,20 @@ class EventCreationHandler(object):
                     event.signatures.update(returned_invite.signatures)
 
         if event.type == EventTypes.Redaction:
+            original_event = yield self.store.get_event(
+                event.redacts,
+                check_redacted=False,
+                get_prev_content=False,
+                allow_rejected=False,
+                allow_none=True,
+                check_room_id=event.room_id,
+            )
+
+            # we can make some additional checks now if we have the original event.
+            if original_event:
+                if original_event.type == EventTypes.Create:
+                    raise AuthError(403, "Redacting create events is not permitted")
+
             prev_state_ids = yield context.get_prev_state_ids(self.store)
             auth_events_ids = yield self.auth.compute_auth_events(
                 event, prev_state_ids, for_verification=True
@@ -791,18 +806,18 @@ class EventCreationHandler(object):
             auth_events = yield self.store.get_events(auth_events_ids)
             auth_events = {(e.type, e.state_key): e for e in auth_events.values()}
             room_version = yield self.store.get_room_version(event.room_id)
-            if self.auth.check_redaction(room_version, event, auth_events=auth_events):
-                original_event = yield self.store.get_event(
-                    event.redacts,
-                    check_redacted=False,
-                    get_prev_content=False,
-                    allow_rejected=False,
-                    allow_none=False,
-                )
+
+            if event_auth.check_redaction(room_version, event, auth_events=auth_events):
+                # this user doesn't have 'redact' rights, so we need to do some more
+                # checks on the original event. Let's start by checking the original
+                # event exists.
+                if not original_event:
+                    raise NotFoundError("Could not find event %s" % (event.redacts,))
+
                 if event.user_id != original_event.user_id:
                     raise AuthError(403, "You don't have permission to redact events")
 
-                # We've already checked.
+                # all the checks are done.
                 event.internal_metadata.recheck_redaction = False
 
         if event.type == EventTypes.Create:
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index d8462b75ec..a2388a7091 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -303,6 +303,10 @@ class BaseProfileHandler(BaseHandler):
         if not self.hs.config.require_auth_for_profile_requests or not requester:
             return
 
+        # Always allow the user to query their own profile.
+        if target_user.to_string() == requester.to_string():
+            return
+
         try:
             requester_rooms = yield self.store.get_rooms_for_user(requester.to_string())
             target_user_rooms = yield self.store.get_rooms_for_user(
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index a3e553d5f5..bb7cfd71b9 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -84,6 +84,8 @@ class RegistrationHandler(BaseHandler):
             self.device_handler = hs.get_device_handler()
             self.pusher_pool = hs.get_pusherpool()
 
+        self.session_lifetime = hs.config.session_lifetime
+
     @defer.inlineCallbacks
     def check_username(self, localpart, guest_access_token=None, assigned_user_id=None):
         if types.contains_invalid_mxid_characters(localpart):
@@ -584,7 +586,7 @@ class RegistrationHandler(BaseHandler):
                 address=address,
             )
         else:
-            return self.store.register(
+            return self.store.register_user(
                 user_id=user_id,
                 password_hash=password_hash,
                 was_guest=was_guest,
@@ -599,6 +601,8 @@ class RegistrationHandler(BaseHandler):
     def register_device(self, user_id, device_id, initial_display_name, is_guest=False):
         """Register a device for a user and generate an access token.
 
+        The access token will be limited by the homeserver's session_lifetime config.
+
         Args:
             user_id (str): full canonical @user:id
             device_id (str|None): The device ID to check, or None to generate
@@ -619,20 +623,29 @@ class RegistrationHandler(BaseHandler):
                 is_guest=is_guest,
             )
             defer.returnValue((r["device_id"], r["access_token"]))
-        else:
-            device_id = yield self.device_handler.check_device_registered(
-                user_id, device_id, initial_display_name
-            )
+
+        valid_until_ms = None
+        if self.session_lifetime is not None:
             if is_guest:
-                access_token = self.macaroon_gen.generate_access_token(
-                    user_id, ["guest = true"]
-                )
-            else:
-                access_token = yield self._auth_handler.get_access_token_for_user_id(
-                    user_id, device_id=device_id
+                raise Exception(
+                    "session_lifetime is not currently implemented for guest access"
                 )
+            valid_until_ms = self.clock.time_msec() + self.session_lifetime
+
+        device_id = yield self.device_handler.check_device_registered(
+            user_id, device_id, initial_display_name
+        )
+        if is_guest:
+            assert valid_until_ms is None
+            access_token = self.macaroon_gen.generate_access_token(
+                user_id, ["guest = true"]
+            )
+        else:
+            access_token = yield self._auth_handler.get_access_token_for_user_id(
+                user_id, device_id=device_id, valid_until_ms=valid_until_ms
+            )
 
-            defer.returnValue((device_id, access_token))
+        defer.returnValue((device_id, access_token))
 
     @defer.inlineCallbacks
     def post_registration_actions(
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 679daaa074..e0196ef83e 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -29,7 +29,7 @@ from twisted.internet import defer
 import synapse.server
 import synapse.types
 from synapse.api.constants import EventTypes, Membership
-from synapse.api.errors import AuthError, Codes, SynapseError
+from synapse.api.errors import AuthError, Codes, HttpResponseException, SynapseError
 from synapse.types import RoomID, UserID
 from synapse.util.async_helpers import Linearizer
 from synapse.util.distributor import user_joined_room, user_left_room
@@ -872,9 +872,23 @@ class RoomMemberHandler(object):
             "sender_avatar_url": inviter_avatar_url,
         }
 
-        data = yield self.simple_http_client.post_urlencoded_get_json(
-            is_url, invite_config
-        )
+        try:
+            data = yield self.simple_http_client.post_json_get_json(
+                is_url, invite_config
+            )
+        except HttpResponseException as e:
+            # Some identity servers may only support application/x-www-form-urlencoded
+            # types. This is especially true with old instances of Sydent, see
+            # https://github.com/matrix-org/sydent/pull/170
+            logger.info(
+                "Failed to POST %s with JSON, falling back to urlencoded form: %s",
+                is_url,
+                e,
+            )
+            data = yield self.simple_http_client.post_urlencoded_get_json(
+                is_url, invite_config
+            )
+
         # TODO: Check for success
         token = data["token"]
         public_keys = data.get("public_keys", [])