diff --git a/CHANGES.rst b/CHANGES.rst
index da118b7ceb..cf5e33c4f9 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,3 +1,19 @@
+Changes in synapse v0.11.0-rc1 (2015-11-11)
+===========================================
+
+* Add Search API (PR #307, #324, #327, #336, #350, #359)
+* Add 'archived' state to v2 /sync API (PR #316)
+* Add ability to reject invites (PR #317)
+* Add config option to disable password login (PR #322)
+* Add the login fallback API (PR #330)
+* Add room context API (PR #334)
+* Add room tagging support (PR #335)
+* Update v2 /sync API to match spec (PR #305, #316, #321, #332, #337, #341)
+* Change retry schedule for application services (PR #320)
+* Change retry schedule for remote servers (PR #340)
+* Fix bug where we hosted static content in the incorrect place (PR #329)
+* Fix bug where we didn't increment retry interval for remote servers (PR #343)
+
Changes in synapse v0.10.1-rc1 (2015-10-15)
===========================================
diff --git a/MANIFEST.in b/MANIFEST.in
index 621e34cb76..573e21f41d 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -15,8 +15,9 @@ recursive-include scripts *
recursive-include scripts-dev *
recursive-include tests *.py
-recursive-include static *.css
-recursive-include static *.html
-recursive-include static *.js
+recursive-include synapse/static *.css
+recursive-include synapse/static *.gif
+recursive-include synapse/static *.html
+recursive-include synapse/static *.js
prune demo/etc
diff --git a/README.rst b/README.rst
index 6c8431aa86..a8f0c62158 100644
--- a/README.rst
+++ b/README.rst
@@ -20,8 +20,8 @@ The overall architecture is::
https://somewhere.org/_matrix https://elsewhere.net/_matrix
``#matrix:matrix.org`` is the official support room for Matrix, and can be
-accessed by the web client at http://matrix.org/beta or via an IRC bridge at
-irc://irc.freenode.net/matrix.
+accessed by any client from https://matrix.org/blog/try-matrix-now or via IRC
+bridge at irc://irc.freenode.net/matrix.
Synapse is currently in rapid development, but as of version 0.5 we believe it
is sufficiently stable to be run as an internet-facing service for real usage!
@@ -77,14 +77,14 @@ Meanwhile, iOS and Android SDKs and clients are available from:
- https://github.com/matrix-org/matrix-android-sdk
We'd like to invite you to join #matrix:matrix.org (via
-https://matrix.org/beta), run a homeserver, take a look at the Matrix spec at
-https://matrix.org/docs/spec and API docs at https://matrix.org/docs/api,
-experiment with the APIs and the demo clients, and report any bugs via
-https://matrix.org/jira.
+https://matrix.org/blog/try-matrix-now), run a homeserver, take a look at the
+Matrix spec at https://matrix.org/docs/spec and API docs at
+https://matrix.org/docs/api, experiment with the APIs and the demo clients, and
+report any bugs via https://matrix.org/jira.
Thanks for using Matrix!
-[1] End-to-end encryption is currently in development
+[1] End-to-end encryption is currently in development - see https://matrix.org/git/olm
Synapse Installation
====================
diff --git a/synapse/__init__.py b/synapse/__init__.py
index e9ce0412ed..f68a15bb85 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server.
"""
-__version__ = "0.10.1-rc1"
+__version__ = "0.11.0-rc1"
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 88445fe999..3e891a6193 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -24,7 +24,6 @@ from synapse.api.constants import EventTypes, Membership, JoinRules
from synapse.api.errors import AuthError, Codes, SynapseError, EventSizeError
from synapse.types import RoomID, UserID, EventID
from synapse.util.logutils import log_function
-from synapse.util import third_party_invites
from unpaddedbase64 import decode_base64
import logging
@@ -49,6 +48,7 @@ class Auth(object):
self.TOKEN_NOT_FOUND_HTTP_STATUS = 401
self._KNOWN_CAVEAT_PREFIXES = set([
"gen = ",
+ "guest = ",
"type = ",
"time < ",
"user_id = ",
@@ -183,15 +183,11 @@ class Auth(object):
defer.returnValue(member)
@defer.inlineCallbacks
- def check_user_was_in_room(self, room_id, user_id, current_state=None):
+ def check_user_was_in_room(self, room_id, user_id):
"""Check if the user was in the room at some point.
Args:
room_id(str): The room to check.
user_id(str): The user to check.
- current_state(dict): Optional map of the current state of the room.
- If provided then that map is used to check whether they are a
- member of the room. Otherwise the current membership is
- loaded from the database.
Raises:
AuthError if the user was never in the room.
Returns:
@@ -199,17 +195,11 @@ class Auth(object):
room. This will be the join event if they are currently joined to
the room. This will be the leave event if they have left the room.
"""
- if current_state:
- member = current_state.get(
- (EventTypes.Member, user_id),
- None
- )
- else:
- member = yield self.state.get_current_state(
- room_id=room_id,
- event_type=EventTypes.Member,
- state_key=user_id
- )
+ member = yield self.state.get_current_state(
+ room_id=room_id,
+ event_type=EventTypes.Member,
+ state_key=user_id
+ )
membership = member.membership if member else None
if membership not in (Membership.JOIN, Membership.LEAVE):
@@ -327,6 +317,11 @@ class Auth(object):
}
)
+ if Membership.INVITE == membership and "third_party_invite" in event.content:
+ if not self._verify_third_party_invite(event, auth_events):
+ raise AuthError(403, "You are not invited to this room.")
+ return True
+
if Membership.JOIN != membership:
if (caller_invited
and Membership.LEAVE == membership
@@ -370,8 +365,7 @@ class Auth(object):
pass
elif join_rule == JoinRules.INVITE:
if not caller_in_room and not caller_invited:
- if not self._verify_third_party_invite(event, auth_events):
- raise AuthError(403, "You are not invited to this room.")
+ raise AuthError(403, "You are not invited to this room.")
else:
# TODO (erikj): may_join list
# TODO (erikj): private rooms
@@ -399,10 +393,10 @@ class Auth(object):
def _verify_third_party_invite(self, event, auth_events):
"""
- Validates that the join event is authorized by a previous third-party invite.
+ Validates that the invite event is authorized by a previous third-party invite.
- Checks that the public key, and keyserver, match those in the invite,
- and that the join event has a signature issued using that public key.
+ Checks that the public key, and keyserver, match those in the third party invite,
+ and that the invite event has a signature issued using that public key.
Args:
event: The m.room.member join event being validated.
@@ -413,35 +407,28 @@ class Auth(object):
True if the event fulfills the expectations of a previous third party
invite event.
"""
- if not third_party_invites.join_has_third_party_invite(event.content):
+ if "third_party_invite" not in event.content:
return False
- join_third_party_invite = event.content["third_party_invite"]
- token = join_third_party_invite["token"]
+ if "signed" not in event.content["third_party_invite"]:
+ return False
+ signed = event.content["third_party_invite"]["signed"]
+ for key in {"mxid", "token"}:
+ if key not in signed:
+ return False
+
+ token = signed["token"]
+
invite_event = auth_events.get(
(EventTypes.ThirdPartyInvite, token,)
)
if not invite_event:
- logger.info("Failing 3pid invite because no invite found for token %s", token)
+ return False
+
+ if event.user_id != invite_event.user_id:
return False
try:
- public_key = join_third_party_invite["public_key"]
- key_validity_url = join_third_party_invite["key_validity_url"]
- if invite_event.content["public_key"] != public_key:
- logger.info(
- "Failing 3pid invite because public key invite: %s != join: %s",
- invite_event.content["public_key"],
- public_key
- )
- return False
- if invite_event.content["key_validity_url"] != key_validity_url:
- logger.info(
- "Failing 3pid invite because key_validity_url invite: %s != join: %s",
- invite_event.content["key_validity_url"],
- key_validity_url
- )
- return False
- signed = join_third_party_invite["signed"]
- if signed["mxid"] != event.user_id:
+ public_key = invite_event.content["public_key"]
+ if signed["mxid"] != event.state_key:
return False
if signed["token"] != token:
return False
@@ -454,6 +441,11 @@ class Auth(object):
decode_base64(public_key)
)
verify_signed_json(signed, server, verify_key)
+
+ # We got the public key from the invite, so we know that the
+ # correct server signed the signed bundle.
+ # The caller is responsible for checking that the signing
+ # server has not revoked that public key.
return True
return False
except (KeyError, SignatureVerifyException,):
@@ -497,7 +489,7 @@ class Auth(object):
return default
@defer.inlineCallbacks
- def get_user_by_req(self, request):
+ def get_user_by_req(self, request, allow_guest=False):
""" Get a registered user's ID.
Args:
@@ -535,7 +527,7 @@ class Auth(object):
request.authenticated_entity = user_id
- defer.returnValue((UserID.from_string(user_id), ""))
+ defer.returnValue((UserID.from_string(user_id), "", False))
return
except KeyError:
pass # normal users won't have the user_id query parameter set.
@@ -543,6 +535,7 @@ class Auth(object):
user_info = yield self._get_user_by_access_token(access_token)
user = user_info["user"]
token_id = user_info["token_id"]
+ is_guest = user_info["is_guest"]
ip_addr = self.hs.get_ip_from_request(request)
user_agent = request.requestHeaders.getRawHeaders(
@@ -557,9 +550,14 @@ class Auth(object):
user_agent=user_agent
)
+ if is_guest and not allow_guest:
+ raise AuthError(
+ 403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
+ )
+
request.authenticated_entity = user.to_string()
- defer.returnValue((user, token_id,))
+ defer.returnValue((user, token_id, is_guest,))
except KeyError:
raise AuthError(
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.",
@@ -592,31 +590,45 @@ class Auth(object):
self._validate_macaroon(macaroon)
user_prefix = "user_id = "
+ user = None
+ guest = False
for caveat in macaroon.caveats:
if caveat.caveat_id.startswith(user_prefix):
user = UserID.from_string(caveat.caveat_id[len(user_prefix):])
- # This codepath exists so that we can actually return a
- # token ID, because we use token IDs in place of device
- # identifiers throughout the codebase.
- # TODO(daniel): Remove this fallback when device IDs are
- # properly implemented.
- ret = yield self._look_up_user_by_access_token(macaroon_str)
- if ret["user"] != user:
- logger.error(
- "Macaroon user (%s) != DB user (%s)",
- user,
- ret["user"]
- )
- raise AuthError(
- self.TOKEN_NOT_FOUND_HTTP_STATUS,
- "User mismatch in macaroon",
- errcode=Codes.UNKNOWN_TOKEN
- )
- defer.returnValue(ret)
- raise AuthError(
- self.TOKEN_NOT_FOUND_HTTP_STATUS, "No user caveat in macaroon",
- errcode=Codes.UNKNOWN_TOKEN
- )
+ elif caveat.caveat_id == "guest = true":
+ guest = True
+
+ if user is None:
+ raise AuthError(
+ self.TOKEN_NOT_FOUND_HTTP_STATUS, "No user caveat in macaroon",
+ errcode=Codes.UNKNOWN_TOKEN
+ )
+
+ if guest:
+ ret = {
+ "user": user,
+ "is_guest": True,
+ "token_id": None,
+ }
+ else:
+ # This codepath exists so that we can actually return a
+ # token ID, because we use token IDs in place of device
+ # identifiers throughout the codebase.
+ # TODO(daniel): Remove this fallback when device IDs are
+ # properly implemented.
+ ret = yield self._look_up_user_by_access_token(macaroon_str)
+ if ret["user"] != user:
+ logger.error(
+ "Macaroon user (%s) != DB user (%s)",
+ user,
+ ret["user"]
+ )
+ raise AuthError(
+ self.TOKEN_NOT_FOUND_HTTP_STATUS,
+ "User mismatch in macaroon",
+ errcode=Codes.UNKNOWN_TOKEN
+ )
+ defer.returnValue(ret)
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
raise AuthError(
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
@@ -629,6 +641,7 @@ class Auth(object):
v.satisfy_exact("type = access")
v.satisfy_general(lambda c: c.startswith("user_id = "))
v.satisfy_general(self._verify_expiry)
+ v.satisfy_exact("guest = true")
v.verify(macaroon, self.hs.config.macaroon_secret_key)
v = pymacaroons.Verifier()
@@ -666,6 +679,7 @@ class Auth(object):
user_info = {
"user": UserID.from_string(ret.get("name")),
"token_id": ret.get("token_id", None),
+ "is_guest": False,
}
defer.returnValue(user_info)
@@ -738,17 +752,19 @@ class Auth(object):
if e_type == Membership.JOIN:
if member_event and not is_public:
auth_ids.append(member_event.event_id)
- if third_party_invites.join_has_third_party_invite(event.content):
+ else:
+ if member_event:
+ auth_ids.append(member_event.event_id)
+
+ if e_type == Membership.INVITE:
+ if "third_party_invite" in event.content:
key = (
EventTypes.ThirdPartyInvite,
event.content["third_party_invite"]["token"]
)
- invite = current_state.get(key)
- if invite:
- auth_ids.append(invite.event_id)
- else:
- if member_event:
- auth_ids.append(member_event.event_id)
+ third_party_invite = current_state.get(key)
+ if third_party_invite:
+ auth_ids.append(third_party_invite.event_id)
elif member_event:
if member_event.content["membership"] == Membership.JOIN:
auth_ids.append(member_event.event_id)
diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index 41125e8719..c2450b771a 100644
--- a/synapse/api/constants.py
+++ b/synapse/api/constants.py
@@ -68,6 +68,7 @@ class EventTypes(object):
RoomHistoryVisibility = "m.room.history_visibility"
CanonicalAlias = "m.room.canonical_alias"
RoomAvatar = "m.room.avatar"
+ GuestAccess = "m.room.guest_access"
# These are used for validation
Message = "m.room.message"
diff --git a/synapse/api/errors.py b/synapse/api/errors.py
index b3fea27d0e..d4037b3d55 100644
--- a/synapse/api/errors.py
+++ b/synapse/api/errors.py
@@ -33,6 +33,7 @@ class Codes(object):
NOT_FOUND = "M_NOT_FOUND"
MISSING_TOKEN = "M_MISSING_TOKEN"
UNKNOWN_TOKEN = "M_UNKNOWN_TOKEN"
+ GUEST_ACCESS_FORBIDDEN = "M_GUEST_ACCESS_FORBIDDEN"
LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED"
CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED"
CAPTCHA_INVALID = "M_CAPTCHA_INVALID"
diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py
index eb15d8c54a..aaa2433cae 100644
--- a/synapse/api/filtering.py
+++ b/synapse/api/filtering.py
@@ -50,11 +50,11 @@ class Filtering(object):
# many definitions.
top_level_definitions = [
- "public_user_data", "private_user_data", "server_data"
+ "presence"
]
room_level_definitions = [
- "state", "timeline", "ephemeral"
+ "state", "timeline", "ephemeral", "private_user_data"
]
for key in top_level_definitions:
@@ -114,22 +114,6 @@ class Filtering(object):
if not isinstance(event_type, basestring):
raise SynapseError(400, "Event type should be a string")
- if "format" in definition:
- event_format = definition["format"]
- if event_format not in ["federation", "events"]:
- raise SynapseError(400, "Invalid format: %s" % (event_format,))
-
- if "select" in definition:
- event_select_list = definition["select"]
- for select_key in event_select_list:
- if select_key not in ["event_id", "origin_server_ts",
- "thread_id", "content", "content.body"]:
- raise SynapseError(400, "Bad select: %s" % (select_key,))
-
- if ("bundle_updates" in definition and
- type(definition["bundle_updates"]) != bool):
- raise SynapseError(400, "Bad bundle_updates: expected bool.")
-
class FilterCollection(object):
def __init__(self, filter_json):
@@ -147,6 +131,10 @@ class FilterCollection(object):
self.filter_json.get("room", {}).get("ephemeral", {})
)
+ self.room_private_user_data = Filter(
+ self.filter_json.get("room", {}).get("private_user_data", {})
+ )
+
self.presence_filter = Filter(
self.filter_json.get("presence", {})
)
@@ -172,6 +160,9 @@ class FilterCollection(object):
def filter_room_ephemeral(self, events):
return self.room_ephemeral_filter.filter(events)
+ def filter_room_private_user_data(self, events):
+ return self.room_private_user_data.filter(events)
+
class Filter(object):
def __init__(self, filter_json):
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index cf2fa221dc..cd7a52ec07 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -132,7 +132,9 @@ class SynapseHomeServer(HomeServer):
def build_resource_for_static_content(self):
# This is old and should go away: not going to bother adding gzip
- return File("static")
+ return File(
+ os.path.join(os.path.dirname(synapse.__file__), "static")
+ )
def build_resource_for_content_repo(self):
return ContentRepoResource(
@@ -437,6 +439,7 @@ def setup(config_options):
hs.get_pusherpool().start()
hs.get_state_handler().start_caching()
hs.get_datastore().start_profiling()
+ hs.get_datastore().start_doing_background_updates()
hs.get_replication_layer().start_get_pdu_cache()
return hs
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index ceef309afc..c18e0bdbb8 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -14,6 +14,7 @@
# limitations under the License.
import argparse
+import errno
import os
import yaml
import sys
@@ -91,8 +92,11 @@ class Config(object):
@classmethod
def ensure_directory(cls, dir_path):
dir_path = cls.abspath(dir_path)
- if not os.path.exists(dir_path):
+ try:
os.makedirs(dir_path)
+ except OSError, e:
+ if e.errno != errno.EEXIST:
+ raise
if not os.path.isdir(dir_path):
raise ConfigError(
"%s is not a directory" % (dir_path,)
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index f5ef36a9f4..dca391f7af 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -34,6 +34,7 @@ class RegistrationConfig(Config):
self.registration_shared_secret = config.get("registration_shared_secret")
self.macaroon_secret_key = config.get("macaroon_secret_key")
self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
+ self.allow_guest_access = config.get("allow_guest_access", False)
def default_config(self, **kwargs):
registration_shared_secret = random_string_with_symbols(50)
@@ -54,6 +55,11 @@ class RegistrationConfig(Config):
# Larger numbers increase the work factor needed to generate the hash.
# The default number of rounds is 12.
bcrypt_rounds: 12
+
+ # Allows users to register as guests without a password/email/etc, and
+ # participate in rooms hosted on this server which have been made
+ # accessible to anonymous users.
+ allow_guest_access: False
""" % locals()
def add_arguments(self, parser):
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index 723f571284..d4f586fae7 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -26,7 +26,6 @@ from synapse.api.errors import (
from synapse.util import unwrapFirstError
from synapse.util.caches.expiringcache import ExpiringCache
from synapse.util.logutils import log_function
-from synapse.util import third_party_invites
from synapse.events import FrozenEvent
import synapse.metrics
@@ -358,7 +357,8 @@ class FederationClient(FederationBase):
defer.returnValue(signed_auth)
@defer.inlineCallbacks
- def make_membership_event(self, destinations, room_id, user_id, membership, content):
+ def make_membership_event(self, destinations, room_id, user_id, membership,
+ content={},):
"""
Creates an m.room.member event, with context, without participating in the room.
@@ -390,20 +390,17 @@ class FederationClient(FederationBase):
if destination == self.server_name:
continue
- args = {}
- if third_party_invites.join_has_third_party_invite(content):
- args = third_party_invites.extract_join_keys(
- content["third_party_invite"]
- )
try:
ret = yield self.transport_layer.make_membership_event(
- destination, room_id, user_id, membership, args
+ destination, room_id, user_id, membership
)
pdu_dict = ret["event"]
logger.debug("Got response to make_%s: %s", membership, pdu_dict)
+ pdu_dict["content"].update(content)
+
defer.returnValue(
(destination, self.event_from_pdu_json(pdu_dict))
)
@@ -704,3 +701,26 @@ class FederationClient(FederationBase):
event.internal_metadata.outlier = outlier
return event
+
+ @defer.inlineCallbacks
+ def forward_third_party_invite(self, destinations, room_id, event_dict):
+ for destination in destinations:
+ if destination == self.server_name:
+ continue
+
+ try:
+ yield self.transport_layer.exchange_third_party_invite(
+ destination=destination,
+ room_id=room_id,
+ event_dict=event_dict,
+ )
+ defer.returnValue(None)
+ except CodeMessageException:
+ raise
+ except Exception as e:
+ logger.exception(
+ "Failed to send_third_party_invite via %s: %s",
+ destination, e.message
+ )
+
+ raise RuntimeError("Failed to send to any server.")
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index 9e2d9ee74c..7a59436a91 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -23,12 +23,10 @@ from synapse.util.logutils import log_function
from synapse.events import FrozenEvent
import synapse.metrics
-from synapse.api.errors import FederationError, SynapseError, Codes
+from synapse.api.errors import FederationError, SynapseError
from synapse.crypto.event_signing import compute_event_signature
-from synapse.util import third_party_invites
-
import simplejson as json
import logging
@@ -230,19 +228,8 @@ class FederationServer(FederationBase):
)
@defer.inlineCallbacks
- def on_make_join_request(self, room_id, user_id, query):
- threepid_details = {}
- if third_party_invites.has_join_keys(query):
- for k in third_party_invites.JOIN_KEYS:
- if not isinstance(query[k], list) or len(query[k]) != 1:
- raise FederationError(
- "FATAL",
- Codes.MISSING_PARAM,
- "key %s value %s" % (k, query[k],),
- None
- )
- threepid_details[k] = query[k][0]
- pdu = yield self.handler.on_make_join_request(room_id, user_id, threepid_details)
+ def on_make_join_request(self, room_id, user_id):
+ pdu = yield self.handler.on_make_join_request(room_id, user_id)
time_now = self._clock.time_msec()
defer.returnValue({"event": pdu.get_pdu_json(time_now)})
@@ -556,3 +543,15 @@ class FederationServer(FederationBase):
event.internal_metadata.outlier = outlier
return event
+
+ @defer.inlineCallbacks
+ def exchange_third_party_invite(self, invite):
+ ret = yield self.handler.exchange_third_party_invite(invite)
+ defer.returnValue(ret)
+
+ @defer.inlineCallbacks
+ def on_exchange_third_party_invite_request(self, origin, room_id, event_dict):
+ ret = yield self.handler.on_exchange_third_party_invite_request(
+ origin, room_id, event_dict
+ )
+ defer.returnValue(ret)
diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py
index 32fa5e8c15..aac6f1c167 100644
--- a/synapse/federation/transaction_queue.py
+++ b/synapse/federation/transaction_queue.py
@@ -202,6 +202,7 @@ class TransactionQueue(object):
@defer.inlineCallbacks
@log_function
def _attempt_new_transaction(self, destination):
+ # list of (pending_pdu, deferred, order)
if destination in self.pending_transactions:
# XXX: pending_transactions can get stuck on by a never-ending
# request at which point pending_pdus_by_dest just keeps growing.
@@ -213,9 +214,6 @@ class TransactionQueue(object):
)
return
- logger.debug("TX [%s] _attempt_new_transaction", destination)
-
- # list of (pending_pdu, deferred, order)
pending_pdus = self.pending_pdus_by_dest.pop(destination, [])
pending_edus = self.pending_edus_by_dest.pop(destination, [])
pending_failures = self.pending_failures_by_dest.pop(destination, [])
@@ -228,20 +226,22 @@ class TransactionQueue(object):
logger.debug("TX [%s] Nothing to send", destination)
return
- # Sort based on the order field
- pending_pdus.sort(key=lambda t: t[2])
-
- pdus = [x[0] for x in pending_pdus]
- edus = [x[0] for x in pending_edus]
- failures = [x[0].get_dict() for x in pending_failures]
- deferreds = [
- x[1]
- for x in pending_pdus + pending_edus + pending_failures
- ]
-
try:
self.pending_transactions[destination] = 1
+ logger.debug("TX [%s] _attempt_new_transaction", destination)
+
+ # Sort based on the order field
+ pending_pdus.sort(key=lambda t: t[2])
+
+ pdus = [x[0] for x in pending_pdus]
+ edus = [x[0] for x in pending_edus]
+ failures = [x[0].get_dict() for x in pending_failures]
+ deferreds = [
+ x[1]
+ for x in pending_pdus + pending_edus + pending_failures
+ ]
+
txn_id = str(self._next_txn_id)
limiter = yield get_retry_limiter(
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index a81b3c4345..3d59e1c650 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -161,7 +161,7 @@ class TransportLayerClient(object):
@defer.inlineCallbacks
@log_function
- def make_membership_event(self, destination, room_id, user_id, membership, args={}):
+ def make_membership_event(self, destination, room_id, user_id, membership):
valid_memberships = {Membership.JOIN, Membership.LEAVE}
if membership not in valid_memberships:
raise RuntimeError(
@@ -173,7 +173,6 @@ class TransportLayerClient(object):
content = yield self.client.get_json(
destination=destination,
path=path,
- args=args,
retry_on_dns_fail=True,
)
@@ -220,6 +219,19 @@ class TransportLayerClient(object):
@defer.inlineCallbacks
@log_function
+ def exchange_third_party_invite(self, destination, room_id, event_dict):
+ path = PREFIX + "/exchange_third_party_invite/%s" % (room_id,)
+
+ response = yield self.client.put_json(
+ destination=destination,
+ path=path,
+ data=event_dict,
+ )
+
+ defer.returnValue(response)
+
+ @defer.inlineCallbacks
+ @log_function
def get_event_auth(self, destination, room_id, event_id):
path = PREFIX + "/event_auth/%s/%s" % (room_id, event_id)
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 8184159210..127b4da4f8 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -292,7 +292,7 @@ class FederationMakeJoinServlet(BaseFederationServlet):
@defer.inlineCallbacks
def on_GET(self, origin, content, query, context, user_id):
- content = yield self.handler.on_make_join_request(context, user_id, query)
+ content = yield self.handler.on_make_join_request(context, user_id)
defer.returnValue((200, content))
@@ -343,6 +343,17 @@ class FederationInviteServlet(BaseFederationServlet):
defer.returnValue((200, content))
+class FederationThirdPartyInviteExchangeServlet(BaseFederationServlet):
+ PATH = "/exchange_third_party_invite/([^/]*)"
+
+ @defer.inlineCallbacks
+ def on_PUT(self, origin, content, query, room_id):
+ content = yield self.handler.on_exchange_third_party_invite_request(
+ origin, room_id, content
+ )
+ defer.returnValue((200, content))
+
+
class FederationClientKeysQueryServlet(BaseFederationServlet):
PATH = "/user/keys/query"
@@ -396,6 +407,30 @@ class FederationGetMissingEventsServlet(BaseFederationServlet):
defer.returnValue((200, content))
+class On3pidBindServlet(BaseFederationServlet):
+ PATH = "/3pid/onbind"
+
+ @defer.inlineCallbacks
+ def on_POST(self, request):
+ content_bytes = request.content.read()
+ content = json.loads(content_bytes)
+ if "invites" in content:
+ last_exception = None
+ for invite in content["invites"]:
+ try:
+ yield self.handler.exchange_third_party_invite(invite)
+ except Exception as e:
+ last_exception = e
+ if last_exception:
+ raise last_exception
+ defer.returnValue((200, {}))
+
+ # Avoid doing remote HS authorization checks which are done by default by
+ # BaseFederationServlet.
+ def _wrap(self, code):
+ return code
+
+
SERVLET_CLASSES = (
FederationPullServlet,
FederationEventServlet,
@@ -413,4 +448,6 @@ SERVLET_CLASSES = (
FederationEventAuthServlet,
FederationClientKeysQueryServlet,
FederationClientKeysClaimServlet,
+ FederationThirdPartyInviteExchangeServlet,
+ On3pidBindServlet,
)
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index 6a26cb1879..6519f183df 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -21,7 +21,6 @@ from synapse.api.constants import Membership, EventTypes
from synapse.types import UserID, RoomAlias
from synapse.util.logcontext import PreserveLoggingContext
-from synapse.util import third_party_invites
import logging
@@ -30,6 +29,12 @@ logger = logging.getLogger(__name__)
class BaseHandler(object):
+ """
+ Common base class for the event handlers.
+
+ :type store: synapse.storage.events.StateStore
+ :type state_handler: synapse.state.StateHandler
+ """
def __init__(self, hs):
self.store = hs.get_datastore()
@@ -47,37 +52,24 @@ class BaseHandler(object):
self.event_builder_factory = hs.get_event_builder_factory()
@defer.inlineCallbacks
- def _filter_events_for_client(self, user_id, events):
- event_id_to_state = yield self.store.get_state_for_events(
- frozenset(e.event_id for e in events),
- types=(
- (EventTypes.RoomHistoryVisibility, ""),
- (EventTypes.Member, user_id),
- )
- )
+ def _filter_events_for_client(self, user_id, events, is_guest=False,
+ require_all_visible_for_guests=True):
+ # Assumes that user has at some point joined the room if not is_guest.
- def allowed(event, state):
- if event.type == EventTypes.RoomHistoryVisibility:
+ def allowed(event, membership, visibility):
+ if visibility == "world_readable":
return True
- membership_ev = state.get((EventTypes.Member, user_id), None)
- if membership_ev:
- membership = membership_ev.membership
- else:
- membership = Membership.LEAVE
+ if is_guest:
+ return False
if membership == Membership.JOIN:
return True
- history = state.get((EventTypes.RoomHistoryVisibility, ''), None)
- if history:
- visibility = history.content.get("history_visibility", "shared")
- else:
- visibility = "shared"
+ if event.type == EventTypes.RoomHistoryVisibility:
+ return not is_guest
- if visibility == "public":
- return True
- elif visibility == "shared":
+ if visibility == "shared":
return True
elif visibility == "joined":
return membership == Membership.JOIN
@@ -86,11 +78,46 @@ class BaseHandler(object):
return True
- defer.returnValue([
- event
- for event in events
- if allowed(event, event_id_to_state[event.event_id])
- ])
+ event_id_to_state = yield self.store.get_state_for_events(
+ frozenset(e.event_id for e in events),
+ types=(
+ (EventTypes.RoomHistoryVisibility, ""),
+ (EventTypes.Member, user_id),
+ )
+ )
+
+ events_to_return = []
+ for event in events:
+ state = event_id_to_state[event.event_id]
+
+ membership_event = state.get((EventTypes.Member, user_id), None)
+ if membership_event:
+ membership = membership_event.membership
+ else:
+ membership = None
+
+ visibility_event = state.get((EventTypes.RoomHistoryVisibility, ""), None)
+ if visibility_event:
+ visibility = visibility_event.content.get("history_visibility", "shared")
+ else:
+ visibility = "shared"
+
+ should_include = allowed(event, membership, visibility)
+ if should_include:
+ events_to_return.append(event)
+
+ if (require_all_visible_for_guests
+ and is_guest
+ and len(events_to_return) < len(events)):
+ # This indicates that some events in the requested range were not
+ # visible to guest users. To be safe, we reject the entire request,
+ # so that we don't have to worry about interpreting visibility
+ # boundaries.
+ raise AuthError(403, "User %s does not have permission" % (
+ user_id
+ ))
+
+ defer.returnValue(events_to_return)
def ratelimit(self, user_id):
time_now = self.clock.time()
@@ -154,6 +181,8 @@ class BaseHandler(object):
if not suppress_auth:
self.auth.check(event, auth_events=context.current_state)
+ yield self.maybe_kick_guest_users(event, context.current_state.values())
+
if event.type == EventTypes.CanonicalAlias:
# Check the alias is acually valid (at this time at least)
room_alias_str = event.content.get("alias", None)
@@ -170,16 +199,6 @@ class BaseHandler(object):
)
)
- if (
- event.type == EventTypes.Member and
- event.content["membership"] == Membership.JOIN and
- third_party_invites.join_has_third_party_invite(event.content)
- ):
- yield third_party_invites.check_key_valid(
- self.hs.get_simple_http_client(),
- event
- )
-
federation_handler = self.hs.get_handlers().federation_handler
if event.type == EventTypes.Member:
@@ -271,3 +290,58 @@ class BaseHandler(object):
federation_handler.handle_new_event(
event, destinations=destinations,
)
+
+ @defer.inlineCallbacks
+ def maybe_kick_guest_users(self, event, current_state):
+ # Technically this function invalidates current_state by changing it.
+ # Hopefully this isn't that important to the caller.
+ if event.type == EventTypes.GuestAccess:
+ guest_access = event.content.get("guest_access", "forbidden")
+ if guest_access != "can_join":
+ yield self.kick_guest_users(current_state)
+
+ @defer.inlineCallbacks
+ def kick_guest_users(self, current_state):
+ for member_event in current_state:
+ try:
+ if member_event.type != EventTypes.Member:
+ continue
+
+ if not self.hs.is_mine(UserID.from_string(member_event.state_key)):
+ continue
+
+ if member_event.content["membership"] not in {
+ Membership.JOIN,
+ Membership.INVITE
+ }:
+ continue
+
+ if (
+ "kind" not in member_event.content
+ or member_event.content["kind"] != "guest"
+ ):
+ continue
+
+ # We make the user choose to leave, rather than have the
+ # event-sender kick them. This is partially because we don't
+ # need to worry about power levels, and partially because guest
+ # users are a concept which doesn't hugely work over federation,
+ # and having homeservers have their own users leave keeps more
+ # of that decision-making and control local to the guest-having
+ # homeserver.
+ message_handler = self.hs.get_handlers().message_handler
+ yield message_handler.create_and_send_event(
+ {
+ "type": EventTypes.Member,
+ "state_key": member_event.state_key,
+ "content": {
+ "membership": Membership.LEAVE,
+ "kind": "guest"
+ },
+ "room_id": member_event.room_id,
+ "sender": member_event.state_key
+ },
+ ratelimit=False,
+ )
+ except Exception as e:
+ logger.warn("Error kicking guest user: %s" % (e,))
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 055d395b20..1b11dbdffd 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -372,12 +372,15 @@ class AuthHandler(BaseHandler):
yield self.store.add_refresh_token_to_user(user_id, refresh_token)
defer.returnValue(refresh_token)
- def generate_access_token(self, user_id):
+ def generate_access_token(self, user_id, extra_caveats=None):
+ extra_caveats = extra_caveats or []
macaroon = self._generate_base_macaroon(user_id)
macaroon.add_first_party_caveat("type = access")
now = self.hs.get_clock().time_msec()
expiry = now + (60 * 60 * 1000)
macaroon.add_first_party_caveat("time < %d" % (expiry,))
+ for caveat in extra_caveats:
+ macaroon.add_first_party_caveat(caveat)
return macaroon.serialize()
def generate_refresh_token(self, user_id):
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index 92afa35d57..0e4c0d4d06 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -100,7 +100,7 @@ class EventStreamHandler(BaseHandler):
@log_function
def get_stream(self, auth_user_id, pagin_config, timeout=0,
as_client_event=True, affect_presence=True,
- only_room_events=False):
+ only_room_events=False, room_id=None, is_guest=False):
"""Fetches the events stream for a given user.
If `only_room_events` is `True` only room events will be returned.
@@ -111,17 +111,6 @@ class EventStreamHandler(BaseHandler):
if affect_presence:
yield self.started_stream(auth_user)
- rm_handler = self.hs.get_handlers().room_member_handler
-
- app_service = yield self.store.get_app_service_by_user_id(
- auth_user.to_string()
- )
- if app_service:
- rooms = yield self.store.get_app_service_rooms(app_service)
- room_ids = set(r.room_id for r in rooms)
- else:
- room_ids = yield rm_handler.get_joined_rooms_for_user(auth_user)
-
if timeout:
# If they've set a timeout set a minimum limit.
timeout = max(timeout, 500)
@@ -130,9 +119,15 @@ class EventStreamHandler(BaseHandler):
# thundering herds on restart.
timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
+ if is_guest:
+ yield self.distributor.fire(
+ "user_joined_room", user=auth_user, room_id=room_id
+ )
+
events, tokens = yield self.notifier.get_events_for(
- auth_user, room_ids, pagin_config, timeout,
- only_room_events=only_room_events
+ auth_user, pagin_config, timeout,
+ only_room_events=only_room_events,
+ is_guest=is_guest, guest_room_id=room_id
)
time_now = self.clock.time_msec()
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index ae9d227586..c1bce07e31 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -21,6 +21,7 @@ from synapse.api.errors import (
AuthError, FederationError, StoreError, CodeMessageException, SynapseError,
)
from synapse.api.constants import EventTypes, Membership, RejectedReason
+from synapse.events.validator import EventValidator
from synapse.util import unwrapFirstError
from synapse.util.logcontext import PreserveLoggingContext
from synapse.util.logutils import log_function
@@ -39,7 +40,6 @@ from twisted.internet import defer
import itertools
import logging
-from synapse.util import third_party_invites
logger = logging.getLogger(__name__)
@@ -58,6 +58,8 @@ class FederationHandler(BaseHandler):
def __init__(self, hs):
super(FederationHandler, self).__init__(hs)
+ self.hs = hs
+
self.distributor.observe(
"user_joined_room",
self._on_user_joined
@@ -68,12 +70,9 @@ class FederationHandler(BaseHandler):
self.store = hs.get_datastore()
self.replication_layer = hs.get_replication_layer()
self.state_handler = hs.get_state_handler()
- # self.auth_handler = gs.get_auth_handler()
self.server_name = hs.hostname
self.keyring = hs.get_keyring()
- self.lock_manager = hs.get_room_lock_manager()
-
self.replication_layer.set_handler(self)
# When joining a room we need to queue any events for that room up
@@ -586,7 +585,7 @@ class FederationHandler(BaseHandler):
room_id,
joinee,
"join",
- content
+ content,
)
self.room_queues[room_id] = []
@@ -663,16 +662,12 @@ class FederationHandler(BaseHandler):
@defer.inlineCallbacks
@log_function
- def on_make_join_request(self, room_id, user_id, query):
+ def on_make_join_request(self, room_id, user_id):
""" We've received a /make_join/ request, so we create a partial
join event for the room and return that. We do *not* persist or
process it until the other server has signed it and sent it back.
"""
event_content = {"membership": Membership.JOIN}
- if third_party_invites.has_join_keys(query):
- event_content["third_party_invite"] = (
- third_party_invites.extract_join_keys(query)
- )
builder = self.event_builder_factory.new({
"type": EventTypes.Member,
@@ -688,9 +683,6 @@ class FederationHandler(BaseHandler):
self.auth.check(event, auth_events=context.current_state)
- if third_party_invites.join_has_third_party_invite(event.content):
- third_party_invites.check_key_valid(self.hs.get_simple_http_client(), event)
-
defer.returnValue(event)
@defer.inlineCallbacks
@@ -830,8 +822,7 @@ class FederationHandler(BaseHandler):
target_hosts,
room_id,
user_id,
- "leave",
- {}
+ "leave"
)
signed_event = self._sign_event(event)
@@ -850,13 +841,14 @@ class FederationHandler(BaseHandler):
defer.returnValue(None)
@defer.inlineCallbacks
- def _make_and_verify_event(self, target_hosts, room_id, user_id, membership, content):
+ def _make_and_verify_event(self, target_hosts, room_id, user_id, membership,
+ content={},):
origin, pdu = yield self.replication_layer.make_membership_event(
target_hosts,
room_id,
user_id,
membership,
- content
+ content,
)
logger.debug("Got response to make_%s: %s", membership, pdu)
@@ -1108,8 +1100,6 @@ class FederationHandler(BaseHandler):
context = yield self._prep_event(
origin, event,
state=state,
- backfilled=backfilled,
- current_state=current_state,
auth_events=auth_events,
)
@@ -1132,7 +1122,6 @@ class FederationHandler(BaseHandler):
origin,
ev_info["event"],
state=ev_info.get("state"),
- backfilled=backfilled,
auth_events=ev_info.get("auth_events"),
)
for ev_info in event_infos
@@ -1219,8 +1208,7 @@ class FederationHandler(BaseHandler):
defer.returnValue((event_stream_id, max_stream_id))
@defer.inlineCallbacks
- def _prep_event(self, origin, event, state=None, backfilled=False,
- current_state=None, auth_events=None):
+ def _prep_event(self, origin, event, state=None, auth_events=None):
outlier = event.internal_metadata.is_outlier()
context = yield self.state_handler.compute_event_context(
@@ -1253,6 +1241,10 @@ class FederationHandler(BaseHandler):
context.rejected = RejectedReason.AUTH_ERROR
+ if event.type == EventTypes.GuestAccess:
+ full_context = yield self.store.get_current_state(room_id=event.room_id)
+ yield self.maybe_kick_guest_users(event, full_context)
+
defer.returnValue(context)
@defer.inlineCallbacks
@@ -1649,3 +1641,75 @@ class FederationHandler(BaseHandler):
},
"missing": [e.event_id for e in missing_locals],
})
+
+ @defer.inlineCallbacks
+ @log_function
+ def exchange_third_party_invite(self, invite):
+ sender = invite["sender"]
+ room_id = invite["room_id"]
+
+ event_dict = {
+ "type": EventTypes.Member,
+ "content": {
+ "membership": Membership.INVITE,
+ "third_party_invite": invite,
+ },
+ "room_id": room_id,
+ "sender": sender,
+ "state_key": invite["mxid"],
+ }
+
+ if (yield self.auth.check_host_in_room(room_id, self.hs.hostname)):
+ builder = self.event_builder_factory.new(event_dict)
+ EventValidator().validate_new(builder)
+ event, context = yield self._create_new_client_event(builder=builder)
+ self.auth.check(event, context.current_state)
+ yield self._validate_keyserver(event, auth_events=context.current_state)
+ member_handler = self.hs.get_handlers().room_member_handler
+ yield member_handler.change_membership(event, context)
+ else:
+ destinations = set([x.split(":", 1)[-1] for x in (sender, room_id)])
+ yield self.replication_layer.forward_third_party_invite(
+ destinations,
+ room_id,
+ event_dict,
+ )
+
+ @defer.inlineCallbacks
+ @log_function
+ def on_exchange_third_party_invite_request(self, origin, room_id, event_dict):
+ builder = self.event_builder_factory.new(event_dict)
+
+ event, context = yield self._create_new_client_event(
+ builder=builder,
+ )
+
+ self.auth.check(event, auth_events=context.current_state)
+ yield self._validate_keyserver(event, auth_events=context.current_state)
+
+ returned_invite = yield self.send_invite(origin, event)
+ # TODO: Make sure the signatures actually are correct.
+ event.signatures.update(returned_invite.signatures)
+ member_handler = self.hs.get_handlers().room_member_handler
+ yield member_handler.change_membership(event, context)
+
+ @defer.inlineCallbacks
+ def _validate_keyserver(self, event, auth_events):
+ token = event.content["third_party_invite"]["signed"]["token"]
+
+ invite_event = auth_events.get(
+ (EventTypes.ThirdPartyInvite, token,)
+ )
+
+ try:
+ response = yield self.hs.get_simple_http_client().get_json(
+ invite_event.content["key_validity_url"],
+ {"public_key": invite_event.content["public_key"]}
+ )
+ except Exception:
+ raise SynapseError(
+ 502,
+ "Third party certificate could not be checked"
+ )
+ if "valid" not in response or not response["valid"]:
+ raise AuthError(403, "Third party certificate was invalid")
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 024474d5fe..14051aee99 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -16,7 +16,7 @@
from twisted.internet import defer
from synapse.api.constants import EventTypes, Membership
-from synapse.api.errors import SynapseError
+from synapse.api.errors import SynapseError, AuthError, Codes
from synapse.streams.config import PaginationConfig
from synapse.events.utils import serialize_event
from synapse.events.validator import EventValidator
@@ -71,20 +71,20 @@ class MessageHandler(BaseHandler):
@defer.inlineCallbacks
def get_messages(self, user_id=None, room_id=None, pagin_config=None,
- as_client_event=True):
+ as_client_event=True, is_guest=False):
"""Get messages in a room.
Args:
user_id (str): The user requesting messages.
room_id (str): The room they want messages from.
pagin_config (synapse.api.streams.PaginationConfig): The pagination
- config rules to apply, if any.
+ config rules to apply, if any.
as_client_event (bool): True to get events in client-server format.
+ is_guest (bool): Whether the requesting user is a guest (as opposed
+ to a fully registered user).
Returns:
dict: Pagination API results
"""
- member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
-
data_source = self.hs.get_event_sources().sources["room"]
if pagin_config.from_token:
@@ -107,23 +107,27 @@ class MessageHandler(BaseHandler):
source_config = pagin_config.get_source_config("room")
- if member_event.membership == Membership.LEAVE:
- # If they have left the room then clamp the token to be before
- # they left the room
- leave_token = yield self.store.get_topological_token_for_event(
- member_event.event_id
- )
- leave_token = RoomStreamToken.parse(leave_token)
- if leave_token.topological < room_token.topological:
- source_config.from_key = str(leave_token)
-
- if source_config.direction == "f":
- if source_config.to_key is None:
- source_config.to_key = str(leave_token)
- else:
- to_token = RoomStreamToken.parse(source_config.to_key)
- if leave_token.topological < to_token.topological:
+ if not is_guest:
+ member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
+ if member_event.membership == Membership.LEAVE:
+ # If they have left the room then clamp the token to be before
+ # they left the room.
+ # If they're a guest, we'll just 403 them if they're asking for
+ # events they can't see.
+ leave_token = yield self.store.get_topological_token_for_event(
+ member_event.event_id
+ )
+ leave_token = RoomStreamToken.parse(leave_token)
+ if leave_token.topological < room_token.topological:
+ source_config.from_key = str(leave_token)
+
+ if source_config.direction == "f":
+ if source_config.to_key is None:
source_config.to_key = str(leave_token)
+ else:
+ to_token = RoomStreamToken.parse(source_config.to_key)
+ if leave_token.topological < to_token.topological:
+ source_config.to_key = str(leave_token)
yield self.hs.get_handlers().federation_handler.maybe_backfill(
room_id, room_token.topological
@@ -146,7 +150,7 @@ class MessageHandler(BaseHandler):
"end": next_token.to_string(),
})
- events = yield self._filter_events_for_client(user_id, events)
+ events = yield self._filter_events_for_client(user_id, events, is_guest=is_guest)
time_now = self.clock.time_msec()
@@ -163,7 +167,7 @@ class MessageHandler(BaseHandler):
@defer.inlineCallbacks
def create_and_send_event(self, event_dict, ratelimit=True,
- token_id=None, txn_id=None):
+ token_id=None, txn_id=None, is_guest=False):
""" Given a dict from a client, create and handle a new event.
Creates an FrozenEvent object, filling out auth_events, prev_events,
@@ -209,7 +213,7 @@ class MessageHandler(BaseHandler):
if event.type == EventTypes.Member:
member_handler = self.hs.get_handlers().room_member_handler
- yield member_handler.change_membership(event, context)
+ yield member_handler.change_membership(event, context, is_guest=is_guest)
else:
yield self.handle_new_client_event(
event=event,
@@ -225,7 +229,7 @@ class MessageHandler(BaseHandler):
@defer.inlineCallbacks
def get_room_data(self, user_id=None, room_id=None,
- event_type=None, state_key=""):
+ event_type=None, state_key="", is_guest=False):
""" Get data from a room.
Args:
@@ -235,23 +239,52 @@ class MessageHandler(BaseHandler):
Raises:
SynapseError if something went wrong.
"""
- member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
+ membership, membership_event_id = yield self._check_in_room_or_world_readable(
+ room_id, user_id, is_guest
+ )
- if member_event.membership == Membership.JOIN:
+ if membership == Membership.JOIN:
data = yield self.state_handler.get_current_state(
room_id, event_type, state_key
)
- elif member_event.membership == Membership.LEAVE:
+ elif membership == Membership.LEAVE:
key = (event_type, state_key)
room_state = yield self.store.get_state_for_events(
- [member_event.event_id], [key]
+ [membership_event_id], [key]
)
- data = room_state[member_event.event_id].get(key)
+ data = room_state[membership_event_id].get(key)
defer.returnValue(data)
@defer.inlineCallbacks
- def get_state_events(self, user_id, room_id):
+ def _check_in_room_or_world_readable(self, room_id, user_id, is_guest):
+ try:
+ # check_user_was_in_room will return the most recent membership
+ # event for the user if:
+ # * The user is a non-guest user, and was ever in the room
+ # * The user is a guest user, and has joined the room
+ # else it will throw.
+ member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
+ defer.returnValue((member_event.membership, member_event.event_id))
+ return
+ except AuthError, auth_error:
+ visibility = yield self.state_handler.get_current_state(
+ room_id, EventTypes.RoomHistoryVisibility, ""
+ )
+ if (
+ visibility and
+ visibility.content["history_visibility"] == "world_readable"
+ ):
+ defer.returnValue((Membership.JOIN, None))
+ return
+ if not is_guest:
+ raise auth_error
+ raise AuthError(
+ 403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
+ )
+
+ @defer.inlineCallbacks
+ def get_state_events(self, user_id, room_id, is_guest=False):
"""Retrieve all state events for a given room. If the user is
joined to the room then return the current state. If the user has
left the room return the state events from when they left.
@@ -262,15 +295,17 @@ class MessageHandler(BaseHandler):
Returns:
A list of dicts representing state events. [{}, {}, {}]
"""
- member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
+ membership, membership_event_id = yield self._check_in_room_or_world_readable(
+ room_id, user_id, is_guest
+ )
- if member_event.membership == Membership.JOIN:
+ if membership == Membership.JOIN:
room_state = yield self.state_handler.get_current_state(room_id)
- elif member_event.membership == Membership.LEAVE:
+ elif membership == Membership.LEAVE:
room_state = yield self.store.get_state_for_events(
- [member_event.event_id], None
+ [membership_event_id], None
)
- room_state = room_state[member_event.event_id]
+ room_state = room_state[membership_event_id]
now = self.clock.time_msec()
defer.returnValue(
@@ -322,6 +357,8 @@ class MessageHandler(BaseHandler):
user, pagination_config.get_source_config("receipt"), None
)
+ tags_by_room = yield self.store.get_tags_for_user(user_id)
+
public_room_ids = yield self.store.get_public_room_ids()
limit = pagin_config.limit
@@ -398,6 +435,15 @@ class MessageHandler(BaseHandler):
serialize_event(c, time_now, as_client_event)
for c in current_state.values()
]
+
+ private_user_data = []
+ tags = tags_by_room.get(event.room_id)
+ if tags:
+ private_user_data.append({
+ "type": "m.tag",
+ "content": {"tags": tags},
+ })
+ d["private_user_data"] = private_user_data
except:
logger.exception("Failed to get snapshot")
@@ -420,7 +466,7 @@ class MessageHandler(BaseHandler):
defer.returnValue(ret)
@defer.inlineCallbacks
- def room_initial_sync(self, user_id, room_id, pagin_config=None):
+ def room_initial_sync(self, user_id, room_id, pagin_config=None, is_guest=False):
"""Capture the a snapshot of a room. If user is currently a member of
the room this will be what is currently in the room. If the user left
the room this will be what was in the room when they left.
@@ -437,33 +483,47 @@ class MessageHandler(BaseHandler):
A JSON serialisable dict with the snapshot of the room.
"""
- member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
+ membership, member_event_id = yield self._check_in_room_or_world_readable(
+ room_id,
+ user_id,
+ is_guest
+ )
- if member_event.membership == Membership.JOIN:
+ if membership == Membership.JOIN:
result = yield self._room_initial_sync_joined(
- user_id, room_id, pagin_config, member_event
+ user_id, room_id, pagin_config, membership, is_guest
)
- elif member_event.membership == Membership.LEAVE:
+ elif membership == Membership.LEAVE:
result = yield self._room_initial_sync_parted(
- user_id, room_id, pagin_config, member_event
+ user_id, room_id, pagin_config, membership, member_event_id, is_guest
)
+
+ private_user_data = []
+ tags = yield self.store.get_tags_for_room(user_id, room_id)
+ if tags:
+ private_user_data.append({
+ "type": "m.tag",
+ "content": {"tags": tags},
+ })
+ result["private_user_data"] = private_user_data
+
defer.returnValue(result)
@defer.inlineCallbacks
def _room_initial_sync_parted(self, user_id, room_id, pagin_config,
- member_event):
+ membership, member_event_id, is_guest):
room_state = yield self.store.get_state_for_events(
- [member_event.event_id], None
+ [member_event_id], None
)
- room_state = room_state[member_event.event_id]
+ room_state = room_state[member_event_id]
limit = pagin_config.limit if pagin_config else None
if limit is None:
limit = 10
stream_token = yield self.store.get_stream_token_for_event(
- member_event.event_id
+ member_event_id
)
messages, token = yield self.store.get_recent_events_for_room(
@@ -473,16 +533,16 @@ class MessageHandler(BaseHandler):
)
messages = yield self._filter_events_for_client(
- user_id, messages
+ user_id, messages, is_guest=is_guest
)
- start_token = StreamToken(token[0], 0, 0, 0)
- end_token = StreamToken(token[1], 0, 0, 0)
+ start_token = StreamToken(token[0], 0, 0, 0, 0)
+ end_token = StreamToken(token[1], 0, 0, 0, 0)
time_now = self.clock.time_msec()
defer.returnValue({
- "membership": member_event.membership,
+ "membership": membership,
"room_id": room_id,
"messages": {
"chunk": [serialize_event(m, time_now) for m in messages],
@@ -496,7 +556,7 @@ class MessageHandler(BaseHandler):
@defer.inlineCallbacks
def _room_initial_sync_joined(self, user_id, room_id, pagin_config,
- member_event):
+ membership, is_guest):
current_state = yield self.state.get_current_state(
room_id=room_id,
)
@@ -528,12 +588,14 @@ class MessageHandler(BaseHandler):
@defer.inlineCallbacks
def get_presence():
- states = yield presence_handler.get_states(
- target_users=[UserID.from_string(m.user_id) for m in room_members],
- auth_user=auth_user,
- as_event=True,
- check_auth=False,
- )
+ states = {}
+ if not is_guest:
+ states = yield presence_handler.get_states(
+ target_users=[UserID.from_string(m.user_id) for m in room_members],
+ auth_user=auth_user,
+ as_event=True,
+ check_auth=False,
+ )
defer.returnValue(states.values())
@@ -553,7 +615,7 @@ class MessageHandler(BaseHandler):
).addErrback(unwrapFirstError)
messages = yield self._filter_events_for_client(
- user_id, messages
+ user_id, messages, is_guest=is_guest, require_all_visible_for_guests=False
)
start_token = now_token.copy_and_replace("room_key", token[0])
@@ -561,8 +623,7 @@ class MessageHandler(BaseHandler):
time_now = self.clock.time_msec()
- defer.returnValue({
- "membership": member_event.membership,
+ ret = {
"room_id": room_id,
"messages": {
"chunk": [serialize_event(m, time_now) for m in messages],
@@ -572,4 +633,8 @@ class MessageHandler(BaseHandler):
"state": state,
"presence": presence,
"receipts": receipts,
- })
+ }
+ if not is_guest:
+ ret["membership"] = membership
+
+ defer.returnValue(ret)
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index ce60642127..aca65096fc 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -950,7 +950,8 @@ class PresenceHandler(BaseHandler):
)
while len(self._remote_offline_serials) > MAX_OFFLINE_SERIALS:
self._remote_offline_serials.pop() # remove the oldest
- del self._user_cachemap[user]
+ if user in self._user_cachemap:
+ del self._user_cachemap[user]
else:
# Remove the user from remote_offline_serials now that they're
# no longer offline
@@ -1142,8 +1143,9 @@ class PresenceEventSource(object):
@defer.inlineCallbacks
@log_function
- def get_new_events_for_user(self, user, from_key, limit):
+ def get_new_events(self, user, from_key, room_ids=None, **kwargs):
from_key = int(from_key)
+ room_ids = room_ids or []
presence = self.hs.get_handlers().presence_handler
cachemap = presence._user_cachemap
@@ -1161,7 +1163,6 @@ class PresenceEventSource(object):
user_ids_to_check |= set(
UserID.from_string(p["observed_user_id"]) for p in presence_list
)
- room_ids = yield presence.get_joined_rooms_for_user(user)
for room_id in set(room_ids) & set(presence._room_serials):
if presence._room_serials[room_id] > from_key:
joined = yield presence.get_joined_users_for_room_id(room_id)
diff --git a/synapse/handlers/private_user_data.py b/synapse/handlers/private_user_data.py
new file mode 100644
index 0000000000..1abe45ed7b
--- /dev/null
+++ b/synapse/handlers/private_user_data.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+
+class PrivateUserDataEventSource(object):
+ def __init__(self, hs):
+ self.store = hs.get_datastore()
+
+ def get_current_key(self, direction='f'):
+ return self.store.get_max_private_user_data_stream_id()
+
+ @defer.inlineCallbacks
+ def get_new_events(self, user, from_key, **kwargs):
+ user_id = user.to_string()
+ last_stream_id = from_key
+
+ current_stream_id = yield self.store.get_max_private_user_data_stream_id()
+ tags = yield self.store.get_updated_tags(user_id, last_stream_id)
+
+ results = []
+ for room_id, room_tags in tags.items():
+ results.append({
+ "type": "m.tag",
+ "content": {"tags": room_tags},
+ "room_id": room_id,
+ })
+
+ defer.returnValue((results, current_stream_id))
+
+ @defer.inlineCallbacks
+ def get_pagination_rows(self, user, config, key):
+ defer.returnValue(([], config.to_id))
diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py
index a47ae3df42..973f4d5cae 100644
--- a/synapse/handlers/receipts.py
+++ b/synapse/handlers/receipts.py
@@ -164,17 +164,15 @@ class ReceiptEventSource(object):
self.store = hs.get_datastore()
@defer.inlineCallbacks
- def get_new_events_for_user(self, user, from_key, limit):
+ def get_new_events(self, from_key, room_ids, **kwargs):
from_key = int(from_key)
to_key = yield self.get_current_key()
if from_key == to_key:
defer.returnValue(([], to_key))
- rooms = yield self.store.get_rooms_for_user(user.to_string())
- rooms = [room.room_id for room in rooms]
events = yield self.store.get_linearized_receipts_for_rooms(
- rooms,
+ room_ids,
from_key=from_key,
to_key=to_key,
)
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index ef4081e3fe..493a087031 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -64,7 +64,7 @@ class RegistrationHandler(BaseHandler):
)
@defer.inlineCallbacks
- def register(self, localpart=None, password=None):
+ def register(self, localpart=None, password=None, generate_token=True):
"""Registers a new client on the server.
Args:
@@ -89,7 +89,9 @@ class RegistrationHandler(BaseHandler):
user = UserID(localpart, self.hs.hostname)
user_id = user.to_string()
- token = self.auth_handler().generate_access_token(user_id)
+ token = None
+ if generate_token:
+ token = self.auth_handler().generate_access_token(user_id)
yield self.store.register(
user_id=user_id,
token=token,
@@ -102,14 +104,14 @@ class RegistrationHandler(BaseHandler):
attempts = 0
user_id = None
token = None
- while not user_id and not token:
+ while not user_id:
try:
localpart = self._generate_user_id()
user = UserID(localpart, self.hs.hostname)
user_id = user.to_string()
yield self.check_user_id_is_valid(user_id)
-
- token = self.auth_handler().generate_access_token(user_id)
+ if generate_token:
+ token = self.auth_handler().generate_access_token(user_id)
yield self.store.register(
user_id=user_id,
token=token,
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 36878a6c20..3f04752581 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -38,6 +38,8 @@ import string
logger = logging.getLogger(__name__)
+id_server_scheme = "https://"
+
class RoomCreationHandler(BaseHandler):
@@ -367,7 +369,7 @@ class RoomMemberHandler(BaseHandler):
remotedomains.add(member.domain)
@defer.inlineCallbacks
- def change_membership(self, event, context, do_auth=True):
+ def change_membership(self, event, context, do_auth=True, is_guest=False):
""" Change the membership status of a user in a room.
Args:
@@ -388,6 +390,20 @@ class RoomMemberHandler(BaseHandler):
# if this HS is not currently in the room, i.e. we have to do the
# invite/join dance.
if event.membership == Membership.JOIN:
+ if is_guest:
+ guest_access = context.current_state.get(
+ (EventTypes.GuestAccess, ""),
+ None
+ )
+ is_guest_access_allowed = (
+ guest_access
+ and guest_access.content
+ and "guest_access" in guest_access.content
+ and guest_access.content["guest_access"] == "can_join"
+ )
+ if not is_guest_access_allowed:
+ raise AuthError(403, "Guest access not allowed")
+
yield self._do_join(event, context, do_auth=do_auth)
else:
if event.membership == Membership.LEAVE:
@@ -489,7 +505,7 @@ class RoomMemberHandler(BaseHandler):
room_hosts,
room_id,
event.user_id,
- event.content # FIXME To get a non-frozen dict
+ event.content,
)
else:
logger.debug("Doing normal join")
@@ -581,7 +597,6 @@ class RoomMemberHandler(BaseHandler):
medium,
address,
id_server,
- display_name,
token_id,
txn_id
):
@@ -608,7 +623,6 @@ class RoomMemberHandler(BaseHandler):
else:
yield self._make_and_store_3pid_invite(
id_server,
- display_name,
medium,
address,
room_id,
@@ -632,7 +646,7 @@ class RoomMemberHandler(BaseHandler):
"""
try:
data = yield self.hs.get_simple_http_client().get_json(
- "https://%s/_matrix/identity/api/v1/lookup" % (id_server,),
+ "%s%s/_matrix/identity/api/v1/lookup" % (id_server_scheme, id_server,),
{
"medium": medium,
"address": address,
@@ -655,8 +669,8 @@ class RoomMemberHandler(BaseHandler):
raise AuthError(401, "No signature from server %s" % (server_hostname,))
for key_name, signature in data["signatures"][server_hostname].items():
key_data = yield self.hs.get_simple_http_client().get_json(
- "https://%s/_matrix/identity/api/v1/pubkey/%s" %
- (server_hostname, key_name,),
+ "%s%s/_matrix/identity/api/v1/pubkey/%s" %
+ (id_server_scheme, server_hostname, key_name,),
)
if "public_key" not in key_data:
raise AuthError(401, "No public key named %s from %s" %
@@ -672,7 +686,6 @@ class RoomMemberHandler(BaseHandler):
def _make_and_store_3pid_invite(
self,
id_server,
- display_name,
medium,
address,
room_id,
@@ -680,7 +693,7 @@ class RoomMemberHandler(BaseHandler):
token_id,
txn_id
):
- token, public_key, key_validity_url = (
+ token, public_key, key_validity_url, display_name = (
yield self._ask_id_server_for_third_party_invite(
id_server,
medium,
@@ -709,7 +722,9 @@ class RoomMemberHandler(BaseHandler):
@defer.inlineCallbacks
def _ask_id_server_for_third_party_invite(
self, id_server, medium, address, room_id, sender):
- is_url = "https://%s/_matrix/identity/api/v1/store-invite" % (id_server,)
+ is_url = "%s%s/_matrix/identity/api/v1/store-invite" % (
+ id_server_scheme, id_server,
+ )
data = yield self.hs.get_simple_http_client().post_urlencoded_get_json(
is_url,
{
@@ -722,10 +737,11 @@ class RoomMemberHandler(BaseHandler):
# TODO: Check for success
token = data["token"]
public_key = data["public_key"]
- key_validity_url = "https://%s/_matrix/identity/api/v1/pubkey/isvalid" % (
- id_server,
+ display_name = data["display_name"]
+ key_validity_url = "%s%s/_matrix/identity/api/v1/pubkey/isvalid" % (
+ id_server_scheme, id_server,
)
- defer.returnValue((token, public_key, key_validity_url))
+ defer.returnValue((token, public_key, key_validity_url, display_name))
class RoomListHandler(BaseHandler):
@@ -750,7 +766,7 @@ class RoomListHandler(BaseHandler):
class RoomContextHandler(BaseHandler):
@defer.inlineCallbacks
- def get_event_context(self, user, room_id, event_id, limit):
+ def get_event_context(self, user, room_id, event_id, limit, is_guest):
"""Retrieves events, pagination tokens and state around a given event
in a room.
@@ -774,11 +790,17 @@ class RoomContextHandler(BaseHandler):
)
results["events_before"] = yield self._filter_events_for_client(
- user.to_string(), results["events_before"]
+ user.to_string(),
+ results["events_before"],
+ is_guest=is_guest,
+ require_all_visible_for_guests=False
)
results["events_after"] = yield self._filter_events_for_client(
- user.to_string(), results["events_after"]
+ user.to_string(),
+ results["events_after"],
+ is_guest=is_guest,
+ require_all_visible_for_guests=False
)
if results["events_after"]:
@@ -807,7 +829,14 @@ class RoomEventSource(object):
self.store = hs.get_datastore()
@defer.inlineCallbacks
- def get_new_events_for_user(self, user, from_key, limit):
+ def get_new_events(
+ self,
+ user,
+ from_key,
+ limit,
+ room_ids,
+ is_guest,
+ ):
# We just ignore the key for now.
to_key = yield self.get_current_key()
@@ -827,8 +856,9 @@ class RoomEventSource(object):
user_id=user.to_string(),
from_key=from_key,
to_key=to_key,
- room_id=None,
limit=limit,
+ room_ids=room_ids,
+ is_guest=is_guest,
)
defer.returnValue((events, end_key))
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index 2718e9482e..b7545c111f 100644
--- a/synapse/handlers/search.py
+++ b/synapse/handlers/search.py
@@ -22,6 +22,8 @@ from synapse.api.filtering import Filter
from synapse.api.errors import SynapseError
from synapse.events.utils import serialize_event
+from unpaddedbase64 import decode_base64, encode_base64
+
import logging
@@ -34,27 +36,59 @@ class SearchHandler(BaseHandler):
super(SearchHandler, self).__init__(hs)
@defer.inlineCallbacks
- def search(self, user, content):
+ def search(self, user, content, batch=None):
"""Performs a full text search for a user.
Args:
user (UserID)
content (dict): Search parameters
+ batch (str): The next_batch parameter. Used for pagination.
Returns:
dict to be returned to the client with results of search
"""
+ batch_group = None
+ batch_group_key = None
+ batch_token = None
+ if batch:
+ try:
+ b = decode_base64(batch)
+ batch_group, batch_group_key, batch_token = b.split("\n")
+
+ assert batch_group is not None
+ assert batch_group_key is not None
+ assert batch_token is not None
+ except:
+ raise SynapseError(400, "Invalid batch")
+
try:
- search_term = content["search_categories"]["room_events"]["search_term"]
- keys = content["search_categories"]["room_events"].get("keys", [
+ room_cat = content["search_categories"]["room_events"]
+
+ # The actual thing to query in FTS
+ search_term = room_cat["search_term"]
+
+ # Which "keys" to search over in FTS query
+ keys = room_cat.get("keys", [
"content.body", "content.name", "content.topic",
])
- filter_dict = content["search_categories"]["room_events"].get("filter", {})
- event_context = content["search_categories"]["room_events"].get(
+
+ # Filter to apply to results
+ filter_dict = room_cat.get("filter", {})
+
+ # What to order results by (impacts whether pagination can be doen)
+ order_by = room_cat.get("order_by", "rank")
+
+ # Include context around each event?
+ event_context = room_cat.get(
"event_context", None
)
+ # Group results together? May allow clients to paginate within a
+ # group
+ group_by = room_cat.get("groupings", {}).get("group_by", {})
+ group_keys = [g["key"] for g in group_by]
+
if event_context is not None:
before_limit = int(event_context.get(
"before_limit", 5
@@ -65,6 +99,15 @@ class SearchHandler(BaseHandler):
except KeyError:
raise SynapseError(400, "Invalid search query")
+ if order_by not in ("rank", "recent"):
+ raise SynapseError(400, "Invalid order by: %r" % (order_by,))
+
+ if set(group_keys) - {"room_id", "sender"}:
+ raise SynapseError(
+ 400,
+ "Invalid group by keys: %r" % (set(group_keys) - {"room_id", "sender"},)
+ )
+
search_filter = Filter(filter_dict)
# TODO: Search through left rooms too
@@ -77,19 +120,130 @@ class SearchHandler(BaseHandler):
room_ids = search_filter.filter_rooms(room_ids)
- rank_map, event_map, _ = yield self.store.search_msgs(
- room_ids, search_term, keys
- )
+ if batch_group == "room_id":
+ room_ids.intersection_update({batch_group_key})
- filtered_events = search_filter.filter(event_map.values())
+ rank_map = {} # event_id -> rank of event
+ allowed_events = []
+ room_groups = {} # Holds result of grouping by room, if applicable
+ sender_group = {} # Holds result of grouping by sender, if applicable
- allowed_events = yield self._filter_events_for_client(
- user.to_string(), filtered_events
- )
+ # Holds the next_batch for the entire result set if one of those exists
+ global_next_batch = None
- allowed_events.sort(key=lambda e: -rank_map[e.event_id])
- allowed_events = allowed_events[:search_filter.limit()]
+ if order_by == "rank":
+ results = yield self.store.search_msgs(
+ room_ids, search_term, keys
+ )
+
+ results_map = {r["event"].event_id: r for r in results}
+
+ rank_map.update({r["event"].event_id: r["rank"] for r in results})
+
+ filtered_events = search_filter.filter([r["event"] for r in results])
+
+ events = yield self._filter_events_for_client(
+ user.to_string(), filtered_events
+ )
+
+ events.sort(key=lambda e: -rank_map[e.event_id])
+ allowed_events = events[:search_filter.limit()]
+
+ for e in allowed_events:
+ rm = room_groups.setdefault(e.room_id, {
+ "results": [],
+ "order": rank_map[e.event_id],
+ })
+ rm["results"].append(e.event_id)
+
+ s = sender_group.setdefault(e.sender, {
+ "results": [],
+ "order": rank_map[e.event_id],
+ })
+ s["results"].append(e.event_id)
+
+ elif order_by == "recent":
+ # In this case we specifically loop through each room as the given
+ # limit applies to each room, rather than a global list.
+ # This is not necessarilly a good idea.
+ for room_id in room_ids:
+ room_events = []
+ if batch_group == "room_id" and batch_group_key == room_id:
+ pagination_token = batch_token
+ else:
+ pagination_token = None
+ i = 0
+
+ # We keep looping and we keep filtering until we reach the limit
+ # or we run out of things.
+ # But only go around 5 times since otherwise synapse will be sad.
+ while len(room_events) < search_filter.limit() and i < 5:
+ i += 1
+ results = yield self.store.search_room(
+ room_id, search_term, keys, search_filter.limit() * 2,
+ pagination_token=pagination_token,
+ )
+
+ results_map = {r["event"].event_id: r for r in results}
+
+ rank_map.update({r["event"].event_id: r["rank"] for r in results})
+
+ filtered_events = search_filter.filter([
+ r["event"] for r in results
+ ])
+
+ events = yield self._filter_events_for_client(
+ user.to_string(), filtered_events
+ )
+
+ room_events.extend(events)
+ room_events = room_events[:search_filter.limit()]
+
+ if len(results) < search_filter.limit() * 2:
+ pagination_token = None
+ break
+ else:
+ pagination_token = results[-1]["pagination_token"]
+
+ if room_events:
+ res = results_map[room_events[-1].event_id]
+ pagination_token = res["pagination_token"]
+
+ group = room_groups.setdefault(room_id, {})
+ if pagination_token:
+ next_batch = encode_base64("%s\n%s\n%s" % (
+ "room_id", room_id, pagination_token
+ ))
+ group["next_batch"] = next_batch
+
+ if batch_token:
+ global_next_batch = next_batch
+
+ group["results"] = [e.event_id for e in room_events]
+ group["order"] = max(
+ e.origin_server_ts/1000 for e in room_events
+ if hasattr(e, "origin_server_ts")
+ )
+
+ allowed_events.extend(room_events)
+
+ # Normalize the group orders
+ if room_groups:
+ if len(room_groups) > 1:
+ mx = max(g["order"] for g in room_groups.values())
+ mn = min(g["order"] for g in room_groups.values())
+
+ for g in room_groups.values():
+ g["order"] = (g["order"] - mn) * 1.0 / (mx - mn)
+ else:
+ room_groups.values()[0]["order"] = 1
+ else:
+ # We should never get here due to the guard earlier.
+ raise NotImplementedError()
+
+ # If client has asked for "context" for each event (i.e. some surrounding
+ # events and state), fetch that
if event_context is not None:
now_token = yield self.hs.get_event_sources().get_current_token()
@@ -144,11 +298,22 @@ class SearchHandler(BaseHandler):
logger.info("Found %d results", len(results))
+ rooms_cat_res = {
+ "results": results,
+ "count": len(results)
+ }
+
+ if room_groups and "room_id" in group_keys:
+ rooms_cat_res.setdefault("groups", {})["room_id"] = room_groups
+
+ if sender_group and "sender" in group_keys:
+ rooms_cat_res.setdefault("groups", {})["sender"] = sender_group
+
+ if global_next_batch:
+ rooms_cat_res["next_batch"] = global_next_batch
+
defer.returnValue({
"search_categories": {
- "room_events": {
- "results": results,
- "count": len(results)
- }
+ "room_events": rooms_cat_res
}
})
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 4c5a2353b2..6dc9d0fb92 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -47,10 +47,11 @@ class TimelineBatch(collections.namedtuple("TimelineBatch", [
class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [
- "room_id",
- "timeline",
- "state",
+ "room_id", # str
+ "timeline", # TimelineBatch
+ "state", # dict[(str, str), FrozenEvent]
"ephemeral",
+ "private_user_data",
])):
__slots__ = []
@@ -58,13 +59,19 @@ class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [
"""Make the result appear empty if there are no updates. This is used
to tell if room needs to be part of the sync result.
"""
- return bool(self.timeline or self.state or self.ephemeral)
+ return bool(
+ self.timeline
+ or self.state
+ or self.ephemeral
+ or self.private_user_data
+ )
class ArchivedSyncResult(collections.namedtuple("JoinedSyncResult", [
- "room_id",
- "timeline",
- "state",
+ "room_id", # str
+ "timeline", # TimelineBatch
+ "state", # dict[(str, str), FrozenEvent]
+ "private_user_data",
])):
__slots__ = []
@@ -72,12 +79,16 @@ class ArchivedSyncResult(collections.namedtuple("JoinedSyncResult", [
"""Make the result appear empty if there are no updates. This is used
to tell if room needs to be part of the sync result.
"""
- return bool(self.timeline or self.state)
+ return bool(
+ self.timeline
+ or self.state
+ or self.private_user_data
+ )
class InvitedSyncResult(collections.namedtuple("InvitedSyncResult", [
- "room_id",
- "invite",
+ "room_id", # str
+ "invite", # FrozenEvent: the invite event
])):
__slots__ = []
@@ -132,21 +143,8 @@ class SyncHandler(BaseHandler):
def current_sync_callback(before_token, after_token):
return self.current_sync_for_user(sync_config, since_token)
- rm_handler = self.hs.get_handlers().room_member_handler
-
- app_service = yield self.store.get_app_service_by_user_id(
- sync_config.user.to_string()
- )
- if app_service:
- rooms = yield self.store.get_app_service_rooms(app_service)
- room_ids = set(r.room_id for r in rooms)
- else:
- room_ids = yield rm_handler.get_joined_rooms_for_user(
- sync_config.user
- )
-
result = yield self.notifier.wait_for_events(
- sync_config.user, room_ids, timeout, current_sync_callback,
+ sync_config.user, timeout, current_sync_callback,
from_token=since_token
)
defer.returnValue(result)
@@ -174,7 +172,7 @@ class SyncHandler(BaseHandler):
"""
now_token = yield self.event_sources.get_current_token()
- now_token, typing_by_room = yield self.typing_by_room(
+ now_token, ephemeral_by_room = yield self.ephemeral_by_room(
sync_config, now_token
)
@@ -197,6 +195,10 @@ class SyncHandler(BaseHandler):
)
)
+ tags_by_room = yield self.store.get_tags_for_user(
+ sync_config.user.to_string()
+ )
+
joined = []
invited = []
archived = []
@@ -207,7 +209,8 @@ class SyncHandler(BaseHandler):
sync_config=sync_config,
now_token=now_token,
timeline_since_token=timeline_since_token,
- typing_by_room=typing_by_room
+ ephemeral_by_room=ephemeral_by_room,
+ tags_by_room=tags_by_room,
)
joined.append(room_sync)
elif event.membership == Membership.INVITE:
@@ -226,6 +229,7 @@ class SyncHandler(BaseHandler):
leave_event_id=event.event_id,
leave_token=leave_token,
timeline_since_token=timeline_since_token,
+ tags_by_room=tags_by_room,
)
archived.append(room_sync)
@@ -240,7 +244,7 @@ class SyncHandler(BaseHandler):
@defer.inlineCallbacks
def full_state_sync_for_joined_room(self, room_id, sync_config,
now_token, timeline_since_token,
- typing_by_room):
+ ephemeral_by_room, tags_by_room):
"""Sync a room for a client which is starting without any state
Returns:
A Deferred JoinedSyncResult.
@@ -250,21 +254,31 @@ class SyncHandler(BaseHandler):
room_id, sync_config, now_token, since_token=timeline_since_token
)
- current_state = yield self.state_handler.get_current_state(
- room_id
- )
- current_state_events = current_state.values()
+ current_state = yield self.get_state_at(room_id, now_token)
defer.returnValue(JoinedSyncResult(
room_id=room_id,
timeline=batch,
- state=current_state_events,
- ephemeral=typing_by_room.get(room_id, []),
+ state=current_state,
+ ephemeral=ephemeral_by_room.get(room_id, []),
+ private_user_data=self.private_user_data_for_room(
+ room_id, tags_by_room
+ ),
))
+ def private_user_data_for_room(self, room_id, tags_by_room):
+ private_user_data = []
+ tags = tags_by_room.get(room_id)
+ if tags is not None:
+ private_user_data.append({
+ "type": "m.tag",
+ "content": {"tags": tags},
+ })
+ return private_user_data
+
@defer.inlineCallbacks
- def typing_by_room(self, sync_config, now_token, since_token=None):
- """Get the typing events for each room the user is in
+ def ephemeral_by_room(self, sync_config, now_token, since_token=None):
+ """Get the ephemeral events for each room the user is in
Args:
sync_config (SyncConfig): The flags, filters and user for the sync.
now_token (StreamToken): Where the server is currently up to.
@@ -278,25 +292,56 @@ class SyncHandler(BaseHandler):
typing_key = since_token.typing_key if since_token else "0"
+ rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
+ room_ids = [room.room_id for room in rooms]
+
typing_source = self.event_sources.sources["typing"]
- typing, typing_key = yield typing_source.get_new_events_for_user(
+ typing, typing_key = yield typing_source.get_new_events(
user=sync_config.user,
from_key=typing_key,
limit=sync_config.filter.ephemeral_limit(),
+ room_ids=room_ids,
+ is_guest=False,
)
now_token = now_token.copy_and_replace("typing_key", typing_key)
- typing_by_room = {event["room_id"]: [event] for event in typing}
+ ephemeral_by_room = {}
+
for event in typing:
- event.pop("room_id")
- logger.debug("Typing %r", typing_by_room)
+ # we want to exclude the room_id from the event, but modifying the
+ # result returned by the event source is poor form (it might cache
+ # the object)
+ room_id = event["room_id"]
+ event_copy = {k: v for (k, v) in event.iteritems()
+ if k != "room_id"}
+ ephemeral_by_room.setdefault(room_id, []).append(event_copy)
+
+ receipt_key = since_token.receipt_key if since_token else "0"
+
+ receipt_source = self.event_sources.sources["receipt"]
+ receipts, receipt_key = yield receipt_source.get_new_events(
+ user=sync_config.user,
+ from_key=receipt_key,
+ limit=sync_config.filter.ephemeral_limit(),
+ room_ids=room_ids,
+ # /sync doesn't support guest access, they can't get to this point in code
+ is_guest=False,
+ )
+ now_token = now_token.copy_and_replace("receipt_key", receipt_key)
- defer.returnValue((now_token, typing_by_room))
+ for event in receipts:
+ room_id = event["room_id"]
+ # exclude room id, as above
+ event_copy = {k: v for (k, v) in event.iteritems()
+ if k != "room_id"}
+ ephemeral_by_room.setdefault(room_id, []).append(event_copy)
+
+ defer.returnValue((now_token, ephemeral_by_room))
@defer.inlineCallbacks
def full_state_sync_for_archived_room(self, room_id, sync_config,
leave_event_id, leave_token,
- timeline_since_token):
+ timeline_since_token, tags_by_room):
"""Sync a room for a client which is starting without any state
Returns:
A Deferred JoinedSyncResult.
@@ -306,14 +351,15 @@ class SyncHandler(BaseHandler):
room_id, sync_config, leave_token, since_token=timeline_since_token
)
- leave_state = yield self.store.get_state_for_events(
- [leave_event_id], None
- )
+ leave_state = yield self.store.get_state_for_event(leave_event_id)
defer.returnValue(ArchivedSyncResult(
room_id=room_id,
timeline=batch,
- state=leave_state[leave_event_id].values(),
+ state=leave_state,
+ private_user_data=self.private_user_data_for_room(
+ room_id, tags_by_room
+ ),
))
@defer.inlineCallbacks
@@ -325,15 +371,21 @@ class SyncHandler(BaseHandler):
"""
now_token = yield self.event_sources.get_current_token()
+ rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
+ room_ids = [room.room_id for room in rooms]
+
presence_source = self.event_sources.sources["presence"]
- presence, presence_key = yield presence_source.get_new_events_for_user(
+ presence, presence_key = yield presence_source.get_new_events(
user=sync_config.user,
from_key=since_token.presence_key,
limit=sync_config.filter.presence_limit(),
+ room_ids=room_ids,
+ # /sync doesn't support guest access, they can't get to this point in code
+ is_guest=False,
)
now_token = now_token.copy_and_replace("presence_key", presence_key)
- now_token, typing_by_room = yield self.typing_by_room(
+ now_token, ephemeral_by_room = yield self.ephemeral_by_room(
sync_config, now_token, since_token
)
@@ -355,15 +407,22 @@ class SyncHandler(BaseHandler):
sync_config.user.to_string(),
from_key=since_token.room_key,
to_key=now_token.room_key,
- room_id=None,
limit=timeline_limit + 1,
)
+ tags_by_room = yield self.store.get_updated_tags(
+ sync_config.user.to_string(),
+ since_token.private_user_data_key,
+ )
+
joined = []
archived = []
if len(room_events) <= timeline_limit:
# There is no gap in any of the rooms. Therefore we can just
# partition the new events by room and return them.
+ logger.debug("Got %i events for incremental sync - not limited",
+ len(room_events))
+
invite_events = []
leave_events = []
events_by_room_id = {}
@@ -379,7 +438,12 @@ class SyncHandler(BaseHandler):
for room_id in joined_room_ids:
recents = events_by_room_id.get(room_id, [])
- state = [event for event in recents if event.is_state()]
+ logger.debug("Events for room %s: %r", room_id, recents)
+ state = {
+ (event.type, event.state_key): event
+ for event in recents if event.is_state()}
+ limited = False
+
if recents:
prev_batch = now_token.copy_and_replace(
"room_key", recents[0].internal_metadata.before
@@ -387,9 +451,13 @@ class SyncHandler(BaseHandler):
else:
prev_batch = now_token
- state, limited = yield self.check_joined_room(
- sync_config, room_id, state
- )
+ just_joined = yield self.check_joined_room(sync_config, state)
+ if just_joined:
+ logger.debug("User has just joined %s: needs full state",
+ room_id)
+ state = yield self.get_state_at(room_id, now_token)
+ # the timeline is inherently limited if we've just joined
+ limited = True
room_sync = JoinedSyncResult(
room_id=room_id,
@@ -399,12 +467,20 @@ class SyncHandler(BaseHandler):
limited=limited,
),
state=state,
- ephemeral=typing_by_room.get(room_id, [])
+ ephemeral=ephemeral_by_room.get(room_id, []),
+ private_user_data=self.private_user_data_for_room(
+ room_id, tags_by_room
+ ),
)
+ logger.debug("Result for room %s: %r", room_id, room_sync)
+
if room_sync:
joined.append(room_sync)
else:
+ logger.debug("Got %i events for incremental sync - hit limit",
+ len(room_events))
+
invite_events = yield self.store.get_invites_for_user(
sync_config.user.to_string()
)
@@ -416,14 +492,14 @@ class SyncHandler(BaseHandler):
for room_id in joined_room_ids:
room_sync = yield self.incremental_sync_with_gap_for_room(
room_id, sync_config, since_token, now_token,
- typing_by_room
+ ephemeral_by_room, tags_by_room
)
if room_sync:
joined.append(room_sync)
for leave_event in leave_events:
room_sync = yield self.incremental_sync_for_archived_room(
- sync_config, leave_event, since_token
+ sync_config, leave_event, since_token, tags_by_room
)
archived.append(room_sync)
@@ -443,6 +519,9 @@ class SyncHandler(BaseHandler):
@defer.inlineCallbacks
def load_filtered_recents(self, room_id, sync_config, now_token,
since_token=None):
+ """
+ :returns a Deferred TimelineBatch
+ """
limited = True
recents = []
filtering_factor = 2
@@ -487,13 +566,15 @@ class SyncHandler(BaseHandler):
@defer.inlineCallbacks
def incremental_sync_with_gap_for_room(self, room_id, sync_config,
since_token, now_token,
- typing_by_room):
+ ephemeral_by_room, tags_by_room):
""" Get the incremental delta needed to bring the client up to date for
the room. Gives the client the most recent events and the changes to
state.
Returns:
A Deferred JoinedSyncResult
"""
+ logger.debug("Doing incremental sync for room %s between %s and %s",
+ room_id, since_token, now_token)
# TODO(mjark): Check for redactions we might have missed.
@@ -503,32 +584,30 @@ class SyncHandler(BaseHandler):
logging.debug("Recents %r", batch)
- # TODO(mjark): This seems racy since this isn't being passed a
- # token to indicate what point in the stream this is
- current_state = yield self.state_handler.get_current_state(
- room_id
- )
- current_state_events = current_state.values()
+ current_state = yield self.get_state_at(room_id, now_token)
- state_at_previous_sync = yield self.get_state_at_previous_sync(
- room_id, since_token=since_token
+ state_at_previous_sync = yield self.get_state_at(
+ room_id, stream_position=since_token
)
- state_events_delta = yield self.compute_state_delta(
+ state = yield self.compute_state_delta(
since_token=since_token,
previous_state=state_at_previous_sync,
- current_state=current_state_events,
+ current_state=current_state,
)
- state_events_delta, _ = yield self.check_joined_room(
- sync_config, room_id, state_events_delta
- )
+ just_joined = yield self.check_joined_room(sync_config, state)
+ if just_joined:
+ state = yield self.get_state_at(room_id, now_token)
room_sync = JoinedSyncResult(
room_id=room_id,
timeline=batch,
- state=state_events_delta,
- ephemeral=typing_by_room.get(room_id, [])
+ state=state,
+ ephemeral=ephemeral_by_room.get(room_id, []),
+ private_user_data=self.private_user_data_for_room(
+ room_id, tags_by_room
+ ),
)
logging.debug("Room sync: %r", room_sync)
@@ -537,7 +616,7 @@ class SyncHandler(BaseHandler):
@defer.inlineCallbacks
def incremental_sync_for_archived_room(self, sync_config, leave_event,
- since_token):
+ since_token, tags_by_room):
""" Get the incremental delta needed to bring the client up to date for
the archived room.
Returns:
@@ -556,16 +635,12 @@ class SyncHandler(BaseHandler):
logging.debug("Recents %r", batch)
- # TODO(mjark): This seems racy since this isn't being passed a
- # token to indicate what point in the stream this is
- leave_state = yield self.store.get_state_for_events(
- [leave_event.event_id], None
+ state_events_at_leave = yield self.store.get_state_for_event(
+ leave_event.event_id
)
- state_events_at_leave = leave_state[leave_event.event_id].values()
-
- state_at_previous_sync = yield self.get_state_at_previous_sync(
- leave_event.room_id, since_token=since_token
+ state_at_previous_sync = yield self.get_state_at(
+ leave_event.room_id, stream_position=since_token
)
state_events_delta = yield self.compute_state_delta(
@@ -578,6 +653,9 @@ class SyncHandler(BaseHandler):
room_id=leave_event.room_id,
timeline=batch,
state=state_events_delta,
+ private_user_data=self.private_user_data_for_room(
+ leave_event.room_id, tags_by_room
+ ),
)
logging.debug("Room sync: %r", room_sync)
@@ -585,60 +663,77 @@ class SyncHandler(BaseHandler):
defer.returnValue(room_sync)
@defer.inlineCallbacks
- def get_state_at_previous_sync(self, room_id, since_token):
- """ Get the room state at the previous sync the client made.
- Returns:
- A Deferred list of Events.
+ def get_state_after_event(self, event):
+ """
+ Get the room state after the given event
+
+ :param synapse.events.EventBase event: event of interest
+ :return: A Deferred map from ((type, state_key)->Event)
+ """
+ state = yield self.store.get_state_for_event(event.event_id)
+ if event.is_state():
+ state = state.copy()
+ state[(event.type, event.state_key)] = event
+ defer.returnValue(state)
+
+ @defer.inlineCallbacks
+ def get_state_at(self, room_id, stream_position):
+ """ Get the room state at a particular stream position
+ :param str room_id: room for which to get state
+ :param StreamToken stream_position: point at which to get state
+ :returns: A Deferred map from ((type, state_key)->Event)
"""
last_events, token = yield self.store.get_recent_events_for_room(
- room_id, end_token=since_token.room_key, limit=1,
+ room_id, end_token=stream_position.room_key, limit=1,
)
if last_events:
- last_event = last_events[0]
- last_context = yield self.state_handler.compute_event_context(
- last_event
- )
- if last_event.is_state():
- state = [last_event] + last_context.current_state.values()
- else:
- state = last_context.current_state.values()
+ last_event = last_events[-1]
+ state = yield self.get_state_after_event(last_event)
+
else:
- state = ()
+ # no events in this room - so presumably no state
+ state = {}
defer.returnValue(state)
def compute_state_delta(self, since_token, previous_state, current_state):
""" Works out the differnce in state between the current state and the
state the client got when it last performed a sync.
- Returns:
- A list of events.
+
+ :param str since_token: the point we are comparing against
+ :param dict[(str,str), synapse.events.FrozenEvent] previous_state: the
+ state to compare to
+ :param dict[(str,str), synapse.events.FrozenEvent] current_state: the
+ new state
+
+ :returns A new event dictionary
"""
# TODO(mjark) Check if the state events were received by the server
# after the previous sync, since we need to include those state
# updates even if they occured logically before the previous event.
# TODO(mjark) Check for new redactions in the state events.
- previous_dict = {event.event_id: event for event in previous_state}
- state_delta = []
- for event in current_state:
- if event.event_id not in previous_dict:
- state_delta.append(event)
+
+ state_delta = {}
+ for key, event in current_state.iteritems():
+ if (key not in previous_state or
+ previous_state[key].event_id != event.event_id):
+ state_delta[key] = event
return state_delta
- @defer.inlineCallbacks
- def check_joined_room(self, sync_config, room_id, state_delta):
- joined = False
- limited = False
- for event in state_delta:
- if (
- event.type == EventTypes.Member
- and event.state_key == sync_config.user.to_string()
- ):
- if event.content["membership"] == Membership.JOIN:
- joined = True
-
- if joined:
- res = yield self.state_handler.get_current_state(room_id)
- state_delta = res.values()
- limited = True
+ def check_joined_room(self, sync_config, state_delta):
+ """
+ Check if the user has just joined the given room (so should
+ be given the full state)
+
+ :param sync_config:
+ :param dict[(str,str), synapse.events.FrozenEvent] state_delta: the
+ difference in state since the last sync
- defer.returnValue((state_delta, limited))
+ :returns A deferred Tuple (state_delta, limited)
+ """
+ join_event = state_delta.get((
+ EventTypes.Member, sync_config.user.to_string()), None)
+ if join_event is not None:
+ if join_event.content["membership"] == Membership.JOIN:
+ return True
+ return False
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index d7096aab8c..2846f3e6e8 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -246,17 +246,12 @@ class TypingNotificationEventSource(object):
},
}
- @defer.inlineCallbacks
- def get_new_events_for_user(self, user, from_key, limit):
+ def get_new_events(self, from_key, room_ids, **kwargs):
from_key = int(from_key)
handler = self.handler()
- joined_room_ids = (
- yield self.room_member_handler().get_joined_rooms_for_user(user)
- )
-
events = []
- for room_id in joined_room_ids:
+ for room_id in room_ids:
if room_id not in handler._room_serials:
continue
if handler._room_serials[room_id] <= from_key:
@@ -264,7 +259,7 @@ class TypingNotificationEventSource(object):
events.append(self._make_event_for(room_id))
- defer.returnValue((events, handler._latest_room_serial))
+ return events, handler._latest_room_serial
def get_current_key(self):
return self.handler()._latest_room_serial
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index b50a0c445c..6e53538a52 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -35,6 +35,7 @@ from signedjson.sign import sign_json
import simplejson as json
import logging
+import random
import sys
import urllib
import urlparse
@@ -55,6 +56,9 @@ incoming_responses_counter = metrics.register_counter(
)
+MAX_RETRIES = 4
+
+
class MatrixFederationEndpointFactory(object):
def __init__(self, hs):
self.tls_server_context_factory = hs.tls_server_context_factory
@@ -119,7 +123,7 @@ class MatrixFederationHttpClient(object):
# XXX: Would be much nicer to retry only at the transaction-layer
# (once we have reliable transactions in place)
- retries_left = 5
+ retries_left = MAX_RETRIES
http_url_bytes = urlparse.urlunparse(
("", "", path_bytes, param_bytes, query_bytes, "")
@@ -180,7 +184,9 @@ class MatrixFederationHttpClient(object):
)
if retries_left and not timeout:
- yield sleep(2 ** (5 - retries_left))
+ delay = 5 ** (MAX_RETRIES + 1 - retries_left)
+ delay *= random.uniform(0.8, 1.4)
+ yield sleep(delay)
retries_left -= 1
else:
raise
diff --git a/synapse/notifier.py b/synapse/notifier.py
index f998fc83bf..e3b42e2331 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -14,6 +14,8 @@
# limitations under the License.
from twisted.internet import defer
+from synapse.api.constants import EventTypes
+from synapse.api.errors import AuthError
from synapse.util.logutils import log_function
from synapse.util.async import run_on_reactor, ObservableDeferred
@@ -269,8 +271,8 @@ class Notifier(object):
logger.exception("Failed to notify listener")
@defer.inlineCallbacks
- def wait_for_events(self, user, rooms, timeout, callback,
- from_token=StreamToken("s0", "0", "0", "0")):
+ def wait_for_events(self, user, timeout, callback, room_ids=None,
+ from_token=StreamToken("s0", "0", "0", "0", "0")):
"""Wait until the callback returns a non empty response or the
timeout fires.
"""
@@ -279,11 +281,12 @@ class Notifier(object):
if user_stream is None:
appservice = yield self.store.get_app_service_by_user_id(user)
current_token = yield self.event_sources.get_current_token()
- rooms = yield self.store.get_rooms_for_user(user)
- rooms = [room.room_id for room in rooms]
+ if room_ids is None:
+ rooms = yield self.store.get_rooms_for_user(user)
+ room_ids = [room.room_id for room in rooms]
user_stream = _NotifierUserStream(
user=user,
- rooms=rooms,
+ rooms=room_ids,
appservice=appservice,
current_token=current_token,
time_now_ms=self.clock.time_msec(),
@@ -328,8 +331,9 @@ class Notifier(object):
defer.returnValue(result)
@defer.inlineCallbacks
- def get_events_for(self, user, rooms, pagination_config, timeout,
- only_room_events=False):
+ def get_events_for(self, user, pagination_config, timeout,
+ only_room_events=False,
+ is_guest=False, guest_room_id=None):
""" For the given user and rooms, return any new events for them. If
there are no new events wait for up to `timeout` milliseconds for any
new events to happen before returning.
@@ -342,6 +346,16 @@ class Notifier(object):
limit = pagination_config.limit
+ room_ids = []
+ if is_guest:
+ if guest_room_id:
+ if not self._is_world_readable(guest_room_id):
+ raise AuthError(403, "Guest access not allowed")
+ room_ids = [guest_room_id]
+ else:
+ rooms = yield self.store.get_rooms_for_user(user.to_string())
+ room_ids = [room.room_id for room in rooms]
+
@defer.inlineCallbacks
def check_for_updates(before_token, after_token):
if not after_token.is_after(before_token):
@@ -349,6 +363,7 @@ class Notifier(object):
events = []
end_token = from_token
+
for name, source in self.event_sources.sources.items():
keyname = "%s_key" % name
before_id = getattr(before_token, keyname)
@@ -357,9 +372,23 @@ class Notifier(object):
continue
if only_room_events and name != "room":
continue
- new_events, new_key = yield source.get_new_events_for_user(
- user, getattr(from_token, keyname), limit,
+ new_events, new_key = yield source.get_new_events(
+ user=user,
+ from_key=getattr(from_token, keyname),
+ limit=limit,
+ is_guest=is_guest,
+ room_ids=room_ids,
)
+
+ if name == "room":
+ room_member_handler = self.hs.get_handlers().room_member_handler
+ new_events = yield room_member_handler._filter_events_for_client(
+ user.to_string(),
+ new_events,
+ is_guest=is_guest,
+ require_all_visible_for_guests=False
+ )
+
events.extend(new_events)
end_token = end_token.copy_and_replace(keyname, new_key)
@@ -369,7 +398,7 @@ class Notifier(object):
defer.returnValue(None)
result = yield self.wait_for_events(
- user, rooms, timeout, check_for_updates, from_token=from_token
+ user, timeout, check_for_updates, room_ids=room_ids, from_token=from_token
)
if result is None:
@@ -377,6 +406,17 @@ class Notifier(object):
defer.returnValue(result)
+ @defer.inlineCallbacks
+ def _is_world_readable(self, room_id):
+ state = yield self.hs.get_state_handler().get_current_state(
+ room_id,
+ EventTypes.RoomHistoryVisibility
+ )
+ if state and "history_visibility" in state.content:
+ defer.returnValue(state.content["history_visibility"] == "world_readable")
+ else:
+ defer.returnValue(False)
+
@log_function
def remove_expired_streams(self):
time_now_ms = self.clock.time_msec()
diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py
index 504b63eab4..bdde43864c 100644
--- a/synapse/rest/client/v1/admin.py
+++ b/synapse/rest/client/v1/admin.py
@@ -31,7 +31,7 @@ class WhoisRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, user_id):
target_user = UserID.from_string(user_id)
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
is_admin = yield self.auth.is_server_admin(auth_user)
if not is_admin and target_user != auth_user:
diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py
index 4dcda57c1b..240eedac75 100644
--- a/synapse/rest/client/v1/directory.py
+++ b/synapse/rest/client/v1/directory.py
@@ -69,7 +69,7 @@ class ClientDirectoryServer(ClientV1RestServlet):
try:
# try to auth as a user
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, _ = yield self.auth.get_user_by_req(request)
try:
user_id = user.to_string()
yield dir_handler.create_association(
@@ -116,7 +116,7 @@ class ClientDirectoryServer(ClientV1RestServlet):
# fallback to default user behaviour if they aren't an AS
pass
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, _ = yield self.auth.get_user_by_req(request)
is_admin = yield self.auth.is_server_admin(user)
if not is_admin:
diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py
index 582148b659..3e1750d1a1 100644
--- a/synapse/rest/client/v1/events.py
+++ b/synapse/rest/client/v1/events.py
@@ -34,7 +34,15 @@ class EventStreamRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, is_guest = yield self.auth.get_user_by_req(
+ request,
+ allow_guest=True
+ )
+ room_id = None
+ if is_guest:
+ if "room_id" not in request.args:
+ raise SynapseError(400, "Guest users must specify room_id param")
+ room_id = request.args["room_id"][0]
try:
handler = self.handlers.event_stream_handler
pagin_config = PaginationConfig.from_request(request)
@@ -49,7 +57,8 @@ class EventStreamRestServlet(ClientV1RestServlet):
chunk = yield handler.get_stream(
auth_user.to_string(), pagin_config, timeout=timeout,
- as_client_event=as_client_event
+ as_client_event=as_client_event, affect_presence=(not is_guest),
+ room_id=room_id, is_guest=is_guest
)
except:
logger.exception("Event stream failed")
@@ -71,7 +80,7 @@ class EventRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, event_id):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
handler = self.handlers.event_handler
event = yield handler.get_event(auth_user, event_id)
diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py
index 52c7943400..856a70f297 100644
--- a/synapse/rest/client/v1/initial_sync.py
+++ b/synapse/rest/client/v1/initial_sync.py
@@ -25,7 +25,7 @@ class InitialSyncRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request):
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, _ = yield self.auth.get_user_by_req(request)
as_client_event = "raw" not in request.args
pagination_config = PaginationConfig.from_request(request)
handler = self.handlers.message_handler
diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py
index a770efd841..6fe5d19a22 100644
--- a/synapse/rest/client/v1/presence.py
+++ b/synapse/rest/client/v1/presence.py
@@ -32,7 +32,7 @@ class PresenceStatusRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, user_id):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
user = UserID.from_string(user_id)
state = yield self.handlers.presence_handler.get_state(
@@ -42,7 +42,7 @@ class PresenceStatusRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_PUT(self, request, user_id):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
user = UserID.from_string(user_id)
state = {}
@@ -77,7 +77,7 @@ class PresenceListRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, user_id):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
user = UserID.from_string(user_id)
if not self.hs.is_mine(user):
@@ -97,7 +97,7 @@ class PresenceListRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_POST(self, request, user_id):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
user = UserID.from_string(user_id)
if not self.hs.is_mine(user):
diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py
index fdde88a60d..3218e47025 100644
--- a/synapse/rest/client/v1/profile.py
+++ b/synapse/rest/client/v1/profile.py
@@ -37,7 +37,7 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_PUT(self, request, user_id):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request, allow_guest=True)
user = UserID.from_string(user_id)
try:
@@ -70,7 +70,7 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_PUT(self, request, user_id):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
user = UserID.from_string(user_id)
try:
diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py
index bd759a2589..b0870db1ac 100644
--- a/synapse/rest/client/v1/push_rule.py
+++ b/synapse/rest/client/v1/push_rule.py
@@ -43,7 +43,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
except InvalidRuleException as e:
raise SynapseError(400, e.message)
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, _ = yield self.auth.get_user_by_req(request)
if '/' in spec['rule_id'] or '\\' in spec['rule_id']:
raise SynapseError(400, "rule_id may not contain slashes")
@@ -92,7 +92,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
def on_DELETE(self, request):
spec = _rule_spec_from_path(request.postpath)
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, _ = yield self.auth.get_user_by_req(request)
namespaced_rule_id = _namespaced_rule_id_from_spec(spec)
@@ -109,7 +109,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request):
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, _ = yield self.auth.get_user_by_req(request)
# we build up the full structure and then decide which bits of it
# to send which means doing unnecessary work sometimes but is
diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py
index 3aabc93b8b..a110c0a4f0 100644
--- a/synapse/rest/client/v1/pusher.py
+++ b/synapse/rest/client/v1/pusher.py
@@ -27,7 +27,7 @@ class PusherRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_POST(self, request):
- user, token_id = yield self.auth.get_user_by_req(request)
+ user, token_id, _ = yield self.auth.get_user_by_req(request)
content = _parse_json(request)
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index 2dcaee86cd..139dac1cc3 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -17,7 +17,7 @@
from twisted.internet import defer
from base import ClientV1RestServlet, client_path_pattern
-from synapse.api.errors import SynapseError, Codes
+from synapse.api.errors import SynapseError, Codes, AuthError
from synapse.streams.config import PaginationConfig
from synapse.api.constants import EventTypes, Membership
from synapse.types import UserID, RoomID, RoomAlias
@@ -26,7 +26,6 @@ from synapse.events.utils import serialize_event
import simplejson as json
import logging
import urllib
-from synapse.util import third_party_invites
logger = logging.getLogger(__name__)
@@ -62,7 +61,7 @@ class RoomCreateRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_POST(self, request):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
room_config = self.get_room_config(request)
info = yield self.make_room(room_config, auth_user, None)
@@ -125,7 +124,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, room_id, event_type, state_key):
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True)
msg_handler = self.handlers.message_handler
data = yield msg_handler.get_room_data(
@@ -133,6 +132,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
room_id=room_id,
event_type=event_type,
state_key=state_key,
+ is_guest=is_guest,
)
if not data:
@@ -143,7 +143,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_PUT(self, request, room_id, event_type, state_key, txn_id=None):
- user, token_id = yield self.auth.get_user_by_req(request)
+ user, token_id, _ = yield self.auth.get_user_by_req(request)
content = _parse_json(request)
@@ -175,7 +175,7 @@ class RoomSendEventRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_POST(self, request, room_id, event_type, txn_id=None):
- user, token_id = yield self.auth.get_user_by_req(request)
+ user, token_id, _ = yield self.auth.get_user_by_req(request, allow_guest=True)
content = _parse_json(request)
msg_handler = self.handlers.message_handler
@@ -220,7 +220,10 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_POST(self, request, room_identifier, txn_id=None):
- user, token_id = yield self.auth.get_user_by_req(request)
+ user, token_id, is_guest = yield self.auth.get_user_by_req(
+ request,
+ allow_guest=True
+ )
# the identifier could be a room alias or a room id. Try one then the
# other if it fails to parse, without swallowing other valid
@@ -242,16 +245,20 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
defer.returnValue((200, ret_dict))
else: # room id
msg_handler = self.handlers.message_handler
+ content = {"membership": Membership.JOIN}
+ if is_guest:
+ content["kind"] = "guest"
yield msg_handler.create_and_send_event(
{
"type": EventTypes.Member,
- "content": {"membership": Membership.JOIN},
+ "content": content,
"room_id": identifier.to_string(),
"sender": user.to_string(),
"state_key": user.to_string(),
},
token_id=token_id,
txn_id=txn_id,
+ is_guest=is_guest,
)
defer.returnValue((200, {"room_id": identifier.to_string()}))
@@ -289,7 +296,7 @@ class RoomMemberListRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, room_id):
# TODO support Pagination stream API (limit/tokens)
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, _ = yield self.auth.get_user_by_req(request)
handler = self.handlers.message_handler
events = yield handler.get_state_events(
room_id=room_id,
@@ -319,13 +326,13 @@ class RoomMemberListRestServlet(ClientV1RestServlet):
}))
-# TODO: Needs unit testing
+# TODO: Needs better unit testing
class RoomMessageListRestServlet(ClientV1RestServlet):
PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/messages$")
@defer.inlineCallbacks
def on_GET(self, request, room_id):
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True)
pagination_config = PaginationConfig.from_request(
request, default_limit=10,
)
@@ -334,6 +341,7 @@ class RoomMessageListRestServlet(ClientV1RestServlet):
msgs = yield handler.get_messages(
room_id=room_id,
user_id=user.to_string(),
+ is_guest=is_guest,
pagin_config=pagination_config,
as_client_event=as_client_event
)
@@ -347,12 +355,13 @@ class RoomStateRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, room_id):
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True)
handler = self.handlers.message_handler
# Get all the current state for this room
events = yield handler.get_state_events(
room_id=room_id,
user_id=user.to_string(),
+ is_guest=is_guest,
)
defer.returnValue((200, events))
@@ -363,12 +372,13 @@ class RoomInitialSyncRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, room_id):
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True)
pagination_config = PaginationConfig.from_request(request)
content = yield self.handlers.message_handler.room_initial_sync(
room_id=room_id,
user_id=user.to_string(),
pagin_config=pagination_config,
+ is_guest=is_guest,
)
defer.returnValue((200, content))
@@ -408,12 +418,12 @@ class RoomEventContext(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, room_id, event_id):
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, is_guest = yield self.auth.get_user_by_req(request, allow_guest=True)
limit = int(request.args.get("limit", [10])[0])
results = yield self.handlers.room_context_handler.get_event_context(
- user, room_id, event_id, limit,
+ user, room_id, event_id, limit, is_guest
)
time_now = self.clock.time_msec()
@@ -443,21 +453,26 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_POST(self, request, room_id, membership_action, txn_id=None):
- user, token_id = yield self.auth.get_user_by_req(request)
+ user, token_id, is_guest = yield self.auth.get_user_by_req(
+ request,
+ allow_guest=True
+ )
+
+ if is_guest and membership_action not in {Membership.JOIN, Membership.LEAVE}:
+ raise AuthError(403, "Guest access not allowed")
content = _parse_json(request)
# target user is you unless it is an invite
state_key = user.to_string()
- if membership_action == "invite" and third_party_invites.has_invite_keys(content):
+ if membership_action == "invite" and self._has_3pid_invite_keys(content):
yield self.handlers.room_member_handler.do_3pid_invite(
room_id,
user,
content["medium"],
content["address"],
content["id_server"],
- content["display_name"],
token_id,
txn_id
)
@@ -477,29 +492,31 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
msg_handler = self.handlers.message_handler
- event_content = {
- "membership": unicode(membership_action),
- }
-
- if membership_action == "join" and third_party_invites.has_join_keys(content):
- event_content["third_party_invite"] = (
- third_party_invites.extract_join_keys(content)
- )
+ content = {"membership": unicode(membership_action)}
+ if is_guest:
+ content["kind"] = "guest"
yield msg_handler.create_and_send_event(
{
"type": EventTypes.Member,
- "content": event_content,
+ "content": content,
"room_id": room_id,
"sender": user.to_string(),
"state_key": state_key,
},
token_id=token_id,
txn_id=txn_id,
+ is_guest=is_guest,
)
defer.returnValue((200, {}))
+ def _has_3pid_invite_keys(self, content):
+ for key in {"id_server", "medium", "address"}:
+ if key not in content:
+ return False
+ return True
+
@defer.inlineCallbacks
def on_PUT(self, request, room_id, membership_action, txn_id):
try:
@@ -524,7 +541,7 @@ class RoomRedactEventRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_POST(self, request, room_id, event_id, txn_id=None):
- user, token_id = yield self.auth.get_user_by_req(request)
+ user, token_id, _ = yield self.auth.get_user_by_req(request)
content = _parse_json(request)
msg_handler = self.handlers.message_handler
@@ -564,7 +581,7 @@ class RoomTypingRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_PUT(self, request, room_id, user_id):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
room_id = urllib.unquote(room_id)
target_user = UserID.from_string(urllib.unquote(user_id))
@@ -597,11 +614,12 @@ class SearchRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_POST(self, request):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
content = _parse_json(request)
- results = yield self.handlers.search_handler.search(auth_user, content)
+ batch = request.args.get("next_batch", [None])[0]
+ results = yield self.handlers.search_handler.search(auth_user, content, batch)
defer.returnValue((200, results))
diff --git a/synapse/rest/client/v1/voip.py b/synapse/rest/client/v1/voip.py
index 0a863e1c61..eb7c57cade 100644
--- a/synapse/rest/client/v1/voip.py
+++ b/synapse/rest/client/v1/voip.py
@@ -28,7 +28,7 @@ class VoipRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
turnUris = self.hs.config.turn_uris
turnSecret = self.hs.config.turn_shared_secret
diff --git a/synapse/rest/client/v2_alpha/__init__.py b/synapse/rest/client/v2_alpha/__init__.py
index 5831ff0e62..a108132346 100644
--- a/synapse/rest/client/v2_alpha/__init__.py
+++ b/synapse/rest/client/v2_alpha/__init__.py
@@ -22,6 +22,7 @@ from . import (
receipts,
keys,
tokenrefresh,
+ tags,
)
from synapse.http.server import JsonResource
@@ -44,3 +45,4 @@ class ClientV2AlphaRestResource(JsonResource):
receipts.register_servlets(hs, client_resource)
keys.register_servlets(hs, client_resource)
tokenrefresh.register_servlets(hs, client_resource)
+ tags.register_servlets(hs, client_resource)
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index 4692ba413c..1970ad3458 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -55,7 +55,7 @@ class PasswordRestServlet(RestServlet):
if LoginType.PASSWORD in result:
# if using password, they should also be logged in
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
if auth_user.to_string() != result[LoginType.PASSWORD]:
raise LoginError(400, "", Codes.UNKNOWN)
user_id = auth_user.to_string()
@@ -102,7 +102,7 @@ class ThreepidRestServlet(RestServlet):
def on_GET(self, request):
yield run_on_reactor()
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
threepids = yield self.hs.get_datastore().user_get_threepids(
auth_user.to_string()
@@ -120,7 +120,7 @@ class ThreepidRestServlet(RestServlet):
raise SynapseError(400, "Missing param", Codes.MISSING_PARAM)
threePidCreds = body['threePidCreds']
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
threepid = yield self.identity_handler.threepid_from_creds(threePidCreds)
diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py
index f8f91b63f5..97956a4b91 100644
--- a/synapse/rest/client/v2_alpha/filter.py
+++ b/synapse/rest/client/v2_alpha/filter.py
@@ -40,7 +40,7 @@ class GetFilterRestServlet(RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, user_id, filter_id):
target_user = UserID.from_string(user_id)
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
if target_user != auth_user:
raise AuthError(403, "Cannot get filters for other users")
@@ -76,7 +76,7 @@ class CreateFilterRestServlet(RestServlet):
@defer.inlineCallbacks
def on_POST(self, request, user_id):
target_user = UserID.from_string(user_id)
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
if target_user != auth_user:
raise AuthError(403, "Cannot create filters for other users")
diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py
index a1f4423101..820d33336f 100644
--- a/synapse/rest/client/v2_alpha/keys.py
+++ b/synapse/rest/client/v2_alpha/keys.py
@@ -64,7 +64,7 @@ class KeyUploadServlet(RestServlet):
@defer.inlineCallbacks
def on_POST(self, request, device_id):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
user_id = auth_user.to_string()
# TODO: Check that the device_id matches that in the authentication
# or derive the device_id from the authentication instead.
@@ -109,7 +109,7 @@ class KeyUploadServlet(RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, device_id):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
user_id = auth_user.to_string()
result = yield self.store.count_e2e_one_time_keys(user_id, device_id)
@@ -181,7 +181,7 @@ class KeyQueryServlet(RestServlet):
@defer.inlineCallbacks
def on_GET(self, request, user_id, device_id):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
auth_user_id = auth_user.to_string()
user_id = user_id if user_id else auth_user_id
device_ids = [device_id] if device_id else []
diff --git a/synapse/rest/client/v2_alpha/receipts.py b/synapse/rest/client/v2_alpha/receipts.py
index b107b7ce17..788acd4adb 100644
--- a/synapse/rest/client/v2_alpha/receipts.py
+++ b/synapse/rest/client/v2_alpha/receipts.py
@@ -40,7 +40,7 @@ class ReceiptRestServlet(RestServlet):
@defer.inlineCallbacks
def on_POST(self, request, room_id, receipt_type, event_id):
- user, _ = yield self.auth.get_user_by_req(request)
+ user, _, _ = yield self.auth.get_user_by_req(request)
if receipt_type != "m.read":
raise SynapseError(400, "Receipt type must be 'm.read'")
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index 1ba2f29711..f899376311 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -16,7 +16,7 @@
from twisted.internet import defer
from synapse.api.constants import LoginType
-from synapse.api.errors import SynapseError, Codes
+from synapse.api.errors import SynapseError, Codes, UnrecognizedRequestError
from synapse.http.servlet import RestServlet
from ._base import client_v2_pattern, parse_json_dict_from_request
@@ -55,6 +55,19 @@ class RegisterRestServlet(RestServlet):
def on_POST(self, request):
yield run_on_reactor()
+ kind = "user"
+ if "kind" in request.args:
+ kind = request.args["kind"][0]
+
+ if kind == "guest":
+ ret = yield self._do_guest_registration()
+ defer.returnValue(ret)
+ return
+ elif kind != "user":
+ raise UnrecognizedRequestError(
+ "Do not understand membership kind: %s" % (kind,)
+ )
+
if '/register/email/requestToken' in request.path:
ret = yield self.onEmailTokenRequest(request)
defer.returnValue(ret)
@@ -236,6 +249,18 @@ class RegisterRestServlet(RestServlet):
ret = yield self.identity_handler.requestEmailToken(**body)
defer.returnValue((200, ret))
+ @defer.inlineCallbacks
+ def _do_guest_registration(self):
+ if not self.hs.config.allow_guest_access:
+ defer.returnValue((403, "Guest access is disabled"))
+ user_id, _ = yield self.registration_handler.register(generate_token=False)
+ access_token = self.auth_handler.generate_access_token(user_id, ["guest = true"])
+ defer.returnValue((200, {
+ "user_id": user_id,
+ "access_token": access_token,
+ "home_server": self.hs.hostname,
+ }))
+
def register_servlets(hs, http_server):
RegisterRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
index 1840eef775..efd8281558 100644
--- a/synapse/rest/client/v2_alpha/sync.py
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -20,6 +20,7 @@ from synapse.http.servlet import (
)
from synapse.handlers.sync import SyncConfig
from synapse.types import StreamToken
+from synapse.events import FrozenEvent
from synapse.events.utils import (
serialize_event, format_event_for_client_v2_without_event_id,
)
@@ -81,7 +82,7 @@ class SyncRestServlet(RestServlet):
@defer.inlineCallbacks
def on_GET(self, request):
- user, token_id = yield self.auth.get_user_by_req(request)
+ user, token_id, _ = yield self.auth.get_user_by_req(request)
timeout = parse_integer(request, "timeout", default=0)
since = parse_string(request, "since")
@@ -165,6 +166,20 @@ class SyncRestServlet(RestServlet):
return {"events": filter.filter_presence(formatted)}
def encode_joined(self, rooms, filter, time_now, token_id):
+ """
+ Encode the joined rooms in a sync result
+
+ :param list[synapse.handlers.sync.JoinedSyncResult] rooms: list of sync
+ results for rooms this user is joined to
+ :param FilterCollection filter: filters to apply to the results
+ :param int time_now: current time - used as a baseline for age
+ calculations
+ :param int token_id: ID of the user's auth token - used for namespacing
+ of transaction IDs
+
+ :return: the joined rooms list, in our response format
+ :rtype: dict[str, dict[str, object]]
+ """
joined = {}
for room in rooms:
joined[room.room_id] = self.encode_room(
@@ -174,6 +189,20 @@ class SyncRestServlet(RestServlet):
return joined
def encode_invited(self, rooms, filter, time_now, token_id):
+ """
+ Encode the invited rooms in a sync result
+
+ :param list[synapse.handlers.sync.InvitedSyncResult] rooms: list of
+ sync results for rooms this user is joined to
+ :param FilterCollection filter: filters to apply to the results
+ :param int time_now: current time - used as a baseline for age
+ calculations
+ :param int token_id: ID of the user's auth token - used for namespacing
+ of transaction IDs
+
+ :return: the invited rooms list, in our response format
+ :rtype: dict[str, dict[str, object]]
+ """
invited = {}
for room in rooms:
invite = serialize_event(
@@ -189,6 +218,20 @@ class SyncRestServlet(RestServlet):
return invited
def encode_archived(self, rooms, filter, time_now, token_id):
+ """
+ Encode the archived rooms in a sync result
+
+ :param list[synapse.handlers.sync.ArchivedSyncResult] rooms: list of
+ sync results for rooms this user is joined to
+ :param FilterCollection filter: filters to apply to the results
+ :param int time_now: current time - used as a baseline for age
+ calculations
+ :param int token_id: ID of the user's auth token - used for namespacing
+ of transaction IDs
+
+ :return: the invited rooms list, in our response format
+ :rtype: dict[str, dict[str, object]]
+ """
joined = {}
for room in rooms:
joined[room.room_id] = self.encode_room(
@@ -199,8 +242,28 @@ class SyncRestServlet(RestServlet):
@staticmethod
def encode_room(room, filter, time_now, token_id, joined=True):
+ """
+ :param JoinedSyncResult|ArchivedSyncResult room: sync result for a
+ single room
+ :param FilterCollection filter: filters to apply to the results
+ :param int time_now: current time - used as a baseline for age
+ calculations
+ :param int token_id: ID of the user's auth token - used for namespacing
+ of transaction IDs
+ :param joined: True if the user is joined to this room - will mean
+ we handle ephemeral events
+
+ :return: the room, encoded in our response format
+ :rtype: dict[str, object]
+ """
event_map = {}
- state_events = filter.filter_room_state(room.state)
+ state_dict = room.state
+ timeline_events = filter.filter_room_timeline(room.timeline.events)
+
+ state_dict = SyncRestServlet._rollback_state_for_timeline(
+ state_dict, timeline_events)
+
+ state_events = filter.filter_room_state(state_dict.values())
state_event_ids = []
for event in state_events:
# TODO(mjark): Respect formatting requirements in the filter.
@@ -210,7 +273,6 @@ class SyncRestServlet(RestServlet):
)
state_event_ids.append(event.event_id)
- timeline_events = filter.filter_room_timeline(room.timeline.events)
timeline_event_ids = []
for event in timeline_events:
# TODO(mjark): Respect formatting requirements in the filter.
@@ -220,6 +282,10 @@ class SyncRestServlet(RestServlet):
)
timeline_event_ids.append(event.event_id)
+ private_user_data = filter.filter_room_private_user_data(
+ room.private_user_data
+ )
+
result = {
"event_map": event_map,
"timeline": {
@@ -228,6 +294,7 @@ class SyncRestServlet(RestServlet):
"limited": room.timeline.limited,
},
"state": {"events": state_event_ids},
+ "private_user_data": {"events": private_user_data},
}
if joined:
@@ -236,6 +303,63 @@ class SyncRestServlet(RestServlet):
return result
+ @staticmethod
+ def _rollback_state_for_timeline(state, timeline):
+ """
+ Wind the state dictionary backwards, so that it represents the
+ state at the start of the timeline, rather than at the end.
+
+ :param dict[(str, str), synapse.events.EventBase] state: the
+ state dictionary. Will be updated to the state before the timeline.
+ :param list[synapse.events.EventBase] timeline: the event timeline
+ :return: updated state dictionary
+ """
+ logger.debug("Processing state dict %r; timeline %r", state,
+ [e.get_dict() for e in timeline])
+
+ result = state.copy()
+
+ for timeline_event in reversed(timeline):
+ if not timeline_event.is_state():
+ continue
+
+ event_key = (timeline_event.type, timeline_event.state_key)
+
+ logger.debug("Considering %s for removal", event_key)
+
+ state_event = result.get(event_key)
+ if (state_event is None or
+ state_event.event_id != timeline_event.event_id):
+ # the event in the timeline isn't present in the state
+ # dictionary.
+ #
+ # the most likely cause for this is that there was a fork in
+ # the event graph, and the state is no longer valid. Really,
+ # the event shouldn't be in the timeline. We're going to ignore
+ # it for now, however.
+ logger.warn("Found state event %r in timeline which doesn't "
+ "match state dictionary", timeline_event)
+ continue
+
+ prev_event_id = timeline_event.unsigned.get("replaces_state", None)
+ logger.debug("Replacing %s with %s in state dict",
+ timeline_event.event_id, prev_event_id)
+
+ if prev_event_id is None:
+ del result[event_key]
+ else:
+ result[event_key] = FrozenEvent({
+ "type": timeline_event.type,
+ "state_key": timeline_event.state_key,
+ "content": timeline_event.unsigned['prev_content'],
+ "sender": timeline_event.unsigned['prev_sender'],
+ "event_id": prev_event_id,
+ "room_id": timeline_event.room_id,
+ })
+ logger.debug("New value: %r", result.get(event_key))
+
+ return result
+
def register_servlets(hs, http_server):
SyncRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/tags.py b/synapse/rest/client/v2_alpha/tags.py
new file mode 100644
index 0000000000..35482ae6a6
--- /dev/null
+++ b/synapse/rest/client/v2_alpha/tags.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import client_v2_pattern
+
+from synapse.http.servlet import RestServlet
+from synapse.api.errors import AuthError, SynapseError
+
+from twisted.internet import defer
+
+import logging
+
+import simplejson as json
+
+logger = logging.getLogger(__name__)
+
+
+class TagListServlet(RestServlet):
+ """
+ GET /user/{user_id}/rooms/{room_id}/tags HTTP/1.1
+ """
+ PATTERN = client_v2_pattern(
+ "/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags"
+ )
+
+ def __init__(self, hs):
+ super(TagListServlet, self).__init__()
+ self.auth = hs.get_auth()
+ self.store = hs.get_datastore()
+
+ @defer.inlineCallbacks
+ def on_GET(self, request, user_id, room_id):
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
+ if user_id != auth_user.to_string():
+ raise AuthError(403, "Cannot get tags for other users.")
+
+ tags = yield self.store.get_tags_for_room(user_id, room_id)
+
+ defer.returnValue((200, {"tags": tags}))
+
+
+class TagServlet(RestServlet):
+ """
+ PUT /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1
+ DELETE /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1
+ """
+ PATTERN = client_v2_pattern(
+ "/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags/(?P<tag>[^/]*)"
+ )
+
+ def __init__(self, hs):
+ super(TagServlet, self).__init__()
+ self.auth = hs.get_auth()
+ self.store = hs.get_datastore()
+ self.notifier = hs.get_notifier()
+
+ @defer.inlineCallbacks
+ def on_PUT(self, request, user_id, room_id, tag):
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
+ if user_id != auth_user.to_string():
+ raise AuthError(403, "Cannot add tags for other users.")
+
+ try:
+ content_bytes = request.content.read()
+ body = json.loads(content_bytes)
+ except:
+ raise SynapseError(400, "Invalid tag JSON")
+
+ max_id = yield self.store.add_tag_to_room(user_id, room_id, tag, body)
+
+ yield self.notifier.on_new_event(
+ "private_user_data_key", max_id, users=[user_id]
+ )
+
+ defer.returnValue((200, {}))
+
+ @defer.inlineCallbacks
+ def on_DELETE(self, request, user_id, room_id, tag):
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
+ if user_id != auth_user.to_string():
+ raise AuthError(403, "Cannot add tags for other users.")
+
+ max_id = yield self.store.remove_tag_from_room(user_id, room_id, tag)
+
+ yield self.notifier.on_new_event(
+ "private_user_data_key", max_id, users=[user_id]
+ )
+
+ defer.returnValue((200, {}))
+
+
+def register_servlets(hs, http_server):
+ TagListServlet(hs).register(http_server)
+ TagServlet(hs).register(http_server)
diff --git a/synapse/rest/media/v0/content_repository.py b/synapse/rest/media/v0/content_repository.py
index c28dc86cd7..e4fa8c4647 100644
--- a/synapse/rest/media/v0/content_repository.py
+++ b/synapse/rest/media/v0/content_repository.py
@@ -66,7 +66,7 @@ class ContentRepoResource(resource.Resource):
@defer.inlineCallbacks
def map_request_to_name(self, request):
# auth the user
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
# namespace all file uploads on the user
prefix = base64.urlsafe_b64encode(
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index 6abaf56b25..7d61596082 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -70,7 +70,7 @@ class UploadResource(BaseMediaResource):
@request_handler
@defer.inlineCallbacks
def _async_render_POST(self, request):
- auth_user, _ = yield self.auth.get_user_by_req(request)
+ auth_user, _, _ = yield self.auth.get_user_by_req(request)
# TODO: The checks here are a bit late. The content will have
# already been uploaded to a tmp file at this point
content_length = request.getHeader("Content-Length")
diff --git a/synapse/server.py b/synapse/server.py
index 8424798b1b..f75d5358b2 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -29,7 +29,6 @@ from synapse.state import StateHandler
from synapse.storage import DataStore
from synapse.util import Clock
from synapse.util.distributor import Distributor
-from synapse.util.lockutils import LockManager
from synapse.streams.events import EventSources
from synapse.api.ratelimiting import Ratelimiter
from synapse.crypto.keyring import Keyring
@@ -70,7 +69,6 @@ class BaseHomeServer(object):
'auth',
'rest_servlet_factory',
'state_handler',
- 'room_lock_manager',
'notifier',
'distributor',
'resource_for_client',
@@ -201,9 +199,6 @@ class HomeServer(BaseHomeServer):
def build_state_handler(self):
return StateHandler(self)
- def build_room_lock_manager(self):
- return LockManager()
-
def build_distributor(self):
return Distributor()
diff --git a/synapse/state.py b/synapse/state.py
index bb225c39cf..8ea2cac5d6 100644
--- a/synapse/state.py
+++ b/synapse/state.py
@@ -71,7 +71,7 @@ class StateHandler(object):
@defer.inlineCallbacks
def get_current_state(self, room_id, event_type=None, state_key=""):
- """ Returns the current state for the room as a list. This is done by
+ """ Retrieves the current state for the room. This is done by
calling `get_latest_events_in_room` to get the leading edges of the
event graph and then resolving any of the state conflicts.
@@ -80,6 +80,8 @@ class StateHandler(object):
If `event_type` is specified, then the method returns only the one
event (or None) with that `event_type` and `state_key`.
+
+ :returns map from (type, state_key) to event
"""
event_ids = yield self.store.get_latest_event_ids_in_room(room_id)
@@ -177,9 +179,10 @@ class StateHandler(object):
""" Given a list of event_ids this method fetches the state at each
event, resolves conflicts between them and returns them.
- Return format is a tuple: (`state_group`, `state_events`), where the
- first is the name of a state group if one and only one is involved,
- otherwise `None`.
+ :returns a Deferred tuple of (`state_group`, `state`, `prev_state`).
+ `state_group` is the name of a state group if one and only one is
+ involved. `state` is a map from (type, state_key) to event, and
+ `prev_state` is a list of event ids.
"""
logger.debug("resolve_state_groups event_ids %s", event_ids)
@@ -255,6 +258,11 @@ class StateHandler(object):
return self._resolve_events(state_sets)
def _resolve_events(self, state_sets, event_type=None, state_key=""):
+ """
+ :returns a tuple (new_state, prev_states). new_state is a map
+ from (type, state_key) to event. prev_states is a list of event_ids.
+ :rtype: (dict[(str, str), synapse.events.FrozenEvent], list[str])
+ """
state = {}
for st in state_sets:
for e in st:
@@ -307,19 +315,23 @@ class StateHandler(object):
We resolve conflicts in the following order:
1. power levels
- 2. memberships
- 3. other events.
+ 2. join rules
+ 3. memberships
+ 4. other events.
"""
resolved_state = {}
power_key = (EventTypes.PowerLevels, "")
- if power_key in conflicted_state.items():
- power_levels = conflicted_state[power_key]
- resolved_state[power_key] = self._resolve_auth_events(power_levels)
+ if power_key in conflicted_state:
+ events = conflicted_state[power_key]
+ logger.debug("Resolving conflicted power levels %r", events)
+ resolved_state[power_key] = self._resolve_auth_events(
+ events, auth_events)
auth_events.update(resolved_state)
for key, events in conflicted_state.items():
if key[0] == EventTypes.JoinRules:
+ logger.debug("Resolving conflicted join rules %r", events)
resolved_state[key] = self._resolve_auth_events(
events,
auth_events
@@ -329,6 +341,7 @@ class StateHandler(object):
for key, events in conflicted_state.items():
if key[0] == EventTypes.Member:
+ logger.debug("Resolving conflicted member lists %r", events)
resolved_state[key] = self._resolve_auth_events(
events,
auth_events
@@ -338,6 +351,7 @@ class StateHandler(object):
for key, events in conflicted_state.items():
if key not in resolved_state:
+ logger.debug("Resolving conflicted state %r:%r", key, events)
resolved_state[key] = self._resolve_normal_events(
events, auth_events
)
diff --git a/synapse/static/client/login/index.html b/synapse/static/client/login/index.html
new file mode 100644
index 0000000000..96c8723cab
--- /dev/null
+++ b/synapse/static/client/login/index.html
@@ -0,0 +1,50 @@
+<html>
+<head>
+<title> Login </title>
+<meta name='viewport' content='width=device-width, initial-scale=1, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0'>
+<link rel="stylesheet" href="style.css">
+<script src="js/jquery-2.1.3.min.js"></script>
+<script src="js/login.js"></script>
+</head>
+<body onload="matrixLogin.onLoad()">
+ <center>
+ <br/>
+ <h1>Log in with one of the following methods</h1>
+
+ <span id="feedback" style="color: #f00"></span>
+ <br/>
+ <br/>
+
+ <div id="loading">
+ <img src="spinner.gif" />
+ </div>
+
+ <div id="cas_flow" class="login_flow" style="display:none"
+ onclick="gotoCas(); return false;">
+ CAS Authentication: <button id="cas_button" style="margin: 10px">Log in</button>
+ </div>
+
+ <br/>
+
+ <form id="password_form" class="login_flow" style="display:none"
+ onsubmit="matrixLogin.password_login(); return false;">
+ <div>
+ Password Authentication:<br/>
+
+ <div style="text-align: center">
+ <input id="user_id" size="32" type="text" placeholder="Matrix ID (e.g. bob)" autocapitalize="off" autocorrect="off" />
+ <br/>
+ <input id="password" size="32" type="password" placeholder="Password"/>
+ <br/>
+
+ <button type="submit" style="margin: 10px">Log in</button>
+ </div>
+ </div>
+ </form>
+
+ <div id="no_login_types" type="button" class="login_flow" style="display:none">
+ Log in currently unavailable.
+ </div>
+ </center>
+</body>
+</html>
diff --git a/static/client/register/js/jquery-2.1.3.min.js b/synapse/static/client/login/js/jquery-2.1.3.min.js
index 25714ed29a..25714ed29a 100644
--- a/static/client/register/js/jquery-2.1.3.min.js
+++ b/synapse/static/client/login/js/jquery-2.1.3.min.js
diff --git a/synapse/static/client/login/js/login.js b/synapse/static/client/login/js/login.js
new file mode 100644
index 0000000000..ab8b4d44ea
--- /dev/null
+++ b/synapse/static/client/login/js/login.js
@@ -0,0 +1,167 @@
+window.matrixLogin = {
+ endpoint: location.origin + "/_matrix/client/api/v1/login",
+ serverAcceptsPassword: false,
+ serverAcceptsCas: false
+};
+
+var submitPassword = function(user, pwd) {
+ console.log("Logging in with password...");
+ var data = {
+ type: "m.login.password",
+ user: user,
+ password: pwd,
+ };
+ $.post(matrixLogin.endpoint, JSON.stringify(data), function(response) {
+ show_login();
+ matrixLogin.onLogin(response);
+ }).error(errorFunc);
+};
+
+var submitCas = function(ticket, service) {
+ console.log("Logging in with cas...");
+ var data = {
+ type: "m.login.cas",
+ ticket: ticket,
+ service: service,
+ };
+ $.post(matrixLogin.endpoint, JSON.stringify(data), function(response) {
+ show_login();
+ matrixLogin.onLogin(response);
+ }).error(errorFunc);
+};
+
+var errorFunc = function(err) {
+ show_login();
+
+ if (err.responseJSON && err.responseJSON.error) {
+ setFeedbackString(err.responseJSON.error + " (" + err.responseJSON.errcode + ")");
+ }
+ else {
+ setFeedbackString("Request failed: " + err.status);
+ }
+};
+
+var getCasURL = function(cb) {
+ $.get(matrixLogin.endpoint + "/cas", function(response) {
+ var cas_url = response.serverUrl;
+
+ cb(cas_url);
+ }).error(errorFunc);
+};
+
+
+var gotoCas = function() {
+ getCasURL(function(cas_url) {
+ var this_page = window.location.origin + window.location.pathname;
+
+ var redirect_url = cas_url + "/login?service=" + encodeURIComponent(this_page);
+
+ window.location.replace(redirect_url);
+ });
+}
+
+var setFeedbackString = function(text) {
+ $("#feedback").text(text);
+};
+
+var show_login = function() {
+ $("#loading").hide();
+
+ if (matrixLogin.serverAcceptsPassword) {
+ $("#password_form").show();
+ }
+
+ if (matrixLogin.serverAcceptsCas) {
+ $("#cas_flow").show();
+ }
+
+ if (!matrixLogin.serverAcceptsPassword && !matrixLogin.serverAcceptsCas) {
+ $("#no_login_types").show();
+ }
+};
+
+var show_spinner = function() {
+ $("#password_form").hide();
+ $("#cas_flow").hide();
+ $("#no_login_types").hide();
+ $("#loading").show();
+};
+
+
+var fetch_info = function(cb) {
+ $.get(matrixLogin.endpoint, function(response) {
+ var serverAcceptsPassword = false;
+ var serverAcceptsCas = false;
+ for (var i=0; i<response.flows.length; i++) {
+ var flow = response.flows[i];
+ if ("m.login.cas" === flow.type) {
+ matrixLogin.serverAcceptsCas = true;
+ console.log("Server accepts CAS");
+ }
+
+ if ("m.login.password" === flow.type) {
+ matrixLogin.serverAcceptsPassword = true;
+ console.log("Server accepts password");
+ }
+ }
+
+ cb();
+ }).error(errorFunc);
+}
+
+matrixLogin.onLoad = function() {
+ fetch_info(function() {
+ if (!try_cas()) {
+ show_login();
+ }
+ });
+};
+
+matrixLogin.password_login = function() {
+ var user = $("#user_id").val();
+ var pwd = $("#password").val();
+
+ setFeedbackString("");
+
+ show_spinner();
+ submitPassword(user, pwd);
+};
+
+matrixLogin.onLogin = function(response) {
+ // clobber this function
+ console.log("onLogin - This function should be replaced to proceed.");
+ console.log(response);
+};
+
+var parseQsFromUrl = function(query) {
+ var result = {};
+ query.split("&").forEach(function(part) {
+ var item = part.split("=");
+ var key = item[0];
+ var val = item[1];
+
+ if (val) {
+ val = decodeURIComponent(val);
+ }
+ result[key] = val
+ });
+ return result;
+};
+
+var try_cas = function() {
+ var pos = window.location.href.indexOf("?");
+ if (pos == -1) {
+ return false;
+ }
+ var qs = parseQsFromUrl(window.location.href.substr(pos+1));
+
+ var ticket = qs.ticket;
+
+ if (!ticket) {
+ return false;
+ }
+
+ submitCas(ticket, location.origin);
+
+ return true;
+};
diff --git a/synapse/static/client/login/spinner.gif b/synapse/static/client/login/spinner.gif
new file mode 100644
index 0000000000..12c24df798
--- /dev/null
+++ b/synapse/static/client/login/spinner.gif
Binary files differdiff --git a/synapse/static/client/login/style.css b/synapse/static/client/login/style.css
new file mode 100644
index 0000000000..73da0b5117
--- /dev/null
+++ b/synapse/static/client/login/style.css
@@ -0,0 +1,57 @@
+html {
+ height: 100%;
+}
+
+body {
+ height: 100%;
+ font-family: "Myriad Pro", "Myriad", Helvetica, Arial, sans-serif;
+ font-size: 12pt;
+ margin: 0px;
+}
+
+h1 {
+ font-size: 20pt;
+}
+
+a:link { color: #666; }
+a:visited { color: #666; }
+a:hover { color: #000; }
+a:active { color: #000; }
+
+input {
+ width: 90%
+}
+
+textarea, input {
+ font-family: inherit;
+ font-size: inherit;
+ margin: 5px;
+}
+
+.smallPrint {
+ color: #888;
+ font-size: 9pt ! important;
+ font-style: italic ! important;
+}
+
+.g-recaptcha div {
+ margin: auto;
+}
+
+.login_flow {
+ text-align: left;
+ padding: 10px;
+ margin-bottom: 40px;
+ display: inline-block;
+
+ -webkit-border-radius: 10px;
+ -moz-border-radius: 10px;
+ border-radius: 10px;
+
+ -webkit-box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
+ -moz-box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
+ box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
+
+ background-color: #f8f8f8;
+ border: 1px #ccc solid;
+}
diff --git a/static/client/register/index.html b/synapse/static/client/register/index.html
index 600b3ee41e..600b3ee41e 100644
--- a/static/client/register/index.html
+++ b/synapse/static/client/register/index.html
diff --git a/synapse/static/client/register/js/jquery-2.1.3.min.js b/synapse/static/client/register/js/jquery-2.1.3.min.js
new file mode 100644
index 0000000000..25714ed29a
--- /dev/null
+++ b/synapse/static/client/register/js/jquery-2.1.3.min.js
@@ -0,0 +1,4 @@
+/*! jQuery v2.1.3 | (c) 2005, 2014 jQuery Foundation, Inc. | jquery.org/license */
+!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k={},l=a.document,m="2.1.3",n=function(a,b){return new n.fn.init(a,b)},o=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,p=/^-ms-/,q=/-([\da-z])/gi,r=function(a,b){return b.toUpperCase()};n.fn=n.prototype={jquery:m,constructor:n,selector:"",length:0,toArray:function(){return d.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:d.call(this)},pushStack:function(a){var b=n.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return n.each(this,a,b)},map:function(a){return this.pushStack(n.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:c.sort,splice:c.splice},n.extend=n.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||n.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(a=arguments[h]))for(b in a)c=g[b],d=a[b],g!==d&&(j&&d&&(n.isPlainObject(d)||(e=n.isArray(d)))?(e?(e=!1,f=c&&n.isArray(c)?c:[]):f=c&&n.isPlainObject(c)?c:{},g[b]=n.extend(j,f,d)):void 0!==d&&(g[b]=d));return g},n.extend({expando:"jQuery"+(m+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===n.type(a)},isArray:Array.isArray,isWindow:function(a){return null!=a&&a===a.window},isNumeric:function(a){return!n.isArray(a)&&a-parseFloat(a)+1>=0},isPlainObject:function(a){return"object"!==n.type(a)||a.nodeType||n.isWindow(a)?!1:a.constructor&&!j.call(a.constructor.prototype,"isPrototypeOf")?!1:!0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?h[i.call(a)]||"object":typeof a},globalEval:function(a){var b,c=eval;a=n.trim(a),a&&(1===a.indexOf("use strict")?(b=l.createElement("script"),b.text=a,l.head.appendChild(b).parentNode.removeChild(b)):c(a))},camelCase:function(a){return a.replace(p,"ms-").replace(q,r)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,c){var d,e=0,f=a.length,g=s(a);if(c){if(g){for(;f>e;e++)if(d=b.apply(a[e],c),d===!1)break}else for(e in a)if(d=b.apply(a[e],c),d===!1)break}else if(g){for(;f>e;e++)if(d=b.call(a[e],e,a[e]),d===!1)break}else for(e in a)if(d=b.call(a[e],e,a[e]),d===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(o,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(s(Object(a))?n.merge(c,"string"==typeof a?[a]:a):f.call(c,a)),c},inArray:function(a,b,c){return null==b?-1:g.call(b,a,c)},merge:function(a,b){for(var c=+b.length,d=0,e=a.length;c>d;d++)a[e++]=b[d];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,f=0,g=a.length,h=s(a),i=[];if(h)for(;g>f;f++)d=b(a[f],f,c),null!=d&&i.push(d);else for(f in a)d=b(a[f],f,c),null!=d&&i.push(d);return e.apply([],i)},guid:1,proxy:function(a,b){var c,e,f;return"string"==typeof b&&(c=a[b],b=a,a=c),n.isFunction(a)?(e=d.call(arguments,2),f=function(){return a.apply(b||this,e.concat(d.call(arguments)))},f.guid=a.guid=a.guid||n.guid++,f):void 0},now:Date.now,support:k}),n.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){h["[object "+b+"]"]=b.toLowerCase()});function s(a){var b=a.length,c=n.type(a);return"function"===c||n.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var t=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+1*new Date,v=a.document,w=0,x=0,y=hb(),z=hb(),A=hb(),B=function(a,b){return a===b&&(l=!0),0},C=1<<31,D={}.hasOwnProperty,E=[],F=E.pop,G=E.push,H=E.push,I=E.slice,J=function(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return c;return-1},K="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",L="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",N=M.replace("w","w#"),O="\\["+L+"*("+M+")(?:"+L+"*([*^$|!~]?=)"+L+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+N+"))|)"+L+"*\\]",P=":("+M+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+O+")*)|.*)\\)|)",Q=new RegExp(L+"+","g"),R=new RegExp("^"+L+"+|((?:^|[^\\\\])(?:\\\\.)*)"+L+"+$","g"),S=new RegExp("^"+L+"*,"+L+"*"),T=new RegExp("^"+L+"*([>+~]|"+L+")"+L+"*"),U=new RegExp("="+L+"*([^\\]'\"]*?)"+L+"*\\]","g"),V=new RegExp(P),W=new RegExp("^"+N+"$"),X={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+O),PSEUDO:new RegExp("^"+P),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+L+"*(even|odd|(([+-]|)(\\d*)n|)"+L+"*(?:([+-]|)"+L+"*(\\d+)|))"+L+"*\\)|)","i"),bool:new RegExp("^(?:"+K+")$","i"),needsContext:new RegExp("^"+L+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+L+"*((?:-\\d)?\\d*)"+L+"*\\)|)(?=[^-]|$)","i")},Y=/^(?:input|select|textarea|button)$/i,Z=/^h\d$/i,$=/^[^{]+\{\s*\[native \w/,_=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ab=/[+~]/,bb=/'|\\/g,cb=new RegExp("\\\\([\\da-f]{1,6}"+L+"?|("+L+")|.)","ig"),db=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)},eb=function(){m()};try{H.apply(E=I.call(v.childNodes),v.childNodes),E[v.childNodes.length].nodeType}catch(fb){H={apply:E.length?function(a,b){G.apply(a,I.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function gb(a,b,d,e){var f,h,j,k,l,o,r,s,w,x;if((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,d=d||[],k=b.nodeType,"string"!=typeof a||!a||1!==k&&9!==k&&11!==k)return d;if(!e&&p){if(11!==k&&(f=_.exec(a)))if(j=f[1]){if(9===k){if(h=b.getElementById(j),!h||!h.parentNode)return d;if(h.id===j)return d.push(h),d}else if(b.ownerDocument&&(h=b.ownerDocument.getElementById(j))&&t(b,h)&&h.id===j)return d.push(h),d}else{if(f[2])return H.apply(d,b.getElementsByTagName(a)),d;if((j=f[3])&&c.getElementsByClassName)return H.apply(d,b.getElementsByClassName(j)),d}if(c.qsa&&(!q||!q.test(a))){if(s=r=u,w=b,x=1!==k&&a,1===k&&"object"!==b.nodeName.toLowerCase()){o=g(a),(r=b.getAttribute("id"))?s=r.replace(bb,"\\$&"):b.setAttribute("id",s),s="[id='"+s+"'] ",l=o.length;while(l--)o[l]=s+rb(o[l]);w=ab.test(a)&&pb(b.parentNode)||b,x=o.join(",")}if(x)try{return H.apply(d,w.querySelectorAll(x)),d}catch(y){}finally{r||b.removeAttribute("id")}}}return i(a.replace(R,"$1"),b,d,e)}function hb(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function ib(a){return a[u]=!0,a}function jb(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function kb(a,b){var c=a.split("|"),e=a.length;while(e--)d.attrHandle[c[e]]=b}function lb(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||C)-(~a.sourceIndex||C);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function mb(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function nb(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function ob(a){return ib(function(b){return b=+b,ib(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function pb(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}c=gb.support={},f=gb.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=gb.setDocument=function(a){var b,e,g=a?a.ownerDocument||a:v;return g!==n&&9===g.nodeType&&g.documentElement?(n=g,o=g.documentElement,e=g.defaultView,e&&e!==e.top&&(e.addEventListener?e.addEventListener("unload",eb,!1):e.attachEvent&&e.attachEvent("onunload",eb)),p=!f(g),c.attributes=jb(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=jb(function(a){return a.appendChild(g.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=$.test(g.getElementsByClassName),c.getById=jb(function(a){return o.appendChild(a).id=u,!g.getElementsByName||!g.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){var c="undefined"!=typeof a.getAttributeNode&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return"undefined"!=typeof b.getElementsByTagName?b.getElementsByTagName(a):c.qsa?b.querySelectorAll(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=$.test(g.querySelectorAll))&&(jb(function(a){o.appendChild(a).innerHTML="<a id='"+u+"'></a><select id='"+u+"-\f]' msallowcapture=''><option selected=''></option></select>",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+L+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+L+"*(?:value|"+K+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),jb(function(a){var b=g.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+L+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=$.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&jb(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",P)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=$.test(o.compareDocumentPosition),t=b||$.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===g||a.ownerDocument===v&&t(v,a)?-1:b===g||b.ownerDocument===v&&t(v,b)?1:k?J(k,a)-J(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,h=[a],i=[b];if(!e||!f)return a===g?-1:b===g?1:e?-1:f?1:k?J(k,a)-J(k,b):0;if(e===f)return lb(a,b);c=a;while(c=c.parentNode)h.unshift(c);c=b;while(c=c.parentNode)i.unshift(c);while(h[d]===i[d])d++;return d?lb(h[d],i[d]):h[d]===v?-1:i[d]===v?1:0},g):n},gb.matches=function(a,b){return gb(a,null,null,b)},gb.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(U,"='$1']"),!(!c.matchesSelector||!p||r&&r.test(b)||q&&q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return gb(b,n,null,[a]).length>0},gb.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},gb.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&D.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},gb.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},gb.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=gb.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=gb.selectors={cacheLength:50,createPseudo:ib,match:X,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(cb,db),a[3]=(a[3]||a[4]||a[5]||"").replace(cb,db),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||gb.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&gb.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return X.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&V.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(cb,db).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+L+")"+a+"("+L+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=gb.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(Q," ")+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h;if(q){if(f){while(p){l=b;while(l=l[p])if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){k=q[u]||(q[u]={}),j=k[a]||[],n=j[0]===w&&j[1],m=j[0]===w&&j[2],l=n&&q.childNodes[n];while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if(1===l.nodeType&&++m&&l===b){k[a]=[w,n,m];break}}else if(s&&(j=(b[u]||(b[u]={}))[a])&&j[0]===w)m=j[1];else while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if((h?l.nodeName.toLowerCase()===r:1===l.nodeType)&&++m&&(s&&((l[u]||(l[u]={}))[a]=[w,m]),l===b))break;return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||gb.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ib(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=J(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ib(function(a){var b=[],c=[],d=h(a.replace(R,"$1"));return d[u]?ib(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ib(function(a){return function(b){return gb(a,b).length>0}}),contains:ib(function(a){return a=a.replace(cb,db),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ib(function(a){return W.test(a||"")||gb.error("unsupported lang: "+a),a=a.replace(cb,db).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Z.test(a.nodeName)},input:function(a){return Y.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:ob(function(){return[0]}),last:ob(function(a,b){return[b-1]}),eq:ob(function(a,b,c){return[0>c?c+b:c]}),even:ob(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:ob(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:ob(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:ob(function(a,b,c){for(var d=0>c?c+b:c;++d<b;)a.push(d);return a})}},d.pseudos.nth=d.pseudos.eq;for(b in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})d.pseudos[b]=mb(b);for(b in{submit:!0,reset:!0})d.pseudos[b]=nb(b);function qb(){}qb.prototype=d.filters=d.pseudos,d.setFilters=new qb,g=gb.tokenize=function(a,b){var c,e,f,g,h,i,j,k=z[a+" "];if(k)return b?0:k.slice(0);h=a,i=[],j=d.preFilter;while(h){(!c||(e=S.exec(h)))&&(e&&(h=h.slice(e[0].length)||h),i.push(f=[])),c=!1,(e=T.exec(h))&&(c=e.shift(),f.push({value:c,type:e[0].replace(R," ")}),h=h.slice(c.length));for(g in d.filter)!(e=X[g].exec(h))||j[g]&&!(e=j[g](e))||(c=e.shift(),f.push({value:c,type:g,matches:e}),h=h.slice(c.length));if(!c)break}return b?h.length:h?gb.error(a):z(a,i).slice(0)};function rb(a){for(var b=0,c=a.length,d="";c>b;b++)d+=a[b].value;return d}function sb(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(i=b[u]||(b[u]={}),(h=i[d])&&h[0]===w&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function tb(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function ub(a,b,c){for(var d=0,e=b.length;e>d;d++)gb(a,b[d],c);return c}function vb(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function wb(a,b,c,d,e,f){return d&&!d[u]&&(d=wb(d)),e&&!e[u]&&(e=wb(e,f)),ib(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||ub(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:vb(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=vb(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?J(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=vb(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):H.apply(g,r)})}function xb(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=sb(function(a){return a===b},h,!0),l=sb(function(a){return J(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];f>i;i++)if(c=d.relative[a[i].type])m=[sb(tb(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return wb(i>1&&tb(m),i>1&&rb(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(R,"$1"),c,e>i&&xb(a.slice(i,e)),f>e&&xb(a=a.slice(e)),f>e&&rb(a))}m.push(c)}return tb(m)}function yb(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,m,o,p=0,q="0",r=f&&[],s=[],t=j,u=f||e&&d.find.TAG("*",k),v=w+=null==t?1:Math.random()||.1,x=u.length;for(k&&(j=g!==n&&g);q!==x&&null!=(l=u[q]);q++){if(e&&l){m=0;while(o=a[m++])if(o(l,g,h)){i.push(l);break}k&&(w=v)}c&&((l=!o&&l)&&p--,f&&r.push(l))}if(p+=q,c&&q!==p){m=0;while(o=b[m++])o(r,s,g,h);if(f){if(p>0)while(q--)r[q]||s[q]||(s[q]=F.call(i));s=vb(s)}H.apply(i,s),k&&!f&&s.length>0&&p+b.length>1&&gb.uniqueSort(i)}return k&&(w=v,j=t),r};return c?ib(f):f}return h=gb.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=xb(b[c]),f[u]?d.push(f):e.push(f);f=A(a,yb(e,d)),f.selector=a}return f},i=gb.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(cb,db),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=X.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(cb,db),ab.test(j[0].type)&&pb(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&rb(j),!a)return H.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,ab.test(a)&&pb(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=jb(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),jb(function(a){return a.innerHTML="<a href='#'></a>","#"===a.firstChild.getAttribute("href")})||kb("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&jb(function(a){return a.innerHTML="<input/>",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||kb("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),jb(function(a){return null==a.getAttribute("disabled")})||kb(K,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),gb}(a);n.find=t,n.expr=t.selectors,n.expr[":"]=n.expr.pseudos,n.unique=t.uniqueSort,n.text=t.getText,n.isXMLDoc=t.isXML,n.contains=t.contains;var u=n.expr.match.needsContext,v=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,w=/^.[^:#\[\.,]*$/;function x(a,b,c){if(n.isFunction(b))return n.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return n.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(w.test(b))return n.filter(b,a,c);b=n.filter(b,a)}return n.grep(a,function(a){return g.call(b,a)>=0!==c})}n.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?n.find.matchesSelector(d,a)?[d]:[]:n.find.matches(a,n.grep(b,function(a){return 1===a.nodeType}))},n.fn.extend({find:function(a){var b,c=this.length,d=[],e=this;if("string"!=typeof a)return this.pushStack(n(a).filter(function(){for(b=0;c>b;b++)if(n.contains(e[b],this))return!0}));for(b=0;c>b;b++)n.find(a,e[b],d);return d=this.pushStack(c>1?n.unique(d):d),d.selector=this.selector?this.selector+" "+a:a,d},filter:function(a){return this.pushStack(x(this,a||[],!1))},not:function(a){return this.pushStack(x(this,a||[],!0))},is:function(a){return!!x(this,"string"==typeof a&&u.test(a)?n(a):a||[],!1).length}});var y,z=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,A=n.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a[0]&&">"===a[a.length-1]&&a.length>=3?[null,a,null]:z.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||y).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof n?b[0]:b,n.merge(this,n.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:l,!0)),v.test(c[1])&&n.isPlainObject(b))for(c in b)n.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}return d=l.getElementById(c[2]),d&&d.parentNode&&(this.length=1,this[0]=d),this.context=l,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):n.isFunction(a)?"undefined"!=typeof y.ready?y.ready(a):a(n):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),n.makeArray(a,this))};A.prototype=n.fn,y=n(l);var B=/^(?:parents|prev(?:Until|All))/,C={children:!0,contents:!0,next:!0,prev:!0};n.extend({dir:function(a,b,c){var d=[],e=void 0!==c;while((a=a[b])&&9!==a.nodeType)if(1===a.nodeType){if(e&&n(a).is(c))break;d.push(a)}return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),n.fn.extend({has:function(a){var b=n(a,this),c=b.length;return this.filter(function(){for(var a=0;c>a;a++)if(n.contains(this,b[a]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=u.test(a)||"string"!=typeof a?n(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&n.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?n.unique(f):f)},index:function(a){return a?"string"==typeof a?g.call(n(a),this[0]):g.call(this,a.jquery?a[0]:a):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(n.unique(n.merge(this.get(),n(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function D(a,b){while((a=a[b])&&1!==a.nodeType);return a}n.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return n.dir(a,"parentNode")},parentsUntil:function(a,b,c){return n.dir(a,"parentNode",c)},next:function(a){return D(a,"nextSibling")},prev:function(a){return D(a,"previousSibling")},nextAll:function(a){return n.dir(a,"nextSibling")},prevAll:function(a){return n.dir(a,"previousSibling")},nextUntil:function(a,b,c){return n.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return n.dir(a,"previousSibling",c)},siblings:function(a){return n.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return n.sibling(a.firstChild)},contents:function(a){return a.contentDocument||n.merge([],a.childNodes)}},function(a,b){n.fn[a]=function(c,d){var e=n.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=n.filter(d,e)),this.length>1&&(C[a]||n.unique(e),B.test(a)&&e.reverse()),this.pushStack(e)}});var E=/\S+/g,F={};function G(a){var b=F[a]={};return n.each(a.match(E)||[],function(a,c){b[c]=!0}),b}n.Callbacks=function(a){a="string"==typeof a?F[a]||G(a):n.extend({},a);var b,c,d,e,f,g,h=[],i=!a.once&&[],j=function(l){for(b=a.memory&&l,c=!0,g=e||0,e=0,f=h.length,d=!0;h&&f>g;g++)if(h[g].apply(l[0],l[1])===!1&&a.stopOnFalse){b=!1;break}d=!1,h&&(i?i.length&&j(i.shift()):b?h=[]:k.disable())},k={add:function(){if(h){var c=h.length;!function g(b){n.each(b,function(b,c){var d=n.type(c);"function"===d?a.unique&&k.has(c)||h.push(c):c&&c.length&&"string"!==d&&g(c)})}(arguments),d?f=h.length:b&&(e=c,j(b))}return this},remove:function(){return h&&n.each(arguments,function(a,b){var c;while((c=n.inArray(b,h,c))>-1)h.splice(c,1),d&&(f>=c&&f--,g>=c&&g--)}),this},has:function(a){return a?n.inArray(a,h)>-1:!(!h||!h.length)},empty:function(){return h=[],f=0,this},disable:function(){return h=i=b=void 0,this},disabled:function(){return!h},lock:function(){return i=void 0,b||k.disable(),this},locked:function(){return!i},fireWith:function(a,b){return!h||c&&!i||(b=b||[],b=[a,b.slice?b.slice():b],d?i.push(b):j(b)),this},fire:function(){return k.fireWith(this,arguments),this},fired:function(){return!!c}};return k},n.extend({Deferred:function(a){var b=[["resolve","done",n.Callbacks("once memory"),"resolved"],["reject","fail",n.Callbacks("once memory"),"rejected"],["notify","progress",n.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return n.Deferred(function(c){n.each(b,function(b,f){var g=n.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&n.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?n.extend(a,d):d}},e={};return d.pipe=d.then,n.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=d.call(arguments),e=c.length,f=1!==e||a&&n.isFunction(a.promise)?e:0,g=1===f?a:n.Deferred(),h=function(a,b,c){return function(e){b[a]=this,c[a]=arguments.length>1?d.call(arguments):e,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(e>1)for(i=new Array(e),j=new Array(e),k=new Array(e);e>b;b++)c[b]&&n.isFunction(c[b].promise)?c[b].promise().done(h(b,k,c)).fail(g.reject).progress(h(b,j,i)):--f;return f||g.resolveWith(k,c),g.promise()}});var H;n.fn.ready=function(a){return n.ready.promise().done(a),this},n.extend({isReady:!1,readyWait:1,holdReady:function(a){a?n.readyWait++:n.ready(!0)},ready:function(a){(a===!0?--n.readyWait:n.isReady)||(n.isReady=!0,a!==!0&&--n.readyWait>0||(H.resolveWith(l,[n]),n.fn.triggerHandler&&(n(l).triggerHandler("ready"),n(l).off("ready"))))}});function I(){l.removeEventListener("DOMContentLoaded",I,!1),a.removeEventListener("load",I,!1),n.ready()}n.ready.promise=function(b){return H||(H=n.Deferred(),"complete"===l.readyState?setTimeout(n.ready):(l.addEventListener("DOMContentLoaded",I,!1),a.addEventListener("load",I,!1))),H.promise(b)},n.ready.promise();var J=n.access=function(a,b,c,d,e,f,g){var h=0,i=a.length,j=null==c;if("object"===n.type(c)){e=!0;for(h in c)n.access(a,b,h,c[h],!0,f,g)}else if(void 0!==d&&(e=!0,n.isFunction(d)||(g=!0),j&&(g?(b.call(a,d),b=null):(j=b,b=function(a,b,c){return j.call(n(a),c)})),b))for(;i>h;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f};n.acceptData=function(a){return 1===a.nodeType||9===a.nodeType||!+a.nodeType};function K(){Object.defineProperty(this.cache={},0,{get:function(){return{}}}),this.expando=n.expando+K.uid++}K.uid=1,K.accepts=n.acceptData,K.prototype={key:function(a){if(!K.accepts(a))return 0;var b={},c=a[this.expando];if(!c){c=K.uid++;try{b[this.expando]={value:c},Object.defineProperties(a,b)}catch(d){b[this.expando]=c,n.extend(a,b)}}return this.cache[c]||(this.cache[c]={}),c},set:function(a,b,c){var d,e=this.key(a),f=this.cache[e];if("string"==typeof b)f[b]=c;else if(n.isEmptyObject(f))n.extend(this.cache[e],b);else for(d in b)f[d]=b[d];return f},get:function(a,b){var c=this.cache[this.key(a)];return void 0===b?c:c[b]},access:function(a,b,c){var d;return void 0===b||b&&"string"==typeof b&&void 0===c?(d=this.get(a,b),void 0!==d?d:this.get(a,n.camelCase(b))):(this.set(a,b,c),void 0!==c?c:b)},remove:function(a,b){var c,d,e,f=this.key(a),g=this.cache[f];if(void 0===b)this.cache[f]={};else{n.isArray(b)?d=b.concat(b.map(n.camelCase)):(e=n.camelCase(b),b in g?d=[b,e]:(d=e,d=d in g?[d]:d.match(E)||[])),c=d.length;while(c--)delete g[d[c]]}},hasData:function(a){return!n.isEmptyObject(this.cache[a[this.expando]]||{})},discard:function(a){a[this.expando]&&delete this.cache[a[this.expando]]}};var L=new K,M=new K,N=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,O=/([A-Z])/g;function P(a,b,c){var d;if(void 0===c&&1===a.nodeType)if(d="data-"+b.replace(O,"-$1").toLowerCase(),c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:N.test(c)?n.parseJSON(c):c}catch(e){}M.set(a,b,c)}else c=void 0;return c}n.extend({hasData:function(a){return M.hasData(a)||L.hasData(a)},data:function(a,b,c){return M.access(a,b,c)
+},removeData:function(a,b){M.remove(a,b)},_data:function(a,b,c){return L.access(a,b,c)},_removeData:function(a,b){L.remove(a,b)}}),n.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=M.get(f),1===f.nodeType&&!L.get(f,"hasDataAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=n.camelCase(d.slice(5)),P(f,d,e[d])));L.set(f,"hasDataAttrs",!0)}return e}return"object"==typeof a?this.each(function(){M.set(this,a)}):J(this,function(b){var c,d=n.camelCase(a);if(f&&void 0===b){if(c=M.get(f,a),void 0!==c)return c;if(c=M.get(f,d),void 0!==c)return c;if(c=P(f,d,void 0),void 0!==c)return c}else this.each(function(){var c=M.get(this,d);M.set(this,d,b),-1!==a.indexOf("-")&&void 0!==c&&M.set(this,a,b)})},null,b,arguments.length>1,null,!0)},removeData:function(a){return this.each(function(){M.remove(this,a)})}}),n.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=L.get(a,b),c&&(!d||n.isArray(c)?d=L.access(a,b,n.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=n.queue(a,b),d=c.length,e=c.shift(),f=n._queueHooks(a,b),g=function(){n.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return L.get(a,c)||L.access(a,c,{empty:n.Callbacks("once memory").add(function(){L.remove(a,[b+"queue",c])})})}}),n.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.length<c?n.queue(this[0],a):void 0===b?this:this.each(function(){var c=n.queue(this,a,b);n._queueHooks(this,a),"fx"===a&&"inprogress"!==c[0]&&n.dequeue(this,a)})},dequeue:function(a){return this.each(function(){n.dequeue(this,a)})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,b){var c,d=1,e=n.Deferred(),f=this,g=this.length,h=function(){--d||e.resolveWith(f,[f])};"string"!=typeof a&&(b=a,a=void 0),a=a||"fx";while(g--)c=L.get(f[g],a+"queueHooks"),c&&c.empty&&(d++,c.empty.add(h));return h(),e.promise(b)}});var Q=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,R=["Top","Right","Bottom","Left"],S=function(a,b){return a=b||a,"none"===n.css(a,"display")||!n.contains(a.ownerDocument,a)},T=/^(?:checkbox|radio)$/i;!function(){var a=l.createDocumentFragment(),b=a.appendChild(l.createElement("div")),c=l.createElement("input");c.setAttribute("type","radio"),c.setAttribute("checked","checked"),c.setAttribute("name","t"),b.appendChild(c),k.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,b.innerHTML="<textarea>x</textarea>",k.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue}();var U="undefined";k.focusinBubbles="onfocusin"in a;var V=/^key/,W=/^(?:mouse|pointer|contextmenu)|click/,X=/^(?:focusinfocus|focusoutblur)$/,Y=/^([^.]*)(?:\.(.+)|)$/;function Z(){return!0}function $(){return!1}function _(){try{return l.activeElement}catch(a){}}n.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=L.get(a);if(r){c.handler&&(f=c,c=f.handler,e=f.selector),c.guid||(c.guid=n.guid++),(i=r.events)||(i=r.events={}),(g=r.handle)||(g=r.handle=function(b){return typeof n!==U&&n.event.triggered!==b.type?n.event.dispatch.apply(a,arguments):void 0}),b=(b||"").match(E)||[""],j=b.length;while(j--)h=Y.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o&&(l=n.event.special[o]||{},o=(e?l.delegateType:l.bindType)||o,l=n.event.special[o]||{},k=n.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&n.expr.match.needsContext.test(e),namespace:p.join(".")},f),(m=i[o])||(m=i[o]=[],m.delegateCount=0,l.setup&&l.setup.call(a,d,p,g)!==!1||a.addEventListener&&a.addEventListener(o,g,!1)),l.add&&(l.add.call(a,k),k.handler.guid||(k.handler.guid=c.guid)),e?m.splice(m.delegateCount++,0,k):m.push(k),n.event.global[o]=!0)}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=L.hasData(a)&&L.get(a);if(r&&(i=r.events)){b=(b||"").match(E)||[""],j=b.length;while(j--)if(h=Y.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=n.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,m=i[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),g=f=m.length;while(f--)k=m[f],!e&&q!==k.origType||c&&c.guid!==k.guid||h&&!h.test(k.namespace)||d&&d!==k.selector&&("**"!==d||!k.selector)||(m.splice(f,1),k.selector&&m.delegateCount--,l.remove&&l.remove.call(a,k));g&&!m.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||n.removeEvent(a,o,r.handle),delete i[o])}else for(o in i)n.event.remove(a,o+b[j],c,d,!0);n.isEmptyObject(i)&&(delete r.handle,L.remove(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,k,m,o,p=[d||l],q=j.call(b,"type")?b.type:b,r=j.call(b,"namespace")?b.namespace.split("."):[];if(g=h=d=d||l,3!==d.nodeType&&8!==d.nodeType&&!X.test(q+n.event.triggered)&&(q.indexOf(".")>=0&&(r=q.split("."),q=r.shift(),r.sort()),k=q.indexOf(":")<0&&"on"+q,b=b[n.expando]?b:new n.Event(q,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=r.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+r.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:n.makeArray(c,[b]),o=n.event.special[q]||{},e||!o.trigger||o.trigger.apply(d,c)!==!1)){if(!e&&!o.noBubble&&!n.isWindow(d)){for(i=o.delegateType||q,X.test(i+q)||(g=g.parentNode);g;g=g.parentNode)p.push(g),h=g;h===(d.ownerDocument||l)&&p.push(h.defaultView||h.parentWindow||a)}f=0;while((g=p[f++])&&!b.isPropagationStopped())b.type=f>1?i:o.bindType||q,m=(L.get(g,"events")||{})[b.type]&&L.get(g,"handle"),m&&m.apply(g,c),m=k&&g[k],m&&m.apply&&n.acceptData(g)&&(b.result=m.apply(g,c),b.result===!1&&b.preventDefault());return b.type=q,e||b.isDefaultPrevented()||o._default&&o._default.apply(p.pop(),c)!==!1||!n.acceptData(d)||k&&n.isFunction(d[q])&&!n.isWindow(d)&&(h=d[k],h&&(d[k]=null),n.event.triggered=q,d[q](),n.event.triggered=void 0,h&&(d[k]=h)),b.result}},dispatch:function(a){a=n.event.fix(a);var b,c,e,f,g,h=[],i=d.call(arguments),j=(L.get(this,"events")||{})[a.type]||[],k=n.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=n.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,c=0;while((g=f.handlers[c++])&&!a.isImmediatePropagationStopped())(!a.namespace_re||a.namespace_re.test(g.namespace))&&(a.handleObj=g,a.data=g.data,e=((n.event.special[g.origType]||{}).handle||g.handler).apply(f.elem,i),void 0!==e&&(a.result=e)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!==this;i=i.parentNode||this)if(i.disabled!==!0||"click"!==a.type){for(d=[],c=0;h>c;c++)f=b[c],e=f.selector+" ",void 0===d[e]&&(d[e]=f.needsContext?n(e,this).index(i)>=0:n.find(e,this,null,[i]).length),d[e]&&d.push(f);d.length&&g.push({elem:i,handlers:d})}return h<b.length&&g.push({elem:this,handlers:b.slice(h)}),g},props:"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){return null==a.which&&(a.which=null!=b.charCode?b.charCode:b.keyCode),a}},mouseHooks:{props:"button buttons clientX clientY offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,b){var c,d,e,f=b.button;return null==a.pageX&&null!=b.clientX&&(c=a.target.ownerDocument||l,d=c.documentElement,e=c.body,a.pageX=b.clientX+(d&&d.scrollLeft||e&&e.scrollLeft||0)-(d&&d.clientLeft||e&&e.clientLeft||0),a.pageY=b.clientY+(d&&d.scrollTop||e&&e.scrollTop||0)-(d&&d.clientTop||e&&e.clientTop||0)),a.which||void 0===f||(a.which=1&f?1:2&f?3:4&f?2:0),a}},fix:function(a){if(a[n.expando])return a;var b,c,d,e=a.type,f=a,g=this.fixHooks[e];g||(this.fixHooks[e]=g=W.test(e)?this.mouseHooks:V.test(e)?this.keyHooks:{}),d=g.props?this.props.concat(g.props):this.props,a=new n.Event(f),b=d.length;while(b--)c=d[b],a[c]=f[c];return a.target||(a.target=l),3===a.target.nodeType&&(a.target=a.target.parentNode),g.filter?g.filter(a,f):a},special:{load:{noBubble:!0},focus:{trigger:function(){return this!==_()&&this.focus?(this.focus(),!1):void 0},delegateType:"focusin"},blur:{trigger:function(){return this===_()&&this.blur?(this.blur(),!1):void 0},delegateType:"focusout"},click:{trigger:function(){return"checkbox"===this.type&&this.click&&n.nodeName(this,"input")?(this.click(),!1):void 0},_default:function(a){return n.nodeName(a.target,"a")}},beforeunload:{postDispatch:function(a){void 0!==a.result&&a.originalEvent&&(a.originalEvent.returnValue=a.result)}}},simulate:function(a,b,c,d){var e=n.extend(new n.Event,c,{type:a,isSimulated:!0,originalEvent:{}});d?n.event.trigger(e,null,b):n.event.dispatch.call(b,e),e.isDefaultPrevented()&&c.preventDefault()}},n.removeEvent=function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c,!1)},n.Event=function(a,b){return this instanceof n.Event?(a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||void 0===a.defaultPrevented&&a.returnValue===!1?Z:$):this.type=a,b&&n.extend(this,b),this.timeStamp=a&&a.timeStamp||n.now(),void(this[n.expando]=!0)):new n.Event(a,b)},n.Event.prototype={isDefaultPrevented:$,isPropagationStopped:$,isImmediatePropagationStopped:$,preventDefault:function(){var a=this.originalEvent;this.isDefaultPrevented=Z,a&&a.preventDefault&&a.preventDefault()},stopPropagation:function(){var a=this.originalEvent;this.isPropagationStopped=Z,a&&a.stopPropagation&&a.stopPropagation()},stopImmediatePropagation:function(){var a=this.originalEvent;this.isImmediatePropagationStopped=Z,a&&a.stopImmediatePropagation&&a.stopImmediatePropagation(),this.stopPropagation()}},n.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(a,b){n.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c,d=this,e=a.relatedTarget,f=a.handleObj;return(!e||e!==d&&!n.contains(d,e))&&(a.type=f.origType,c=f.handler.apply(this,arguments),a.type=b),c}}}),k.focusinBubbles||n.each({focus:"focusin",blur:"focusout"},function(a,b){var c=function(a){n.event.simulate(b,a.target,n.event.fix(a),!0)};n.event.special[b]={setup:function(){var d=this.ownerDocument||this,e=L.access(d,b);e||d.addEventListener(a,c,!0),L.access(d,b,(e||0)+1)},teardown:function(){var d=this.ownerDocument||this,e=L.access(d,b)-1;e?L.access(d,b,e):(d.removeEventListener(a,c,!0),L.remove(d,b))}}}),n.fn.extend({on:function(a,b,c,d,e){var f,g;if("object"==typeof a){"string"!=typeof b&&(c=c||b,b=void 0);for(g in a)this.on(g,b,c,a[g],e);return this}if(null==c&&null==d?(d=b,c=b=void 0):null==d&&("string"==typeof b?(d=c,c=void 0):(d=c,c=b,b=void 0)),d===!1)d=$;else if(!d)return this;return 1===e&&(f=d,d=function(a){return n().off(a),f.apply(this,arguments)},d.guid=f.guid||(f.guid=n.guid++)),this.each(function(){n.event.add(this,a,d,c,b)})},one:function(a,b,c,d){return this.on(a,b,c,d,1)},off:function(a,b,c){var d,e;if(a&&a.preventDefault&&a.handleObj)return d=a.handleObj,n(a.delegateTarget).off(d.namespace?d.origType+"."+d.namespace:d.origType,d.selector,d.handler),this;if("object"==typeof a){for(e in a)this.off(e,b,a[e]);return this}return(b===!1||"function"==typeof b)&&(c=b,b=void 0),c===!1&&(c=$),this.each(function(){n.event.remove(this,a,c,b)})},trigger:function(a,b){return this.each(function(){n.event.trigger(a,b,this)})},triggerHandler:function(a,b){var c=this[0];return c?n.event.trigger(a,b,c,!0):void 0}});var ab=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,bb=/<([\w:]+)/,cb=/<|&#?\w+;/,db=/<(?:script|style|link)/i,eb=/checked\s*(?:[^=]|=\s*.checked.)/i,fb=/^$|\/(?:java|ecma)script/i,gb=/^true\/(.*)/,hb=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,ib={option:[1,"<select multiple='multiple'>","</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};ib.optgroup=ib.option,ib.tbody=ib.tfoot=ib.colgroup=ib.caption=ib.thead,ib.th=ib.td;function jb(a,b){return n.nodeName(a,"table")&&n.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function kb(a){return a.type=(null!==a.getAttribute("type"))+"/"+a.type,a}function lb(a){var b=gb.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function mb(a,b){for(var c=0,d=a.length;d>c;c++)L.set(a[c],"globalEval",!b||L.get(b[c],"globalEval"))}function nb(a,b){var c,d,e,f,g,h,i,j;if(1===b.nodeType){if(L.hasData(a)&&(f=L.access(a),g=L.set(b,f),j=f.events)){delete g.handle,g.events={};for(e in j)for(c=0,d=j[e].length;d>c;c++)n.event.add(b,e,j[e][c])}M.hasData(a)&&(h=M.access(a),i=n.extend({},h),M.set(b,i))}}function ob(a,b){var c=a.getElementsByTagName?a.getElementsByTagName(b||"*"):a.querySelectorAll?a.querySelectorAll(b||"*"):[];return void 0===b||b&&n.nodeName(a,b)?n.merge([a],c):c}function pb(a,b){var c=b.nodeName.toLowerCase();"input"===c&&T.test(a.type)?b.checked=a.checked:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}n.extend({clone:function(a,b,c){var d,e,f,g,h=a.cloneNode(!0),i=n.contains(a.ownerDocument,a);if(!(k.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||n.isXMLDoc(a)))for(g=ob(h),f=ob(a),d=0,e=f.length;e>d;d++)pb(f[d],g[d]);if(b)if(c)for(f=f||ob(a),g=g||ob(h),d=0,e=f.length;e>d;d++)nb(f[d],g[d]);else nb(a,h);return g=ob(h,"script"),g.length>0&&mb(g,!i&&ob(a,"script")),h},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,k=b.createDocumentFragment(),l=[],m=0,o=a.length;o>m;m++)if(e=a[m],e||0===e)if("object"===n.type(e))n.merge(l,e.nodeType?[e]:e);else if(cb.test(e)){f=f||k.appendChild(b.createElement("div")),g=(bb.exec(e)||["",""])[1].toLowerCase(),h=ib[g]||ib._default,f.innerHTML=h[1]+e.replace(ab,"<$1></$2>")+h[2],j=h[0];while(j--)f=f.lastChild;n.merge(l,f.childNodes),f=k.firstChild,f.textContent=""}else l.push(b.createTextNode(e));k.textContent="",m=0;while(e=l[m++])if((!d||-1===n.inArray(e,d))&&(i=n.contains(e.ownerDocument,e),f=ob(k.appendChild(e),"script"),i&&mb(f),c)){j=0;while(e=f[j++])fb.test(e.type||"")&&c.push(e)}return k},cleanData:function(a){for(var b,c,d,e,f=n.event.special,g=0;void 0!==(c=a[g]);g++){if(n.acceptData(c)&&(e=c[L.expando],e&&(b=L.cache[e]))){if(b.events)for(d in b.events)f[d]?n.event.remove(c,d):n.removeEvent(c,d,b.handle);L.cache[e]&&delete L.cache[e]}delete M.cache[c[M.expando]]}}}),n.fn.extend({text:function(a){return J(this,function(a){return void 0===a?n.text(this):this.empty().each(function(){(1===this.nodeType||11===this.nodeType||9===this.nodeType)&&(this.textContent=a)})},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=jb(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=jb(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?n.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||n.cleanData(ob(c)),c.parentNode&&(b&&n.contains(c.ownerDocument,c)&&mb(ob(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++)1===a.nodeType&&(n.cleanData(ob(a,!1)),a.textContent="");return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return n.clone(this,a,b)})},html:function(a){return J(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a&&1===b.nodeType)return b.innerHTML;if("string"==typeof a&&!db.test(a)&&!ib[(bb.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(ab,"<$1></$2>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(n.cleanData(ob(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,n.cleanData(ob(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=e.apply([],a);var c,d,f,g,h,i,j=0,l=this.length,m=this,o=l-1,p=a[0],q=n.isFunction(p);if(q||l>1&&"string"==typeof p&&!k.checkClone&&eb.test(p))return this.each(function(c){var d=m.eq(c);q&&(a[0]=p.call(this,c,d.html())),d.domManip(a,b)});if(l&&(c=n.buildFragment(a,this[0].ownerDocument,!1,this),d=c.firstChild,1===c.childNodes.length&&(c=d),d)){for(f=n.map(ob(c,"script"),kb),g=f.length;l>j;j++)h=c,j!==o&&(h=n.clone(h,!0,!0),g&&n.merge(f,ob(h,"script"))),b.call(this[j],h,j);if(g)for(i=f[f.length-1].ownerDocument,n.map(f,lb),j=0;g>j;j++)h=f[j],fb.test(h.type||"")&&!L.access(h,"globalEval")&&n.contains(i,h)&&(h.src?n._evalUrl&&n._evalUrl(h.src):n.globalEval(h.textContent.replace(hb,"")))}return this}}),n.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){n.fn[a]=function(a){for(var c,d=[],e=n(a),g=e.length-1,h=0;g>=h;h++)c=h===g?this:this.clone(!0),n(e[h])[b](c),f.apply(d,c.get());return this.pushStack(d)}});var qb,rb={};function sb(b,c){var d,e=n(c.createElement(b)).appendTo(c.body),f=a.getDefaultComputedStyle&&(d=a.getDefaultComputedStyle(e[0]))?d.display:n.css(e[0],"display");return e.detach(),f}function tb(a){var b=l,c=rb[a];return c||(c=sb(a,b),"none"!==c&&c||(qb=(qb||n("<iframe frameborder='0' width='0' height='0'/>")).appendTo(b.documentElement),b=qb[0].contentDocument,b.write(),b.close(),c=sb(a,b),qb.detach()),rb[a]=c),c}var ub=/^margin/,vb=new RegExp("^("+Q+")(?!px)[a-z%]+$","i"),wb=function(b){return b.ownerDocument.defaultView.opener?b.ownerDocument.defaultView.getComputedStyle(b,null):a.getComputedStyle(b,null)};function xb(a,b,c){var d,e,f,g,h=a.style;return c=c||wb(a),c&&(g=c.getPropertyValue(b)||c[b]),c&&(""!==g||n.contains(a.ownerDocument,a)||(g=n.style(a,b)),vb.test(g)&&ub.test(b)&&(d=h.width,e=h.minWidth,f=h.maxWidth,h.minWidth=h.maxWidth=h.width=g,g=c.width,h.width=d,h.minWidth=e,h.maxWidth=f)),void 0!==g?g+"":g}function yb(a,b){return{get:function(){return a()?void delete this.get:(this.get=b).apply(this,arguments)}}}!function(){var b,c,d=l.documentElement,e=l.createElement("div"),f=l.createElement("div");if(f.style){f.style.backgroundClip="content-box",f.cloneNode(!0).style.backgroundClip="",k.clearCloneStyle="content-box"===f.style.backgroundClip,e.style.cssText="border:0;width:0;height:0;top:0;left:-9999px;margin-top:1px;position:absolute",e.appendChild(f);function g(){f.style.cssText="-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;display:block;margin-top:1%;top:1%;border:1px;padding:1px;width:4px;position:absolute",f.innerHTML="",d.appendChild(e);var g=a.getComputedStyle(f,null);b="1%"!==g.top,c="4px"===g.width,d.removeChild(e)}a.getComputedStyle&&n.extend(k,{pixelPosition:function(){return g(),b},boxSizingReliable:function(){return null==c&&g(),c},reliableMarginRight:function(){var b,c=f.appendChild(l.createElement("div"));return c.style.cssText=f.style.cssText="-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;display:block;margin:0;border:0;padding:0",c.style.marginRight=c.style.width="0",f.style.width="1px",d.appendChild(e),b=!parseFloat(a.getComputedStyle(c,null).marginRight),d.removeChild(e),f.removeChild(c),b}})}}(),n.swap=function(a,b,c,d){var e,f,g={};for(f in b)g[f]=a.style[f],a.style[f]=b[f];e=c.apply(a,d||[]);for(f in b)a.style[f]=g[f];return e};var zb=/^(none|table(?!-c[ea]).+)/,Ab=new RegExp("^("+Q+")(.*)$","i"),Bb=new RegExp("^([+-])=("+Q+")","i"),Cb={position:"absolute",visibility:"hidden",display:"block"},Db={letterSpacing:"0",fontWeight:"400"},Eb=["Webkit","O","Moz","ms"];function Fb(a,b){if(b in a)return b;var c=b[0].toUpperCase()+b.slice(1),d=b,e=Eb.length;while(e--)if(b=Eb[e]+c,b in a)return b;return d}function Gb(a,b,c){var d=Ab.exec(b);return d?Math.max(0,d[1]-(c||0))+(d[2]||"px"):b}function Hb(a,b,c,d,e){for(var f=c===(d?"border":"content")?4:"width"===b?1:0,g=0;4>f;f+=2)"margin"===c&&(g+=n.css(a,c+R[f],!0,e)),d?("content"===c&&(g-=n.css(a,"padding"+R[f],!0,e)),"margin"!==c&&(g-=n.css(a,"border"+R[f]+"Width",!0,e))):(g+=n.css(a,"padding"+R[f],!0,e),"padding"!==c&&(g+=n.css(a,"border"+R[f]+"Width",!0,e)));return g}function Ib(a,b,c){var d=!0,e="width"===b?a.offsetWidth:a.offsetHeight,f=wb(a),g="border-box"===n.css(a,"boxSizing",!1,f);if(0>=e||null==e){if(e=xb(a,b,f),(0>e||null==e)&&(e=a.style[b]),vb.test(e))return e;d=g&&(k.boxSizingReliable()||e===a.style[b]),e=parseFloat(e)||0}return e+Hb(a,b,c||(g?"border":"content"),d,f)+"px"}function Jb(a,b){for(var c,d,e,f=[],g=0,h=a.length;h>g;g++)d=a[g],d.style&&(f[g]=L.get(d,"olddisplay"),c=d.style.display,b?(f[g]||"none"!==c||(d.style.display=""),""===d.style.display&&S(d)&&(f[g]=L.access(d,"olddisplay",tb(d.nodeName)))):(e=S(d),"none"===c&&e||L.set(d,"olddisplay",e?c:n.css(d,"display"))));for(g=0;h>g;g++)d=a[g],d.style&&(b&&"none"!==d.style.display&&""!==d.style.display||(d.style.display=b?f[g]||"":"none"));return a}n.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=xb(a,"opacity");return""===c?"1":c}}}},cssNumber:{columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":"cssFloat"},style:function(a,b,c,d){if(a&&3!==a.nodeType&&8!==a.nodeType&&a.style){var e,f,g,h=n.camelCase(b),i=a.style;return b=n.cssProps[h]||(n.cssProps[h]=Fb(i,h)),g=n.cssHooks[b]||n.cssHooks[h],void 0===c?g&&"get"in g&&void 0!==(e=g.get(a,!1,d))?e:i[b]:(f=typeof c,"string"===f&&(e=Bb.exec(c))&&(c=(e[1]+1)*e[2]+parseFloat(n.css(a,b)),f="number"),null!=c&&c===c&&("number"!==f||n.cssNumber[h]||(c+="px"),k.clearCloneStyle||""!==c||0!==b.indexOf("background")||(i[b]="inherit"),g&&"set"in g&&void 0===(c=g.set(a,c,d))||(i[b]=c)),void 0)}},css:function(a,b,c,d){var e,f,g,h=n.camelCase(b);return b=n.cssProps[h]||(n.cssProps[h]=Fb(a.style,h)),g=n.cssHooks[b]||n.cssHooks[h],g&&"get"in g&&(e=g.get(a,!0,c)),void 0===e&&(e=xb(a,b,d)),"normal"===e&&b in Db&&(e=Db[b]),""===c||c?(f=parseFloat(e),c===!0||n.isNumeric(f)?f||0:e):e}}),n.each(["height","width"],function(a,b){n.cssHooks[b]={get:function(a,c,d){return c?zb.test(n.css(a,"display"))&&0===a.offsetWidth?n.swap(a,Cb,function(){return Ib(a,b,d)}):Ib(a,b,d):void 0},set:function(a,c,d){var e=d&&wb(a);return Gb(a,c,d?Hb(a,b,d,"border-box"===n.css(a,"boxSizing",!1,e),e):0)}}}),n.cssHooks.marginRight=yb(k.reliableMarginRight,function(a,b){return b?n.swap(a,{display:"inline-block"},xb,[a,"marginRight"]):void 0}),n.each({margin:"",padding:"",border:"Width"},function(a,b){n.cssHooks[a+b]={expand:function(c){for(var d=0,e={},f="string"==typeof c?c.split(" "):[c];4>d;d++)e[a+R[d]+b]=f[d]||f[d-2]||f[0];return e}},ub.test(a)||(n.cssHooks[a+b].set=Gb)}),n.fn.extend({css:function(a,b){return J(this,function(a,b,c){var d,e,f={},g=0;if(n.isArray(b)){for(d=wb(a),e=b.length;e>g;g++)f[b[g]]=n.css(a,b[g],!1,d);return f}return void 0!==c?n.style(a,b,c):n.css(a,b)},a,b,arguments.length>1)},show:function(){return Jb(this,!0)},hide:function(){return Jb(this)},toggle:function(a){return"boolean"==typeof a?a?this.show():this.hide():this.each(function(){S(this)?n(this).show():n(this).hide()})}});function Kb(a,b,c,d,e){return new Kb.prototype.init(a,b,c,d,e)}n.Tween=Kb,Kb.prototype={constructor:Kb,init:function(a,b,c,d,e,f){this.elem=a,this.prop=c,this.easing=e||"swing",this.options=b,this.start=this.now=this.cur(),this.end=d,this.unit=f||(n.cssNumber[c]?"":"px")},cur:function(){var a=Kb.propHooks[this.prop];return a&&a.get?a.get(this):Kb.propHooks._default.get(this)},run:function(a){var b,c=Kb.propHooks[this.prop];return this.pos=b=this.options.duration?n.easing[this.easing](a,this.options.duration*a,0,1,this.options.duration):a,this.now=(this.end-this.start)*b+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),c&&c.set?c.set(this):Kb.propHooks._default.set(this),this}},Kb.prototype.init.prototype=Kb.prototype,Kb.propHooks={_default:{get:function(a){var b;return null==a.elem[a.prop]||a.elem.style&&null!=a.elem.style[a.prop]?(b=n.css(a.elem,a.prop,""),b&&"auto"!==b?b:0):a.elem[a.prop]},set:function(a){n.fx.step[a.prop]?n.fx.step[a.prop](a):a.elem.style&&(null!=a.elem.style[n.cssProps[a.prop]]||n.cssHooks[a.prop])?n.style(a.elem,a.prop,a.now+a.unit):a.elem[a.prop]=a.now}}},Kb.propHooks.scrollTop=Kb.propHooks.scrollLeft={set:function(a){a.elem.nodeType&&a.elem.parentNode&&(a.elem[a.prop]=a.now)}},n.easing={linear:function(a){return a},swing:function(a){return.5-Math.cos(a*Math.PI)/2}},n.fx=Kb.prototype.init,n.fx.step={};var Lb,Mb,Nb=/^(?:toggle|show|hide)$/,Ob=new RegExp("^(?:([+-])=|)("+Q+")([a-z%]*)$","i"),Pb=/queueHooks$/,Qb=[Vb],Rb={"*":[function(a,b){var c=this.createTween(a,b),d=c.cur(),e=Ob.exec(b),f=e&&e[3]||(n.cssNumber[a]?"":"px"),g=(n.cssNumber[a]||"px"!==f&&+d)&&Ob.exec(n.css(c.elem,a)),h=1,i=20;if(g&&g[3]!==f){f=f||g[3],e=e||[],g=+d||1;do h=h||".5",g/=h,n.style(c.elem,a,g+f);while(h!==(h=c.cur()/d)&&1!==h&&--i)}return e&&(g=c.start=+g||+d||0,c.unit=f,c.end=e[1]?g+(e[1]+1)*e[2]:+e[2]),c}]};function Sb(){return setTimeout(function(){Lb=void 0}),Lb=n.now()}function Tb(a,b){var c,d=0,e={height:a};for(b=b?1:0;4>d;d+=2-b)c=R[d],e["margin"+c]=e["padding"+c]=a;return b&&(e.opacity=e.width=a),e}function Ub(a,b,c){for(var d,e=(Rb[b]||[]).concat(Rb["*"]),f=0,g=e.length;g>f;f++)if(d=e[f].call(c,b,a))return d}function Vb(a,b,c){var d,e,f,g,h,i,j,k,l=this,m={},o=a.style,p=a.nodeType&&S(a),q=L.get(a,"fxshow");c.queue||(h=n._queueHooks(a,"fx"),null==h.unqueued&&(h.unqueued=0,i=h.empty.fire,h.empty.fire=function(){h.unqueued||i()}),h.unqueued++,l.always(function(){l.always(function(){h.unqueued--,n.queue(a,"fx").length||h.empty.fire()})})),1===a.nodeType&&("height"in b||"width"in b)&&(c.overflow=[o.overflow,o.overflowX,o.overflowY],j=n.css(a,"display"),k="none"===j?L.get(a,"olddisplay")||tb(a.nodeName):j,"inline"===k&&"none"===n.css(a,"float")&&(o.display="inline-block")),c.overflow&&(o.overflow="hidden",l.always(function(){o.overflow=c.overflow[0],o.overflowX=c.overflow[1],o.overflowY=c.overflow[2]}));for(d in b)if(e=b[d],Nb.exec(e)){if(delete b[d],f=f||"toggle"===e,e===(p?"hide":"show")){if("show"!==e||!q||void 0===q[d])continue;p=!0}m[d]=q&&q[d]||n.style(a,d)}else j=void 0;if(n.isEmptyObject(m))"inline"===("none"===j?tb(a.nodeName):j)&&(o.display=j);else{q?"hidden"in q&&(p=q.hidden):q=L.access(a,"fxshow",{}),f&&(q.hidden=!p),p?n(a).show():l.done(function(){n(a).hide()}),l.done(function(){var b;L.remove(a,"fxshow");for(b in m)n.style(a,b,m[b])});for(d in m)g=Ub(p?q[d]:0,d,l),d in q||(q[d]=g.start,p&&(g.end=g.start,g.start="width"===d||"height"===d?1:0))}}function Wb(a,b){var c,d,e,f,g;for(c in a)if(d=n.camelCase(c),e=b[d],f=a[c],n.isArray(f)&&(e=f[1],f=a[c]=f[0]),c!==d&&(a[d]=f,delete a[c]),g=n.cssHooks[d],g&&"expand"in g){f=g.expand(f),delete a[d];for(c in f)c in a||(a[c]=f[c],b[c]=e)}else b[d]=e}function Xb(a,b,c){var d,e,f=0,g=Qb.length,h=n.Deferred().always(function(){delete i.elem}),i=function(){if(e)return!1;for(var b=Lb||Sb(),c=Math.max(0,j.startTime+j.duration-b),d=c/j.duration||0,f=1-d,g=0,i=j.tweens.length;i>g;g++)j.tweens[g].run(f);return h.notifyWith(a,[j,f,c]),1>f&&i?c:(h.resolveWith(a,[j]),!1)},j=h.promise({elem:a,props:n.extend({},b),opts:n.extend(!0,{specialEasing:{}},c),originalProperties:b,originalOptions:c,startTime:Lb||Sb(),duration:c.duration,tweens:[],createTween:function(b,c){var d=n.Tween(a,j.opts,b,c,j.opts.specialEasing[b]||j.opts.easing);return j.tweens.push(d),d},stop:function(b){var c=0,d=b?j.tweens.length:0;if(e)return this;for(e=!0;d>c;c++)j.tweens[c].run(1);return b?h.resolveWith(a,[j,b]):h.rejectWith(a,[j,b]),this}}),k=j.props;for(Wb(k,j.opts.specialEasing);g>f;f++)if(d=Qb[f].call(j,a,k,j.opts))return d;return n.map(k,Ub,j),n.isFunction(j.opts.start)&&j.opts.start.call(a,j),n.fx.timer(n.extend(i,{elem:a,anim:j,queue:j.opts.queue})),j.progress(j.opts.progress).done(j.opts.done,j.opts.complete).fail(j.opts.fail).always(j.opts.always)}n.Animation=n.extend(Xb,{tweener:function(a,b){n.isFunction(a)?(b=a,a=["*"]):a=a.split(" ");for(var c,d=0,e=a.length;e>d;d++)c=a[d],Rb[c]=Rb[c]||[],Rb[c].unshift(b)},prefilter:function(a,b){b?Qb.unshift(a):Qb.push(a)}}),n.speed=function(a,b,c){var d=a&&"object"==typeof a?n.extend({},a):{complete:c||!c&&b||n.isFunction(a)&&a,duration:a,easing:c&&b||b&&!n.isFunction(b)&&b};return d.duration=n.fx.off?0:"number"==typeof d.duration?d.duration:d.duration in n.fx.speeds?n.fx.speeds[d.duration]:n.fx.speeds._default,(null==d.queue||d.queue===!0)&&(d.queue="fx"),d.old=d.complete,d.complete=function(){n.isFunction(d.old)&&d.old.call(this),d.queue&&n.dequeue(this,d.queue)},d},n.fn.extend({fadeTo:function(a,b,c,d){return this.filter(S).css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){var e=n.isEmptyObject(a),f=n.speed(b,c,d),g=function(){var b=Xb(this,n.extend({},a),f);(e||L.get(this,"finish"))&&b.stop(!0)};return g.finish=g,e||f.queue===!1?this.each(g):this.queue(f.queue,g)},stop:function(a,b,c){var d=function(a){var b=a.stop;delete a.stop,b(c)};return"string"!=typeof a&&(c=b,b=a,a=void 0),b&&a!==!1&&this.queue(a||"fx",[]),this.each(function(){var b=!0,e=null!=a&&a+"queueHooks",f=n.timers,g=L.get(this);if(e)g[e]&&g[e].stop&&d(g[e]);else for(e in g)g[e]&&g[e].stop&&Pb.test(e)&&d(g[e]);for(e=f.length;e--;)f[e].elem!==this||null!=a&&f[e].queue!==a||(f[e].anim.stop(c),b=!1,f.splice(e,1));(b||!c)&&n.dequeue(this,a)})},finish:function(a){return a!==!1&&(a=a||"fx"),this.each(function(){var b,c=L.get(this),d=c[a+"queue"],e=c[a+"queueHooks"],f=n.timers,g=d?d.length:0;for(c.finish=!0,n.queue(this,a,[]),e&&e.stop&&e.stop.call(this,!0),b=f.length;b--;)f[b].elem===this&&f[b].queue===a&&(f[b].anim.stop(!0),f.splice(b,1));for(b=0;g>b;b++)d[b]&&d[b].finish&&d[b].finish.call(this);delete c.finish})}}),n.each(["toggle","show","hide"],function(a,b){var c=n.fn[b];n.fn[b]=function(a,d,e){return null==a||"boolean"==typeof a?c.apply(this,arguments):this.animate(Tb(b,!0),a,d,e)}}),n.each({slideDown:Tb("show"),slideUp:Tb("hide"),slideToggle:Tb("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){n.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),n.timers=[],n.fx.tick=function(){var a,b=0,c=n.timers;for(Lb=n.now();b<c.length;b++)a=c[b],a()||c[b]!==a||c.splice(b--,1);c.length||n.fx.stop(),Lb=void 0},n.fx.timer=function(a){n.timers.push(a),a()?n.fx.start():n.timers.pop()},n.fx.interval=13,n.fx.start=function(){Mb||(Mb=setInterval(n.fx.tick,n.fx.interval))},n.fx.stop=function(){clearInterval(Mb),Mb=null},n.fx.speeds={slow:600,fast:200,_default:400},n.fn.delay=function(a,b){return a=n.fx?n.fx.speeds[a]||a:a,b=b||"fx",this.queue(b,function(b,c){var d=setTimeout(b,a);c.stop=function(){clearTimeout(d)}})},function(){var a=l.createElement("input"),b=l.createElement("select"),c=b.appendChild(l.createElement("option"));a.type="checkbox",k.checkOn=""!==a.value,k.optSelected=c.selected,b.disabled=!0,k.optDisabled=!c.disabled,a=l.createElement("input"),a.value="t",a.type="radio",k.radioValue="t"===a.value}();var Yb,Zb,$b=n.expr.attrHandle;n.fn.extend({attr:function(a,b){return J(this,n.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){n.removeAttr(this,a)})}}),n.extend({attr:function(a,b,c){var d,e,f=a.nodeType;if(a&&3!==f&&8!==f&&2!==f)return typeof a.getAttribute===U?n.prop(a,b,c):(1===f&&n.isXMLDoc(a)||(b=b.toLowerCase(),d=n.attrHooks[b]||(n.expr.match.bool.test(b)?Zb:Yb)),void 0===c?d&&"get"in d&&null!==(e=d.get(a,b))?e:(e=n.find.attr(a,b),null==e?void 0:e):null!==c?d&&"set"in d&&void 0!==(e=d.set(a,c,b))?e:(a.setAttribute(b,c+""),c):void n.removeAttr(a,b))
+},removeAttr:function(a,b){var c,d,e=0,f=b&&b.match(E);if(f&&1===a.nodeType)while(c=f[e++])d=n.propFix[c]||c,n.expr.match.bool.test(c)&&(a[d]=!1),a.removeAttribute(c)},attrHooks:{type:{set:function(a,b){if(!k.radioValue&&"radio"===b&&n.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}}}}),Zb={set:function(a,b,c){return b===!1?n.removeAttr(a,c):a.setAttribute(c,c),c}},n.each(n.expr.match.bool.source.match(/\w+/g),function(a,b){var c=$b[b]||n.find.attr;$b[b]=function(a,b,d){var e,f;return d||(f=$b[b],$b[b]=e,e=null!=c(a,b,d)?b.toLowerCase():null,$b[b]=f),e}});var _b=/^(?:input|select|textarea|button)$/i;n.fn.extend({prop:function(a,b){return J(this,n.prop,a,b,arguments.length>1)},removeProp:function(a){return this.each(function(){delete this[n.propFix[a]||a]})}}),n.extend({propFix:{"for":"htmlFor","class":"className"},prop:function(a,b,c){var d,e,f,g=a.nodeType;if(a&&3!==g&&8!==g&&2!==g)return f=1!==g||!n.isXMLDoc(a),f&&(b=n.propFix[b]||b,e=n.propHooks[b]),void 0!==c?e&&"set"in e&&void 0!==(d=e.set(a,c,b))?d:a[b]=c:e&&"get"in e&&null!==(d=e.get(a,b))?d:a[b]},propHooks:{tabIndex:{get:function(a){return a.hasAttribute("tabindex")||_b.test(a.nodeName)||a.href?a.tabIndex:-1}}}}),k.optSelected||(n.propHooks.selected={get:function(a){var b=a.parentNode;return b&&b.parentNode&&b.parentNode.selectedIndex,null}}),n.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){n.propFix[this.toLowerCase()]=this});var ac=/[\t\r\n\f]/g;n.fn.extend({addClass:function(a){var b,c,d,e,f,g,h="string"==typeof a&&a,i=0,j=this.length;if(n.isFunction(a))return this.each(function(b){n(this).addClass(a.call(this,b,this.className))});if(h)for(b=(a||"").match(E)||[];j>i;i++)if(c=this[i],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(ac," "):" ")){f=0;while(e=b[f++])d.indexOf(" "+e+" ")<0&&(d+=e+" ");g=n.trim(d),c.className!==g&&(c.className=g)}return this},removeClass:function(a){var b,c,d,e,f,g,h=0===arguments.length||"string"==typeof a&&a,i=0,j=this.length;if(n.isFunction(a))return this.each(function(b){n(this).removeClass(a.call(this,b,this.className))});if(h)for(b=(a||"").match(E)||[];j>i;i++)if(c=this[i],d=1===c.nodeType&&(c.className?(" "+c.className+" ").replace(ac," "):"")){f=0;while(e=b[f++])while(d.indexOf(" "+e+" ")>=0)d=d.replace(" "+e+" "," ");g=a?n.trim(d):"",c.className!==g&&(c.className=g)}return this},toggleClass:function(a,b){var c=typeof a;return"boolean"==typeof b&&"string"===c?b?this.addClass(a):this.removeClass(a):this.each(n.isFunction(a)?function(c){n(this).toggleClass(a.call(this,c,this.className,b),b)}:function(){if("string"===c){var b,d=0,e=n(this),f=a.match(E)||[];while(b=f[d++])e.hasClass(b)?e.removeClass(b):e.addClass(b)}else(c===U||"boolean"===c)&&(this.className&&L.set(this,"__className__",this.className),this.className=this.className||a===!1?"":L.get(this,"__className__")||"")})},hasClass:function(a){for(var b=" "+a+" ",c=0,d=this.length;d>c;c++)if(1===this[c].nodeType&&(" "+this[c].className+" ").replace(ac," ").indexOf(b)>=0)return!0;return!1}});var bc=/\r/g;n.fn.extend({val:function(a){var b,c,d,e=this[0];{if(arguments.length)return d=n.isFunction(a),this.each(function(c){var e;1===this.nodeType&&(e=d?a.call(this,c,n(this).val()):a,null==e?e="":"number"==typeof e?e+="":n.isArray(e)&&(e=n.map(e,function(a){return null==a?"":a+""})),b=n.valHooks[this.type]||n.valHooks[this.nodeName.toLowerCase()],b&&"set"in b&&void 0!==b.set(this,e,"value")||(this.value=e))});if(e)return b=n.valHooks[e.type]||n.valHooks[e.nodeName.toLowerCase()],b&&"get"in b&&void 0!==(c=b.get(e,"value"))?c:(c=e.value,"string"==typeof c?c.replace(bc,""):null==c?"":c)}}}),n.extend({valHooks:{option:{get:function(a){var b=n.find.attr(a,"value");return null!=b?b:n.trim(n.text(a))}},select:{get:function(a){for(var b,c,d=a.options,e=a.selectedIndex,f="select-one"===a.type||0>e,g=f?null:[],h=f?e+1:d.length,i=0>e?h:f?e:0;h>i;i++)if(c=d[i],!(!c.selected&&i!==e||(k.optDisabled?c.disabled:null!==c.getAttribute("disabled"))||c.parentNode.disabled&&n.nodeName(c.parentNode,"optgroup"))){if(b=n(c).val(),f)return b;g.push(b)}return g},set:function(a,b){var c,d,e=a.options,f=n.makeArray(b),g=e.length;while(g--)d=e[g],(d.selected=n.inArray(d.value,f)>=0)&&(c=!0);return c||(a.selectedIndex=-1),f}}}}),n.each(["radio","checkbox"],function(){n.valHooks[this]={set:function(a,b){return n.isArray(b)?a.checked=n.inArray(n(a).val(),b)>=0:void 0}},k.checkOn||(n.valHooks[this].get=function(a){return null===a.getAttribute("value")?"on":a.value})}),n.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){n.fn[b]=function(a,c){return arguments.length>0?this.on(b,null,a,c):this.trigger(b)}}),n.fn.extend({hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)},bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return 1===arguments.length?this.off(a,"**"):this.off(b,a||"**",c)}});var cc=n.now(),dc=/\?/;n.parseJSON=function(a){return JSON.parse(a+"")},n.parseXML=function(a){var b,c;if(!a||"string"!=typeof a)return null;try{c=new DOMParser,b=c.parseFromString(a,"text/xml")}catch(d){b=void 0}return(!b||b.getElementsByTagName("parsererror").length)&&n.error("Invalid XML: "+a),b};var ec=/#.*$/,fc=/([?&])_=[^&]*/,gc=/^(.*?):[ \t]*([^\r\n]*)$/gm,hc=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,ic=/^(?:GET|HEAD)$/,jc=/^\/\//,kc=/^([\w.+-]+:)(?:\/\/(?:[^\/?#]*@|)([^\/?#:]*)(?::(\d+)|)|)/,lc={},mc={},nc="*/".concat("*"),oc=a.location.href,pc=kc.exec(oc.toLowerCase())||[];function qc(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var d,e=0,f=b.toLowerCase().match(E)||[];if(n.isFunction(c))while(d=f[e++])"+"===d[0]?(d=d.slice(1)||"*",(a[d]=a[d]||[]).unshift(c)):(a[d]=a[d]||[]).push(c)}}function rc(a,b,c,d){var e={},f=a===mc;function g(h){var i;return e[h]=!0,n.each(a[h]||[],function(a,h){var j=h(b,c,d);return"string"!=typeof j||f||e[j]?f?!(i=j):void 0:(b.dataTypes.unshift(j),g(j),!1)}),i}return g(b.dataTypes[0])||!e["*"]&&g("*")}function sc(a,b){var c,d,e=n.ajaxSettings.flatOptions||{};for(c in b)void 0!==b[c]&&((e[c]?a:d||(d={}))[c]=b[c]);return d&&n.extend(!0,a,d),a}function tc(a,b,c){var d,e,f,g,h=a.contents,i=a.dataTypes;while("*"===i[0])i.shift(),void 0===d&&(d=a.mimeType||b.getResponseHeader("Content-Type"));if(d)for(e in h)if(h[e]&&h[e].test(d)){i.unshift(e);break}if(i[0]in c)f=i[0];else{for(e in c){if(!i[0]||a.converters[e+" "+i[0]]){f=e;break}g||(g=e)}f=f||g}return f?(f!==i[0]&&i.unshift(f),c[f]):void 0}function uc(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])for(g in a.converters)j[g.toLowerCase()]=a.converters[g];f=k.shift();while(f)if(a.responseFields[f]&&(c[a.responseFields[f]]=b),!i&&d&&a.dataFilter&&(b=a.dataFilter(b,a.dataType)),i=f,f=k.shift())if("*"===f)f=i;else if("*"!==i&&i!==f){if(g=j[i+" "+f]||j["* "+f],!g)for(e in j)if(h=e.split(" "),h[1]===f&&(g=j[i+" "+h[0]]||j["* "+h[0]])){g===!0?g=j[e]:j[e]!==!0&&(f=h[0],k.unshift(h[1]));break}if(g!==!0)if(g&&a["throws"])b=g(b);else try{b=g(b)}catch(l){return{state:"parsererror",error:g?l:"No conversion from "+i+" to "+f}}}return{state:"success",data:b}}n.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:oc,type:"GET",isLocal:hc.test(pc[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":nc,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":n.parseJSON,"text xml":n.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(a,b){return b?sc(sc(a,n.ajaxSettings),b):sc(n.ajaxSettings,a)},ajaxPrefilter:qc(lc),ajaxTransport:qc(mc),ajax:function(a,b){"object"==typeof a&&(b=a,a=void 0),b=b||{};var c,d,e,f,g,h,i,j,k=n.ajaxSetup({},b),l=k.context||k,m=k.context&&(l.nodeType||l.jquery)?n(l):n.event,o=n.Deferred(),p=n.Callbacks("once memory"),q=k.statusCode||{},r={},s={},t=0,u="canceled",v={readyState:0,getResponseHeader:function(a){var b;if(2===t){if(!f){f={};while(b=gc.exec(e))f[b[1].toLowerCase()]=b[2]}b=f[a.toLowerCase()]}return null==b?null:b},getAllResponseHeaders:function(){return 2===t?e:null},setRequestHeader:function(a,b){var c=a.toLowerCase();return t||(a=s[c]=s[c]||a,r[a]=b),this},overrideMimeType:function(a){return t||(k.mimeType=a),this},statusCode:function(a){var b;if(a)if(2>t)for(b in a)q[b]=[q[b],a[b]];else v.always(a[v.status]);return this},abort:function(a){var b=a||u;return c&&c.abort(b),x(0,b),this}};if(o.promise(v).complete=p.add,v.success=v.done,v.error=v.fail,k.url=((a||k.url||oc)+"").replace(ec,"").replace(jc,pc[1]+"//"),k.type=b.method||b.type||k.method||k.type,k.dataTypes=n.trim(k.dataType||"*").toLowerCase().match(E)||[""],null==k.crossDomain&&(h=kc.exec(k.url.toLowerCase()),k.crossDomain=!(!h||h[1]===pc[1]&&h[2]===pc[2]&&(h[3]||("http:"===h[1]?"80":"443"))===(pc[3]||("http:"===pc[1]?"80":"443")))),k.data&&k.processData&&"string"!=typeof k.data&&(k.data=n.param(k.data,k.traditional)),rc(lc,k,b,v),2===t)return v;i=n.event&&k.global,i&&0===n.active++&&n.event.trigger("ajaxStart"),k.type=k.type.toUpperCase(),k.hasContent=!ic.test(k.type),d=k.url,k.hasContent||(k.data&&(d=k.url+=(dc.test(d)?"&":"?")+k.data,delete k.data),k.cache===!1&&(k.url=fc.test(d)?d.replace(fc,"$1_="+cc++):d+(dc.test(d)?"&":"?")+"_="+cc++)),k.ifModified&&(n.lastModified[d]&&v.setRequestHeader("If-Modified-Since",n.lastModified[d]),n.etag[d]&&v.setRequestHeader("If-None-Match",n.etag[d])),(k.data&&k.hasContent&&k.contentType!==!1||b.contentType)&&v.setRequestHeader("Content-Type",k.contentType),v.setRequestHeader("Accept",k.dataTypes[0]&&k.accepts[k.dataTypes[0]]?k.accepts[k.dataTypes[0]]+("*"!==k.dataTypes[0]?", "+nc+"; q=0.01":""):k.accepts["*"]);for(j in k.headers)v.setRequestHeader(j,k.headers[j]);if(k.beforeSend&&(k.beforeSend.call(l,v,k)===!1||2===t))return v.abort();u="abort";for(j in{success:1,error:1,complete:1})v[j](k[j]);if(c=rc(mc,k,b,v)){v.readyState=1,i&&m.trigger("ajaxSend",[v,k]),k.async&&k.timeout>0&&(g=setTimeout(function(){v.abort("timeout")},k.timeout));try{t=1,c.send(r,x)}catch(w){if(!(2>t))throw w;x(-1,w)}}else x(-1,"No Transport");function x(a,b,f,h){var j,r,s,u,w,x=b;2!==t&&(t=2,g&&clearTimeout(g),c=void 0,e=h||"",v.readyState=a>0?4:0,j=a>=200&&300>a||304===a,f&&(u=tc(k,v,f)),u=uc(k,u,v,j),j?(k.ifModified&&(w=v.getResponseHeader("Last-Modified"),w&&(n.lastModified[d]=w),w=v.getResponseHeader("etag"),w&&(n.etag[d]=w)),204===a||"HEAD"===k.type?x="nocontent":304===a?x="notmodified":(x=u.state,r=u.data,s=u.error,j=!s)):(s=x,(a||!x)&&(x="error",0>a&&(a=0))),v.status=a,v.statusText=(b||x)+"",j?o.resolveWith(l,[r,x,v]):o.rejectWith(l,[v,x,s]),v.statusCode(q),q=void 0,i&&m.trigger(j?"ajaxSuccess":"ajaxError",[v,k,j?r:s]),p.fireWith(l,[v,x]),i&&(m.trigger("ajaxComplete",[v,k]),--n.active||n.event.trigger("ajaxStop")))}return v},getJSON:function(a,b,c){return n.get(a,b,c,"json")},getScript:function(a,b){return n.get(a,void 0,b,"script")}}),n.each(["get","post"],function(a,b){n[b]=function(a,c,d,e){return n.isFunction(c)&&(e=e||d,d=c,c=void 0),n.ajax({url:a,type:b,dataType:e,data:c,success:d})}}),n._evalUrl=function(a){return n.ajax({url:a,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0})},n.fn.extend({wrapAll:function(a){var b;return n.isFunction(a)?this.each(function(b){n(this).wrapAll(a.call(this,b))}):(this[0]&&(b=n(a,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstElementChild)a=a.firstElementChild;return a}).append(this)),this)},wrapInner:function(a){return this.each(n.isFunction(a)?function(b){n(this).wrapInner(a.call(this,b))}:function(){var b=n(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=n.isFunction(a);return this.each(function(c){n(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){n.nodeName(this,"body")||n(this).replaceWith(this.childNodes)}).end()}}),n.expr.filters.hidden=function(a){return a.offsetWidth<=0&&a.offsetHeight<=0},n.expr.filters.visible=function(a){return!n.expr.filters.hidden(a)};var vc=/%20/g,wc=/\[\]$/,xc=/\r?\n/g,yc=/^(?:submit|button|image|reset|file)$/i,zc=/^(?:input|select|textarea|keygen)/i;function Ac(a,b,c,d){var e;if(n.isArray(b))n.each(b,function(b,e){c||wc.test(a)?d(a,e):Ac(a+"["+("object"==typeof e?b:"")+"]",e,c,d)});else if(c||"object"!==n.type(b))d(a,b);else for(e in b)Ac(a+"["+e+"]",b[e],c,d)}n.param=function(a,b){var c,d=[],e=function(a,b){b=n.isFunction(b)?b():null==b?"":b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};if(void 0===b&&(b=n.ajaxSettings&&n.ajaxSettings.traditional),n.isArray(a)||a.jquery&&!n.isPlainObject(a))n.each(a,function(){e(this.name,this.value)});else for(c in a)Ac(c,a[c],b,e);return d.join("&").replace(vc,"+")},n.fn.extend({serialize:function(){return n.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var a=n.prop(this,"elements");return a?n.makeArray(a):this}).filter(function(){var a=this.type;return this.name&&!n(this).is(":disabled")&&zc.test(this.nodeName)&&!yc.test(a)&&(this.checked||!T.test(a))}).map(function(a,b){var c=n(this).val();return null==c?null:n.isArray(c)?n.map(c,function(a){return{name:b.name,value:a.replace(xc,"\r\n")}}):{name:b.name,value:c.replace(xc,"\r\n")}}).get()}}),n.ajaxSettings.xhr=function(){try{return new XMLHttpRequest}catch(a){}};var Bc=0,Cc={},Dc={0:200,1223:204},Ec=n.ajaxSettings.xhr();a.attachEvent&&a.attachEvent("onunload",function(){for(var a in Cc)Cc[a]()}),k.cors=!!Ec&&"withCredentials"in Ec,k.ajax=Ec=!!Ec,n.ajaxTransport(function(a){var b;return k.cors||Ec&&!a.crossDomain?{send:function(c,d){var e,f=a.xhr(),g=++Bc;if(f.open(a.type,a.url,a.async,a.username,a.password),a.xhrFields)for(e in a.xhrFields)f[e]=a.xhrFields[e];a.mimeType&&f.overrideMimeType&&f.overrideMimeType(a.mimeType),a.crossDomain||c["X-Requested-With"]||(c["X-Requested-With"]="XMLHttpRequest");for(e in c)f.setRequestHeader(e,c[e]);b=function(a){return function(){b&&(delete Cc[g],b=f.onload=f.onerror=null,"abort"===a?f.abort():"error"===a?d(f.status,f.statusText):d(Dc[f.status]||f.status,f.statusText,"string"==typeof f.responseText?{text:f.responseText}:void 0,f.getAllResponseHeaders()))}},f.onload=b(),f.onerror=b("error"),b=Cc[g]=b("abort");try{f.send(a.hasContent&&a.data||null)}catch(h){if(b)throw h}},abort:function(){b&&b()}}:void 0}),n.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/(?:java|ecma)script/},converters:{"text script":function(a){return n.globalEval(a),a}}}),n.ajaxPrefilter("script",function(a){void 0===a.cache&&(a.cache=!1),a.crossDomain&&(a.type="GET")}),n.ajaxTransport("script",function(a){if(a.crossDomain){var b,c;return{send:function(d,e){b=n("<script>").prop({async:!0,charset:a.scriptCharset,src:a.url}).on("load error",c=function(a){b.remove(),c=null,a&&e("error"===a.type?404:200,a.type)}),l.head.appendChild(b[0])},abort:function(){c&&c()}}}});var Fc=[],Gc=/(=)\?(?=&|$)|\?\?/;n.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=Fc.pop()||n.expando+"_"+cc++;return this[a]=!0,a}}),n.ajaxPrefilter("json jsonp",function(b,c,d){var e,f,g,h=b.jsonp!==!1&&(Gc.test(b.url)?"url":"string"==typeof b.data&&!(b.contentType||"").indexOf("application/x-www-form-urlencoded")&&Gc.test(b.data)&&"data");return h||"jsonp"===b.dataTypes[0]?(e=b.jsonpCallback=n.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,h?b[h]=b[h].replace(Gc,"$1"+e):b.jsonp!==!1&&(b.url+=(dc.test(b.url)?"&":"?")+b.jsonp+"="+e),b.converters["script json"]=function(){return g||n.error(e+" was not called"),g[0]},b.dataTypes[0]="json",f=a[e],a[e]=function(){g=arguments},d.always(function(){a[e]=f,b[e]&&(b.jsonpCallback=c.jsonpCallback,Fc.push(e)),g&&n.isFunction(f)&&f(g[0]),g=f=void 0}),"script"):void 0}),n.parseHTML=function(a,b,c){if(!a||"string"!=typeof a)return null;"boolean"==typeof b&&(c=b,b=!1),b=b||l;var d=v.exec(a),e=!c&&[];return d?[b.createElement(d[1])]:(d=n.buildFragment([a],b,e),e&&e.length&&n(e).remove(),n.merge([],d.childNodes))};var Hc=n.fn.load;n.fn.load=function(a,b,c){if("string"!=typeof a&&Hc)return Hc.apply(this,arguments);var d,e,f,g=this,h=a.indexOf(" ");return h>=0&&(d=n.trim(a.slice(h)),a=a.slice(0,h)),n.isFunction(b)?(c=b,b=void 0):b&&"object"==typeof b&&(e="POST"),g.length>0&&n.ajax({url:a,type:e,dataType:"html",data:b}).done(function(a){f=arguments,g.html(d?n("<div>").append(n.parseHTML(a)).find(d):a)}).complete(c&&function(a,b){g.each(c,f||[a.responseText,b,a])}),this},n.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(a,b){n.fn[b]=function(a){return this.on(b,a)}}),n.expr.filters.animated=function(a){return n.grep(n.timers,function(b){return a===b.elem}).length};var Ic=a.document.documentElement;function Jc(a){return n.isWindow(a)?a:9===a.nodeType&&a.defaultView}n.offset={setOffset:function(a,b,c){var d,e,f,g,h,i,j,k=n.css(a,"position"),l=n(a),m={};"static"===k&&(a.style.position="relative"),h=l.offset(),f=n.css(a,"top"),i=n.css(a,"left"),j=("absolute"===k||"fixed"===k)&&(f+i).indexOf("auto")>-1,j?(d=l.position(),g=d.top,e=d.left):(g=parseFloat(f)||0,e=parseFloat(i)||0),n.isFunction(b)&&(b=b.call(a,c,h)),null!=b.top&&(m.top=b.top-h.top+g),null!=b.left&&(m.left=b.left-h.left+e),"using"in b?b.using.call(a,m):l.css(m)}},n.fn.extend({offset:function(a){if(arguments.length)return void 0===a?this:this.each(function(b){n.offset.setOffset(this,a,b)});var b,c,d=this[0],e={top:0,left:0},f=d&&d.ownerDocument;if(f)return b=f.documentElement,n.contains(b,d)?(typeof d.getBoundingClientRect!==U&&(e=d.getBoundingClientRect()),c=Jc(f),{top:e.top+c.pageYOffset-b.clientTop,left:e.left+c.pageXOffset-b.clientLeft}):e},position:function(){if(this[0]){var a,b,c=this[0],d={top:0,left:0};return"fixed"===n.css(c,"position")?b=c.getBoundingClientRect():(a=this.offsetParent(),b=this.offset(),n.nodeName(a[0],"html")||(d=a.offset()),d.top+=n.css(a[0],"borderTopWidth",!0),d.left+=n.css(a[0],"borderLeftWidth",!0)),{top:b.top-d.top-n.css(c,"marginTop",!0),left:b.left-d.left-n.css(c,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||Ic;while(a&&!n.nodeName(a,"html")&&"static"===n.css(a,"position"))a=a.offsetParent;return a||Ic})}}),n.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(b,c){var d="pageYOffset"===c;n.fn[b]=function(e){return J(this,function(b,e,f){var g=Jc(b);return void 0===f?g?g[c]:b[e]:void(g?g.scrollTo(d?a.pageXOffset:f,d?f:a.pageYOffset):b[e]=f)},b,e,arguments.length,null)}}),n.each(["top","left"],function(a,b){n.cssHooks[b]=yb(k.pixelPosition,function(a,c){return c?(c=xb(a,b),vb.test(c)?n(a).position()[b]+"px":c):void 0})}),n.each({Height:"height",Width:"width"},function(a,b){n.each({padding:"inner"+a,content:b,"":"outer"+a},function(c,d){n.fn[d]=function(d,e){var f=arguments.length&&(c||"boolean"!=typeof d),g=c||(d===!0||e===!0?"margin":"border");return J(this,function(b,c,d){var e;return n.isWindow(b)?b.document.documentElement["client"+a]:9===b.nodeType?(e=b.documentElement,Math.max(b.body["scroll"+a],e["scroll"+a],b.body["offset"+a],e["offset"+a],e["client"+a])):void 0===d?n.css(b,c,g):n.style(b,c,d,g)},b,f?d:void 0,f,null)}})}),n.fn.size=function(){return this.length},n.fn.andSelf=n.fn.addBack,"function"==typeof define&&define.amd&&define("jquery",[],function(){return n});var Kc=a.jQuery,Lc=a.$;return n.noConflict=function(b){return a.$===n&&(a.$=Lc),b&&a.jQuery===n&&(a.jQuery=Kc),n},typeof b===U&&(a.jQuery=a.$=n),n});
diff --git a/static/client/register/js/recaptcha_ajax.js b/synapse/static/client/register/js/recaptcha_ajax.js
index d0e71e5b88..d0e71e5b88 100644
--- a/static/client/register/js/recaptcha_ajax.js
+++ b/synapse/static/client/register/js/recaptcha_ajax.js
diff --git a/static/client/register/js/register.js b/synapse/static/client/register/js/register.js
index b62763a293..b62763a293 100644
--- a/static/client/register/js/register.js
+++ b/synapse/static/client/register/js/register.js
diff --git a/static/client/register/register_config.sample.js b/synapse/static/client/register/register_config.sample.js
index c7ea180dee..c7ea180dee 100644
--- a/static/client/register/register_config.sample.js
+++ b/synapse/static/client/register/register_config.sample.js
diff --git a/static/client/register/style.css b/synapse/static/client/register/style.css
index 5a7b6eebf2..5a7b6eebf2 100644
--- a/static/client/register/style.css
+++ b/synapse/static/client/register/style.css
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index a1bd9c4ce9..e7443f2838 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -41,6 +41,7 @@ from .end_to_end_keys import EndToEndKeyStore
from .receipts import ReceiptsStore
from .search import SearchStore
+from .tags import TagsStore
import logging
@@ -71,6 +72,7 @@ class DataStore(RoomMemberStore, RoomStore,
ReceiptsStore,
EndToEndKeyStore,
SearchStore,
+ TagsStore,
):
def __init__(self, hs):
diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py
new file mode 100644
index 0000000000..45fccc2e5e
--- /dev/null
+++ b/synapse/storage/background_updates.py
@@ -0,0 +1,256 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import SQLBaseStore
+
+from twisted.internet import defer
+
+import ujson as json
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class BackgroundUpdatePerformance(object):
+ """Tracks the how long a background update is taking to update its items"""
+
+ def __init__(self, name):
+ self.name = name
+ self.total_item_count = 0
+ self.total_duration_ms = 0
+ self.avg_item_count = 0
+ self.avg_duration_ms = 0
+
+ def update(self, item_count, duration_ms):
+ """Update the stats after doing an update"""
+ self.total_item_count += item_count
+ self.total_duration_ms += duration_ms
+
+ # Exponential moving averages for the number of items updated and
+ # the duration.
+ self.avg_item_count += 0.1 * (item_count - self.avg_item_count)
+ self.avg_duration_ms += 0.1 * (duration_ms - self.avg_duration_ms)
+
+ def average_items_per_ms(self):
+ """An estimate of how long it takes to do a single update.
+ Returns:
+ A duration in ms as a float
+ """
+ if self.total_item_count == 0:
+ return None
+ else:
+ # Use the exponential moving average so that we can adapt to
+ # changes in how long the update process takes.
+ return float(self.avg_item_count) / float(self.avg_duration_ms)
+
+ def total_items_per_ms(self):
+ """An estimate of how long it takes to do a single update.
+ Returns:
+ A duration in ms as a float
+ """
+ if self.total_item_count == 0:
+ return None
+ else:
+ return float(self.total_item_count) / float(self.total_duration_ms)
+
+
+class BackgroundUpdateStore(SQLBaseStore):
+ """ Background updates are updates to the database that run in the
+ background. Each update processes a batch of data at once. We attempt to
+ limit the impact of each update by monitoring how long each batch takes to
+ process and autotuning the batch size.
+ """
+
+ MINIMUM_BACKGROUND_BATCH_SIZE = 100
+ DEFAULT_BACKGROUND_BATCH_SIZE = 100
+ BACKGROUND_UPDATE_INTERVAL_MS = 1000
+ BACKGROUND_UPDATE_DURATION_MS = 100
+
+ def __init__(self, hs):
+ super(BackgroundUpdateStore, self).__init__(hs)
+ self._background_update_performance = {}
+ self._background_update_queue = []
+ self._background_update_handlers = {}
+ self._background_update_timer = None
+
+ @defer.inlineCallbacks
+ def start_doing_background_updates(self):
+ while True:
+ if self._background_update_timer is not None:
+ return
+
+ sleep = defer.Deferred()
+ self._background_update_timer = self._clock.call_later(
+ self.BACKGROUND_UPDATE_INTERVAL_MS / 1000., sleep.callback, None
+ )
+ try:
+ yield sleep
+ finally:
+ self._background_update_timer = None
+
+ try:
+ result = yield self.do_background_update(
+ self.BACKGROUND_UPDATE_DURATION_MS
+ )
+ except:
+ logger.exception("Error doing update")
+
+ if result is None:
+ logger.info(
+ "No more background updates to do."
+ " Unscheduling background update task."
+ )
+ return
+
+ @defer.inlineCallbacks
+ def do_background_update(self, desired_duration_ms):
+ """Does some amount of work on a background update
+ Args:
+ desired_duration_ms(float): How long we want to spend
+ updating.
+ Returns:
+ A deferred that completes once some amount of work is done.
+ The deferred will have a value of None if there is currently
+ no more work to do.
+ """
+ if not self._background_update_queue:
+ updates = yield self._simple_select_list(
+ "background_updates",
+ keyvalues=None,
+ retcols=("update_name",),
+ )
+ for update in updates:
+ self._background_update_queue.append(update['update_name'])
+
+ if not self._background_update_queue:
+ defer.returnValue(None)
+
+ update_name = self._background_update_queue.pop(0)
+ self._background_update_queue.append(update_name)
+
+ update_handler = self._background_update_handlers[update_name]
+
+ performance = self._background_update_performance.get(update_name)
+
+ if performance is None:
+ performance = BackgroundUpdatePerformance(update_name)
+ self._background_update_performance[update_name] = performance
+
+ items_per_ms = performance.average_items_per_ms()
+
+ if items_per_ms is not None:
+ batch_size = int(desired_duration_ms * items_per_ms)
+ # Clamp the batch size so that we always make progress
+ batch_size = max(batch_size, self.MINIMUM_BACKGROUND_BATCH_SIZE)
+ else:
+ batch_size = self.DEFAULT_BACKGROUND_BATCH_SIZE
+
+ progress_json = yield self._simple_select_one_onecol(
+ "background_updates",
+ keyvalues={"update_name": update_name},
+ retcol="progress_json"
+ )
+
+ progress = json.loads(progress_json)
+
+ time_start = self._clock.time_msec()
+ items_updated = yield update_handler(progress, batch_size)
+ time_stop = self._clock.time_msec()
+
+ duration_ms = time_stop - time_start
+
+ logger.info(
+ "Updating %r. Updated %r items in %rms."
+ " (total_rate=%r/ms, current_rate=%r/ms, total_updated=%r)",
+ update_name, items_updated, duration_ms,
+ performance.total_items_per_ms(),
+ performance.average_items_per_ms(),
+ performance.total_item_count,
+ )
+
+ performance.update(items_updated, duration_ms)
+
+ defer.returnValue(len(self._background_update_performance))
+
+ def register_background_update_handler(self, update_name, update_handler):
+ """Register a handler for doing a background update.
+
+ The handler should take two arguments:
+
+ * A dict of the current progress
+ * An integer count of the number of items to update in this batch.
+
+ The handler should return a deferred integer count of items updated.
+ The hander is responsible for updating the progress of the update.
+
+ Args:
+ update_name(str): The name of the update that this code handles.
+ update_handler(function): The function that does the update.
+ """
+ self._background_update_handlers[update_name] = update_handler
+
+ def start_background_update(self, update_name, progress):
+ """Starts a background update running.
+
+ Args:
+ update_name: The update to set running.
+ progress: The initial state of the progress of the update.
+
+ Returns:
+ A deferred that completes once the task has been added to the
+ queue.
+ """
+ # Clear the background update queue so that we will pick up the new
+ # task on the next iteration of do_background_update.
+ self._background_update_queue = []
+ progress_json = json.dumps(progress)
+
+ return self._simple_insert(
+ "background_updates",
+ {"update_name": update_name, "progress_json": progress_json}
+ )
+
+ def _end_background_update(self, update_name):
+ """Removes a completed background update task from the queue.
+
+ Args:
+ update_name(str): The name of the completed task to remove
+ Returns:
+ A deferred that completes once the task is removed.
+ """
+ self._background_update_queue = [
+ name for name in self._background_update_queue if name != update_name
+ ]
+ return self._simple_delete_one(
+ "background_updates", keyvalues={"update_name": update_name}
+ )
+
+ def _background_update_progress_txn(self, txn, update_name, progress):
+ """Update the progress of a background update
+
+ Args:
+ txn(cursor): The transaction.
+ update_name(str): The name of the background update task
+ progress(dict): The progress of the update.
+ """
+
+ progress_json = json.dumps(progress)
+
+ self._simple_update_one_txn(
+ txn,
+ "background_updates",
+ keyvalues={"update_name": update_name},
+ updatevalues={"progress_json": progress_json},
+ )
diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index e6c1abfc27..5d35ca90b9 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -311,6 +311,10 @@ class EventsStore(SQLBaseStore):
self._store_room_message_txn(txn, event)
elif event.type == EventTypes.Redaction:
self._store_redaction(txn, event)
+ elif event.type == EventTypes.RoomHistoryVisibility:
+ self._store_history_visibility_txn(txn, event)
+ elif event.type == EventTypes.GuestAccess:
+ self._store_guest_access_txn(txn, event)
self._store_room_members_txn(
txn,
@@ -827,7 +831,8 @@ class EventsStore(SQLBaseStore):
allow_none=True,
)
if prev:
- ev.unsigned["prev_content"] = prev.get_dict()["content"]
+ ev.unsigned["prev_content"] = prev.content
+ ev.unsigned["prev_sender"] = prev.sender
self._get_event_cache.prefill(
(ev.event_id, check_redacted, get_prev_content), ev
@@ -884,7 +889,8 @@ class EventsStore(SQLBaseStore):
get_prev_content=False,
)
if prev:
- ev.unsigned["prev_content"] = prev.get_dict()["content"]
+ ev.unsigned["prev_content"] = prev.content
+ ev.unsigned["prev_sender"] = prev.sender
self._get_event_cache.prefill(
(ev.event_id, check_redacted, get_prev_content), ev
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index b454dd5b3a..2e5eddd259 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -102,13 +102,14 @@ class RegistrationStore(SQLBaseStore):
400, "User ID already taken.", errcode=Codes.USER_IN_USE
)
- # it's possible for this to get a conflict, but only for a single user
- # since tokens are namespaced based on their user ID
- txn.execute(
- "INSERT INTO access_tokens(id, user_id, token)"
- " VALUES (?,?,?)",
- (next_id, user_id, token,)
- )
+ if token:
+ # it's possible for this to get a conflict, but only for a single user
+ # since tokens are namespaced based on their user ID
+ txn.execute(
+ "INSERT INTO access_tokens(id, user_id, token)"
+ " VALUES (?,?,?)",
+ (next_id, user_id, token,)
+ )
def get_user_by_id(self, user_id):
return self._simple_select_one(
diff --git a/synapse/storage/room.py b/synapse/storage/room.py
index 13441fcdce..4f08df478c 100644
--- a/synapse/storage/room.py
+++ b/synapse/storage/room.py
@@ -99,34 +99,39 @@ class RoomStore(SQLBaseStore):
"""
def f(txn):
- topic_subquery = (
- "SELECT topics.event_id as event_id, "
- "topics.room_id as room_id, topic "
- "FROM topics "
- "INNER JOIN current_state_events as c "
- "ON c.event_id = topics.event_id "
- )
-
- name_subquery = (
- "SELECT room_names.event_id as event_id, "
- "room_names.room_id as room_id, name "
- "FROM room_names "
- "INNER JOIN current_state_events as c "
- "ON c.event_id = room_names.event_id "
- )
+ def subquery(table_name, column_name=None):
+ column_name = column_name or table_name
+ return (
+ "SELECT %(table_name)s.event_id as event_id, "
+ "%(table_name)s.room_id as room_id, %(column_name)s "
+ "FROM %(table_name)s "
+ "INNER JOIN current_state_events as c "
+ "ON c.event_id = %(table_name)s.event_id " % {
+ "column_name": column_name,
+ "table_name": table_name,
+ }
+ )
- # We use non printing ascii character US (\x1F) as a separator
sql = (
- "SELECT r.room_id, max(n.name), max(t.topic)"
+ "SELECT"
+ " r.room_id,"
+ " max(n.name),"
+ " max(t.topic),"
+ " max(v.history_visibility),"
+ " max(g.guest_access)"
" FROM rooms AS r"
" LEFT JOIN (%(topic)s) AS t ON t.room_id = r.room_id"
" LEFT JOIN (%(name)s) AS n ON n.room_id = r.room_id"
+ " LEFT JOIN (%(history_visibility)s) AS v ON v.room_id = r.room_id"
+ " LEFT JOIN (%(guest_access)s) AS g ON g.room_id = r.room_id"
" WHERE r.is_public = ?"
- " GROUP BY r.room_id"
- ) % {
- "topic": topic_subquery,
- "name": name_subquery,
- }
+ " GROUP BY r.room_id" % {
+ "topic": subquery("topics", "topic"),
+ "name": subquery("room_names", "name"),
+ "history_visibility": subquery("history_visibility"),
+ "guest_access": subquery("guest_access"),
+ }
+ )
txn.execute(sql, (is_public,))
@@ -156,10 +161,12 @@ class RoomStore(SQLBaseStore):
"room_id": r[0],
"name": r[1],
"topic": r[2],
- "aliases": r[3],
+ "world_readable": r[3] == "world_readable",
+ "guest_can_join": r[4] == "can_join",
+ "aliases": r[5],
}
for r in rows
- if r[3] # We only return rooms that have at least one alias.
+ if r[5] # We only return rooms that have at least one alias.
]
defer.returnValue(ret)
@@ -202,6 +209,25 @@ class RoomStore(SQLBaseStore):
txn, event, "content.body", event.content["body"]
)
+ def _store_history_visibility_txn(self, txn, event):
+ self._store_content_index_txn(txn, event, "history_visibility")
+
+ def _store_guest_access_txn(self, txn, event):
+ self._store_content_index_txn(txn, event, "guest_access")
+
+ def _store_content_index_txn(self, txn, event, key):
+ if hasattr(event, "content") and key in event.content:
+ sql = (
+ "INSERT INTO %(key)s"
+ " (event_id, room_id, %(key)s)"
+ " VALUES (?, ?, ?)" % {"key": key}
+ )
+ txn.execute(sql, (
+ event.event_id,
+ event.room_id,
+ event.content[key]
+ ))
+
def _store_event_search_txn(self, txn, event, key, value):
if isinstance(self.database_engine, PostgresEngine):
sql = (
diff --git a/synapse/storage/schema/delta/25/00background_updates.sql b/synapse/storage/schema/delta/25/00background_updates.sql
new file mode 100644
index 0000000000..41a9b59b1b
--- /dev/null
+++ b/synapse/storage/schema/delta/25/00background_updates.sql
@@ -0,0 +1,21 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+CREATE TABLE IF NOT EXISTS background_updates(
+ update_name TEXT NOT NULL, -- The name of the background update.
+ progress_json TEXT NOT NULL, -- The current progress of the update as JSON.
+ CONSTRAINT background_updates_uniqueness UNIQUE (update_name)
+);
diff --git a/synapse/storage/schema/delta/25/fts.py b/synapse/storage/schema/delta/25/fts.py
index b7cd0ce3b8..5239d69073 100644
--- a/synapse/storage/schema/delta/25/fts.py
+++ b/synapse/storage/schema/delta/25/fts.py
@@ -22,7 +22,7 @@ import ujson
logger = logging.getLogger(__name__)
-POSTGRES_SQL = """
+POSTGRES_TABLE = """
CREATE TABLE IF NOT EXISTS event_search (
event_id TEXT,
room_id TEXT,
@@ -31,22 +31,6 @@ CREATE TABLE IF NOT EXISTS event_search (
vector tsvector
);
-INSERT INTO event_search SELECT
- event_id, room_id, json::json->>'sender', 'content.body',
- to_tsvector('english', json::json->'content'->>'body')
- FROM events NATURAL JOIN event_json WHERE type = 'm.room.message';
-
-INSERT INTO event_search SELECT
- event_id, room_id, json::json->>'sender', 'content.name',
- to_tsvector('english', json::json->'content'->>'name')
- FROM events NATURAL JOIN event_json WHERE type = 'm.room.name';
-
-INSERT INTO event_search SELECT
- event_id, room_id, json::json->>'sender', 'content.topic',
- to_tsvector('english', json::json->'content'->>'topic')
- FROM events NATURAL JOIN event_json WHERE type = 'm.room.topic';
-
-
CREATE INDEX event_search_fts_idx ON event_search USING gin(vector);
CREATE INDEX event_search_ev_idx ON event_search(event_id);
CREATE INDEX event_search_ev_ridx ON event_search(room_id);
@@ -61,67 +45,34 @@ SQLITE_TABLE = (
def run_upgrade(cur, database_engine, *args, **kwargs):
if isinstance(database_engine, PostgresEngine):
- run_postgres_upgrade(cur)
- return
+ for statement in get_statements(POSTGRES_TABLE.splitlines()):
+ cur.execute(statement)
+ elif isinstance(database_engine, Sqlite3Engine):
+ cur.execute(SQLITE_TABLE)
+ else:
+ raise Exception("Unrecognized database engine")
- if isinstance(database_engine, Sqlite3Engine):
- run_sqlite_upgrade(cur)
- return
+ cur.execute("SELECT MIN(stream_ordering) FROM events")
+ rows = cur.fetchall()
+ min_stream_id = rows[0][0]
+ cur.execute("SELECT MAX(stream_ordering) FROM events")
+ rows = cur.fetchall()
+ max_stream_id = rows[0][0]
-def run_postgres_upgrade(cur):
- for statement in get_statements(POSTGRES_SQL.splitlines()):
- cur.execute(statement)
+ if min_stream_id is not None and max_stream_id is not None:
+ progress = {
+ "target_min_stream_id_inclusive": min_stream_id,
+ "max_stream_id_exclusive": max_stream_id + 1,
+ "rows_inserted": 0,
+ }
+ progress_json = ujson.dumps(progress)
+ sql = (
+ "INSERT into background_updates (update_name, progress_json)"
+ " VALUES (?, ?)"
+ )
-def run_sqlite_upgrade(cur):
- cur.execute(SQLITE_TABLE)
+ sql = database_engine.convert_param_style(sql)
- rowid = -1
- while True:
- cur.execute(
- "SELECT rowid, json FROM event_json"
- " WHERE rowid > ?"
- " ORDER BY rowid ASC LIMIT 100",
- (rowid,)
- )
-
- res = cur.fetchall()
-
- if not res:
- break
-
- events = [
- ujson.loads(js)
- for _, js in res
- ]
-
- rowid = max(rid for rid, _ in res)
-
- rows = []
- for ev in events:
- content = ev.get("content", {})
- body = content.get("body", None)
- name = content.get("name", None)
- topic = content.get("topic", None)
- sender = ev.get("sender", None)
- if ev["type"] == "m.room.message" and body:
- rows.append((
- ev["event_id"], ev["room_id"], sender, "content.body", body
- ))
- if ev["type"] == "m.room.name" and name:
- rows.append((
- ev["event_id"], ev["room_id"], sender, "content.name", name
- ))
- if ev["type"] == "m.room.topic" and topic:
- rows.append((
- ev["event_id"], ev["room_id"], sender, "content.topic", topic
- ))
-
- if rows:
- logger.info(rows)
- cur.executemany(
- "INSERT INTO event_search (event_id, room_id, sender, key, value)"
- " VALUES (?,?,?,?,?)",
- rows
- )
+ cur.execute(sql, ("event_search", progress_json))
diff --git a/synapse/storage/schema/delta/25/guest_access.sql b/synapse/storage/schema/delta/25/guest_access.sql
new file mode 100644
index 0000000000..bdb90e7118
--- /dev/null
+++ b/synapse/storage/schema/delta/25/guest_access.sql
@@ -0,0 +1,25 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * This is a manual index of guest_access content of state events,
+ * so that we can join on them in SELECT statements.
+ */
+CREATE TABLE IF NOT EXISTS guest_access(
+ event_id TEXT NOT NULL,
+ room_id TEXT NOT NULL,
+ guest_access TEXT NOT NULL,
+ UNIQUE (event_id)
+);
diff --git a/synapse/storage/schema/delta/25/history_visibility.sql b/synapse/storage/schema/delta/25/history_visibility.sql
new file mode 100644
index 0000000000..532cb05151
--- /dev/null
+++ b/synapse/storage/schema/delta/25/history_visibility.sql
@@ -0,0 +1,25 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * This is a manual index of history_visibility content of state events,
+ * so that we can join on them in SELECT statements.
+ */
+CREATE TABLE IF NOT EXISTS history_visibility(
+ event_id TEXT NOT NULL,
+ room_id TEXT NOT NULL,
+ history_visibility TEXT NOT NULL,
+ UNIQUE (event_id)
+);
diff --git a/synapse/storage/schema/delta/25/tags.sql b/synapse/storage/schema/delta/25/tags.sql
new file mode 100644
index 0000000000..527424c998
--- /dev/null
+++ b/synapse/storage/schema/delta/25/tags.sql
@@ -0,0 +1,38 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+CREATE TABLE IF NOT EXISTS room_tags(
+ user_id TEXT NOT NULL,
+ room_id TEXT NOT NULL,
+ tag TEXT NOT NULL, -- The name of the tag.
+ content TEXT NOT NULL, -- The JSON content of the tag.
+ CONSTRAINT room_tag_uniqueness UNIQUE (user_id, room_id, tag)
+);
+
+CREATE TABLE IF NOT EXISTS room_tags_revisions (
+ user_id TEXT NOT NULL,
+ room_id TEXT NOT NULL,
+ stream_id BIGINT NOT NULL, -- The current version of the room tags.
+ CONSTRAINT room_tag_revisions_uniqueness UNIQUE (user_id, room_id)
+);
+
+CREATE TABLE IF NOT EXISTS private_user_data_max_stream_id(
+ Lock CHAR(1) NOT NULL DEFAULT 'X' UNIQUE, -- Makes sure this table only has one row.
+ stream_id BIGINT NOT NULL,
+ CHECK (Lock='X')
+);
+
+INSERT INTO private_user_data_max_stream_id (stream_id) VALUES (0);
diff --git a/synapse/storage/search.py b/synapse/storage/search.py
index cdf003502f..380270b009 100644
--- a/synapse/storage/search.py
+++ b/synapse/storage/search.py
@@ -15,22 +15,116 @@
from twisted.internet import defer
-from _base import SQLBaseStore
+from .background_updates import BackgroundUpdateStore
+from synapse.api.errors import SynapseError
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
-from collections import namedtuple
+import logging
-"""The result of a search.
-Fields:
- rank_map (dict): Mapping event_id -> rank
- event_map (dict): Mapping event_id -> event
- pagination_token (str): Pagination token
-"""
-SearchResult = namedtuple("SearchResult", ("rank_map", "event_map", "pagination_token"))
+logger = logging.getLogger(__name__)
-class SearchStore(SQLBaseStore):
+class SearchStore(BackgroundUpdateStore):
+
+ EVENT_SEARCH_UPDATE_NAME = "event_search"
+
+ def __init__(self, hs):
+ super(SearchStore, self).__init__(hs)
+ self.register_background_update_handler(
+ self.EVENT_SEARCH_UPDATE_NAME, self._background_reindex_search
+ )
+
+ @defer.inlineCallbacks
+ def _background_reindex_search(self, progress, batch_size):
+ target_min_stream_id = progress["target_min_stream_id_inclusive"]
+ max_stream_id = progress["max_stream_id_exclusive"]
+ rows_inserted = progress.get("rows_inserted", 0)
+
+ INSERT_CLUMP_SIZE = 1000
+ TYPES = ["m.room.name", "m.room.message", "m.room.topic"]
+
+ def reindex_search_txn(txn):
+ sql = (
+ "SELECT stream_ordering, event_id FROM events"
+ " WHERE ? <= stream_ordering AND stream_ordering < ?"
+ " AND (%s)"
+ " ORDER BY stream_ordering DESC"
+ " LIMIT ?"
+ ) % (" OR ".join("type = '%s'" % (t,) for t in TYPES),)
+
+ txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
+
+ rows = txn.fetchall()
+ if not rows:
+ return 0
+
+ min_stream_id = rows[-1][0]
+ event_ids = [row[1] for row in rows]
+
+ events = self._get_events_txn(txn, event_ids)
+
+ event_search_rows = []
+ for event in events:
+ try:
+ event_id = event.event_id
+ room_id = event.room_id
+ content = event.content
+ if event.type == "m.room.message":
+ key = "content.body"
+ value = content["body"]
+ elif event.type == "m.room.topic":
+ key = "content.topic"
+ value = content["topic"]
+ elif event.type == "m.room.name":
+ key = "content.name"
+ value = content["name"]
+ except (KeyError, AttributeError):
+ # If the event is missing a necessary field then
+ # skip over it.
+ continue
+
+ event_search_rows.append((event_id, room_id, key, value))
+
+ if isinstance(self.database_engine, PostgresEngine):
+ sql = (
+ "INSERT INTO event_search (event_id, room_id, key, vector)"
+ " VALUES (?,?,?,to_tsvector('english', ?))"
+ )
+ elif isinstance(self.database_engine, Sqlite3Engine):
+ sql = (
+ "INSERT INTO event_search (event_id, room_id, key, value)"
+ " VALUES (?,?,?,?)"
+ )
+ else:
+ # This should be unreachable.
+ raise Exception("Unrecognized database engine")
+
+ for index in range(0, len(event_search_rows), INSERT_CLUMP_SIZE):
+ clump = event_search_rows[index:index + INSERT_CLUMP_SIZE]
+ txn.executemany(sql, clump)
+
+ progress = {
+ "target_min_stream_id_inclusive": target_min_stream_id,
+ "max_stream_id_exclusive": min_stream_id,
+ "rows_inserted": rows_inserted + len(event_search_rows)
+ }
+
+ self._background_update_progress_txn(
+ txn, self.EVENT_SEARCH_UPDATE_NAME, progress
+ )
+
+ return len(event_search_rows)
+
+ result = yield self.runInteraction(
+ self.EVENT_SEARCH_UPDATE_NAME, reindex_search_txn
+ )
+
+ if not result:
+ yield self._end_background_update(self.EVENT_SEARCH_UPDATE_NAME)
+
+ defer.returnValue(result)
+
@defer.inlineCallbacks
def search_msgs(self, room_ids, search_term, keys):
"""Performs a full text search over events with given keys.
@@ -42,7 +136,7 @@ class SearchStore(SQLBaseStore):
"content.body", "content.name", "content.topic"
Returns:
- SearchResult
+ list of dicts
"""
clauses = []
args = []
@@ -100,12 +194,114 @@ class SearchStore(SQLBaseStore):
for ev in events
}
- defer.returnValue(SearchResult(
+ defer.returnValue([
+ {
+ "event": event_map[r["event_id"]],
+ "rank": r["rank"],
+ }
+ for r in results
+ if r["event_id"] in event_map
+ ])
+
+ @defer.inlineCallbacks
+ def search_room(self, room_id, search_term, keys, limit, pagination_token=None):
+ """Performs a full text search over events with given keys.
+
+ Args:
+ room_id (str): The room_id to search in
+ search_term (str): Search term to search for
+ keys (list): List of keys to search in, currently supports
+ "content.body", "content.name", "content.topic"
+ pagination_token (str): A pagination token previously returned
+
+ Returns:
+ list of dicts
+ """
+ clauses = []
+ args = [search_term, room_id]
+
+ local_clauses = []
+ for key in keys:
+ local_clauses.append("key = ?")
+ args.append(key)
+
+ clauses.append(
+ "(%s)" % (" OR ".join(local_clauses),)
+ )
+
+ if pagination_token:
+ try:
+ topo, stream = pagination_token.split(",")
+ topo = int(topo)
+ stream = int(stream)
+ except:
+ raise SynapseError(400, "Invalid pagination token")
+
+ clauses.append(
+ "(topological_ordering < ?"
+ " OR (topological_ordering = ? AND stream_ordering < ?))"
+ )
+ args.extend([topo, topo, stream])
+
+ if isinstance(self.database_engine, PostgresEngine):
+ sql = (
+ "SELECT ts_rank_cd(vector, query) as rank,"
+ " topological_ordering, stream_ordering, room_id, event_id"
+ " FROM plainto_tsquery('english', ?) as query, event_search"
+ " NATURAL JOIN events"
+ " WHERE vector @@ query AND room_id = ?"
+ )
+ elif isinstance(self.database_engine, Sqlite3Engine):
+ # We use CROSS JOIN here to ensure we use the right indexes.
+ # https://sqlite.org/optoverview.html#crossjoin
+ #
+ # We want to use the full text search index on event_search to
+ # extract all possible matches first, then lookup those matches
+ # in the events table to get the topological ordering. We need
+ # to use the indexes in this order because sqlite refuses to
+ # MATCH unless it uses the full text search index
+ sql = (
+ "SELECT rank(matchinfo) as rank, room_id, event_id,"
+ " topological_ordering, stream_ordering"
+ " FROM (SELECT key, event_id, matchinfo(event_search) as matchinfo"
+ " FROM event_search"
+ " WHERE value MATCH ?"
+ " )"
+ " CROSS JOIN events USING (event_id)"
+ " WHERE room_id = ?"
+ )
+ else:
+ # This should be unreachable.
+ raise Exception("Unrecognized database engine")
+
+ for clause in clauses:
+ sql += " AND " + clause
+
+ # We add an arbitrary limit here to ensure we don't try to pull the
+ # entire table from the database.
+ sql += " ORDER BY topological_ordering DESC, stream_ordering DESC LIMIT ?"
+
+ args.append(limit)
+
+ results = yield self._execute(
+ "search_rooms", self.cursor_to_dict, sql, *args
+ )
+
+ events = yield self._get_events([r["event_id"] for r in results])
+
+ event_map = {
+ ev.event_id: ev
+ for ev in events
+ }
+
+ defer.returnValue([
{
- r["event_id"]: r["rank"]
- for r in results
- if r["event_id"] in event_map
- },
- event_map,
- None
- ))
+ "event": event_map[r["event_id"]],
+ "rank": r["rank"],
+ "pagination_token": "%s,%s" % (
+ r["topological_ordering"], r["stream_ordering"]
+ ),
+ }
+ for r in results
+ if r["event_id"] in event_map
+ ])
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index acfb322a53..80e9b63f50 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -237,6 +237,20 @@ class StateStore(SQLBaseStore):
defer.returnValue({event: event_to_state[event] for event in event_ids})
+ @defer.inlineCallbacks
+ def get_state_for_event(self, event_id, types=None):
+ """
+ Get the state dict corresponding to a particular event
+
+ :param str event_id: event whose state should be returned
+ :param list[(str, str)]|None types: List of (type, state_key) tuples
+ which are used to filter the state fetched. May be None, which
+ matches any key
+ :return: a deferred dict from (type, state_key) -> state_event
+ """
+ state_map = yield self.get_state_for_events([event_id], types)
+ defer.returnValue(state_map[event_id])
+
@cached(num_args=2, lru=True, max_entries=10000)
def _get_state_group_for_event(self, room_id, event_id):
return self._simple_select_one_onecol(
diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py
index 15d4c2bf68..be8ba76aae 100644
--- a/synapse/storage/stream.py
+++ b/synapse/storage/stream.py
@@ -158,14 +158,40 @@ class StreamStore(SQLBaseStore):
defer.returnValue(results)
@log_function
- def get_room_events_stream(self, user_id, from_key, to_key, room_id,
- limit=0):
- current_room_membership_sql = (
- "SELECT m.room_id FROM room_memberships as m "
- " INNER JOIN current_state_events as c"
- " ON m.event_id = c.event_id AND c.state_key = m.user_id"
- " WHERE m.user_id = ? AND m.membership = 'join'"
- )
+ def get_room_events_stream(
+ self,
+ user_id,
+ from_key,
+ to_key,
+ limit=0,
+ is_guest=False,
+ room_ids=None
+ ):
+ room_ids = room_ids or []
+ room_ids = [r for r in room_ids]
+ if is_guest:
+ current_room_membership_sql = (
+ "SELECT c.room_id FROM history_visibility AS h"
+ " INNER JOIN current_state_events AS c"
+ " ON h.event_id = c.event_id"
+ " WHERE c.room_id IN (%s) AND h.history_visibility = 'world_readable'" % (
+ ",".join(map(lambda _: "?", room_ids))
+ )
+ )
+ current_room_membership_args = room_ids
+ else:
+ current_room_membership_sql = (
+ "SELECT m.room_id FROM room_memberships as m "
+ " INNER JOIN current_state_events as c"
+ " ON m.event_id = c.event_id AND c.state_key = m.user_id"
+ " WHERE m.user_id = ? AND m.membership = 'join'"
+ )
+ current_room_membership_args = [user_id]
+ if room_ids:
+ current_room_membership_sql += " AND m.room_id in (%s)" % (
+ ",".join(map(lambda _: "?", room_ids))
+ )
+ current_room_membership_args = [user_id] + room_ids
# We also want to get any membership events about that user, e.g.
# invites or leave notifications.
@@ -174,6 +200,7 @@ class StreamStore(SQLBaseStore):
"INNER JOIN current_state_events as c ON m.event_id = c.event_id "
"WHERE m.user_id = ? "
)
+ membership_args = [user_id]
if limit:
limit = max(limit, MAX_STREAM_SIZE)
@@ -200,7 +227,9 @@ class StreamStore(SQLBaseStore):
}
def f(txn):
- txn.execute(sql, (False, user_id, user_id, from_id.stream, to_id.stream,))
+ args = ([False] + current_room_membership_args + membership_args +
+ [from_id.stream, to_id.stream])
+ txn.execute(sql, args)
rows = self.cursor_to_dict(txn)
diff --git a/synapse/storage/tags.py b/synapse/storage/tags.py
new file mode 100644
index 0000000000..bf695b7800
--- /dev/null
+++ b/synapse/storage/tags.py
@@ -0,0 +1,216 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014, 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import SQLBaseStore
+from synapse.util.caches.descriptors import cached
+from twisted.internet import defer
+from .util.id_generators import StreamIdGenerator
+
+import ujson as json
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class TagsStore(SQLBaseStore):
+ def __init__(self, hs):
+ super(TagsStore, self).__init__(hs)
+
+ self._private_user_data_id_gen = StreamIdGenerator(
+ "private_user_data_max_stream_id", "stream_id"
+ )
+
+ def get_max_private_user_data_stream_id(self):
+ """Get the current max stream id for the private user data stream
+
+ Returns:
+ A deferred int.
+ """
+ return self._private_user_data_id_gen.get_max_token(self)
+
+ @cached()
+ def get_tags_for_user(self, user_id):
+ """Get all the tags for a user.
+
+
+ Args:
+ user_id(str): The user to get the tags for.
+ Returns:
+ A deferred dict mapping from room_id strings to lists of tag
+ strings.
+ """
+
+ deferred = self._simple_select_list(
+ "room_tags", {"user_id": user_id}, ["room_id", "tag", "content"]
+ )
+
+ @deferred.addCallback
+ def tags_by_room(rows):
+ tags_by_room = {}
+ for row in rows:
+ room_tags = tags_by_room.setdefault(row["room_id"], {})
+ room_tags[row["tag"]] = json.loads(row["content"])
+ return tags_by_room
+
+ return deferred
+
+ @defer.inlineCallbacks
+ def get_updated_tags(self, user_id, stream_id):
+ """Get all the tags for the rooms where the tags have changed since the
+ given version
+
+ Args:
+ user_id(str): The user to get the tags for.
+ stream_id(int): The earliest update to get for the user.
+ Returns:
+ A deferred dict mapping from room_id strings to lists of tag
+ strings for all the rooms that changed since the stream_id token.
+ """
+ def get_updated_tags_txn(txn):
+ sql = (
+ "SELECT room_id from room_tags_revisions"
+ " WHERE user_id = ? AND stream_id > ?"
+ )
+ txn.execute(sql, (user_id, stream_id))
+ room_ids = [row[0] for row in txn.fetchall()]
+ return room_ids
+
+ room_ids = yield self.runInteraction(
+ "get_updated_tags", get_updated_tags_txn
+ )
+
+ results = {}
+ if room_ids:
+ tags_by_room = yield self.get_tags_for_user(user_id)
+ for room_id in room_ids:
+ results[room_id] = tags_by_room.get(room_id, {})
+
+ defer.returnValue(results)
+
+ def get_tags_for_room(self, user_id, room_id):
+ """Get all the tags for the given room
+ Args:
+ user_id(str): The user to get tags for
+ room_id(str): The room to get tags for
+ Returns:
+ A deferred list of string tags.
+ """
+ return self._simple_select_list(
+ table="room_tags",
+ keyvalues={"user_id": user_id, "room_id": room_id},
+ retcols=("tag", "content"),
+ desc="get_tags_for_room",
+ ).addCallback(lambda rows: {
+ row["tag"]: json.loads(row["content"]) for row in rows
+ })
+
+ @defer.inlineCallbacks
+ def add_tag_to_room(self, user_id, room_id, tag, content):
+ """Add a tag to a room for a user.
+ Args:
+ user_id(str): The user to add a tag for.
+ room_id(str): The room to add a tag for.
+ tag(str): The tag name to add.
+ content(dict): A json object to associate with the tag.
+ Returns:
+ A deferred that completes once the tag has been added.
+ """
+ content_json = json.dumps(content)
+
+ def add_tag_txn(txn, next_id):
+ self._simple_upsert_txn(
+ txn,
+ table="room_tags",
+ keyvalues={
+ "user_id": user_id,
+ "room_id": room_id,
+ "tag": tag,
+ },
+ values={
+ "content": content_json,
+ }
+ )
+ self._update_revision_txn(txn, user_id, room_id, next_id)
+
+ with (yield self._private_user_data_id_gen.get_next(self)) as next_id:
+ yield self.runInteraction("add_tag", add_tag_txn, next_id)
+
+ self.get_tags_for_user.invalidate((user_id,))
+
+ result = yield self._private_user_data_id_gen.get_max_token(self)
+ defer.returnValue(result)
+
+ @defer.inlineCallbacks
+ def remove_tag_from_room(self, user_id, room_id, tag):
+ """Remove a tag from a room for a user.
+ Returns:
+ A deferred that completes once the tag has been removed
+ """
+ def remove_tag_txn(txn, next_id):
+ sql = (
+ "DELETE FROM room_tags "
+ " WHERE user_id = ? AND room_id = ? AND tag = ?"
+ )
+ txn.execute(sql, (user_id, room_id, tag))
+ self._update_revision_txn(txn, user_id, room_id, next_id)
+
+ with (yield self._private_user_data_id_gen.get_next(self)) as next_id:
+ yield self.runInteraction("remove_tag", remove_tag_txn, next_id)
+
+ self.get_tags_for_user.invalidate((user_id,))
+
+ result = yield self._private_user_data_id_gen.get_max_token(self)
+ defer.returnValue(result)
+
+ def _update_revision_txn(self, txn, user_id, room_id, next_id):
+ """Update the latest revision of the tags for the given user and room.
+
+ Args:
+ txn: The database cursor
+ user_id(str): The ID of the user.
+ room_id(str): The ID of the room.
+ next_id(int): The the revision to advance to.
+ """
+
+ update_max_id_sql = (
+ "UPDATE private_user_data_max_stream_id"
+ " SET stream_id = ?"
+ " WHERE stream_id < ?"
+ )
+ txn.execute(update_max_id_sql, (next_id, next_id))
+
+ update_sql = (
+ "UPDATE room_tags_revisions"
+ " SET stream_id = ?"
+ " WHERE user_id = ?"
+ " AND room_id = ?"
+ )
+ txn.execute(update_sql, (next_id, user_id, room_id))
+
+ if txn.rowcount == 0:
+ insert_sql = (
+ "INSERT INTO room_tags_revisions (user_id, room_id, stream_id)"
+ " VALUES (?, ?, ?)"
+ )
+ try:
+ txn.execute(insert_sql, (user_id, room_id, next_id))
+ except self.database_engine.module.IntegrityError:
+ # Ignore insertion errors. It doesn't matter if the row wasn't
+ # inserted because if two updates happend concurrently the one
+ # with the higher stream_id will not be reported to a client
+ # unless the previous update has completed. It doesn't matter
+ # which stream_id ends up in the table, as long as it is higher
+ # than the id that the client has.
+ pass
diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py
index 15695e9831..ad099775eb 100644
--- a/synapse/storage/transactions.py
+++ b/synapse/storage/transactions.py
@@ -59,7 +59,7 @@ class TransactionStore(SQLBaseStore):
allow_none=True,
)
- if result and result.response_code:
+ if result and result["response_code"]:
return result["response_code"], result["response_json"]
else:
return None
@@ -253,16 +253,6 @@ class TransactionStore(SQLBaseStore):
retry_interval (int) - how long until next retry in ms
"""
- # As this is the new value, we might as well prefill the cache
- self.get_destination_retry_timings.prefill(
- destination,
- {
- "destination": destination,
- "retry_last_ts": retry_last_ts,
- "retry_interval": retry_interval
- },
- )
-
# XXX: we could chose to not bother persisting this if our cache thinks
# this is a NOOP
return self.runInteraction(
@@ -275,31 +265,25 @@ class TransactionStore(SQLBaseStore):
def _set_destination_retry_timings(self, txn, destination,
retry_last_ts, retry_interval):
- query = (
- "UPDATE destinations"
- " SET retry_last_ts = ?, retry_interval = ?"
- " WHERE destination = ?"
- )
+ txn.call_after(self.get_destination_retry_timings.invalidate, (destination,))
- txn.execute(
- query,
- (
- retry_last_ts, retry_interval, destination,
- )
+ self._simple_upsert_txn(
+ txn,
+ "destinations",
+ keyvalues={
+ "destination": destination,
+ },
+ values={
+ "retry_last_ts": retry_last_ts,
+ "retry_interval": retry_interval,
+ },
+ insertion_values={
+ "destination": destination,
+ "retry_last_ts": retry_last_ts,
+ "retry_interval": retry_interval,
+ }
)
- if txn.rowcount == 0:
- # destination wasn't already in table. Insert it.
- self._simple_insert_txn(
- txn,
- table="destinations",
- values={
- "destination": destination,
- "retry_last_ts": retry_last_ts,
- "retry_interval": retry_interval,
- }
- )
-
def get_destinations_needing_retry(self):
"""Get all destinations which are due a retry for sending a transaction.
diff --git a/synapse/streams/events.py b/synapse/streams/events.py
index 699083ae12..f0d68b5bf2 100644
--- a/synapse/streams/events.py
+++ b/synapse/streams/events.py
@@ -21,6 +21,7 @@ from synapse.handlers.presence import PresenceEventSource
from synapse.handlers.room import RoomEventSource
from synapse.handlers.typing import TypingNotificationEventSource
from synapse.handlers.receipts import ReceiptEventSource
+from synapse.handlers.private_user_data import PrivateUserDataEventSource
class EventSources(object):
@@ -29,6 +30,7 @@ class EventSources(object):
"presence": PresenceEventSource,
"typing": TypingNotificationEventSource,
"receipt": ReceiptEventSource,
+ "private_user_data": PrivateUserDataEventSource,
}
def __init__(self, hs):
@@ -52,5 +54,8 @@ class EventSources(object):
receipt_key=(
yield self.sources["receipt"].get_current_key()
),
+ private_user_data_key=(
+ yield self.sources["private_user_data"].get_current_key()
+ ),
)
defer.returnValue(token)
diff --git a/synapse/types.py b/synapse/types.py
index 8c51e00e8a..28344d8b36 100644
--- a/synapse/types.py
+++ b/synapse/types.py
@@ -98,10 +98,13 @@ class EventID(DomainSpecificString):
class StreamToken(
- namedtuple(
- "Token",
- ("room_key", "presence_key", "typing_key", "receipt_key")
- )
+ namedtuple("Token", (
+ "room_key",
+ "presence_key",
+ "typing_key",
+ "receipt_key",
+ "private_user_data_key",
+ ))
):
_SEPARATOR = "_"
@@ -109,7 +112,7 @@ class StreamToken(
def from_string(cls, string):
try:
keys = string.split(cls._SEPARATOR)
- if len(keys) == len(cls._fields) - 1:
+ while len(keys) < len(cls._fields):
# i.e. old token from before receipt_key
keys.append("0")
return cls(*keys)
@@ -128,13 +131,14 @@ class StreamToken(
else:
return int(self.room_key[1:].split("-")[-1])
- def is_after(self, other_token):
+ def is_after(self, other):
"""Does this token contain events that the other doesn't?"""
return (
- (other_token.room_stream_id < self.room_stream_id)
- or (int(other_token.presence_key) < int(self.presence_key))
- or (int(other_token.typing_key) < int(self.typing_key))
- or (int(other_token.receipt_key) < int(self.receipt_key))
+ (other.room_stream_id < self.room_stream_id)
+ or (int(other.presence_key) < int(self.presence_key))
+ or (int(other.typing_key) < int(self.typing_key))
+ or (int(other.receipt_key) < int(self.receipt_key))
+ or (int(other.private_user_data_key) < int(self.private_user_data_key))
)
def copy_and_advance(self, key, new_value):
diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py
index 1d123ccefc..d69c7cb991 100644
--- a/synapse/util/__init__.py
+++ b/synapse/util/__init__.py
@@ -53,6 +53,14 @@ class Clock(object):
loop.stop()
def call_later(self, delay, callback, *args, **kwargs):
+ """Call something later
+
+ Args:
+ delay(float): How long to wait in seconds.
+ callback(function): Function to call
+ *args: Postional arguments to pass to function.
+ **kwargs: Key arguments to pass to function.
+ """
current_context = LoggingContext.current_context()
def wrapped_callback(*args, **kwargs):
diff --git a/synapse/util/lockutils.py b/synapse/util/lockutils.py
deleted file mode 100644
index 33edc5c20e..0000000000
--- a/synapse/util/lockutils.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014, 2015 OpenMarket Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from twisted.internet import defer
-
-import logging
-
-
-logger = logging.getLogger(__name__)
-
-
-class Lock(object):
-
- def __init__(self, deferred, key):
- self._deferred = deferred
- self.released = False
- self.key = key
-
- def release(self):
- self.released = True
- self._deferred.callback(None)
-
- def __del__(self):
- if not self.released:
- logger.critical("Lock was destructed but never released!")
- self.release()
-
- def __enter__(self):
- return self
-
- def __exit__(self, type, value, traceback):
- logger.debug("Releasing lock for key=%r", self.key)
- self.release()
-
-
-class LockManager(object):
- """ Utility class that allows us to lock based on a `key` """
-
- def __init__(self):
- self._lock_deferreds = {}
-
- @defer.inlineCallbacks
- def lock(self, key):
- """ Allows us to block until it is our turn.
- Args:
- key (str)
- Returns:
- Lock
- """
- new_deferred = defer.Deferred()
- old_deferred = self._lock_deferreds.get(key)
- self._lock_deferreds[key] = new_deferred
-
- if old_deferred:
- logger.debug("Queueing on lock for key=%r", key)
- yield old_deferred
- logger.debug("Obtained lock for key=%r", key)
- else:
- logger.debug("Entering uncontended lock for key=%r", key)
-
- defer.returnValue(Lock(new_deferred, key))
diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py
index a42138f556..2fe6814807 100644
--- a/synapse/util/retryutils.py
+++ b/synapse/util/retryutils.py
@@ -18,6 +18,7 @@ from twisted.internet import defer
from synapse.api.errors import CodeMessageException
import logging
+import random
logger = logging.getLogger(__name__)
@@ -85,8 +86,9 @@ def get_retry_limiter(destination, clock, store, **kwargs):
class RetryDestinationLimiter(object):
def __init__(self, destination, clock, store, retry_interval,
- min_retry_interval=5000, max_retry_interval=60 * 60 * 1000,
- multiplier_retry_interval=2,):
+ min_retry_interval=10 * 60 * 1000,
+ max_retry_interval=24 * 60 * 60 * 1000,
+ multiplier_retry_interval=5,):
"""Marks the destination as "down" if an exception is thrown in the
context, except for CodeMessageException with code < 500.
@@ -140,6 +142,7 @@ class RetryDestinationLimiter(object):
# We couldn't connect.
if self.retry_interval:
self.retry_interval *= self.multiplier_retry_interval
+ self.retry_interval *= int(random.uniform(0.8, 1.4))
if self.retry_interval >= self.max_retry_interval:
self.retry_interval = self.max_retry_interval
diff --git a/synapse/util/third_party_invites.py b/synapse/util/third_party_invites.py
deleted file mode 100644
index 31d186740d..0000000000
--- a/synapse/util/third_party_invites.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2015 OpenMarket Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from twisted.internet import defer
-from synapse.api.errors import AuthError
-
-
-INVITE_KEYS = {"id_server", "medium", "address", "display_name"}
-
-JOIN_KEYS = {
- "token",
- "public_key",
- "key_validity_url",
- "sender",
- "signed",
-}
-
-
-def has_invite_keys(content):
- for key in INVITE_KEYS:
- if key not in content:
- return False
- return True
-
-
-def has_join_keys(content):
- for key in JOIN_KEYS:
- if key not in content:
- return False
- return True
-
-
-def join_has_third_party_invite(content):
- if "third_party_invite" not in content:
- return False
- return has_join_keys(content["third_party_invite"])
-
-
-def extract_join_keys(src):
- return {
- key: value
- for key, value in src.items()
- if key in JOIN_KEYS
- }
-
-
-@defer.inlineCallbacks
-def check_key_valid(http_client, event):
- try:
- response = yield http_client.get_json(
- event.content["third_party_invite"]["key_validity_url"],
- {"public_key": event.content["third_party_invite"]["public_key"]}
- )
- except Exception:
- raise AuthError(502, "Third party certificate could not be checked")
- if "valid" not in response or not response["valid"]:
- raise AuthError(403, "Third party certificate was invalid")
diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py
index c96273480d..70d928defe 100644
--- a/tests/api/test_auth.py
+++ b/tests/api/test_auth.py
@@ -51,7 +51,7 @@ class AuthTestCase(unittest.TestCase):
request = Mock(args={})
request.args["access_token"] = [self.test_token]
request.requestHeaders.getRawHeaders = Mock(return_value=[""])
- (user, _) = yield self.auth.get_user_by_req(request)
+ (user, _, _) = yield self.auth.get_user_by_req(request)
self.assertEquals(user.to_string(), self.test_user)
def test_get_user_by_req_user_bad_token(self):
@@ -86,7 +86,7 @@ class AuthTestCase(unittest.TestCase):
request = Mock(args={})
request.args["access_token"] = [self.test_token]
request.requestHeaders.getRawHeaders = Mock(return_value=[""])
- (user, _) = yield self.auth.get_user_by_req(request)
+ (user, _, _) = yield self.auth.get_user_by_req(request)
self.assertEquals(user.to_string(), self.test_user)
def test_get_user_by_req_appservice_bad_token(self):
@@ -121,7 +121,7 @@ class AuthTestCase(unittest.TestCase):
request.args["access_token"] = [self.test_token]
request.args["user_id"] = [masquerading_user_id]
request.requestHeaders.getRawHeaders = Mock(return_value=[""])
- (user, _) = yield self.auth.get_user_by_req(request)
+ (user, _, _) = yield self.auth.get_user_by_req(request)
self.assertEquals(user.to_string(), masquerading_user_id)
def test_get_user_by_req_appservice_valid_token_bad_user_id(self):
@@ -159,6 +159,25 @@ class AuthTestCase(unittest.TestCase):
self.assertEqual(UserID.from_string(user_id), user)
@defer.inlineCallbacks
+ def test_get_guest_user_from_macaroon(self):
+ user_id = "@baldrick:matrix.org"
+ macaroon = pymacaroons.Macaroon(
+ location=self.hs.config.server_name,
+ identifier="key",
+ key=self.hs.config.macaroon_secret_key)
+ macaroon.add_first_party_caveat("gen = 1")
+ macaroon.add_first_party_caveat("type = access")
+ macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
+ macaroon.add_first_party_caveat("guest = true")
+ serialized = macaroon.serialize()
+
+ user_info = yield self.auth._get_user_from_macaroon(serialized)
+ user = user_info["user"]
+ is_guest = user_info["is_guest"]
+ self.assertEqual(UserID.from_string(user_id), user)
+ self.assertTrue(is_guest)
+
+ @defer.inlineCallbacks
def test_get_user_from_macaroon_user_db_mismatch(self):
self.store.get_user_by_access_token = Mock(
return_value={"name": "@percy:matrix.org"}
diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py
index 29372d488a..10d4482cce 100644
--- a/tests/handlers/test_presence.py
+++ b/tests/handlers/test_presence.py
@@ -650,9 +650,30 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
{"presence": ONLINE}
)
+ # Apple sees self-reflection even without room_id
+ (events, _) = yield self.event_source.get_new_events(
+ user=self.u_apple,
+ from_key=0,
+ )
+
+ self.assertEquals(self.event_source.get_current_key(), 1)
+ self.assertEquals(events,
+ [
+ {"type": "m.presence",
+ "content": {
+ "user_id": "@apple:test",
+ "presence": ONLINE,
+ "last_active_ago": 0,
+ }},
+ ],
+ msg="Presence event should be visible to self-reflection"
+ )
+
# Apple sees self-reflection
- (events, _) = yield self.event_source.get_new_events_for_user(
- self.u_apple, 0, None
+ (events, _) = yield self.event_source.get_new_events(
+ user=self.u_apple,
+ from_key=0,
+ room_ids=[self.room_id],
)
self.assertEquals(self.event_source.get_current_key(), 1)
@@ -684,8 +705,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
)
# Banana sees it because of presence subscription
- (events, _) = yield self.event_source.get_new_events_for_user(
- self.u_banana, 0, None
+ (events, _) = yield self.event_source.get_new_events(
+ user=self.u_banana,
+ from_key=0,
+ room_ids=[self.room_id],
)
self.assertEquals(self.event_source.get_current_key(), 1)
@@ -702,8 +725,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
)
# Elderberry sees it because of same room
- (events, _) = yield self.event_source.get_new_events_for_user(
- self.u_elderberry, 0, None
+ (events, _) = yield self.event_source.get_new_events(
+ user=self.u_elderberry,
+ from_key=0,
+ room_ids=[self.room_id],
)
self.assertEquals(self.event_source.get_current_key(), 1)
@@ -720,8 +745,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
)
# Durian is not in the room, should not see this event
- (events, _) = yield self.event_source.get_new_events_for_user(
- self.u_durian, 0, None
+ (events, _) = yield self.event_source.get_new_events(
+ user=self.u_durian,
+ from_key=0,
+ room_ids=[],
)
self.assertEquals(self.event_source.get_current_key(), 1)
@@ -767,8 +794,9 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
"accepted": True},
], presence)
- (events, _) = yield self.event_source.get_new_events_for_user(
- self.u_apple, 1, None
+ (events, _) = yield self.event_source.get_new_events(
+ user=self.u_apple,
+ from_key=1,
)
self.assertEquals(self.event_source.get_current_key(), 2)
@@ -858,8 +886,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
)
)
- (events, _) = yield self.event_source.get_new_events_for_user(
- self.u_apple, 0, None
+ (events, _) = yield self.event_source.get_new_events(
+ user=self.u_apple,
+ from_key=0,
+ room_ids=[self.room_id],
)
self.assertEquals(self.event_source.get_current_key(), 1)
@@ -905,8 +935,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
self.assertEquals(self.event_source.get_current_key(), 1)
- (events, _) = yield self.event_source.get_new_events_for_user(
- self.u_apple, 0, None
+ (events, _) = yield self.event_source.get_new_events(
+ user=self.u_apple,
+ from_key=0,
+ room_ids=[self.room_id,]
)
self.assertEquals(events,
[
@@ -932,8 +964,10 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
self.assertEquals(self.event_source.get_current_key(), 2)
- (events, _) = yield self.event_source.get_new_events_for_user(
- self.u_apple, 0, None
+ (events, _) = yield self.event_source.get_new_events(
+ user=self.u_apple,
+ from_key=0,
+ room_ids=[self.room_id,]
)
self.assertEquals(events,
[
@@ -966,8 +1000,9 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
self.room_members.append(self.u_clementine)
- (events, _) = yield self.event_source.get_new_events_for_user(
- self.u_apple, 0, None
+ (events, _) = yield self.event_source.get_new_events(
+ user=self.u_apple,
+ from_key=0,
)
self.assertEquals(self.event_source.get_current_key(), 1)
diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py
index 41bb08b7ca..2d7ba43561 100644
--- a/tests/handlers/test_typing.py
+++ b/tests/handlers/test_typing.py
@@ -187,7 +187,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
])
self.assertEquals(self.event_source.get_current_key(), 1)
- events = yield self.event_source.get_new_events_for_user(self.u_apple, 0, None)
+ events = yield self.event_source.get_new_events(
+ room_ids=[self.room_id],
+ from_key=0,
+ )
self.assertEquals(
events[0],
[
@@ -250,7 +253,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
])
self.assertEquals(self.event_source.get_current_key(), 1)
- events = yield self.event_source.get_new_events_for_user(self.u_apple, 0, None)
+ events = yield self.event_source.get_new_events(
+ room_ids=[self.room_id],
+ from_key=0
+ )
self.assertEquals(
events[0],
[
@@ -306,7 +312,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
yield put_json.await_calls()
self.assertEquals(self.event_source.get_current_key(), 1)
- events = yield self.event_source.get_new_events_for_user(self.u_apple, 0, None)
+ events = yield self.event_source.get_new_events(
+ room_ids=[self.room_id],
+ from_key=0,
+ )
self.assertEquals(
events[0],
[
@@ -337,7 +346,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 1)
- events = yield self.event_source.get_new_events_for_user(self.u_apple, 0, None)
+ events = yield self.event_source.get_new_events(
+ room_ids=[self.room_id],
+ from_key=0,
+ )
self.assertEquals(
events[0],
[
@@ -356,7 +368,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
])
self.assertEquals(self.event_source.get_current_key(), 2)
- events = yield self.event_source.get_new_events_for_user(self.u_apple, 1, None)
+ events = yield self.event_source.get_new_events(
+ room_ids=[self.room_id],
+ from_key=1,
+ )
self.assertEquals(
events[0],
[
@@ -383,7 +398,10 @@ class TypingNotificationsTestCase(unittest.TestCase):
self.on_new_event.reset_mock()
self.assertEquals(self.event_source.get_current_key(), 3)
- events = yield self.event_source.get_new_events_for_user(self.u_apple, 0, None)
+ events = yield self.event_source.get_new_events(
+ room_ids=[self.room_id],
+ from_key=0,
+ )
self.assertEquals(
events[0],
[
diff --git a/tests/rest/client/v1/test_presence.py b/tests/rest/client/v1/test_presence.py
index 29d9bbaad4..8581796f72 100644
--- a/tests/rest/client/v1/test_presence.py
+++ b/tests/rest/client/v1/test_presence.py
@@ -47,7 +47,14 @@ class NullSource(object):
def __init__(self, hs):
pass
- def get_new_events_for_user(self, user, from_key, limit):
+ def get_new_events(
+ self,
+ user,
+ from_key,
+ room_ids=None,
+ limit=None,
+ is_guest=None
+ ):
return defer.succeed(([], from_key))
def get_current_key(self, direction='f'):
@@ -86,10 +93,11 @@ class PresenceStateTestCase(unittest.TestCase):
return defer.succeed([])
self.datastore.get_presence_list = get_presence_list
- def _get_user_by_access_token(token=None):
+ def _get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(myid),
"token_id": 1,
+ "is_guest": False,
}
hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
@@ -173,10 +181,11 @@ class PresenceListTestCase(unittest.TestCase):
)
self.datastore.has_presence_state = has_presence_state
- def _get_user_by_access_token(token=None):
+ def _get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(myid),
"token_id": 1,
+ "is_guest": False,
}
hs.handlers.room_member_handler = Mock(
@@ -291,8 +300,8 @@ class PresenceEventStreamTestCase(unittest.TestCase):
hs.get_clock().time_msec.return_value = 1000000
- def _get_user_by_req(req=None):
- return (UserID.from_string(myid), "")
+ def _get_user_by_req(req=None, allow_guest=False):
+ return (UserID.from_string(myid), "", False)
hs.get_v1auth().get_user_by_req = _get_user_by_req
@@ -312,6 +321,9 @@ class PresenceEventStreamTestCase(unittest.TestCase):
hs.handlers.room_member_handler.get_room_members = (
lambda r: self.room_members if r == "a-room" else []
)
+ hs.handlers.room_member_handler._filter_events_for_client = (
+ lambda user_id, events, **kwargs: events
+ )
self.mock_datastore = hs.get_datastore()
self.mock_datastore.get_app_service_by_token = Mock(return_value=None)
@@ -369,7 +381,7 @@ class PresenceEventStreamTestCase(unittest.TestCase):
# all be ours
# I'll already get my own presence state change
- self.assertEquals({"start": "0_1_0_0", "end": "0_1_0_0", "chunk": []},
+ self.assertEquals({"start": "0_1_0_0_0", "end": "0_1_0_0_0", "chunk": []},
response
)
@@ -388,7 +400,7 @@ class PresenceEventStreamTestCase(unittest.TestCase):
"/events?from=s0_1_0&timeout=0", None)
self.assertEquals(200, code)
- self.assertEquals({"start": "s0_1_0_0", "end": "s0_2_0_0", "chunk": [
+ self.assertEquals({"start": "s0_1_0_0_0", "end": "s0_2_0_0_0", "chunk": [
{"type": "m.presence",
"content": {
"user_id": "@banana:test",
diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py
index 929e5e5dd4..adcc1d1969 100644
--- a/tests/rest/client/v1/test_profile.py
+++ b/tests/rest/client/v1/test_profile.py
@@ -52,8 +52,8 @@ class ProfileTestCase(unittest.TestCase):
replication_layer=Mock(),
)
- def _get_user_by_req(request=None):
- return (UserID.from_string(myid), "")
+ def _get_user_by_req(request=None, allow_guest=False):
+ return (UserID.from_string(myid), "", False)
hs.get_v1auth().get_user_by_req = _get_user_by_req
diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py
index 93896dd076..7749378064 100644
--- a/tests/rest/client/v1/test_rooms.py
+++ b/tests/rest/client/v1/test_rooms.py
@@ -54,10 +54,11 @@ class RoomPermissionsTestCase(RestTestCase):
hs.get_handlers().federation_handler = Mock()
- def _get_user_by_access_token(token=None):
+ def _get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
+ "is_guest": False,
}
hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
@@ -439,10 +440,11 @@ class RoomsMemberListTestCase(RestTestCase):
self.auth_user_id = self.user_id
- def _get_user_by_access_token(token=None):
+ def _get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
+ "is_guest": False,
}
hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
@@ -517,10 +519,11 @@ class RoomsCreateTestCase(RestTestCase):
hs.get_handlers().federation_handler = Mock()
- def _get_user_by_access_token(token=None):
+ def _get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
+ "is_guest": False,
}
hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
@@ -608,10 +611,11 @@ class RoomTopicTestCase(RestTestCase):
hs.get_handlers().federation_handler = Mock()
- def _get_user_by_access_token(token=None):
+ def _get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
+ "is_guest": False,
}
hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
@@ -713,10 +717,11 @@ class RoomMemberStateTestCase(RestTestCase):
hs.get_handlers().federation_handler = Mock()
- def _get_user_by_access_token(token=None):
+ def _get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
+ "is_guest": False,
}
hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
@@ -838,10 +843,11 @@ class RoomMessagesTestCase(RestTestCase):
hs.get_handlers().federation_handler = Mock()
- def _get_user_by_access_token(token=None):
+ def _get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
+ "is_guest": False,
}
hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
@@ -933,10 +939,11 @@ class RoomInitialSyncTestCase(RestTestCase):
hs.get_handlers().federation_handler = Mock()
- def _get_user_by_access_token(token=None):
+ def _get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
+ "is_guest": False,
}
hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
@@ -987,3 +994,59 @@ class RoomInitialSyncTestCase(RestTestCase):
}
self.assertTrue(self.user_id in presence_by_user)
self.assertEquals("m.presence", presence_by_user[self.user_id]["type"])
+
+
+class RoomMessageListTestCase(RestTestCase):
+ """ Tests /rooms/$room_id/messages REST events. """
+ user_id = "@sid1:red"
+
+ @defer.inlineCallbacks
+ def setUp(self):
+ self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
+ self.auth_user_id = self.user_id
+
+ hs = yield setup_test_homeserver(
+ "red",
+ http_client=None,
+ replication_layer=Mock(),
+ ratelimiter=NonCallableMock(spec_set=["send_message"]),
+ )
+ self.ratelimiter = hs.get_ratelimiter()
+ self.ratelimiter.send_message.return_value = (True, 0)
+
+ hs.get_handlers().federation_handler = Mock()
+
+ def _get_user_by_access_token(token=None, allow_guest=False):
+ return {
+ "user": UserID.from_string(self.auth_user_id),
+ "token_id": 1,
+ "is_guest": False,
+ }
+ hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
+
+ def _insert_client_ip(*args, **kwargs):
+ return defer.succeed(None)
+ hs.get_datastore().insert_client_ip = _insert_client_ip
+
+ synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource)
+
+ self.room_id = yield self.create_room_as(self.user_id)
+
+ @defer.inlineCallbacks
+ def test_topo_token_is_accepted(self):
+ token = "t1-0_0_0_0_0"
+ (code, response) = yield self.mock_resource.trigger_get(
+ "/rooms/%s/messages?access_token=x&from=%s" %
+ (self.room_id, token))
+ self.assertEquals(200, code)
+ self.assertTrue("start" in response)
+ self.assertEquals(token, response['start'])
+ self.assertTrue("chunk" in response)
+ self.assertTrue("end" in response)
+
+ @defer.inlineCallbacks
+ def test_stream_token_is_rejected(self):
+ (code, response) = yield self.mock_resource.trigger_get(
+ "/rooms/%s/messages?access_token=x&from=s0_0_0_0" %
+ self.room_id)
+ self.assertEquals(400, code)
diff --git a/tests/rest/client/v1/test_typing.py b/tests/rest/client/v1/test_typing.py
index 6395ce79db..61b9cc743b 100644
--- a/tests/rest/client/v1/test_typing.py
+++ b/tests/rest/client/v1/test_typing.py
@@ -61,10 +61,11 @@ class RoomTypingTestCase(RestTestCase):
hs.get_handlers().federation_handler = Mock()
- def _get_user_by_access_token(token=None):
+ def _get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
+ "is_guest": False,
}
hs.get_v1auth()._get_user_by_access_token = _get_user_by_access_token
@@ -115,7 +116,10 @@ class RoomTypingTestCase(RestTestCase):
self.assertEquals(200, code)
self.assertEquals(self.event_source.get_current_key(), 1)
- events = yield self.event_source.get_new_events_for_user(self.user, 0, None)
+ events = yield self.event_source.get_new_events(
+ from_key=0,
+ room_ids=[self.room_id],
+ )
self.assertEquals(
events[0],
[
diff --git a/tests/rest/client/v2_alpha/__init__.py b/tests/rest/client/v2_alpha/__init__.py
index f45570a1c0..fa9e17ec4f 100644
--- a/tests/rest/client/v2_alpha/__init__.py
+++ b/tests/rest/client/v2_alpha/__init__.py
@@ -43,10 +43,11 @@ class V2AlphaRestTestCase(unittest.TestCase):
resource_for_federation=self.mock_resource,
)
- def _get_user_by_access_token(token=None):
+ def _get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.USER_ID),
"token_id": 1,
+ "is_guest": False,
}
hs.get_auth()._get_user_by_access_token = _get_user_by_access_token
diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py
new file mode 100644
index 0000000000..29289fa9b4
--- /dev/null
+++ b/tests/storage/test_background_update.py
@@ -0,0 +1,76 @@
+from tests import unittest
+from twisted.internet import defer
+
+from synapse.api.constants import EventTypes
+from synapse.types import UserID, RoomID, RoomAlias
+
+from tests.utils import setup_test_homeserver
+
+from mock import Mock
+
+class BackgroundUpdateTestCase(unittest.TestCase):
+
+ @defer.inlineCallbacks
+ def setUp(self):
+ hs = yield setup_test_homeserver()
+ self.store = hs.get_datastore()
+ self.clock = hs.get_clock()
+
+ self.update_handler = Mock()
+
+ yield self.store.register_background_update_handler(
+ "test_update", self.update_handler
+ )
+
+ @defer.inlineCallbacks
+ def test_do_background_update(self):
+ desired_count = 1000;
+ duration_ms = 42;
+
+ @defer.inlineCallbacks
+ def update(progress, count):
+ self.clock.advance_time_msec(count * duration_ms)
+ progress = {"my_key": progress["my_key"] + 1}
+ yield self.store.runInteraction(
+ "update_progress",
+ self.store._background_update_progress_txn,
+ "test_update",
+ progress,
+ )
+ defer.returnValue(count)
+
+ self.update_handler.side_effect = update
+
+ yield self.store.start_background_update("test_update", {"my_key": 1})
+
+ self.update_handler.reset_mock()
+ result = yield self.store.do_background_update(
+ duration_ms * desired_count
+ )
+ self.assertIsNotNone(result)
+ self.update_handler.assert_called_once_with(
+ {"my_key": 1}, self.store.DEFAULT_BACKGROUND_BATCH_SIZE
+ )
+
+ @defer.inlineCallbacks
+ def update(progress, count):
+ yield self.store._end_background_update("test_update")
+ defer.returnValue(count)
+
+ self.update_handler.side_effect = update
+
+ self.update_handler.reset_mock()
+ result = yield self.store.do_background_update(
+ duration_ms * desired_count
+ )
+ self.assertIsNotNone(result)
+ self.update_handler.assert_called_once_with(
+ {"my_key": 2}, desired_count
+ )
+
+ self.update_handler.reset_mock()
+ result = yield self.store.do_background_update(
+ duration_ms * desired_count
+ )
+ self.assertIsNone(result)
+ self.assertFalse(self.update_handler.called)
diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py
index b57006fcb4..dbf9700e6a 100644
--- a/tests/storage/test_redaction.py
+++ b/tests/storage/test_redaction.py
@@ -120,7 +120,6 @@ class RedactionTestCase(unittest.TestCase):
self.u_alice.to_string(),
start,
end,
- None, # Is currently ignored
)
self.assertEqual(1, len(results))
@@ -149,7 +148,6 @@ class RedactionTestCase(unittest.TestCase):
self.u_alice.to_string(),
start,
end,
- None, # Is currently ignored
)
self.assertEqual(1, len(results))
@@ -199,7 +197,6 @@ class RedactionTestCase(unittest.TestCase):
self.u_alice.to_string(),
start,
end,
- None, # Is currently ignored
)
self.assertEqual(1, len(results))
@@ -228,7 +225,6 @@ class RedactionTestCase(unittest.TestCase):
self.u_alice.to_string(),
start,
end,
- None, # Is currently ignored
)
self.assertEqual(1, len(results))
diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py
index caffce64e3..91c967548d 100644
--- a/tests/storage/test_room.py
+++ b/tests/storage/test_room.py
@@ -73,6 +73,8 @@ class RoomStoreTestCase(unittest.TestCase):
"room_id": self.room.to_string(),
"topic": None,
"aliases": [self.alias.to_string()],
+ "world_readable": False,
+ "guest_can_join": False,
}, rooms[0])
diff --git a/tests/storage/test_stream.py b/tests/storage/test_stream.py
index a658a789aa..e5c2c5cc8e 100644
--- a/tests/storage/test_stream.py
+++ b/tests/storage/test_stream.py
@@ -68,7 +68,6 @@ class StreamStoreTestCase(unittest.TestCase):
self.u_bob.to_string(),
start,
end,
- None, # Is currently ignored
)
self.assertEqual(1, len(results))
@@ -105,7 +104,6 @@ class StreamStoreTestCase(unittest.TestCase):
self.u_alice.to_string(),
start,
end,
- None, # Is currently ignored
)
self.assertEqual(1, len(results))
@@ -147,7 +145,6 @@ class StreamStoreTestCase(unittest.TestCase):
self.u_bob.to_string(),
start,
end,
- None, # Is currently ignored
)
# We should not get the message, as it happened *after* bob left.
@@ -175,7 +172,6 @@ class StreamStoreTestCase(unittest.TestCase):
self.u_bob.to_string(),
start,
end,
- None, # Is currently ignored
)
# We should not get the message, as it happened *after* bob left.
diff --git a/tests/test_state.py b/tests/test_state.py
index 0274c4bc18..e4e995b756 100644
--- a/tests/test_state.py
+++ b/tests/test_state.py
@@ -318,6 +318,99 @@ class StateTestCase(unittest.TestCase):
)
@defer.inlineCallbacks
+ def test_branch_have_perms_conflict(self):
+ userid1 = "@user_id:example.com"
+ userid2 = "@user_id2:example.com"
+
+ nodes = {
+ "A1": DictObj(
+ type=EventTypes.Create,
+ state_key="",
+ content={"creator": userid1},
+ depth=1,
+ ),
+ "A2": DictObj(
+ type=EventTypes.Member,
+ state_key=userid1,
+ content={"membership": Membership.JOIN},
+ membership=Membership.JOIN,
+ ),
+ "A3": DictObj(
+ type=EventTypes.Member,
+ state_key=userid2,
+ content={"membership": Membership.JOIN},
+ membership=Membership.JOIN,
+ ),
+ "A4": DictObj(
+ type=EventTypes.PowerLevels,
+ state_key="",
+ content={
+ "events": {"m.room.name": 50},
+ "users": {userid1: 100,
+ userid2: 60},
+ },
+ ),
+ "A5": DictObj(
+ type=EventTypes.Name,
+ state_key="",
+ ),
+ "B": DictObj(
+ type=EventTypes.PowerLevels,
+ state_key="",
+ content={
+ "events": {"m.room.name": 50},
+ "users": {userid2: 30},
+ },
+ ),
+ "C": DictObj(
+ type=EventTypes.Name,
+ state_key="",
+ sender=userid2,
+ ),
+ "D": DictObj(
+ type=EventTypes.Message,
+ ),
+ }
+ edges = {
+ "A2": ["A1"],
+ "A3": ["A2"],
+ "A4": ["A3"],
+ "A5": ["A4"],
+ "B": ["A5"],
+ "C": ["A5"],
+ "D": ["B", "C"]
+ }
+ self._add_depths(nodes, edges)
+ graph = Graph(nodes, edges)
+
+ store = StateGroupStore()
+ self.store.get_state_groups.side_effect = store.get_state_groups
+
+ context_store = {}
+
+ for event in graph.walk():
+ context = yield self.state.compute_event_context(event)
+ store.store_state_groups(event, context)
+ context_store[event.event_id] = context
+
+ self.assertSetEqual(
+ {"A1", "A2", "A3", "A5", "B"},
+ {e.event_id for e in context_store["D"].current_state.values()}
+ )
+
+ def _add_depths(self, nodes, edges):
+ def _get_depth(ev):
+ node = nodes[ev]
+ if 'depth' not in node:
+ prevs = edges[ev]
+ depth = max(_get_depth(prev) for prev in prevs) + 1
+ node['depth'] = depth
+ return node['depth']
+
+ for n in nodes:
+ _get_depth(n)
+
+ @defer.inlineCallbacks
def test_annotate_with_old_message(self):
event = create_event(type="test_message", name="event")
diff --git a/tests/util/test_lock.py b/tests/util/test_lock.py
deleted file mode 100644
index 6a1e521b1e..0000000000
--- a/tests/util/test_lock.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014 OpenMarket Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from twisted.internet import defer
-from tests import unittest
-
-from synapse.util.lockutils import LockManager
-
-
-class LockManagerTestCase(unittest.TestCase):
-
- def setUp(self):
- self.lock_manager = LockManager()
-
- @defer.inlineCallbacks
- def test_one_lock(self):
- key = "test"
- deferred_lock1 = self.lock_manager.lock(key)
-
- self.assertTrue(deferred_lock1.called)
-
- lock1 = yield deferred_lock1
-
- self.assertFalse(lock1.released)
-
- lock1.release()
-
- self.assertTrue(lock1.released)
-
- @defer.inlineCallbacks
- def test_concurrent_locks(self):
- key = "test"
- deferred_lock1 = self.lock_manager.lock(key)
- deferred_lock2 = self.lock_manager.lock(key)
-
- self.assertTrue(deferred_lock1.called)
- self.assertFalse(deferred_lock2.called)
-
- lock1 = yield deferred_lock1
-
- self.assertFalse(lock1.released)
- self.assertFalse(deferred_lock2.called)
-
- lock1.release()
-
- self.assertTrue(lock1.released)
- self.assertTrue(deferred_lock2.called)
-
- lock2 = yield deferred_lock2
-
- lock2.release()
-
- @defer.inlineCallbacks
- def test_sequential_locks(self):
- key = "test"
- deferred_lock1 = self.lock_manager.lock(key)
-
- self.assertTrue(deferred_lock1.called)
-
- lock1 = yield deferred_lock1
-
- self.assertFalse(lock1.released)
-
- lock1.release()
-
- self.assertTrue(lock1.released)
-
- deferred_lock2 = self.lock_manager.lock(key)
-
- self.assertTrue(deferred_lock2.called)
-
- lock2 = yield deferred_lock2
-
- self.assertFalse(lock2.released)
-
- lock2.release()
-
- self.assertTrue(lock2.released)
-
- @defer.inlineCallbacks
- def test_with_statement(self):
- key = "test"
- with (yield self.lock_manager.lock(key)) as lock:
- self.assertFalse(lock.released)
-
- self.assertTrue(lock.released)
-
- @defer.inlineCallbacks
- def test_two_with_statement(self):
- key = "test"
- with (yield self.lock_manager.lock(key)):
- pass
-
- with (yield self.lock_manager.lock(key)):
- pass
diff --git a/tests/utils.py b/tests/utils.py
index 4da51291a4..91040c2efd 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -243,6 +243,9 @@ class MockClock(object):
else:
self.timers.append(t)
+ def advance_time_msec(self, ms):
+ self.advance_time(ms / 1000.)
+
class SQLiteMemoryDbPool(ConnectionPool, object):
def __init__(self):
@@ -335,7 +338,7 @@ class MemoryDataStore(object):
]
def get_room_events_stream(self, user_id=None, from_key=None, to_key=None,
- room_id=None, limit=0, with_feedback=False):
+ limit=0, with_feedback=False):
return ([], from_key) # TODO
def get_joined_hosts_for_room(self, room_id):
|