diff --git a/changelog.d/3778.misc b/changelog.d/3778.misc
new file mode 100644
index 0000000000..b78a2c9f42
--- /dev/null
+++ b/changelog.d/3778.misc
@@ -0,0 +1 @@
+Fix build of Docker image with docker-compose
diff --git a/changelog.d/4004.feature b/changelog.d/4004.feature
new file mode 100644
index 0000000000..ef5cdaf5ec
--- /dev/null
+++ b/changelog.d/4004.feature
@@ -0,0 +1 @@
+Add `m.login.terms` to the registration flow when consent tracking is enabled. **This makes the template arguments conditionally optional on a new `public_version` variable - update your privacy templates to support this.**
diff --git a/changelog.d/4127.bugfix b/changelog.d/4127.bugfix
new file mode 100644
index 0000000000..0701d2ceaa
--- /dev/null
+++ b/changelog.d/4127.bugfix
@@ -0,0 +1 @@
+If the typing stream ID goes backwards (as on a worker when the master restarts), the worker's typing handler will no longer erroneously report rooms containing new typing events.
diff --git a/changelog.d/4132.bugfix b/changelog.d/4132.bugfix
new file mode 100644
index 0000000000..2304a40f05
--- /dev/null
+++ b/changelog.d/4132.bugfix
@@ -0,0 +1 @@
+Fix table lock of device_lists_remote_cache which could freeze the application
\ No newline at end of file
diff --git a/changelog.d/4133.feature b/changelog.d/4133.feature
new file mode 100644
index 0000000000..ef5cdaf5ec
--- /dev/null
+++ b/changelog.d/4133.feature
@@ -0,0 +1 @@
+Add `m.login.terms` to the registration flow when consent tracking is enabled. **This makes the template arguments conditionally optional on a new `public_version` variable - update your privacy templates to support this.**
diff --git a/changelog.d/4135.bugfix b/changelog.d/4135.bugfix
new file mode 100644
index 0000000000..6879b1c162
--- /dev/null
+++ b/changelog.d/4135.bugfix
@@ -0,0 +1 @@
+Fix exception when using state res v2 algorithm
diff --git a/contrib/docker/docker-compose.yml b/contrib/docker/docker-compose.yml
index b1f6fcb7da..2c1f0671b2 100644
--- a/contrib/docker/docker-compose.yml
+++ b/contrib/docker/docker-compose.yml
@@ -6,9 +6,11 @@ version: '3'
services:
synapse:
- build: ../..
+ build:
+ context: ../..
+ dockerfile: docker/Dockerfile
image: docker.io/matrixdotorg/synapse:latest
- # Since snyapse does not retry to connect to the database, restart upon
+ # Since synapse does not retry to connect to the database, restart upon
# failure
restart: unless-stopped
# See the readme for a full documentation of the environment settings
diff --git a/docs/consent_tracking.md b/docs/consent_tracking.md
index 064eae82f7..3634d13d4f 100644
--- a/docs/consent_tracking.md
+++ b/docs/consent_tracking.md
@@ -31,7 +31,7 @@ Note that the templates must be stored under a name giving the language of the
template - currently this must always be `en` (for "English");
internationalisation support is intended for the future.
-The template for the policy itself should be versioned and named according to
+The template for the policy itself should be versioned and named according to
the version: for example `1.0.html`. The version of the policy which the user
has agreed to is stored in the database.
@@ -81,9 +81,9 @@ should be a matter of `pip install Jinja2`. On debian, try `apt-get install
python-jinja2`.
Once this is complete, and the server has been restarted, try visiting
-`https://<server>/_matrix/consent`. If correctly configured, this should give
-an error "Missing string query parameter 'u'". It is now possible to manually
-construct URIs where users can give their consent.
+`https://<server>/_matrix/consent`. If correctly configured, you should see a
+default policy document. It is now possible to manually construct URIs where
+users can give their consent.
### Constructing the consent URI
@@ -106,6 +106,11 @@ query parameters:
`https://<server>/_matrix/consent?u=<user>&h=68a152465a4d...`.
+Note that not providing a `u` parameter will be interpreted as wanting to view
+the document from an unauthenticated perspective, such as prior to registration.
+Therefore, the `h` parameter is not required in this scenario.
+
+
Sending users a server notice asking them to agree to the policy
----------------------------------------------------------------
diff --git a/docs/privacy_policy_templates/en/1.0.html b/docs/privacy_policy_templates/en/1.0.html
index 55c5e4b612..321c7e4671 100644
--- a/docs/privacy_policy_templates/en/1.0.html
+++ b/docs/privacy_policy_templates/en/1.0.html
@@ -12,12 +12,15 @@
<p>
All your base are belong to us.
</p>
- <form method="post" action="consent">
- <input type="hidden" name="v" value="{{version}}"/>
- <input type="hidden" name="u" value="{{user}}"/>
- <input type="hidden" name="h" value="{{userhmac}}"/>
- <input type="submit" value="Sure thing!"/>
- </form>
+ {% if not public_version %}
+ <!-- The variables used here are only provided when the 'u' param is given to the homeserver -->
+ <form method="post" action="consent">
+ <input type="hidden" name="v" value="{{version}}"/>
+ <input type="hidden" name="u" value="{{user}}"/>
+ <input type="hidden" name="h" value="{{userhmac}}"/>
+ <input type="submit" value="Sure thing!"/>
+ </form>
+ {% endif %}
{% endif %}
</body>
</html>
diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index e63b1e8a38..f20e0fcf0b 100644
--- a/synapse/api/constants.py
+++ b/synapse/api/constants.py
@@ -51,6 +51,7 @@ class LoginType(object):
EMAIL_IDENTITY = u"m.login.email.identity"
MSISDN = u"m.login.msisdn"
RECAPTCHA = u"m.login.recaptcha"
+ TERMS = u"m.login.terms"
DUMMY = u"m.login.dummy"
# Only for C/S API v1
diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py
index 3926c7f263..0354e82bf8 100644
--- a/synapse/app/synchrotron.py
+++ b/synapse/app/synchrotron.py
@@ -226,7 +226,15 @@ class SynchrotronPresence(object):
class SynchrotronTyping(object):
def __init__(self, hs):
self._latest_room_serial = 0
+ self._reset()
+
+ def _reset(self):
+ """
+ Reset the typing handler's data caches.
+ """
+ # map room IDs to serial numbers
self._room_serials = {}
+ # map room IDs to sets of users currently typing
self._room_typing = {}
def stream_positions(self):
@@ -236,6 +244,12 @@ class SynchrotronTyping(object):
return {"typing": self._latest_room_serial}
def process_replication_rows(self, token, rows):
+ if self._latest_room_serial > token:
+ # The master has gone backwards. To prevent inconsistent data, just
+ # clear everything.
+ self._reset()
+
+ # Set the latest serial token to whatever the server gave us.
self._latest_room_serial = token
for row in rows:
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 329e3c7d71..85fc1fc525 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -59,6 +59,7 @@ class AuthHandler(BaseHandler):
LoginType.EMAIL_IDENTITY: self._check_email_identity,
LoginType.MSISDN: self._check_msisdn,
LoginType.DUMMY: self._check_dummy_auth,
+ LoginType.TERMS: self._check_terms_auth,
}
self.bcrypt_rounds = hs.config.bcrypt_rounds
@@ -431,6 +432,9 @@ class AuthHandler(BaseHandler):
def _check_dummy_auth(self, authdict, _):
return defer.succeed(True)
+ def _check_terms_auth(self, authdict, _):
+ return defer.succeed(True)
+
@defer.inlineCallbacks
def _check_threepid(self, medium, authdict):
if 'threepid_creds' not in authdict:
@@ -462,6 +466,22 @@ class AuthHandler(BaseHandler):
def _get_params_recaptcha(self):
return {"public_key": self.hs.config.recaptcha_public_key}
+ def _get_params_terms(self):
+ return {
+ "policies": {
+ "privacy_policy": {
+ "version": self.hs.config.user_consent_version,
+ "en": {
+ "name": "Privacy Policy",
+ "url": "%s/_matrix/consent?v=%s" % (
+ self.hs.config.public_baseurl,
+ self.hs.config.user_consent_version,
+ ),
+ },
+ },
+ },
+ }
+
def _auth_dict_for_flows(self, flows, session):
public_flows = []
for f in flows:
@@ -469,6 +489,7 @@ class AuthHandler(BaseHandler):
get_params = {
LoginType.RECAPTCHA: self._get_params_recaptcha,
+ LoginType.TERMS: self._get_params_terms,
}
params = {}
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index c610933dd4..a61bbf9392 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -63,11 +63,8 @@ class TypingHandler(object):
self._member_typing_until = {} # clock time we expect to stop
self._member_last_federation_poke = {}
- # map room IDs to serial numbers
- self._room_serials = {}
self._latest_room_serial = 0
- # map room IDs to sets of users currently typing
- self._room_typing = {}
+ self._reset()
# caches which room_ids changed at which serials
self._typing_stream_change_cache = StreamChangeCache(
@@ -79,6 +76,15 @@ class TypingHandler(object):
5000,
)
+ def _reset(self):
+ """
+ Reset the typing handler's data caches.
+ """
+ # map room IDs to serial numbers
+ self._room_serials = {}
+ # map room IDs to sets of users currently typing
+ self._room_typing = {}
+
def _handle_timeouts(self):
logger.info("Checking for typing timeouts")
diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py
index 693b303881..a8d8ed6590 100644
--- a/synapse/rest/client/v2_alpha/auth.py
+++ b/synapse/rest/client/v2_alpha/auth.py
@@ -68,6 +68,29 @@ function captchaDone() {
</html>
"""
+TERMS_TEMPLATE = """
+<html>
+<head>
+<title>Authentication</title>
+<meta name='viewport' content='width=device-width, initial-scale=1,
+ user-scalable=no, minimum-scale=1.0, maximum-scale=1.0'>
+<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
+</head>
+<body>
+<form id="registrationForm" method="post" action="%(myurl)s">
+ <div>
+ <p>
+ Please click the button below if you agree to the
+ <a href="%(terms_url)s">privacy policy of this homeserver.</a>
+ </p>
+ <input type="hidden" name="session" value="%(session)s" />
+ <input type="submit" value="Agree" />
+ </div>
+</form>
+</body>
+</html>
+"""
+
SUCCESS_TEMPLATE = """
<html>
<head>
@@ -133,13 +156,34 @@ class AuthRestServlet(RestServlet):
request.write(html_bytes)
finish_request(request)
defer.returnValue(None)
+ elif stagetype == LoginType.TERMS:
+ session = request.args['session'][0]
+
+ html = TERMS_TEMPLATE % {
+ 'session': session,
+ 'terms_url': "%s/_matrix/consent?v=%s" % (
+ self.hs.config.public_baseurl,
+ self.hs.config.user_consent_version,
+ ),
+ 'myurl': "%s/auth/%s/fallback/web" % (
+ CLIENT_V2_ALPHA_PREFIX, LoginType.TERMS
+ ),
+ }
+ html_bytes = html.encode("utf8")
+ request.setResponseCode(200)
+ request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
+ request.setHeader(b"Content-Length", b"%d" % (len(html_bytes),))
+
+ request.write(html_bytes)
+ finish_request(request)
+ defer.returnValue(None)
else:
raise SynapseError(404, "Unknown auth stage type")
@defer.inlineCallbacks
def on_POST(self, request, stagetype):
yield
- if stagetype == "m.login.recaptcha":
+ if stagetype == LoginType.RECAPTCHA:
if ('g-recaptcha-response' not in request.args or
len(request.args['g-recaptcha-response'])) == 0:
raise SynapseError(400, "No captcha response supplied")
@@ -179,6 +223,41 @@ class AuthRestServlet(RestServlet):
finish_request(request)
defer.returnValue(None)
+ elif stagetype == LoginType.TERMS:
+ if ('session' not in request.args or
+ len(request.args['session'])) == 0:
+ raise SynapseError(400, "No session supplied")
+
+ session = request.args['session'][0]
+ authdict = {'session': session}
+
+ success = yield self.auth_handler.add_oob_auth(
+ LoginType.TERMS,
+ authdict,
+ self.hs.get_ip_from_request(request)
+ )
+
+ if success:
+ html = SUCCESS_TEMPLATE
+ else:
+ html = TERMS_TEMPLATE % {
+ 'session': session,
+ 'terms_url': "%s/_matrix/consent?v=%s" % (
+ self.hs.config.public_baseurl,
+ self.hs.config.user_consent_version,
+ ),
+ 'myurl': "%s/auth/%s/fallback/web" % (
+ CLIENT_V2_ALPHA_PREFIX, LoginType.TERMS
+ ),
+ }
+ html_bytes = html.encode("utf8")
+ request.setResponseCode(200)
+ request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
+ request.setHeader(b"Content-Length", b"%d" % (len(html_bytes),))
+
+ request.write(html_bytes)
+ finish_request(request)
+ defer.returnValue(None)
else:
raise SynapseError(404, "Unknown auth stage type")
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index 192f52e462..c5214330ad 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -359,6 +359,13 @@ class RegisterRestServlet(RestServlet):
[LoginType.MSISDN, LoginType.EMAIL_IDENTITY]
])
+ # Append m.login.terms to all flows if we're requiring consent
+ if self.hs.config.block_events_without_consent_error is not None:
+ new_flows = []
+ for flow in flows:
+ flow.append(LoginType.TERMS)
+ flows.extend(new_flows)
+
auth_result, params, session_id = yield self.auth_handler.check_auth(
flows, body, self.hs.get_ip_from_request(request)
)
@@ -445,6 +452,12 @@ class RegisterRestServlet(RestServlet):
params.get("bind_msisdn")
)
+ if auth_result and LoginType.TERMS in auth_result:
+ logger.info("%s has consented to the privacy policy" % registered_user_id)
+ yield self.store.user_set_consent_version(
+ registered_user_id, self.hs.config.user_consent_version,
+ )
+
defer.returnValue((200, return_dict))
def on_OPTIONS(self, _):
diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py
index 7362e1858d..89b82b0591 100644
--- a/synapse/rest/consent/consent_resource.py
+++ b/synapse/rest/consent/consent_resource.py
@@ -137,27 +137,31 @@ class ConsentResource(Resource):
request (twisted.web.http.Request):
"""
- version = parse_string(request, "v",
- default=self._default_consent_version)
- username = parse_string(request, "u", required=True)
- userhmac = parse_string(request, "h", required=True, encoding=None)
-
- self._check_hash(username, userhmac)
-
- if username.startswith('@'):
- qualified_user_id = username
- else:
- qualified_user_id = UserID(username, self.hs.hostname).to_string()
-
- u = yield self.store.get_user_by_id(qualified_user_id)
- if u is None:
- raise NotFoundError("Unknown user")
+ version = parse_string(request, "v", default=self._default_consent_version)
+ username = parse_string(request, "u", required=False, default="")
+ userhmac = None
+ has_consented = False
+ public_version = username == ""
+ if not public_version:
+ userhmac = parse_string(request, "h", required=True, encoding=None)
+
+ self._check_hash(username, userhmac)
+
+ if username.startswith('@'):
+ qualified_user_id = username
+ else:
+ qualified_user_id = UserID(username, self.hs.hostname).to_string()
+
+ u = yield self.store.get_user_by_id(qualified_user_id)
+ if u is None:
+ raise NotFoundError("Unknown user")
+ has_consented = u["consent_version"] == version
try:
self._render_template(
request, "%s.html" % (version,),
user=username, userhmac=userhmac, version=version,
- has_consented=(u["consent_version"] == version),
+ has_consented=has_consented, public_version=public_version,
)
except TemplateNotFound:
raise NotFoundError("Unknown policy version")
diff --git a/synapse/state/v2.py b/synapse/state/v2.py
index 5d06f7e928..dbc9688c56 100644
--- a/synapse/state/v2.py
+++ b/synapse/state/v2.py
@@ -53,6 +53,10 @@ def resolve_events_with_store(state_sets, event_map, state_res_store):
logger.debug("Computing conflicted state")
+ # We use event_map as a cache, so if its None we need to initialize it
+ if event_map is None:
+ event_map = {}
+
# First split up the un/conflicted state
unconflicted_state, conflicted_state = _seperate(state_sets)
diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py
index 62497ab63f..ecdab34e7d 100644
--- a/synapse/storage/devices.py
+++ b/synapse/storage/devices.py
@@ -22,14 +22,19 @@ from twisted.internet import defer
from synapse.api.errors import StoreError
from synapse.metrics.background_process_metrics import run_as_background_process
+from synapse.storage.background_updates import BackgroundUpdateStore
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
-from ._base import Cache, SQLBaseStore, db_to_json
+from ._base import Cache, db_to_json
logger = logging.getLogger(__name__)
+DROP_DEVICE_LIST_STREAMS_NON_UNIQUE_INDEXES = (
+ "drop_device_list_streams_non_unique_indexes"
+)
-class DeviceStore(SQLBaseStore):
+
+class DeviceStore(BackgroundUpdateStore):
def __init__(self, db_conn, hs):
super(DeviceStore, self).__init__(db_conn, hs)
@@ -52,6 +57,30 @@ class DeviceStore(SQLBaseStore):
columns=["user_id", "device_id"],
)
+ # create a unique index on device_lists_remote_cache
+ self.register_background_index_update(
+ "device_lists_remote_cache_unique_idx",
+ index_name="device_lists_remote_cache_unique_id",
+ table="device_lists_remote_cache",
+ columns=["user_id", "device_id"],
+ unique=True,
+ )
+
+ # And one on device_lists_remote_extremeties
+ self.register_background_index_update(
+ "device_lists_remote_extremeties_unique_idx",
+ index_name="device_lists_remote_extremeties_unique_idx",
+ table="device_lists_remote_extremeties",
+ columns=["user_id"],
+ unique=True,
+ )
+
+ # once they complete, we can remove the old non-unique indexes.
+ self.register_background_update_handler(
+ DROP_DEVICE_LIST_STREAMS_NON_UNIQUE_INDEXES,
+ self._drop_device_list_streams_non_unique_indexes,
+ )
+
@defer.inlineCallbacks
def store_device(self, user_id, device_id,
initial_device_display_name):
@@ -239,7 +268,19 @@ class DeviceStore(SQLBaseStore):
def update_remote_device_list_cache_entry(self, user_id, device_id, content,
stream_id):
- """Updates a single user's device in the cache.
+ """Updates a single device in the cache of a remote user's devicelist.
+
+ Note: assumes that we are the only thread that can be updating this user's
+ device list.
+
+ Args:
+ user_id (str): User to update device list for
+ device_id (str): ID of decivice being updated
+ content (dict): new data on this device
+ stream_id (int): the version of the device list
+
+ Returns:
+ Deferred[None]
"""
return self.runInteraction(
"update_remote_device_list_cache_entry",
@@ -272,7 +313,11 @@ class DeviceStore(SQLBaseStore):
},
values={
"content": json.dumps(content),
- }
+ },
+
+ # we don't need to lock, because we assume we are the only thread
+ # updating this user's devices.
+ lock=False,
)
txn.call_after(self._get_cached_user_device.invalidate, (user_id, device_id,))
@@ -289,11 +334,26 @@ class DeviceStore(SQLBaseStore):
},
values={
"stream_id": stream_id,
- }
+ },
+
+ # again, we can assume we are the only thread updating this user's
+ # extremity.
+ lock=False,
)
def update_remote_device_list_cache(self, user_id, devices, stream_id):
- """Replace the cache of the remote user's devices.
+ """Replace the entire cache of the remote user's devices.
+
+ Note: assumes that we are the only thread that can be updating this user's
+ device list.
+
+ Args:
+ user_id (str): User to update device list for
+ devices (list[dict]): list of device objects supplied over federation
+ stream_id (int): the version of the device list
+
+ Returns:
+ Deferred[None]
"""
return self.runInteraction(
"update_remote_device_list_cache",
@@ -338,7 +398,11 @@ class DeviceStore(SQLBaseStore):
},
values={
"stream_id": stream_id,
- }
+ },
+
+ # we don't need to lock, because we can assume we are the only thread
+ # updating this user's extremity.
+ lock=False,
)
def get_devices_by_remote(self, destination, from_stream_id):
@@ -722,3 +786,19 @@ class DeviceStore(SQLBaseStore):
"_prune_old_outbound_device_pokes",
_prune_txn,
)
+
+ @defer.inlineCallbacks
+ def _drop_device_list_streams_non_unique_indexes(self, progress, batch_size):
+ def f(conn):
+ txn = conn.cursor()
+ txn.execute(
+ "DROP INDEX IF EXISTS device_lists_remote_cache_id"
+ )
+ txn.execute(
+ "DROP INDEX IF EXISTS device_lists_remote_extremeties_id"
+ )
+ txn.close()
+
+ yield self.runWithConnection(f)
+ yield self._end_background_update(DROP_DEVICE_LIST_STREAMS_NON_UNIQUE_INDEXES)
+ defer.returnValue(1)
diff --git a/synapse/storage/schema/delta/40/device_list_streams.sql b/synapse/storage/schema/delta/40/device_list_streams.sql
index 54841b3843..dd6dcb65f1 100644
--- a/synapse/storage/schema/delta/40/device_list_streams.sql
+++ b/synapse/storage/schema/delta/40/device_list_streams.sql
@@ -20,9 +20,6 @@ CREATE TABLE device_lists_remote_cache (
content TEXT NOT NULL
);
-CREATE INDEX device_lists_remote_cache_id ON device_lists_remote_cache(user_id, device_id);
-
-
-- The last update we got for a user. Empty if we're not receiving updates for
-- that user.
CREATE TABLE device_lists_remote_extremeties (
@@ -30,7 +27,11 @@ CREATE TABLE device_lists_remote_extremeties (
stream_id TEXT NOT NULL
);
-CREATE INDEX device_lists_remote_extremeties_id ON device_lists_remote_extremeties(user_id, stream_id);
+-- we used to create non-unique indexes on these tables, but as of update 52 we create
+-- unique indexes concurrently:
+--
+-- CREATE INDEX device_lists_remote_cache_id ON device_lists_remote_cache(user_id, device_id);
+-- CREATE INDEX device_lists_remote_extremeties_id ON device_lists_remote_extremeties(user_id, stream_id);
-- Stream of device lists updates. Includes both local and remotes
diff --git a/synapse/storage/schema/delta/52/device_list_streams_unique_idx.sql b/synapse/storage/schema/delta/52/device_list_streams_unique_idx.sql
new file mode 100644
index 0000000000..bfa49e6f92
--- /dev/null
+++ b/synapse/storage/schema/delta/52/device_list_streams_unique_idx.sql
@@ -0,0 +1,36 @@
+/* Copyright 2018 New Vector Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+-- register a background update which will create a unique index on
+-- device_lists_remote_cache
+INSERT into background_updates (update_name, progress_json)
+ VALUES ('device_lists_remote_cache_unique_idx', '{}');
+
+-- and one on device_lists_remote_extremeties
+INSERT into background_updates (update_name, progress_json, depends_on)
+ VALUES (
+ 'device_lists_remote_extremeties_unique_idx', '{}',
+
+ -- doesn't really depend on this, but we need to make sure both happen
+ -- before we drop the old indexes.
+ 'device_lists_remote_cache_unique_idx'
+ );
+
+-- once they complete, we can drop the old indexes.
+INSERT into background_updates (update_name, progress_json, depends_on)
+ VALUES (
+ 'drop_device_list_streams_non_unique_indexes', '{}',
+ 'device_lists_remote_extremeties_unique_idx'
+ );
diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py
index 4c30c5f258..99b716f00a 100644
--- a/tests/rest/client/v2_alpha/test_sync.py
+++ b/tests/rest/client/v2_alpha/test_sync.py
@@ -15,9 +15,11 @@
from mock import Mock
+from synapse.rest.client.v1 import admin, login, room
from synapse.rest.client.v2_alpha import sync
from tests import unittest
+from tests.server import TimedOutException
class FilterTestCase(unittest.HomeserverTestCase):
@@ -65,3 +67,124 @@ class FilterTestCase(unittest.HomeserverTestCase):
["next_batch", "rooms", "account_data", "to_device", "device_lists"]
).issubset(set(channel.json_body.keys()))
)
+
+
+class SyncTypingTests(unittest.HomeserverTestCase):
+
+ servlets = [
+ admin.register_servlets,
+ room.register_servlets,
+ login.register_servlets,
+ sync.register_servlets,
+ ]
+ user_id = True
+ hijack_auth = False
+
+ def test_sync_backwards_typing(self):
+ """
+ If the typing serial goes backwards and the typing handler is then reset
+ (such as when the master restarts and sets the typing serial to 0), we
+ do not incorrectly return typing information that had a serial greater
+ than the now-reset serial.
+ """
+ typing_url = "/rooms/%s/typing/%s?access_token=%s"
+ sync_url = "/sync?timeout=3000000&access_token=%s&since=%s"
+
+ # Register the user who gets notified
+ user_id = self.register_user("user", "pass")
+ access_token = self.login("user", "pass")
+
+ # Register the user who sends the message
+ other_user_id = self.register_user("otheruser", "pass")
+ other_access_token = self.login("otheruser", "pass")
+
+ # Create a room
+ room = self.helper.create_room_as(user_id, tok=access_token)
+
+ # Invite the other person
+ self.helper.invite(room=room, src=user_id, tok=access_token, targ=other_user_id)
+
+ # The other user joins
+ self.helper.join(room=room, user=other_user_id, tok=other_access_token)
+
+ # The other user sends some messages
+ self.helper.send(room, body="Hi!", tok=other_access_token)
+ self.helper.send(room, body="There!", tok=other_access_token)
+
+ # Start typing.
+ request, channel = self.make_request(
+ "PUT",
+ typing_url % (room, other_user_id, other_access_token),
+ b'{"typing": true, "timeout": 30000}',
+ )
+ self.render(request)
+ self.assertEquals(200, channel.code)
+
+ request, channel = self.make_request(
+ "GET", "/sync?access_token=%s" % (access_token,)
+ )
+ self.render(request)
+ self.assertEquals(200, channel.code)
+ next_batch = channel.json_body["next_batch"]
+
+ # Stop typing.
+ request, channel = self.make_request(
+ "PUT",
+ typing_url % (room, other_user_id, other_access_token),
+ b'{"typing": false}',
+ )
+ self.render(request)
+ self.assertEquals(200, channel.code)
+
+ # Start typing.
+ request, channel = self.make_request(
+ "PUT",
+ typing_url % (room, other_user_id, other_access_token),
+ b'{"typing": true, "timeout": 30000}',
+ )
+ self.render(request)
+ self.assertEquals(200, channel.code)
+
+ # Should return immediately
+ request, channel = self.make_request(
+ "GET", sync_url % (access_token, next_batch)
+ )
+ self.render(request)
+ self.assertEquals(200, channel.code)
+ next_batch = channel.json_body["next_batch"]
+
+ # Reset typing serial back to 0, as if the master had.
+ typing = self.hs.get_typing_handler()
+ typing._latest_room_serial = 0
+
+ # Since it checks the state token, we need some state to update to
+ # invalidate the stream token.
+ self.helper.send(room, body="There!", tok=other_access_token)
+
+ request, channel = self.make_request(
+ "GET", sync_url % (access_token, next_batch)
+ )
+ self.render(request)
+ self.assertEquals(200, channel.code)
+ next_batch = channel.json_body["next_batch"]
+
+ # This should time out! But it does not, because our stream token is
+ # ahead, and therefore it's saying the typing (that we've actually
+ # already seen) is new, since it's got a token above our new, now-reset
+ # stream token.
+ request, channel = self.make_request(
+ "GET", sync_url % (access_token, next_batch)
+ )
+ self.render(request)
+ self.assertEquals(200, channel.code)
+ next_batch = channel.json_body["next_batch"]
+
+ # Clear the typing information, so that it doesn't think everything is
+ # in the future.
+ typing._reset()
+
+ # Now it SHOULD fail as it never completes!
+ request, channel = self.make_request(
+ "GET", sync_url % (access_token, next_batch)
+ )
+ self.assertRaises(TimedOutException, self.render, request)
diff --git a/tests/server.py b/tests/server.py
index 819c854448..cc6dbe04ac 100644
--- a/tests/server.py
+++ b/tests/server.py
@@ -21,6 +21,12 @@ from synapse.util import Clock
from tests.utils import setup_test_homeserver as _sth
+class TimedOutException(Exception):
+ """
+ A web query timed out.
+ """
+
+
@attr.s
class FakeChannel(object):
"""
@@ -153,7 +159,7 @@ def wait_until_result(clock, request, timeout=100):
x += 1
if x > timeout:
- raise Exception("Timed out waiting for request to finish.")
+ raise TimedOutException("Timed out waiting for request to finish.")
clock.advance(0.1)
diff --git a/tests/state/test_v2.py b/tests/state/test_v2.py
index efd85ebe6c..d67f59b2c7 100644
--- a/tests/state/test_v2.py
+++ b/tests/state/test_v2.py
@@ -544,8 +544,7 @@ class StateTestCase(unittest.TestCase):
state_res_store=TestStateResolutionStore(event_map),
)
- self.assertTrue(state_d.called)
- state_before = state_d.result
+ state_before = self.successResultOf(state_d)
state_after = dict(state_before)
if fake_event.state_key is not None:
@@ -599,6 +598,103 @@ class LexicographicalTestCase(unittest.TestCase):
self.assertEqual(["o", "l", "n", "m", "p"], res)
+class SimpleParamStateTestCase(unittest.TestCase):
+ def setUp(self):
+ # We build up a simple DAG.
+
+ event_map = {}
+
+ create_event = FakeEvent(
+ id="CREATE",
+ sender=ALICE,
+ type=EventTypes.Create,
+ state_key="",
+ content={"creator": ALICE},
+ ).to_event([], [])
+ event_map[create_event.event_id] = create_event
+
+ alice_member = FakeEvent(
+ id="IMA",
+ sender=ALICE,
+ type=EventTypes.Member,
+ state_key=ALICE,
+ content=MEMBERSHIP_CONTENT_JOIN,
+ ).to_event([create_event.event_id], [create_event.event_id])
+ event_map[alice_member.event_id] = alice_member
+
+ join_rules = FakeEvent(
+ id="IJR",
+ sender=ALICE,
+ type=EventTypes.JoinRules,
+ state_key="",
+ content={"join_rule": JoinRules.PUBLIC},
+ ).to_event(
+ auth_events=[create_event.event_id, alice_member.event_id],
+ prev_events=[alice_member.event_id],
+ )
+ event_map[join_rules.event_id] = join_rules
+
+ # Bob and Charlie join at the same time, so there is a fork
+ bob_member = FakeEvent(
+ id="IMB",
+ sender=BOB,
+ type=EventTypes.Member,
+ state_key=BOB,
+ content=MEMBERSHIP_CONTENT_JOIN,
+ ).to_event(
+ auth_events=[create_event.event_id, join_rules.event_id],
+ prev_events=[join_rules.event_id],
+ )
+ event_map[bob_member.event_id] = bob_member
+
+ charlie_member = FakeEvent(
+ id="IMC",
+ sender=CHARLIE,
+ type=EventTypes.Member,
+ state_key=CHARLIE,
+ content=MEMBERSHIP_CONTENT_JOIN,
+ ).to_event(
+ auth_events=[create_event.event_id, join_rules.event_id],
+ prev_events=[join_rules.event_id],
+ )
+ event_map[charlie_member.event_id] = charlie_member
+
+ self.event_map = event_map
+ self.create_event = create_event
+ self.alice_member = alice_member
+ self.join_rules = join_rules
+ self.bob_member = bob_member
+ self.charlie_member = charlie_member
+
+ self.state_at_bob = {
+ (e.type, e.state_key): e.event_id
+ for e in [create_event, alice_member, join_rules, bob_member]
+ }
+
+ self.state_at_charlie = {
+ (e.type, e.state_key): e.event_id
+ for e in [create_event, alice_member, join_rules, charlie_member]
+ }
+
+ self.expected_combined_state = {
+ (e.type, e.state_key): e.event_id
+ for e in [create_event, alice_member, join_rules, bob_member, charlie_member]
+ }
+
+ def test_event_map_none(self):
+ # Test that we correctly handle passing `None` as the event_map
+
+ state_d = resolve_events_with_store(
+ [self.state_at_bob, self.state_at_charlie],
+ event_map=None,
+ state_res_store=TestStateResolutionStore(self.event_map),
+ )
+
+ state = self.successResultOf(state_d)
+
+ self.assert_dict(self.expected_combined_state, state)
+
+
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = itertools.tee(iterable)
diff --git a/tests/test_terms_auth.py b/tests/test_terms_auth.py
new file mode 100644
index 0000000000..7deab5266f
--- /dev/null
+++ b/tests/test_terms_auth.py
@@ -0,0 +1,123 @@
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+
+import six
+from mock import Mock
+
+from twisted.test.proto_helpers import MemoryReactorClock
+
+from synapse.rest.client.v2_alpha.register import register_servlets
+from synapse.util import Clock
+
+from tests import unittest
+from tests.server import make_request
+
+
+class TermsTestCase(unittest.HomeserverTestCase):
+ servlets = [register_servlets]
+
+ def prepare(self, reactor, clock, hs):
+ self.clock = MemoryReactorClock()
+ self.hs_clock = Clock(self.clock)
+ self.url = "/_matrix/client/r0/register"
+ self.registration_handler = Mock()
+ self.auth_handler = Mock()
+ self.device_handler = Mock()
+ hs.config.enable_registration = True
+ hs.config.registrations_require_3pid = []
+ hs.config.auto_join_rooms = []
+ hs.config.enable_registration_captcha = False
+
+ def test_ui_auth(self):
+ self.hs.config.block_events_without_consent_error = True
+ self.hs.config.public_baseurl = "https://example.org"
+ self.hs.config.user_consent_version = "1.0"
+
+ # Do a UI auth request
+ request, channel = self.make_request(b"POST", self.url, b"{}")
+ self.render(request)
+
+ self.assertEquals(channel.result["code"], b"401", channel.result)
+
+ self.assertTrue(channel.json_body is not None)
+ self.assertIsInstance(channel.json_body["session"], six.text_type)
+
+ self.assertIsInstance(channel.json_body["flows"], list)
+ for flow in channel.json_body["flows"]:
+ self.assertIsInstance(flow["stages"], list)
+ self.assertTrue(len(flow["stages"]) > 0)
+ self.assertEquals(flow["stages"][-1], "m.login.terms")
+
+ expected_params = {
+ "m.login.terms": {
+ "policies": {
+ "privacy_policy": {
+ "en": {
+ "name": "Privacy Policy",
+ "url": "https://example.org/_matrix/consent?v=1.0",
+ },
+ "version": "1.0"
+ },
+ },
+ },
+ }
+ self.assertIsInstance(channel.json_body["params"], dict)
+ self.assertDictContainsSubset(channel.json_body["params"], expected_params)
+
+ # We have to complete the dummy auth stage before completing the terms stage
+ request_data = json.dumps(
+ {
+ "username": "kermit",
+ "password": "monkey",
+ "auth": {
+ "session": channel.json_body["session"],
+ "type": "m.login.dummy",
+ },
+ }
+ )
+
+ self.registration_handler.check_username = Mock(return_value=True)
+
+ request, channel = make_request(b"POST", self.url, request_data)
+ self.render(request)
+
+ # We don't bother checking that the response is correct - we'll leave that to
+ # other tests. We just want to make sure we're on the right path.
+ self.assertEquals(channel.result["code"], b"401", channel.result)
+
+ # Finish the UI auth for terms
+ request_data = json.dumps(
+ {
+ "username": "kermit",
+ "password": "monkey",
+ "auth": {
+ "session": channel.json_body["session"],
+ "type": "m.login.terms",
+ },
+ }
+ )
+ request, channel = make_request(b"POST", self.url, request_data)
+ self.render(request)
+
+ # We're interested in getting a response that looks like a successful
+ # registration, not so much that the details are exactly what we want.
+
+ self.assertEquals(channel.result["code"], b"200", channel.result)
+
+ self.assertTrue(channel.json_body is not None)
+ self.assertIsInstance(channel.json_body["user_id"], six.text_type)
+ self.assertIsInstance(channel.json_body["access_token"], six.text_type)
+ self.assertIsInstance(channel.json_body["device_id"], six.text_type)
|