diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000000..f36f86fbb7
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,5 @@
+Dockerfile
+.travis.yml
+.gitignore
+demo/etc
+tox.ini
diff --git a/.gitignore b/.gitignore
index c8901eb206..7acfe56d26 100644
--- a/.gitignore
+++ b/.gitignore
@@ -32,6 +32,7 @@ demo/media_store.*
demo/etc
uploads
+cache
.idea/
media_store/
@@ -48,3 +49,4 @@ env/
*.config
.vscode/
+.ropeproject/
diff --git a/AUTHORS.rst b/AUTHORS.rst
index 3dcb1c2a89..e13ac5ad34 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -60,3 +60,6 @@ Niklas Riekenbrauck <nikriek at gmail dot.com>
Christoph Witzany <christoph at web.crofting.com>
* Add LDAP support for authentication
+
+Pierre Jaury <pierre at jaury.eu>
+* Docker packaging
\ No newline at end of file
diff --git a/CHANGES.rst b/CHANGES.rst
index 9d40b2ac1e..b769b0f046 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,5 +1,13 @@
-Changes in synapse <unreleased>
-===============================
+Changes in synapse v0.29.0 (2018-05-16)
+===========================================
+
+
+Changes in synapse v0.29.0-rc1 (2018-05-14)
+===========================================
+
+Notable changes, a docker file for running Synapse (Thanks to @kaiyou!) and a
+closed spec bug in the Client Server API. Additionally further prep for Python 3
+migration.
Potentially breaking change:
@@ -12,6 +20,66 @@ Potentially breaking change:
Thanks to @NotAFile for fixing this.
+Features:
+
+* Add a Dockerfile for synapse (PR #2846) Thanks to @kaiyou!
+
+Changes - General:
+
+* nuke-room-from-db.sh: added postgresql option and help (PR #2337) Thanks to @rubo77!
+* Part user from rooms on account deactivate (PR #3201)
+* Make 'unexpected logging context' into warnings (PR #3007)
+* Set Server header in SynapseRequest (PR #3208)
+* remove duplicates from groups tables (PR #3129)
+* Improve exception handling for background processes (PR #3138)
+* Add missing consumeErrors to improve exception handling (PR #3139)
+* reraise exceptions more carefully (PR #3142)
+* Remove redundant call to preserve_fn (PR #3143)
+* Trap exceptions thrown within run_in_background (PR #3144)
+
+Changes - Refactors:
+
+* Refactor /context to reuse pagination storage functions (PR #3193)
+* Refactor recent events func to use pagination func (PR #3195)
+* Refactor pagination DB API to return concrete type (PR #3196)
+* Refactor get_recent_events_for_room return type (PR #3198)
+* Refactor sync APIs to reuse pagination API (PR #3199)
+* Remove unused code path from member change DB func (PR #3200)
+* Refactor request handling wrappers (PR #3203)
+* transaction_id, destination defined twice (PR #3209) Thanks to @damir-manapov!
+* Refactor event storage to prepare for changes in state calculations (PR #3141)
+* Set Server header in SynapseRequest (PR #3208)
+* Use deferred.addTimeout instead of time_bound_deferred (PR #3127, #3178)
+* Use run_in_background in preference to preserve_fn (PR #3140)
+
+Changes - Python 3 migration:
+
+* Construct HMAC as bytes on py3 (PR #3156) Thanks to @NotAFile!
+* run config tests on py3 (PR #3159) Thanks to @NotAFile!
+* Open certificate files as bytes (PR #3084) Thanks to @NotAFile!
+* Open config file in non-bytes mode (PR #3085) Thanks to @NotAFile!
+* Make event properties raise AttributeError instead (PR #3102) Thanks to @NotAFile!
+* Use six.moves.urlparse (PR #3108) Thanks to @NotAFile!
+* Add py3 tests to tox with folders that work (PR #3145) Thanks to @NotAFile!
+* Don't yield in list comprehensions (PR #3150) Thanks to @NotAFile!
+* Move more xrange to six (PR #3151) Thanks to @NotAFile!
+* make imports local (PR #3152) Thanks to @NotAFile!
+* move httplib import to six (PR #3153) Thanks to @NotAFile!
+* Replace stringIO imports with six (PR #3154, #3168) Thanks to @NotAFile!
+* more bytes strings (PR #3155) Thanks to @NotAFile!
+
+Bug Fixes:
+
+* synapse fails to start under Twisted >= 18.4 (PR #3157)
+* Fix a class of logcontext leaks (PR #3170)
+* Fix a couple of logcontext leaks in unit tests (PR #3172)
+* Fix logcontext leak in media repo (PR #3174)
+* Escape label values in prometheus metrics (PR #3175, #3186)
+* Fix 'Unhandled Error' logs with Twisted 18.4 (PR #3182) Thanks to @Half-Shot!
+* Fix logcontext leaks in rate limiter (PR #3183)
+* notifications: Convert next_token to string according to the spec (PR #3190) Thanks to @mujx!
+* nuke-room-from-db.sh: fix deletion from search table (PR #3194) Thanks to @rubo77!
+* add guard for None on purge_history api (PR #3160) Thanks to @krombel!
Changes in synapse v0.28.1 (2018-05-01)
=======================================
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000000..8085f3d354
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,19 @@
+FROM docker.io/python:2-alpine3.7
+
+RUN apk add --no-cache --virtual .nacl_deps su-exec build-base libffi-dev zlib-dev libressl-dev libjpeg-turbo-dev linux-headers postgresql-dev
+
+COPY . /synapse
+
+# A wheel cache may be provided in ./cache for faster build
+RUN cd /synapse \
+ && pip install --upgrade pip setuptools psycopg2 \
+ && mkdir -p /synapse/cache \
+ && pip install -f /synapse/cache --upgrade --process-dependency-links . \
+ && mv /synapse/contrib/docker/start.py /synapse/contrib/docker/conf / \
+ && rm -rf setup.py setup.cfg synapse
+
+VOLUME ["/data"]
+
+EXPOSE 8008/tcp 8448/tcp
+
+ENTRYPOINT ["/start.py"]
diff --git a/MANIFEST.in b/MANIFEST.in
index afb60e12ee..e2a6623a63 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -25,6 +25,8 @@ recursive-include synapse/static *.js
exclude jenkins.sh
exclude jenkins*.sh
exclude jenkins*
+exclude Dockerfile
+exclude .dockerignore
recursive-exclude jenkins *.sh
prune .github
diff --git a/contrib/docker/README.md b/contrib/docker/README.md
new file mode 100644
index 0000000000..aed56646c2
--- /dev/null
+++ b/contrib/docker/README.md
@@ -0,0 +1,148 @@
+# Synapse Docker
+
+This Docker image will run Synapse as a single process. It does not provide any
+database server or TURN server that you should run separately.
+
+If you run a Postgres server, you should simply have it in the same Compose
+project or set the proper environment variables and the image will automatically
+use that server.
+
+## Build
+
+Build the docker image with the `docker build` command from the root of the synapse repository.
+
+```
+docker build -t docker.io/matrixdotorg/synapse .
+```
+
+The `-t` option sets the image tag. Official images are tagged `matrixdotorg/synapse:<version>` where `<version>` is the same as the release tag in the synapse git repository.
+
+You may have a local Python wheel cache available, in which case copy the relevant packages in the ``cache/`` directory at the root of the project.
+
+## Run
+
+This image is designed to run either with an automatically generated configuration
+file or with a custom configuration that requires manual edition.
+
+### Automated configuration
+
+It is recommended that you use Docker Compose to run your containers, including
+this image and a Postgres server. A sample ``docker-compose.yml`` is provided,
+including example labels for reverse proxying and other artifacts.
+
+Read the section about environment variables and set at least mandatory variables,
+then run the server:
+
+```
+docker-compose up -d
+```
+
+### Manual configuration
+
+A sample ``docker-compose.yml`` is provided, including example labels for
+reverse proxying and other artifacts.
+
+Specify a ``SYNAPSE_CONFIG_PATH``, preferably to a persistent path,
+to use manual configuration. To generate a fresh ``homeserver.yaml``, simply run:
+
+```
+docker-compose run --rm -e SYNAPSE_SERVER_NAME=my.matrix.host synapse generate
+```
+
+Then, customize your configuration and run the server:
+
+```
+docker-compose up -d
+```
+
+### Without Compose
+
+If you do not wish to use Compose, you may still run this image using plain
+Docker commands. Note that the following is just a guideline and you may need
+to add parameters to the docker run command to account for the network situation
+with your postgres database.
+
+```
+docker run \
+ -d \
+ --name synapse \
+ -v ${DATA_PATH}:/data \
+ -e SYNAPSE_SERVER_NAME=my.matrix.host \
+ -e SYNAPSE_REPORT_STATS=yes \
+ docker.io/matrixdotorg/synapse:latest
+```
+
+## Volumes
+
+The image expects a single volume, located at ``/data``, that will hold:
+
+* temporary files during uploads;
+* uploaded media and thumbnails;
+* the SQLite database if you do not configure postgres;
+* the appservices configuration.
+
+You are free to use separate volumes depending on storage endpoints at your
+disposal. For instance, ``/data/media`` coud be stored on a large but low
+performance hdd storage while other files could be stored on high performance
+endpoints.
+
+In order to setup an application service, simply create an ``appservices``
+directory in the data volume and write the application service Yaml
+configuration file there. Multiple application services are supported.
+
+## Environment
+
+Unless you specify a custom path for the configuration file, a very generic
+file will be generated, based on the following environment settings.
+These are a good starting point for setting up your own deployment.
+
+Global settings:
+
+* ``UID``, the user id Synapse will run as [default 991]
+* ``GID``, the group id Synapse will run as [default 991]
+* ``SYNAPSE_CONFIG_PATH``, path to a custom config file
+
+If ``SYNAPSE_CONFIG_PATH`` is set, you should generate a configuration file
+then customize it manually. No other environment variable is required.
+
+Otherwise, a dynamic configuration file will be used. The following environment
+variables are available for configuration:
+
+* ``SYNAPSE_SERVER_NAME`` (mandatory), the current server public hostname.
+* ``SYNAPSE_REPORT_STATS``, (mandatory, ``yes`` or ``no``), enable anonymous
+ statistics reporting back to the Matrix project which helps us to get funding.
+* ``SYNAPSE_MACAROON_SECRET_KEY`` (mandatory) secret for signing access tokens
+ to the server, set this to a proper random key.
+* ``SYNAPSE_NO_TLS``, set this variable to disable TLS in Synapse (use this if
+ you run your own TLS-capable reverse proxy).
+* ``SYNAPSE_ENABLE_REGISTRATION``, set this variable to enable registration on
+ the Synapse instance.
+* ``SYNAPSE_ALLOW_GUEST``, set this variable to allow guest joining this server.
+* ``SYNAPSE_EVENT_CACHE_SIZE``, the event cache size [default `10K`].
+* ``SYNAPSE_CACHE_FACTOR``, the cache factor [default `0.5`].
+* ``SYNAPSE_RECAPTCHA_PUBLIC_KEY``, set this variable to the recaptcha public
+ key in order to enable recaptcha upon registration.
+* ``SYNAPSE_RECAPTCHA_PRIVATE_KEY``, set this variable to the recaptcha private
+ key in order to enable recaptcha upon registration.
+* ``SYNAPSE_TURN_URIS``, set this variable to the coma-separated list of TURN
+ uris to enable TURN for this homeserver.
+* ``SYNAPSE_TURN_SECRET``, set this to the TURN shared secret if required.
+
+Shared secrets, that will be initialized to random values if not set:
+
+* ``SYNAPSE_REGISTRATION_SHARED_SECRET``, secret for registrering users if
+ registration is disable.
+
+Database specific values (will use SQLite if not set):
+
+* `POSTGRES_DB` - The database name for the synapse postgres database. [default: `synapse`]
+* `POSTGRES_HOST` - The host of the postgres database if you wish to use postgresql instead of sqlite3. [default: `db` which is useful when using a container on the same docker network in a compose file where the postgres service is called `db`]
+* `POSTGRES_PASSWORD` - The password for the synapse postgres database. **If this is set then postgres will be used instead of sqlite3.** [default: none] **NOTE**: You are highly encouraged to use postgresql! Please use the compose file to make it easier to deploy.
+* `POSTGRES_USER` - The user for the synapse postgres database. [default: `matrix`]
+
+Mail server specific values (will not send emails if not set):
+
+* ``SYNAPSE_SMTP_HOST``, hostname to the mail server.
+* ``SYNAPSE_SMTP_PORT``, TCP port for accessing the mail server [default ``25``].
+* ``SYNAPSE_SMTP_USER``, username for authenticating against the mail server if any.
+* ``SYNAPSE_SMTP_PASSWORD``, password for authenticating against the mail server if any.
diff --git a/contrib/docker/conf/homeserver.yaml b/contrib/docker/conf/homeserver.yaml
new file mode 100644
index 0000000000..6bc25bb45f
--- /dev/null
+++ b/contrib/docker/conf/homeserver.yaml
@@ -0,0 +1,219 @@
+# vim:ft=yaml
+
+## TLS ##
+
+tls_certificate_path: "/data/{{ SYNAPSE_SERVER_NAME }}.tls.crt"
+tls_private_key_path: "/data/{{ SYNAPSE_SERVER_NAME }}.tls.key"
+tls_dh_params_path: "/data/{{ SYNAPSE_SERVER_NAME }}.tls.dh"
+no_tls: {{ "True" if SYNAPSE_NO_TLS else "False" }}
+tls_fingerprints: []
+
+## Server ##
+
+server_name: "{{ SYNAPSE_SERVER_NAME }}"
+pid_file: /homeserver.pid
+web_client: False
+soft_file_limit: 0
+
+## Ports ##
+
+listeners:
+ {% if not SYNAPSE_NO_TLS %}
+ -
+ port: 8448
+ bind_addresses: ['0.0.0.0']
+ type: http
+ tls: true
+ x_forwarded: false
+ resources:
+ - names: [client]
+ compress: true
+ - names: [federation] # Federation APIs
+ compress: false
+ {% endif %}
+
+ - port: 8008
+ tls: false
+ bind_addresses: ['0.0.0.0']
+ type: http
+ x_forwarded: false
+
+ resources:
+ - names: [client]
+ compress: true
+ - names: [federation]
+ compress: false
+
+## Database ##
+
+{% if POSTGRES_PASSWORD %}
+database:
+ name: "psycopg2"
+ args:
+ user: "{{ POSTGRES_USER or "synapse" }}"
+ password: "{{ POSTGRES_PASSWORD }}"
+ database: "{{ POSTGRES_DB or "synapse" }}"
+ host: "{{ POSTGRES_HOST or "db" }}"
+ port: "{{ POSTGRES_PORT or "5432" }}"
+ cp_min: 5
+ cp_max: 10
+{% else %}
+database:
+ name: "sqlite3"
+ args:
+ database: "/data/homeserver.db"
+{% endif %}
+
+## Performance ##
+
+event_cache_size: "{{ SYNAPSE_EVENT_CACHE_SIZE or "10K" }}"
+verbose: 0
+log_file: "/data/homeserver.log"
+log_config: "/compiled/log.config"
+
+## Ratelimiting ##
+
+rc_messages_per_second: 0.2
+rc_message_burst_count: 10.0
+federation_rc_window_size: 1000
+federation_rc_sleep_limit: 10
+federation_rc_sleep_delay: 500
+federation_rc_reject_limit: 50
+federation_rc_concurrent: 3
+
+## Files ##
+
+media_store_path: "/data/media"
+uploads_path: "/data/uploads"
+max_upload_size: "10M"
+max_image_pixels: "32M"
+dynamic_thumbnails: false
+
+# List of thumbnail to precalculate when an image is uploaded.
+thumbnail_sizes:
+- width: 32
+ height: 32
+ method: crop
+- width: 96
+ height: 96
+ method: crop
+- width: 320
+ height: 240
+ method: scale
+- width: 640
+ height: 480
+ method: scale
+- width: 800
+ height: 600
+ method: scale
+
+url_preview_enabled: False
+max_spider_size: "10M"
+
+## Captcha ##
+
+{% if SYNAPSE_RECAPTCHA_PUBLIC_KEY %}
+recaptcha_public_key: "{{ SYNAPSE_RECAPTCHA_PUBLIC_KEY }}"
+recaptcha_private_key: "{{ SYNAPSE_RECAPTCHA_PRIVATE_KEY }}"
+enable_registration_captcha: True
+recaptcha_siteverify_api: "https://www.google.com/recaptcha/api/siteverify"
+{% else %}
+recaptcha_public_key: "YOUR_PUBLIC_KEY"
+recaptcha_private_key: "YOUR_PRIVATE_KEY"
+enable_registration_captcha: False
+recaptcha_siteverify_api: "https://www.google.com/recaptcha/api/siteverify"
+{% endif %}
+
+## Turn ##
+
+{% if SYNAPSE_TURN_URIS %}
+turn_uris:
+{% for uri in SYNAPSE_TURN_URIS.split(',') %} - "{{ uri }}"
+{% endfor %}
+turn_shared_secret: "{{ SYNAPSE_TURN_SECRET }}"
+turn_user_lifetime: "1h"
+turn_allow_guests: True
+{% else %}
+turn_uris: []
+turn_shared_secret: "YOUR_SHARED_SECRET"
+turn_user_lifetime: "1h"
+turn_allow_guests: True
+{% endif %}
+
+## Registration ##
+
+enable_registration: {{ "True" if SYNAPSE_ENABLE_REGISTRATION else "False" }}
+registration_shared_secret: "{{ SYNAPSE_REGISTRATION_SHARED_SECRET }}"
+bcrypt_rounds: 12
+allow_guest_access: {{ "True" if SYNAPSE_ALLOW_GUEST else "False" }}
+enable_group_creation: true
+
+# The list of identity servers trusted to verify third party
+# identifiers by this server.
+trusted_third_party_id_servers:
+ - matrix.org
+ - vector.im
+ - riot.im
+
+## Metrics ###
+
+{% if SYNAPSE_REPORT_STATS.lower() == "yes" %}
+enable_metrics: True
+report_stats: True
+{% else %}
+enable_metrics: False
+report_stats: False
+{% endif %}
+
+## API Configuration ##
+
+room_invite_state_types:
+ - "m.room.join_rules"
+ - "m.room.canonical_alias"
+ - "m.room.avatar"
+ - "m.room.name"
+
+{% if SYNAPSE_APPSERVICES %}
+app_service_config_files:
+{% for appservice in SYNAPSE_APPSERVICES %} - "{{ appservice }}"
+{% endfor %}
+{% else %}
+app_service_config_files: []
+{% endif %}
+
+macaroon_secret_key: "{{ SYNAPSE_MACAROON_SECRET_KEY }}"
+expire_access_token: False
+
+## Signing Keys ##
+
+signing_key_path: "/data/{{ SYNAPSE_SERVER_NAME }}.signing.key"
+old_signing_keys: {}
+key_refresh_interval: "1d" # 1 Day.
+
+# The trusted servers to download signing keys from.
+perspectives:
+ servers:
+ "matrix.org":
+ verify_keys:
+ "ed25519:auto":
+ key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
+
+password_config:
+ enabled: true
+
+{% if SYNAPSE_SMTP_HOST %}
+email:
+ enable_notifs: false
+ smtp_host: "{{ SYNAPSE_SMTP_HOST }}"
+ smtp_port: {{ SYNAPSE_SMTP_PORT or "25" }}
+ smtp_user: "{{ SYNAPSE_SMTP_USER }}"
+ smtp_pass: "{{ SYNAPSE_SMTP_PASSWORD }}"
+ require_transport_security: False
+ notif_from: "{{ SYNAPSE_SMTP_FROM or "hostmaster@" + SYNAPSE_SERVER_NAME }}"
+ app_name: Matrix
+ template_dir: res/templates
+ notif_template_html: notif_mail.html
+ notif_template_text: notif_mail.txt
+ notif_for_new_users: True
+ riot_base_url: "https://{{ SYNAPSE_SERVER_NAME }}"
+{% endif %}
diff --git a/contrib/docker/conf/log.config b/contrib/docker/conf/log.config
new file mode 100644
index 0000000000..1851995802
--- /dev/null
+++ b/contrib/docker/conf/log.config
@@ -0,0 +1,29 @@
+version: 1
+
+formatters:
+ precise:
+ format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s- %(message)s'
+
+filters:
+ context:
+ (): synapse.util.logcontext.LoggingContextFilter
+ request: ""
+
+handlers:
+ console:
+ class: logging.StreamHandler
+ formatter: precise
+ filters: [context]
+
+loggers:
+ synapse:
+ level: {{ SYNAPSE_LOG_LEVEL or "WARNING" }}
+
+ synapse.storage.SQL:
+ # beware: increasing this to DEBUG will make synapse log sensitive
+ # information such as access tokens.
+ level: {{ SYNAPSE_LOG_LEVEL or "WARNING" }}
+
+root:
+ level: {{ SYNAPSE_LOG_LEVEL or "WARNING" }}
+ handlers: [console]
diff --git a/contrib/docker/docker-compose.yml b/contrib/docker/docker-compose.yml
new file mode 100644
index 0000000000..0b531949e0
--- /dev/null
+++ b/contrib/docker/docker-compose.yml
@@ -0,0 +1,49 @@
+# This compose file is compatible with Compose itself, it might need some
+# adjustments to run properly with stack.
+
+version: '3'
+
+services:
+
+ synapse:
+ image: docker.io/matrixdotorg/synapse:latest
+ # Since snyapse does not retry to connect to the database, restart upon
+ # failure
+ restart: unless-stopped
+ # See the readme for a full documentation of the environment settings
+ environment:
+ - SYNAPSE_SERVER_NAME=my.matrix.host
+ - SYNAPSE_REPORT_STATS=no
+ - SYNAPSE_ENABLE_REGISTRATION=yes
+ - SYNAPSE_LOG_LEVEL=INFO
+ - POSTGRES_PASSWORD=changeme
+ volumes:
+ # You may either store all the files in a local folder
+ - ./files:/data
+ # .. or you may split this between different storage points
+ # - ./files:/data
+ # - /path/to/ssd:/data/uploads
+ # - /path/to/large_hdd:/data/media
+ depends_on:
+ - db
+ # In order to expose Synapse, remove one of the following, you might for
+ # instance expose the TLS port directly:
+ ports:
+ - 8448:8448/tcp
+ # ... or use a reverse proxy, here is an example for traefik:
+ labels:
+ - traefik.enable=true
+ - traefik.frontend.rule=Host:my.matrix.Host
+ - traefik.port=8448
+
+ db:
+ image: docker.io/postgres:10-alpine
+ # Change that password, of course!
+ environment:
+ - POSTGRES_USER=synapse
+ - POSTGRES_PASSWORD=changeme
+ volumes:
+ # You may store the database tables in a local folder..
+ - ./schemas:/var/lib/postgresql/data
+ # .. or store them on some high performance storage for better results
+ # - /path/to/ssd/storage:/var/lib/postfesql/data
diff --git a/contrib/docker/start.py b/contrib/docker/start.py
new file mode 100755
index 0000000000..90e8b9c51a
--- /dev/null
+++ b/contrib/docker/start.py
@@ -0,0 +1,66 @@
+#!/usr/local/bin/python
+
+import jinja2
+import os
+import sys
+import subprocess
+import glob
+
+# Utility functions
+convert = lambda src, dst, environ: open(dst, "w").write(jinja2.Template(open(src).read()).render(**environ))
+
+def check_arguments(environ, args):
+ for argument in args:
+ if argument not in environ:
+ print("Environment variable %s is mandatory, exiting." % argument)
+ sys.exit(2)
+
+def generate_secrets(environ, secrets):
+ for name, secret in secrets.items():
+ if secret not in environ:
+ filename = "/data/%s.%s.key" % (environ["SYNAPSE_SERVER_NAME"], name)
+ if os.path.exists(filename):
+ with open(filename) as handle: value = handle.read()
+ else:
+ print("Generating a random secret for {}".format(name))
+ value = os.urandom(32).encode("hex")
+ with open(filename, "w") as handle: handle.write(value)
+ environ[secret] = value
+
+# Prepare the configuration
+mode = sys.argv[1] if len(sys.argv) > 1 else None
+environ = os.environ.copy()
+ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991))
+args = ["python", "-m", "synapse.app.homeserver"]
+
+# In generate mode, generate a configuration, missing keys, then exit
+if mode == "generate":
+ check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS", "SYNAPSE_CONFIG_PATH"))
+ args += [
+ "--server-name", environ["SYNAPSE_SERVER_NAME"],
+ "--report-stats", environ["SYNAPSE_REPORT_STATS"],
+ "--config-path", environ["SYNAPSE_CONFIG_PATH"],
+ "--generate-config"
+ ]
+ os.execv("/usr/local/bin/python", args)
+
+# In normal mode, generate missing keys if any, then run synapse
+else:
+ # Parse the configuration file
+ if "SYNAPSE_CONFIG_PATH" in environ:
+ args += ["--config-path", environ["SYNAPSE_CONFIG_PATH"]]
+ else:
+ check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"))
+ generate_secrets(environ, {
+ "registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
+ "macaroon": "SYNAPSE_MACAROON_SECRET_KEY"
+ })
+ environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml")
+ if not os.path.exists("/compiled"): os.mkdir("/compiled")
+ convert("/conf/homeserver.yaml", "/compiled/homeserver.yaml", environ)
+ convert("/conf/log.config", "/compiled/log.config", environ)
+ subprocess.check_output(["chown", "-R", ownership, "/data"])
+ args += ["--config-path", "/compiled/homeserver.yaml"]
+ # Generate missing keys and start synapse
+ subprocess.check_output(args + ["--generate-keys"])
+ os.execv("/sbin/su-exec", ["su-exec", ownership] + args)
diff --git a/docs/privacy_policy_templates/README.md b/docs/privacy_policy_templates/README.md
new file mode 100644
index 0000000000..8e91c516b3
--- /dev/null
+++ b/docs/privacy_policy_templates/README.md
@@ -0,0 +1,23 @@
+If enabling the 'consent' resource in synapse, you will need some templates
+for the HTML to be served to the user. This directory contains very simple
+examples of the sort of thing that can be done.
+
+You'll need to add this sort of thing to your homeserver.yaml:
+
+```
+form_secret: <unique but arbitrary secret>
+
+user_consent:
+ template_dir: docs/privacy_policy_templates
+ default_version: 1.0
+```
+
+You should then be able to enable the `consent` resource under a `listener`
+entry. For example:
+
+```
+listeners:
+ - port: 8008
+ resources:
+ - names: [client, consent]
+```
diff --git a/docs/privacy_policy_templates/en/1.0.html b/docs/privacy_policy_templates/en/1.0.html
new file mode 100644
index 0000000000..ab8666f0c3
--- /dev/null
+++ b/docs/privacy_policy_templates/en/1.0.html
@@ -0,0 +1,17 @@
+<!doctype html>
+<html lang="en">
+ <head>
+ <title>Matrix.org Privacy policy</title>
+ </head>
+ <body>
+ <p>
+ All your base are belong to us.
+ </p>
+ <form method="post" action="consent">
+ <input type="hidden" name="v" value="{{version}}"/>
+ <input type="hidden" name="u" value="{{user}}"/>
+ <input type="hidden" name="h" value="{{userhmac}}"/>
+ <input type="submit" value="Sure thing!"/>
+ </form>
+ </body>
+</html>
diff --git a/docs/privacy_policy_templates/en/success.html b/docs/privacy_policy_templates/en/success.html
new file mode 100644
index 0000000000..d55e90c94f
--- /dev/null
+++ b/docs/privacy_policy_templates/en/success.html
@@ -0,0 +1,11 @@
+<!doctype html>
+<html lang="en">
+ <head>
+ <title>Matrix.org Privacy policy</title>
+ </head>
+ <body>
+ <p>
+ Sweet.
+ </p>
+ </body>
+</html>
diff --git a/synapse/__init__.py b/synapse/__init__.py
index f31cb9a3cb..d94c20505e 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server.
"""
-__version__ = "0.28.1"
+__version__ = "0.29.0"
diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py
index 58f2c9d68c..b1efacc9f8 100644
--- a/synapse/app/appservice.py
+++ b/synapse/app/appservice.py
@@ -74,6 +74,7 @@ class AppserviceServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
)
)
diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py
index 267d34c881..38b98382c6 100644
--- a/synapse/app/client_reader.py
+++ b/synapse/app/client_reader.py
@@ -98,6 +98,7 @@ class ClientReaderServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
)
)
diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py
index b915d12d53..bd7f3d5679 100644
--- a/synapse/app/event_creator.py
+++ b/synapse/app/event_creator.py
@@ -114,6 +114,7 @@ class EventCreatorServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
)
)
diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py
index c1dc66dd17..6e10b27b9e 100644
--- a/synapse/app/federation_reader.py
+++ b/synapse/app/federation_reader.py
@@ -87,6 +87,7 @@ class FederationReaderServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
)
)
diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py
index a08af83a4c..6f24e32d6d 100644
--- a/synapse/app/federation_sender.py
+++ b/synapse/app/federation_sender.py
@@ -101,6 +101,7 @@ class FederationSenderServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
)
)
diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py
index b349e3e3ce..0f700ee786 100644
--- a/synapse/app/frontend_proxy.py
+++ b/synapse/app/frontend_proxy.py
@@ -152,6 +152,7 @@ class FrontendProxyServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
)
)
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index a0e465d644..bceb21a8d5 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -41,6 +41,7 @@ from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, \
from synapse.replication.http import ReplicationRestResource, REPLICATION_PREFIX
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
from synapse.rest import ClientRestResource
+from synapse.rest.consent.consent_resource import ConsentResource
from synapse.rest.key.v1.server_key_resource import LocalKey
from synapse.rest.key.v2 import KeyApiV2Resource
from synapse.rest.media.v0.content_repository import ContentRepoResource
@@ -140,6 +141,7 @@ class SynapseHomeServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
),
self.tls_server_context_factory,
)
@@ -153,6 +155,7 @@ class SynapseHomeServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
)
)
logger.info("Synapse now listening on port %d", port)
@@ -182,6 +185,14 @@ class SynapseHomeServer(HomeServer):
"/_matrix/client/versions": client_resource,
})
+ if name == "consent":
+ consent_resource = ConsentResource(self)
+ if compress:
+ consent_resource = gz_wrap(consent_resource)
+ resources.update({
+ "/_matrix/consent": consent_resource,
+ })
+
if name == "federation":
resources.update({
FEDERATION_PREFIX: TransportLayerServer(self),
@@ -473,6 +484,14 @@ def run(hs):
" changes across releases."
)
+ def generate_user_daily_visit_stats():
+ hs.get_datastore().generate_user_daily_visits()
+
+ # Rather than update on per session basis, batch up the requests.
+ # If you increase the loop period, the accuracy of user_daily_visits
+ # table will decrease
+ clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000)
+
if hs.config.report_stats:
logger.info("Scheduling stats reporting for 3 hour intervals")
clock.looping_call(phone_stats_home, 3 * 60 * 60 * 1000)
diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py
index fc8282bbc1..9c93195f0a 100644
--- a/synapse/app/media_repository.py
+++ b/synapse/app/media_repository.py
@@ -94,6 +94,7 @@ class MediaRepositoryServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
)
)
diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py
index 26930d1b3b..3912eae48c 100644
--- a/synapse/app/pusher.py
+++ b/synapse/app/pusher.py
@@ -104,6 +104,7 @@ class PusherServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
)
)
diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py
index 7152b1deb4..c6294a7a0c 100644
--- a/synapse/app/synchrotron.py
+++ b/synapse/app/synchrotron.py
@@ -281,6 +281,7 @@ class SynchrotronServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
)
)
diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py
index 5ba7e9b416..53eb3474da 100644
--- a/synapse/app/user_dir.py
+++ b/synapse/app/user_dir.py
@@ -126,6 +126,7 @@ class UserDirectoryServer(HomeServer):
site_tag,
listener_config,
root_resource,
+ self.version_string,
)
)
diff --git a/synapse/config/__init__.py b/synapse/config/__init__.py
index bfebb0f644..f2a5a41e92 100644
--- a/synapse/config/__init__.py
+++ b/synapse/config/__init__.py
@@ -12,3 +12,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+
+from ._base import ConfigError
+
+# export ConfigError if somebody does import *
+# this is largely a fudge to stop PEP8 moaning about the import
+__all__ = ["ConfigError"]
diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py
new file mode 100644
index 0000000000..675fce0911
--- /dev/null
+++ b/synapse/config/consent_config.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import Config
+
+DEFAULT_CONFIG = """\
+# User Consent configuration
+#
+# uncomment and configure if enabling the 'consent' resource under 'listeners'.
+#
+# 'template_dir' gives the location of the templates for the HTML forms.
+# This directory should contain one subdirectory per language (eg, 'en', 'fr'),
+# and each language directory should contain the policy document (named as
+# '<version>.html') and a success page (success.html).
+#
+# 'default_version' gives the version of the policy document to serve up if
+# there is no 'v' parameter.
+#
+# user_consent:
+# template_dir: res/templates/privacy
+# default_version: 1.0
+"""
+
+
+class ConsentConfig(Config):
+ def read_config(self, config):
+ self.consent_config = config.get("user_consent")
+
+ def default_config(self, **kwargs):
+ return DEFAULT_CONFIG
diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index bf19cfee29..fb6bd3b421 100644
--- a/synapse/config/homeserver.py
+++ b/synapse/config/homeserver.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
from .tls import TlsConfig
from .server import ServerConfig
from .logger import LoggingConfig
@@ -37,6 +37,7 @@ from .push import PushConfig
from .spam_checker import SpamCheckerConfig
from .groups import GroupsConfig
from .user_directory import UserDirectoryConfig
+from .consent_config import ConsentConfig
class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
@@ -45,12 +46,13 @@ class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
AppServiceConfig, KeyConfig, SAML2Config, CasConfig,
JWTConfig, PasswordConfig, EmailConfig,
WorkerConfig, PasswordAuthProviderConfig, PushConfig,
- SpamCheckerConfig, GroupsConfig, UserDirectoryConfig,):
+ SpamCheckerConfig, GroupsConfig, UserDirectoryConfig,
+ ConsentConfig):
pass
if __name__ == '__main__':
import sys
sys.stdout.write(
- HomeServerConfig().generate_config(sys.argv[1], sys.argv[2])[0]
+ HomeServerConfig().generate_config(sys.argv[1], sys.argv[2], True)[0]
)
diff --git a/synapse/config/key.py b/synapse/config/key.py
index 4b8fc063d0..d1382ad9ac 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -59,14 +59,20 @@ class KeyConfig(Config):
self.expire_access_token = config.get("expire_access_token", False)
+ # a secret which is used to calculate HMACs for form values, to stop
+ # falsification of values
+ self.form_secret = config.get("form_secret", None)
+
def default_config(self, config_dir_path, server_name, is_generating_file=False,
**kwargs):
base_key_name = os.path.join(config_dir_path, server_name)
if is_generating_file:
macaroon_secret_key = random_string_with_symbols(50)
+ form_secret = '"%s"' % random_string_with_symbols(50)
else:
macaroon_secret_key = None
+ form_secret = 'null'
return """\
macaroon_secret_key: "%(macaroon_secret_key)s"
@@ -74,6 +80,10 @@ class KeyConfig(Config):
# Used to enable access token expiration.
expire_access_token: False
+ # a secret which is used to calculate HMACs for form values, to stop
+ # falsification of values
+ form_secret: %(form_secret)s
+
## Signing Keys ##
# Path to the signing key to sign messages with
diff --git a/synapse/federation/units.py b/synapse/federation/units.py
index 3f645acc43..01c5b8fe17 100644
--- a/synapse/federation/units.py
+++ b/synapse/federation/units.py
@@ -74,8 +74,6 @@ class Transaction(JsonEncodedObject):
"previous_ids",
"pdus",
"edus",
- "transaction_id",
- "destination",
"pdu_failures",
]
diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py
index b1d3814909..4eb18775e8 100644
--- a/synapse/handlers/deactivate_account.py
+++ b/synapse/handlers/deactivate_account.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2017 New Vector Ltd
+# Copyright 2017, 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,9 +12,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from twisted.internet import defer
+from twisted.internet import defer, reactor
from ._base import BaseHandler
+from synapse.types import UserID, create_requester
+from synapse.util.logcontext import run_in_background
import logging
@@ -27,6 +29,14 @@ class DeactivateAccountHandler(BaseHandler):
super(DeactivateAccountHandler, self).__init__(hs)
self._auth_handler = hs.get_auth_handler()
self._device_handler = hs.get_device_handler()
+ self._room_member_handler = hs.get_room_member_handler()
+
+ # Flag that indicates whether the process to part users from rooms is running
+ self._user_parter_running = False
+
+ # Start the user parter loop so it can resume parting users from rooms where
+ # it left off (if it has work left to do).
+ reactor.callWhenRunning(self._start_user_parting)
@defer.inlineCallbacks
def deactivate_account(self, user_id):
@@ -50,3 +60,70 @@ class DeactivateAccountHandler(BaseHandler):
yield self.store.user_delete_threepids(user_id)
yield self.store.user_set_password_hash(user_id, None)
+
+ # Add the user to a table of users penpding deactivation (ie.
+ # removal from all the rooms they're a member of)
+ yield self.store.add_user_pending_deactivation(user_id)
+
+ # Now start the process that goes through that list and
+ # parts users from rooms (if it isn't already running)
+ self._start_user_parting()
+
+ def _start_user_parting(self):
+ """
+ Start the process that goes through the table of users
+ pending deactivation, if it isn't already running.
+
+ Returns:
+ None
+ """
+ if not self._user_parter_running:
+ run_in_background(self._user_parter_loop)
+
+ @defer.inlineCallbacks
+ def _user_parter_loop(self):
+ """Loop that parts deactivated users from rooms
+
+ Returns:
+ None
+ """
+ self._user_parter_running = True
+ logger.info("Starting user parter")
+ try:
+ while True:
+ user_id = yield self.store.get_user_pending_deactivation()
+ if user_id is None:
+ break
+ logger.info("User parter parting %r", user_id)
+ yield self._part_user(user_id)
+ yield self.store.del_user_pending_deactivation(user_id)
+ logger.info("User parter finished parting %r", user_id)
+ logger.info("User parter finished: stopping")
+ finally:
+ self._user_parter_running = False
+
+ @defer.inlineCallbacks
+ def _part_user(self, user_id):
+ """Causes the given user_id to leave all the rooms they're joined to
+
+ Returns:
+ None
+ """
+ user = UserID.from_string(user_id)
+
+ rooms_for_user = yield self.store.get_rooms_for_user(user_id)
+ for room_id in rooms_for_user:
+ logger.info("User parter parting %r from %r", user_id, room_id)
+ try:
+ yield self._room_member_handler.update_membership(
+ create_requester(user),
+ user,
+ room_id,
+ "leave",
+ ratelimit=False,
+ )
+ except Exception:
+ logger.exception(
+ "Failed to part user %r from room %r: ignoring and continuing",
+ user_id, room_id,
+ )
diff --git a/synapse/http/additional_resource.py b/synapse/http/additional_resource.py
index 343e932cb1..a797396ade 100644
--- a/synapse/http/additional_resource.py
+++ b/synapse/http/additional_resource.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from synapse.http.server import wrap_request_handler
+from synapse.http.server import wrap_json_request_handler
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
@@ -42,14 +42,13 @@ class AdditionalResource(Resource):
Resource.__init__(self)
self._handler = handler
- # these are required by the request_handler wrapper
- self.version_string = hs.version_string
+ # required by the request_handler wrapper
self.clock = hs.get_clock()
def render(self, request):
self._async_render(request)
return NOT_DONE_YET
- @wrap_request_handler
+ @wrap_json_request_handler
def _async_render(self, request):
return self._handler(request)
diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py
new file mode 100644
index 0000000000..8c850bf23f
--- /dev/null
+++ b/synapse/http/request_metrics.py
@@ -0,0 +1,149 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+import synapse.metrics
+from synapse.util.logcontext import LoggingContext
+
+logger = logging.getLogger(__name__)
+
+metrics = synapse.metrics.get_metrics_for("synapse.http.server")
+
+# total number of responses served, split by method/servlet/tag
+response_count = metrics.register_counter(
+ "response_count",
+ labels=["method", "servlet", "tag"],
+ alternative_names=(
+ # the following are all deprecated aliases for the same metric
+ metrics.name_prefix + x for x in (
+ "_requests",
+ "_response_time:count",
+ "_response_ru_utime:count",
+ "_response_ru_stime:count",
+ "_response_db_txn_count:count",
+ "_response_db_txn_duration:count",
+ )
+ )
+)
+
+requests_counter = metrics.register_counter(
+ "requests_received",
+ labels=["method", "servlet", ],
+)
+
+outgoing_responses_counter = metrics.register_counter(
+ "responses",
+ labels=["method", "code"],
+)
+
+response_timer = metrics.register_counter(
+ "response_time_seconds",
+ labels=["method", "servlet", "tag"],
+ alternative_names=(
+ metrics.name_prefix + "_response_time:total",
+ ),
+)
+
+response_ru_utime = metrics.register_counter(
+ "response_ru_utime_seconds", labels=["method", "servlet", "tag"],
+ alternative_names=(
+ metrics.name_prefix + "_response_ru_utime:total",
+ ),
+)
+
+response_ru_stime = metrics.register_counter(
+ "response_ru_stime_seconds", labels=["method", "servlet", "tag"],
+ alternative_names=(
+ metrics.name_prefix + "_response_ru_stime:total",
+ ),
+)
+
+response_db_txn_count = metrics.register_counter(
+ "response_db_txn_count", labels=["method", "servlet", "tag"],
+ alternative_names=(
+ metrics.name_prefix + "_response_db_txn_count:total",
+ ),
+)
+
+# seconds spent waiting for db txns, excluding scheduling time, when processing
+# this request
+response_db_txn_duration = metrics.register_counter(
+ "response_db_txn_duration_seconds", labels=["method", "servlet", "tag"],
+ alternative_names=(
+ metrics.name_prefix + "_response_db_txn_duration:total",
+ ),
+)
+
+# seconds spent waiting for a db connection, when processing this request
+response_db_sched_duration = metrics.register_counter(
+ "response_db_sched_duration_seconds", labels=["method", "servlet", "tag"]
+)
+
+# size in bytes of the response written
+response_size = metrics.register_counter(
+ "response_size", labels=["method", "servlet", "tag"]
+)
+
+
+class RequestMetrics(object):
+ def start(self, time_msec, name):
+ self.start = time_msec
+ self.start_context = LoggingContext.current_context()
+ self.name = name
+
+ def stop(self, time_msec, request):
+ context = LoggingContext.current_context()
+
+ tag = ""
+ if context:
+ tag = context.tag
+
+ if context != self.start_context:
+ logger.warn(
+ "Context have unexpectedly changed %r, %r",
+ context, self.start_context
+ )
+ return
+
+ outgoing_responses_counter.inc(request.method, str(request.code))
+
+ response_count.inc(request.method, self.name, tag)
+
+ response_timer.inc_by(
+ time_msec - self.start, request.method,
+ self.name, tag
+ )
+
+ ru_utime, ru_stime = context.get_resource_usage()
+
+ response_ru_utime.inc_by(
+ ru_utime, request.method, self.name, tag
+ )
+ response_ru_stime.inc_by(
+ ru_stime, request.method, self.name, tag
+ )
+ response_db_txn_count.inc_by(
+ context.db_txn_count, request.method, self.name, tag
+ )
+ response_db_txn_duration.inc_by(
+ context.db_txn_duration_ms / 1000., request.method, self.name, tag
+ )
+ response_db_sched_duration.inc_by(
+ context.db_sched_duration_ms / 1000., request.method, self.name, tag
+ )
+
+ response_size.inc_by(request.sentLength, request.method, self.name, tag)
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 55b9ad5251..faf700851a 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -13,11 +13,15 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
+import cgi
+from six.moves import http_client
from synapse.api.errors import (
cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError, Codes
)
+from synapse.http.request_metrics import (
+ requests_counter,
+)
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
from synapse.util.caches import intern_dict
from synapse.util.metrics import Measure
@@ -41,178 +45,174 @@ import simplejson
logger = logging.getLogger(__name__)
-metrics = synapse.metrics.get_metrics_for(__name__)
-
-# total number of responses served, split by method/servlet/tag
-response_count = metrics.register_counter(
- "response_count",
- labels=["method", "servlet", "tag"],
- alternative_names=(
- # the following are all deprecated aliases for the same metric
- metrics.name_prefix + x for x in (
- "_requests",
- "_response_time:count",
- "_response_ru_utime:count",
- "_response_ru_stime:count",
- "_response_db_txn_count:count",
- "_response_db_txn_duration:count",
- )
- )
-)
+HTML_ERROR_TEMPLATE = """<!DOCTYPE html>
+<html lang=en>
+ <head>
+ <meta charset="utf-8">
+ <title>Error {code}</title>
+ </head>
+ <body>
+ <p>{msg}</p>
+ </body>
+</html>
+"""
-requests_counter = metrics.register_counter(
- "requests_received",
- labels=["method", "servlet", ],
-)
-outgoing_responses_counter = metrics.register_counter(
- "responses",
- labels=["method", "code"],
-)
+def wrap_json_request_handler(h):
+ """Wraps a request handler method with exception handling.
-response_timer = metrics.register_counter(
- "response_time_seconds",
- labels=["method", "servlet", "tag"],
- alternative_names=(
- metrics.name_prefix + "_response_time:total",
- ),
-)
+ Also adds logging as per wrap_request_handler_with_logging.
-response_ru_utime = metrics.register_counter(
- "response_ru_utime_seconds", labels=["method", "servlet", "tag"],
- alternative_names=(
- metrics.name_prefix + "_response_ru_utime:total",
- ),
-)
+ The handler method must have a signature of "handle_foo(self, request)",
+ where "self" must have a "clock" attribute (and "request" must be a
+ SynapseRequest).
-response_ru_stime = metrics.register_counter(
- "response_ru_stime_seconds", labels=["method", "servlet", "tag"],
- alternative_names=(
- metrics.name_prefix + "_response_ru_stime:total",
- ),
-)
-
-response_db_txn_count = metrics.register_counter(
- "response_db_txn_count", labels=["method", "servlet", "tag"],
- alternative_names=(
- metrics.name_prefix + "_response_db_txn_count:total",
- ),
-)
+ The handler must return a deferred. If the deferred succeeds we assume that
+ a response has been sent. If the deferred fails with a SynapseError we use
+ it to send a JSON response with the appropriate HTTP reponse code. If the
+ deferred fails with any other type of error we send a 500 reponse.
+ """
-# seconds spent waiting for db txns, excluding scheduling time, when processing
-# this request
-response_db_txn_duration = metrics.register_counter(
- "response_db_txn_duration_seconds", labels=["method", "servlet", "tag"],
- alternative_names=(
- metrics.name_prefix + "_response_db_txn_duration:total",
- ),
-)
+ @defer.inlineCallbacks
+ def wrapped_request_handler(self, request):
+ try:
+ yield h(self, request)
+ except CodeMessageException as e:
+ code = e.code
+ if isinstance(e, SynapseError):
+ logger.info(
+ "%s SynapseError: %s - %s", request, code, e.msg
+ )
+ else:
+ logger.exception(e)
+ respond_with_json(
+ request, code, cs_exception(e), send_cors=True,
+ pretty_print=_request_user_agent_is_curl(request),
+ )
-# seconds spent waiting for a db connection, when processing this request
-response_db_sched_duration = metrics.register_counter(
- "response_db_sched_duration_seconds", labels=["method", "servlet", "tag"]
-)
+ except Exception:
+ # failure.Failure() fishes the original Failure out
+ # of our stack, and thus gives us a sensible stack
+ # trace.
+ f = failure.Failure()
+ logger.error(
+ "Failed handle request via %r: %r: %s",
+ h,
+ request,
+ f.getTraceback().rstrip(),
+ )
+ respond_with_json(
+ request,
+ 500,
+ {
+ "error": "Internal server error",
+ "errcode": Codes.UNKNOWN,
+ },
+ send_cors=True,
+ pretty_print=_request_user_agent_is_curl(request),
+ )
-# size in bytes of the response written
-response_size = metrics.register_counter(
- "response_size", labels=["method", "servlet", "tag"]
-)
+ return wrap_request_handler_with_logging(wrapped_request_handler)
-_next_request_id = 0
+def wrap_html_request_handler(h):
+ """Wraps a request handler method with exception handling.
-def request_handler(include_metrics=False):
- """Decorator for ``wrap_request_handler``"""
- return lambda request_handler: wrap_request_handler(request_handler, include_metrics)
+ Also adds logging as per wrap_request_handler_with_logging.
+ The handler method must have a signature of "handle_foo(self, request)",
+ where "self" must have a "clock" attribute (and "request" must be a
+ SynapseRequest).
+ """
+ def wrapped_request_handler(self, request):
+ d = defer.maybeDeferred(h, self, request)
+ d.addErrback(_return_html_error, request)
+ return d
-def wrap_request_handler(request_handler, include_metrics=False):
- """Wraps a method that acts as a request handler with the necessary logging
- and exception handling.
+ return wrap_request_handler_with_logging(wrapped_request_handler)
- The method must have a signature of "handle_foo(self, request)". The
- argument "self" must have "version_string" and "clock" attributes. The
- argument "request" must be a twisted HTTP request.
- The method must return a deferred. If the deferred succeeds we assume that
- a response has been sent. If the deferred fails with a SynapseError we use
- it to send a JSON response with the appropriate HTTP reponse code. If the
- deferred fails with any other type of error we send a 500 reponse.
+def _return_html_error(f, request):
+ """Sends an HTML error page corresponding to the given failure
- We insert a unique request-id into the logging context for this request and
- log the response and duration for this request.
+ Args:
+ f (twisted.python.failure.Failure):
+ request (twisted.web.iweb.IRequest):
"""
+ if f.check(CodeMessageException):
+ cme = f.value
+ code = cme.code
+ msg = cme.msg
+
+ if isinstance(cme, SynapseError):
+ logger.info(
+ "%s SynapseError: %s - %s", request, code, msg
+ )
+ else:
+ logger.error(
+ "Failed handle request %r: %s",
+ request,
+ f.getTraceback().rstrip(),
+ )
+ else:
+ code = http_client.INTERNAL_SERVER_ERROR
+ msg = "Internal server error"
+
+ logger.error(
+ "Failed handle request %r: %s",
+ request,
+ f.getTraceback().rstrip(),
+ )
+
+ body = HTML_ERROR_TEMPLATE.format(
+ code=code, msg=cgi.escape(msg),
+ ).encode("utf-8")
+ request.setResponseCode(code)
+ request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
+ request.setHeader(b"Content-Length", b"%i" % (len(body),))
+ request.write(body)
+ finish_request(request)
+
+
+def wrap_request_handler_with_logging(h):
+ """Wraps a request handler to provide logging and metrics
+ The handler method must have a signature of "handle_foo(self, request)",
+ where "self" must have a "clock" attribute (and "request" must be a
+ SynapseRequest).
+
+ As well as calling `request.processing` (which will log the response and
+ duration for this request), the wrapped request handler will insert the
+ request id into the logging context.
+ """
@defer.inlineCallbacks
def wrapped_request_handler(self, request):
- global _next_request_id
- request_id = "%s-%s" % (request.method, _next_request_id)
- _next_request_id += 1
+ """
+ Args:
+ self:
+ request (synapse.http.site.SynapseRequest):
+ """
+ request_id = request.get_request_id()
with LoggingContext(request_id) as request_context:
+ request_context.request = request_id
with Measure(self.clock, "wrapped_request_handler"):
- request_metrics = RequestMetrics()
# we start the request metrics timer here with an initial stab
# at the servlet name. For most requests that name will be
# JsonResource (or a subclass), and JsonResource._async_render
# will update it once it picks a servlet.
servlet_name = self.__class__.__name__
- request_metrics.start(self.clock, name=servlet_name)
-
- request_context.request = request_id
- with request.processing():
- try:
- with PreserveLoggingContext(request_context):
- if include_metrics:
- yield request_handler(self, request, request_metrics)
- else:
- requests_counter.inc(request.method, servlet_name)
- yield request_handler(self, request)
- except CodeMessageException as e:
- code = e.code
- if isinstance(e, SynapseError):
- logger.info(
- "%s SynapseError: %s - %s", request, code, e.msg
- )
- else:
- logger.exception(e)
- outgoing_responses_counter.inc(request.method, str(code))
- respond_with_json(
- request, code, cs_exception(e), send_cors=True,
- pretty_print=_request_user_agent_is_curl(request),
- version_string=self.version_string,
- )
- except Exception:
- # failure.Failure() fishes the original Failure out
- # of our stack, and thus gives us a sensible stack
- # trace.
- f = failure.Failure()
- logger.error(
- "Failed handle request %s.%s on %r: %r: %s",
- request_handler.__module__,
- request_handler.__name__,
- self,
- request,
- f.getTraceback().rstrip(),
- )
- respond_with_json(
- request,
- 500,
- {
- "error": "Internal server error",
- "errcode": Codes.UNKNOWN,
- },
- send_cors=True,
- pretty_print=_request_user_agent_is_curl(request),
- version_string=self.version_string,
- )
- finally:
- try:
- request_metrics.stop(
- self.clock, request
- )
- except Exception as e:
- logger.warn("Failed to stop metrics: %r", e)
+ with request.processing(servlet_name):
+ with PreserveLoggingContext(request_context):
+ d = defer.maybeDeferred(h, self, request)
+
+ # record the arrival of the request *after*
+ # dispatching to the handler, so that the handler
+ # can update the servlet name in the request
+ # metrics
+ requests_counter.inc(request.method,
+ request.request_metrics.name)
+ yield d
return wrapped_request_handler
@@ -262,7 +262,6 @@ class JsonResource(HttpServer, resource.Resource):
self.canonical_json = canonical_json
self.clock = hs.get_clock()
self.path_regexs = {}
- self.version_string = hs.version_string
self.hs = hs
def register_paths(self, method, path_patterns, callback):
@@ -278,13 +277,9 @@ class JsonResource(HttpServer, resource.Resource):
self._async_render(request)
return server.NOT_DONE_YET
- # Disable metric reporting because _async_render does its own metrics.
- # It does its own metric reporting because _async_render dispatches to
- # a callback and it's the class name of that callback we want to report
- # against rather than the JsonResource itself.
- @request_handler(include_metrics=True)
+ @wrap_json_request_handler
@defer.inlineCallbacks
- def _async_render(self, request, request_metrics):
+ def _async_render(self, request):
""" This gets called from render() every time someone sends us a request.
This checks if anyone has registered a callback for that method and
path.
@@ -296,9 +291,7 @@ class JsonResource(HttpServer, resource.Resource):
servlet_classname = servlet_instance.__class__.__name__
else:
servlet_classname = "%r" % callback
-
- request_metrics.name = servlet_classname
- requests_counter.inc(request.method, servlet_classname)
+ request.request_metrics.name = servlet_classname
# Now trigger the callback. If it returns a response, we send it
# here. If it throws an exception, that is handled by the wrapper
@@ -345,15 +338,12 @@ class JsonResource(HttpServer, resource.Resource):
def _send_response(self, request, code, response_json_object,
response_code_message=None):
- outgoing_responses_counter.inc(request.method, str(code))
-
# TODO: Only enable CORS for the requests that need it.
respond_with_json(
request, code, response_json_object,
send_cors=True,
response_code_message=response_code_message,
pretty_print=_request_user_agent_is_curl(request),
- version_string=self.version_string,
canonical_json=self.canonical_json,
)
@@ -386,54 +376,6 @@ def _unrecognised_request_handler(request):
raise UnrecognizedRequestError()
-class RequestMetrics(object):
- def start(self, clock, name):
- self.start = clock.time_msec()
- self.start_context = LoggingContext.current_context()
- self.name = name
-
- def stop(self, clock, request):
- context = LoggingContext.current_context()
-
- tag = ""
- if context:
- tag = context.tag
-
- if context != self.start_context:
- logger.warn(
- "Context have unexpectedly changed %r, %r",
- context, self.start_context
- )
- return
-
- response_count.inc(request.method, self.name, tag)
-
- response_timer.inc_by(
- clock.time_msec() - self.start, request.method,
- self.name, tag
- )
-
- ru_utime, ru_stime = context.get_resource_usage()
-
- response_ru_utime.inc_by(
- ru_utime, request.method, self.name, tag
- )
- response_ru_stime.inc_by(
- ru_stime, request.method, self.name, tag
- )
- response_db_txn_count.inc_by(
- context.db_txn_count, request.method, self.name, tag
- )
- response_db_txn_duration.inc_by(
- context.db_txn_duration_ms / 1000., request.method, self.name, tag
- )
- response_db_sched_duration.inc_by(
- context.db_sched_duration_ms / 1000., request.method, self.name, tag
- )
-
- response_size.inc_by(request.sentLength, request.method, self.name, tag)
-
-
class RootRedirect(resource.Resource):
"""Redirects the root '/' path to another path."""
@@ -452,7 +394,7 @@ class RootRedirect(resource.Resource):
def respond_with_json(request, code, json_object, send_cors=False,
response_code_message=None, pretty_print=False,
- version_string="", canonical_json=True):
+ canonical_json=True):
# could alternatively use request.notifyFinish() and flip a flag when
# the Deferred fires, but since the flag is RIGHT THERE it seems like
# a waste.
@@ -474,12 +416,11 @@ def respond_with_json(request, code, json_object, send_cors=False,
request, code, json_bytes,
send_cors=send_cors,
response_code_message=response_code_message,
- version_string=version_string
)
def respond_with_json_bytes(request, code, json_bytes, send_cors=False,
- version_string="", response_code_message=None):
+ response_code_message=None):
"""Sends encoded JSON in response to the given request.
Args:
@@ -493,7 +434,6 @@ def respond_with_json_bytes(request, code, json_bytes, send_cors=False,
request.setResponseCode(code, message=response_code_message)
request.setHeader(b"Content-Type", b"application/json")
- request.setHeader(b"Server", version_string)
request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),))
request.setHeader(b"Cache-Control", b"no-cache, no-store, must-revalidate")
diff --git a/synapse/http/site.py b/synapse/http/site.py
index c8b46e1af2..202a990508 100644
--- a/synapse/http/site.py
+++ b/synapse/http/site.py
@@ -12,24 +12,48 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from synapse.util.logcontext import LoggingContext
-from twisted.web.server import Site, Request
-
import contextlib
import logging
import re
import time
+from twisted.web.server import Site, Request
+
+from synapse.http.request_metrics import RequestMetrics
+from synapse.util.logcontext import LoggingContext
+
+logger = logging.getLogger(__name__)
+
ACCESS_TOKEN_RE = re.compile(br'(\?.*access(_|%5[Ff])token=)[^&]*(.*)$')
+_next_request_seq = 0
+
class SynapseRequest(Request):
+ """Class which encapsulates an HTTP request to synapse.
+
+ All of the requests processed in synapse are of this type.
+
+ It extends twisted's twisted.web.server.Request, and adds:
+ * Unique request ID
+ * Redaction of access_token query-params in __repr__
+ * Logging at start and end
+ * Metrics to record CPU, wallclock and DB time by endpoint.
+
+ It provides a method `processing` which should be called by the Resource
+ which is handling the request, and returns a context manager.
+
+ """
def __init__(self, site, *args, **kw):
Request.__init__(self, *args, **kw)
self.site = site
self.authenticated_entity = None
self.start_time = 0
+ global _next_request_seq
+ self.request_seq = _next_request_seq
+ _next_request_seq += 1
+
def __repr__(self):
# We overwrite this so that we don't log ``access_token``
return '<%s at 0x%x method=%s uri=%s clientproto=%s site=%s>' % (
@@ -41,6 +65,9 @@ class SynapseRequest(Request):
self.site.site_tag,
)
+ def get_request_id(self):
+ return "%s-%i" % (self.method, self.request_seq)
+
def get_redacted_uri(self):
return ACCESS_TOKEN_RE.sub(
br'\1<redacted>\3',
@@ -50,7 +77,16 @@ class SynapseRequest(Request):
def get_user_agent(self):
return self.requestHeaders.getRawHeaders(b"User-Agent", [None])[-1]
- def started_processing(self):
+ def render(self, resrc):
+ # override the Server header which is set by twisted
+ self.setHeader("Server", self.site.server_version_string)
+ return Request.render(self, resrc)
+
+ def _started_processing(self, servlet_name):
+ self.start_time = int(time.time() * 1000)
+ self.request_metrics = RequestMetrics()
+ self.request_metrics.start(self.start_time, name=servlet_name)
+
self.site.access_logger.info(
"%s - %s - Received request: %s %s",
self.getClientIP(),
@@ -58,10 +94,8 @@ class SynapseRequest(Request):
self.method,
self.get_redacted_uri()
)
- self.start_time = int(time.time() * 1000)
-
- def finished_processing(self):
+ def _finished_processing(self):
try:
context = LoggingContext.current_context()
ru_utime, ru_stime = context.get_resource_usage()
@@ -72,6 +106,8 @@ class SynapseRequest(Request):
ru_utime, ru_stime = (0, 0)
db_txn_count, db_txn_duration_ms = (0, 0)
+ end_time = int(time.time() * 1000)
+
self.site.access_logger.info(
"%s - %s - {%s}"
" Processed request: %dms (%dms, %dms) (%dms/%dms/%d)"
@@ -79,7 +115,7 @@ class SynapseRequest(Request):
self.getClientIP(),
self.site.site_tag,
self.authenticated_entity,
- int(time.time() * 1000) - self.start_time,
+ end_time - self.start_time,
int(ru_utime * 1000),
int(ru_stime * 1000),
db_sched_duration_ms,
@@ -93,11 +129,38 @@ class SynapseRequest(Request):
self.get_user_agent(),
)
+ try:
+ self.request_metrics.stop(end_time, self)
+ except Exception as e:
+ logger.warn("Failed to stop metrics: %r", e)
+
@contextlib.contextmanager
- def processing(self):
- self.started_processing()
+ def processing(self, servlet_name):
+ """Record the fact that we are processing this request.
+
+ Returns a context manager; the correct way to use this is:
+
+ @defer.inlineCallbacks
+ def handle_request(request):
+ with request.processing("FooServlet"):
+ yield really_handle_the_request()
+
+ This will log the request's arrival. Once the context manager is
+ closed, the completion of the request will be logged, and the various
+ metrics will be updated.
+
+ Args:
+ servlet_name (str): the name of the servlet which will be
+ processing this request. This is used in the metrics.
+
+ It is possible to update this afterwards by updating
+ self.request_metrics.servlet_name.
+ """
+ # TODO: we should probably just move this into render() and finish(),
+ # to save having to call a separate method.
+ self._started_processing(servlet_name)
yield
- self.finished_processing()
+ self._finished_processing()
class XForwardedForRequest(SynapseRequest):
@@ -135,7 +198,8 @@ class SynapseSite(Site):
Subclass of a twisted http Site that does access logging with python's
standard logging
"""
- def __init__(self, logger_name, site_tag, config, resource, *args, **kwargs):
+ def __init__(self, logger_name, site_tag, config, resource,
+ server_version_string, *args, **kwargs):
Site.__init__(self, resource, *args, **kwargs)
self.site_tag = site_tag
@@ -143,6 +207,7 @@ class SynapseSite(Site):
proxied = config.get("x_forwarded", False)
self.requestFactory = SynapseRequestFactory(self, proxied)
self.access_logger = logging.getLogger(logger_name)
+ self.server_version_string = server_version_string
def log(self, request):
pass
diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py
index 0206e664c1..40e523cc5f 100644
--- a/synapse/rest/client/v1/pusher.py
+++ b/synapse/rest/client/v1/pusher.py
@@ -176,7 +176,6 @@ class PushersRemoveRestServlet(RestServlet):
request.setResponseCode(200)
request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
- request.setHeader(b"Server", self.hs.version_string)
request.setHeader(b"Content-Length", b"%d" % (
len(PushersRemoveRestServlet.SUCCESS_HTML),
))
diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py
index 8e5577148f..d6f3a19648 100644
--- a/synapse/rest/client/v2_alpha/auth.py
+++ b/synapse/rest/client/v2_alpha/auth.py
@@ -129,7 +129,6 @@ class AuthRestServlet(RestServlet):
html_bytes = html.encode("utf8")
request.setResponseCode(200)
request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
- request.setHeader(b"Server", self.hs.version_string)
request.setHeader(b"Content-Length", b"%d" % (len(html_bytes),))
request.write(html_bytes)
@@ -175,7 +174,6 @@ class AuthRestServlet(RestServlet):
html_bytes = html.encode("utf8")
request.setResponseCode(200)
request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
- request.setHeader(b"Server", self.hs.version_string)
request.setHeader(b"Content-Length", b"%d" % (len(html_bytes),))
request.write(html_bytes)
diff --git a/synapse/rest/consent/__init__.py b/synapse/rest/consent/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/synapse/rest/consent/__init__.py
diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py
new file mode 100644
index 0000000000..d791302278
--- /dev/null
+++ b/synapse/rest/consent/consent_resource.py
@@ -0,0 +1,210 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from hashlib import sha256
+import hmac
+import logging
+from os import path
+from six.moves import http_client
+
+import jinja2
+from jinja2 import TemplateNotFound
+from twisted.internet import defer
+from twisted.web.resource import Resource
+from twisted.web.server import NOT_DONE_YET
+
+from synapse.api.errors import NotFoundError, SynapseError, StoreError
+from synapse.config import ConfigError
+from synapse.http.server import (
+ finish_request,
+ wrap_html_request_handler,
+)
+from synapse.http.servlet import parse_string
+from synapse.types import UserID
+
+
+# language to use for the templates. TODO: figure this out from Accept-Language
+TEMPLATE_LANGUAGE = "en"
+
+logger = logging.getLogger(__name__)
+
+# use hmac.compare_digest if we have it (python 2.7.7), else just use equality
+if hasattr(hmac, "compare_digest"):
+ compare_digest = hmac.compare_digest
+else:
+ def compare_digest(a, b):
+ return a == b
+
+
+class ConsentResource(Resource):
+ """A twisted Resource to display a privacy policy and gather consent to it
+
+ When accessed via GET, returns the privacy policy via a template.
+
+ When accessed via POST, records the user's consent in the database and
+ displays a success page.
+
+ The config should include a template_dir setting which contains templates
+ for the HTML. The directory should contain one subdirectory per language
+ (eg, 'en', 'fr'), and each language directory should contain the policy
+ document (named as '<version>.html') and a success page (success.html).
+
+ Both forms take a set of parameters from the browser. For the POST form,
+ these are normally sent as form parameters (but may be query-params); for
+ GET requests they must be query params. These are:
+
+ u: the complete mxid, or the localpart of the user giving their
+ consent. Required for both GET (where it is used as an input to the
+ template) and for POST (where it is used to find the row in the db
+ to update).
+
+ h: hmac_sha256(secret, u), where 'secret' is the privacy_secret in the
+ config file. If it doesn't match, the request is 403ed.
+
+ v: the version of the privacy policy being agreed to.
+
+ For GET: optional, and defaults to whatever was set in the config
+ file. Used to choose the version of the policy to pick from the
+ templates directory.
+
+ For POST: required; gives the value to be recorded in the database
+ against the user.
+ """
+ def __init__(self, hs):
+ """
+ Args:
+ hs (synapse.server.HomeServer): homeserver
+ """
+ Resource.__init__(self)
+
+ self.hs = hs
+ self.store = hs.get_datastore()
+
+ # this is required by the request_handler wrapper
+ self.clock = hs.get_clock()
+
+ consent_config = hs.config.consent_config
+ if consent_config is None:
+ raise ConfigError(
+ "Consent resource is enabled but user_consent section is "
+ "missing in config file.",
+ )
+
+ # daemonize changes the cwd to /, so make the path absolute now.
+ consent_template_directory = path.abspath(
+ consent_config["template_dir"],
+ )
+ if not path.isdir(consent_template_directory):
+ raise ConfigError(
+ "Could not find template directory '%s'" % (
+ consent_template_directory,
+ ),
+ )
+
+ loader = jinja2.FileSystemLoader(consent_template_directory)
+ self._jinja_env = jinja2.Environment(loader=loader)
+
+ self._default_consent_verison = consent_config["default_version"]
+
+ if hs.config.form_secret is None:
+ raise ConfigError(
+ "Consent resource is enabled but form_secret is not set in "
+ "config file. It should be set to an arbitrary secret string.",
+ )
+
+ self._hmac_secret = hs.config.form_secret.encode("utf-8")
+
+ def render_GET(self, request):
+ self._async_render_GET(request)
+ return NOT_DONE_YET
+
+ @wrap_html_request_handler
+ def _async_render_GET(self, request):
+ """
+ Args:
+ request (twisted.web.http.Request):
+ """
+
+ version = parse_string(request, "v",
+ default=self._default_consent_verison)
+ username = parse_string(request, "u", required=True)
+ userhmac = parse_string(request, "h", required=True)
+
+ self._check_hash(username, userhmac)
+
+ try:
+ self._render_template(
+ request, "%s.html" % (version,),
+ user=username, userhmac=userhmac, version=version,
+ )
+ except TemplateNotFound:
+ raise NotFoundError("Unknown policy version")
+
+ def render_POST(self, request):
+ self._async_render_POST(request)
+ return NOT_DONE_YET
+
+ @wrap_html_request_handler
+ @defer.inlineCallbacks
+ def _async_render_POST(self, request):
+ """
+ Args:
+ request (twisted.web.http.Request):
+ """
+ version = parse_string(request, "v", required=True)
+ username = parse_string(request, "u", required=True)
+ userhmac = parse_string(request, "h", required=True)
+
+ self._check_hash(username, userhmac)
+
+ if username.startswith('@'):
+ qualified_user_id = username
+ else:
+ qualified_user_id = UserID(username, self.hs.hostname).to_string()
+
+ try:
+ yield self.store.user_set_consent_version(qualified_user_id, version)
+ except StoreError as e:
+ if e.code != 404:
+ raise
+ raise NotFoundError("Unknown user")
+
+ try:
+ self._render_template(request, "success.html")
+ except TemplateNotFound:
+ raise NotFoundError("success.html not found")
+
+ def _render_template(self, request, template_name, **template_args):
+ # get_template checks for ".." so we don't need to worry too much
+ # about path traversal here.
+ template_html = self._jinja_env.get_template(
+ path.join(TEMPLATE_LANGUAGE, template_name)
+ )
+ html_bytes = template_html.render(**template_args).encode("utf8")
+
+ request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
+ request.setHeader(b"Content-Length", b"%i" % len(html_bytes))
+ request.write(html_bytes)
+ finish_request(request)
+
+ def _check_hash(self, userid, userhmac):
+ want_mac = hmac.new(
+ key=self._hmac_secret,
+ msg=userid,
+ digestmod=sha256,
+ ).hexdigest()
+
+ if not compare_digest(want_mac, userhmac):
+ raise SynapseError(http_client.FORBIDDEN, "HMAC incorrect")
diff --git a/synapse/rest/key/v1/server_key_resource.py b/synapse/rest/key/v1/server_key_resource.py
index bd4fea5774..1498d188c1 100644
--- a/synapse/rest/key/v1/server_key_resource.py
+++ b/synapse/rest/key/v1/server_key_resource.py
@@ -49,7 +49,6 @@ class LocalKey(Resource):
"""
def __init__(self, hs):
- self.version_string = hs.version_string
self.response_body = encode_canonical_json(
self.response_json_object(hs.config)
)
@@ -84,7 +83,6 @@ class LocalKey(Resource):
def render_GET(self, request):
return respond_with_json_bytes(
request, 200, self.response_body,
- version_string=self.version_string
)
def getChild(self, name, request):
diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py
index be68d9a096..04775b3c45 100644
--- a/synapse/rest/key/v2/local_key_resource.py
+++ b/synapse/rest/key/v2/local_key_resource.py
@@ -63,7 +63,6 @@ class LocalKey(Resource):
isLeaf = True
def __init__(self, hs):
- self.version_string = hs.version_string
self.config = hs.config
self.clock = hs.clock
self.update_response_body(self.clock.time_msec())
@@ -115,5 +114,4 @@ class LocalKey(Resource):
self.update_response_body(time_now)
return respond_with_json_bytes(
request, 200, self.response_body,
- version_string=self.version_string
)
diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index 17e6079cba..21b4c1175e 100644
--- a/synapse/rest/key/v2/remote_key_resource.py
+++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -12,7 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from synapse.http.server import request_handler, respond_with_json_bytes
+from synapse.http.server import (
+ respond_with_json_bytes, wrap_json_request_handler,
+)
from synapse.http.servlet import parse_integer, parse_json_object_from_request
from synapse.api.errors import SynapseError, Codes
from synapse.crypto.keyring import KeyLookupError
@@ -91,7 +93,6 @@ class RemoteKey(Resource):
def __init__(self, hs):
self.keyring = hs.get_keyring()
self.store = hs.get_datastore()
- self.version_string = hs.version_string
self.clock = hs.get_clock()
self.federation_domain_whitelist = hs.config.federation_domain_whitelist
@@ -99,7 +100,7 @@ class RemoteKey(Resource):
self.async_render_GET(request)
return NOT_DONE_YET
- @request_handler()
+ @wrap_json_request_handler
@defer.inlineCallbacks
def async_render_GET(self, request):
if len(request.postpath) == 1:
@@ -124,7 +125,7 @@ class RemoteKey(Resource):
self.async_render_POST(request)
return NOT_DONE_YET
- @request_handler()
+ @wrap_json_request_handler
@defer.inlineCallbacks
def async_render_POST(self, request):
content = parse_json_object_from_request(request)
@@ -240,5 +241,4 @@ class RemoteKey(Resource):
respond_with_json_bytes(
request, 200, result_io.getvalue(),
- version_string=self.version_string
)
diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py
index fe7e17596f..8cf8820c31 100644
--- a/synapse/rest/media/v1/download_resource.py
+++ b/synapse/rest/media/v1/download_resource.py
@@ -12,16 +12,18 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import synapse.http.servlet
+import logging
-from ._base import parse_media_id, respond_404
+from twisted.internet import defer
from twisted.web.resource import Resource
-from synapse.http.server import request_handler, set_cors_headers
-
from twisted.web.server import NOT_DONE_YET
-from twisted.internet import defer
-import logging
+from synapse.http.server import (
+ set_cors_headers,
+ wrap_json_request_handler,
+)
+import synapse.http.servlet
+from ._base import parse_media_id, respond_404
logger = logging.getLogger(__name__)
@@ -35,15 +37,14 @@ class DownloadResource(Resource):
self.media_repo = media_repo
self.server_name = hs.hostname
- # Both of these are expected by @request_handler()
+ # this is expected by @wrap_json_request_handler
self.clock = hs.get_clock()
- self.version_string = hs.version_string
def render_GET(self, request):
self._async_render_GET(request)
return NOT_DONE_YET
- @request_handler()
+ @wrap_json_request_handler
@defer.inlineCallbacks
def _async_render_GET(self, request):
set_cors_headers(request)
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index 9290d7946f..2839207abc 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -40,8 +40,9 @@ from synapse.util.stringutils import random_string
from synapse.util.caches.expiringcache import ExpiringCache
from synapse.http.client import SpiderHttpClient
from synapse.http.server import (
- request_handler, respond_with_json_bytes,
+ respond_with_json_bytes,
respond_with_json,
+ wrap_json_request_handler,
)
from synapse.util.async import ObservableDeferred
from synapse.util.stringutils import is_ascii
@@ -57,7 +58,6 @@ class PreviewUrlResource(Resource):
self.auth = hs.get_auth()
self.clock = hs.get_clock()
- self.version_string = hs.version_string
self.filepaths = media_repo.filepaths
self.max_spider_size = hs.config.max_spider_size
self.server_name = hs.hostname
@@ -90,7 +90,7 @@ class PreviewUrlResource(Resource):
self._async_render_GET(request)
return NOT_DONE_YET
- @request_handler()
+ @wrap_json_request_handler
@defer.inlineCallbacks
def _async_render_GET(self, request):
diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py
index 58ada49711..aae6e464e8 100644
--- a/synapse/rest/media/v1/thumbnail_resource.py
+++ b/synapse/rest/media/v1/thumbnail_resource.py
@@ -14,18 +14,21 @@
# limitations under the License.
-from ._base import (
- parse_media_id, respond_404, respond_with_file, FileInfo,
- respond_with_responder,
-)
-from twisted.web.resource import Resource
-from synapse.http.servlet import parse_string, parse_integer
-from synapse.http.server import request_handler, set_cors_headers
+import logging
-from twisted.web.server import NOT_DONE_YET
from twisted.internet import defer
+from twisted.web.resource import Resource
+from twisted.web.server import NOT_DONE_YET
-import logging
+from synapse.http.server import (
+ set_cors_headers,
+ wrap_json_request_handler,
+)
+from synapse.http.servlet import parse_integer, parse_string
+from ._base import (
+ FileInfo, parse_media_id, respond_404, respond_with_file,
+ respond_with_responder,
+)
logger = logging.getLogger(__name__)
@@ -41,14 +44,13 @@ class ThumbnailResource(Resource):
self.media_storage = media_storage
self.dynamic_thumbnails = hs.config.dynamic_thumbnails
self.server_name = hs.hostname
- self.version_string = hs.version_string
self.clock = hs.get_clock()
def render_GET(self, request):
self._async_render_GET(request)
return NOT_DONE_YET
- @request_handler()
+ @wrap_json_request_handler
@defer.inlineCallbacks
def _async_render_GET(self, request):
set_cors_headers(request)
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index a31e75cb46..7567476fce 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -13,16 +13,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from synapse.http.server import respond_with_json, request_handler
-
-from synapse.api.errors import SynapseError
+import logging
-from twisted.web.server import NOT_DONE_YET
from twisted.internet import defer
-
from twisted.web.resource import Resource
+from twisted.web.server import NOT_DONE_YET
-import logging
+from synapse.api.errors import SynapseError
+from synapse.http.server import (
+ respond_with_json,
+ wrap_json_request_handler,
+)
logger = logging.getLogger(__name__)
@@ -40,7 +41,6 @@ class UploadResource(Resource):
self.server_name = hs.hostname
self.auth = hs.get_auth()
self.max_upload_size = hs.config.max_upload_size
- self.version_string = hs.version_string
self.clock = hs.get_clock()
def render_POST(self, request):
@@ -51,7 +51,7 @@ class UploadResource(Resource):
respond_with_json(request, 200, {}, send_cors=True)
return NOT_DONE_YET
- @request_handler()
+ @wrap_json_request_handler
@defer.inlineCallbacks
def _async_render_POST(self, request):
requester = yield self.auth.get_user_by_req(request)
diff --git a/synapse/server.py b/synapse/server.py
index ebdea6b0c4..21cde5b6fc 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -97,6 +97,9 @@ class HomeServer(object):
which must be implemented by the subclass. This code may call any of the
required "get" methods on the instance to obtain the sub-dependencies that
one requires.
+
+ Attributes:
+ config (synapse.config.homeserver.HomeserverConfig):
"""
DEPENDENCIES = [
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index 8cdfd50f90..4551cf8774 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -14,6 +14,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import datetime
+from dateutil import tz
+import time
+import logging
+
from synapse.storage.devices import DeviceStore
from .appservice import (
ApplicationServiceStore, ApplicationServiceTransactionStore
@@ -55,10 +60,6 @@ from .engines import PostgresEngine
from synapse.api.constants import PresenceState
from synapse.util.caches.stream_change_cache import StreamChangeCache
-
-import logging
-
-
logger = logging.getLogger(__name__)
@@ -213,6 +214,9 @@ class DataStore(RoomMemberStore, RoomStore,
self._stream_order_on_start = self.get_room_max_stream_ordering()
self._min_stream_order_on_start = self.get_room_min_stream_ordering()
+ # Used in _generate_user_daily_visits to keep track of progress
+ self._last_user_visit_update = self._get_start_of_day()
+
super(DataStore, self).__init__(db_conn, hs)
def take_presence_startup_info(self):
@@ -347,6 +351,65 @@ class DataStore(RoomMemberStore, RoomStore,
return self.runInteraction("count_r30_users", _count_r30_users)
+ def _get_start_of_day(self):
+ """
+ Returns millisecond unixtime for start of UTC day.
+ """
+ now = datetime.datetime.utcnow()
+ today_start = datetime.datetime(now.year, now.month,
+ now.day, tzinfo=tz.tzutc())
+ return int(time.mktime(today_start.timetuple())) * 1000
+
+ def generate_user_daily_visits(self):
+ """
+ Generates daily visit data for use in cohort/ retention analysis
+ """
+ def _generate_user_daily_visits(txn):
+ logger.info("Calling _generate_user_daily_visits")
+ today_start = self._get_start_of_day()
+ a_day_in_milliseconds = 24 * 60 * 60 * 1000
+ now = self.clock.time_msec()
+
+ sql = """
+ INSERT INTO user_daily_visits (user_id, device_id, timestamp)
+ SELECT u.user_id, u.device_id, ?
+ FROM user_ips AS u
+ LEFT JOIN (
+ SELECT user_id, device_id, timestamp FROM user_daily_visits
+ WHERE timestamp IS ?
+ ) udv
+ ON u.user_id = udv.user_id AND u.device_id=udv.device_id
+ WHERE last_seen > ? AND last_seen <= ? AND udv.timestamp IS NULL
+ """
+
+ # This means that the day has rolled over but there could still
+ # be entries from the previous day. There is an edge case
+ # where if the user logs in at 23:59 and overwrites their
+ # last_seen at 00:01 then they will not be counted in the
+ # previous day's stats - it is important that the query is run
+ # often to minimise this case.
+ if today_start > self._last_user_visit_update:
+ yesterday_start = today_start - a_day_in_milliseconds
+ txn.execute(sql, (
+ yesterday_start, yesterday_start,
+ self._last_user_visit_update, today_start
+ ))
+ self._last_user_visit_update = today_start
+
+ txn.execute(sql, (
+ today_start, today_start,
+ self._last_user_visit_update,
+ now
+ ))
+ # Update _last_user_visit_update to now. The reason to do this
+ # rather just clamping to the beginning of the day is to limit
+ # the size of the join - meaning that the query can be run more
+ # frequently
+ self._last_user_visit_update = now
+
+ return self.runInteraction("generate_user_daily_visits",
+ _generate_user_daily_visits)
+
def get_users(self):
"""Function to reterive a list of users in users table.
diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py
index 7b44dae0fc..ba46907737 100644
--- a/synapse/storage/client_ips.py
+++ b/synapse/storage/client_ips.py
@@ -55,6 +55,13 @@ class ClientIpStore(background_updates.BackgroundUpdateStore):
columns=["user_id", "last_seen"],
)
+ self.register_background_index_update(
+ "user_ips_last_seen_only_index",
+ index_name="user_ips_last_seen_only",
+ table="user_ips",
+ columns=["last_seen"],
+ )
+
# (user_id, access_token, ip) -> (user_agent, device_id, last_seen)
self._batch_row_update = {}
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index 04411a665f..c08e9cd65a 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
# Remember to update this number every time a change is made to database
# schema files, so the users will be informed on server restarts.
-SCHEMA_VERSION = 48
+SCHEMA_VERSION = 49
dir_path = os.path.abspath(os.path.dirname(__file__))
diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py
index a50717db2d..8d1a01f1ee 100644
--- a/synapse/storage/registration.py
+++ b/synapse/storage/registration.py
@@ -286,6 +286,24 @@ class RegistrationStore(RegistrationWorkerStore,
"user_set_password_hash", user_set_password_hash_txn
)
+ def user_set_consent_version(self, user_id, consent_version):
+ """Updates the user table to record privacy policy consent
+
+ Args:
+ user_id (str): full mxid of the user to update
+ consent_version (str): version of the policy the user has consented
+ to
+
+ Raises:
+ StoreError(404) if user not found
+ """
+ return self._simple_update_one(
+ table='users',
+ keyvalues={'name': user_id, },
+ updatevalues={'consent_version': consent_version, },
+ desc="user_set_consent_version"
+ )
+
def user_delete_access_tokens(self, user_id, except_token_id=None,
device_id=None):
"""
@@ -526,3 +544,42 @@ class RegistrationStore(RegistrationWorkerStore,
except self.database_engine.module.IntegrityError:
ret = yield self.get_3pid_guest_access_token(medium, address)
defer.returnValue(ret)
+
+ def add_user_pending_deactivation(self, user_id):
+ """
+ Adds a user to the table of users who need to be parted from all the rooms they're
+ in
+ """
+ return self._simple_insert(
+ "users_pending_deactivation",
+ values={
+ "user_id": user_id,
+ },
+ desc="add_user_pending_deactivation",
+ )
+
+ def del_user_pending_deactivation(self, user_id):
+ """
+ Removes the given user to the table of users who need to be parted from all the
+ rooms they're in, effectively marking that user as fully deactivated.
+ """
+ return self._simple_delete_one(
+ "users_pending_deactivation",
+ keyvalues={
+ "user_id": user_id,
+ },
+ desc="del_user_pending_deactivation",
+ )
+
+ def get_user_pending_deactivation(self):
+ """
+ Gets one user from the table of users waiting to be parted from all the rooms
+ they're in.
+ """
+ return self._simple_select_one_onecol(
+ "users_pending_deactivation",
+ keyvalues={},
+ retcol="user_id",
+ allow_none=True,
+ desc="get_users_pending_deactivation",
+ )
diff --git a/synapse/storage/schema/delta/48/add_user_consent.sql b/synapse/storage/schema/delta/48/add_user_consent.sql
new file mode 100644
index 0000000000..5237491506
--- /dev/null
+++ b/synapse/storage/schema/delta/48/add_user_consent.sql
@@ -0,0 +1,18 @@
+/* Copyright 2018 New Vector Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* record the version of the privacy policy the user has consented to
+ */
+ALTER TABLE users ADD COLUMN consent_version TEXT;
diff --git a/synapse/storage/schema/delta/48/deactivated_users.sql b/synapse/storage/schema/delta/48/deactivated_users.sql
new file mode 100644
index 0000000000..e9013a6969
--- /dev/null
+++ b/synapse/storage/schema/delta/48/deactivated_users.sql
@@ -0,0 +1,25 @@
+/* Copyright 2018 New Vector Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Store any accounts that have been requested to be deactivated.
+ * We part the account from all the rooms its in when its
+ * deactivated. This can take some time and synapse may be restarted
+ * before it completes, so store the user IDs here until the process
+ * is complete.
+ */
+CREATE TABLE users_pending_deactivation (
+ user_id TEXT NOT NULL
+);
diff --git a/synapse/storage/schema/delta/49/add_user_daily_visits.sql b/synapse/storage/schema/delta/49/add_user_daily_visits.sql
new file mode 100644
index 0000000000..3dd478196f
--- /dev/null
+++ b/synapse/storage/schema/delta/49/add_user_daily_visits.sql
@@ -0,0 +1,21 @@
+/* Copyright 2018 New Vector Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+CREATE TABLE user_daily_visits ( user_id TEXT NOT NULL,
+ device_id TEXT,
+ timestamp BIGINT NOT NULL );
+CREATE INDEX user_daily_visits_uts_idx ON user_daily_visits(user_id, timestamp);
+CREATE INDEX user_daily_visits_ts_idx ON user_daily_visits(timestamp);
diff --git a/synapse/storage/schema/delta/49/add_user_ips_last_seen_only_index.sql b/synapse/storage/schema/delta/49/add_user_ips_last_seen_only_index.sql
new file mode 100644
index 0000000000..3a4ed59b5b
--- /dev/null
+++ b/synapse/storage/schema/delta/49/add_user_ips_last_seen_only_index.sql
@@ -0,0 +1,17 @@
+/* Copyright 2018 New Vector Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+INSERT into background_updates (update_name, progress_json)
+ VALUES ('user_ips_last_seen_only_index', '{}');
|