diff --git a/AUTHORS.rst b/AUTHORS.rst
index d7224ff5de..54ced67000 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -42,3 +42,6 @@ Ivan Shapovalov <intelfx100 at gmail.com>
Eric Myhre <hash at exultant.us>
* Fix bug where ``media_store_path`` config option was ignored by v0 content
repository API.
+
+Muthu Subramanian <muthu.subramanian.karunanidhi at ericsson.com>
+ * Add SAML2 support for registration and logins.
diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index fe0ccb6eb7..d77f045406 100644
--- a/synapse/config/homeserver.py
+++ b/synapse/config/homeserver.py
@@ -25,12 +25,13 @@ from .registration import RegistrationConfig
from .metrics import MetricsConfig
from .appservice import AppServiceConfig
from .key import KeyConfig
+from .saml2 import SAML2Config
class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
RatelimitConfig, ContentRepositoryConfig, CaptchaConfig,
- VoipConfig, RegistrationConfig,
- MetricsConfig, AppServiceConfig, KeyConfig,):
+ VoipConfig, RegistrationConfig, MetricsConfig,
+ AppServiceConfig, KeyConfig, SAML2Config, ):
pass
diff --git a/synapse/config/saml2.py b/synapse/config/saml2.py
new file mode 100644
index 0000000000..1532036876
--- /dev/null
+++ b/synapse/config/saml2.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 Ericsson
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._base import Config
+
+
+class SAML2Config(Config):
+ """SAML2 Configuration
+ Synapse uses pysaml2 libraries for providing SAML2 support
+
+ config_path: Path to the sp_conf.py configuration file
+ idp_redirect_url: Identity provider URL which will redirect
+ the user back to /login/saml2 with proper info.
+
+ sp_conf.py file is something like:
+ https://github.com/rohe/pysaml2/blob/master/example/sp-repoze/sp_conf.py.example
+
+ More information: https://pythonhosted.org/pysaml2/howto/config.html
+ """
+
+ def read_config(self, config):
+ saml2_config = config.get("saml2_config", None)
+ if saml2_config:
+ self.saml2_enabled = True
+ self.saml2_config_path = saml2_config["config_path"]
+ self.saml2_idp_redirect_url = saml2_config["idp_redirect_url"]
+ else:
+ self.saml2_enabled = False
+ self.saml2_config_path = None
+ self.saml2_idp_redirect_url = None
+
+ def default_config(self, config_dir_path, server_name):
+ return """
+ # Enable SAML2 for registration and login. Uses pysaml2
+ # config_path: Path to the sp_conf.py configuration file
+ # idp_redirect_url: Identity provider URL which will redirect
+ # the user back to /login/saml2 with proper info.
+ # See pysaml2 docs for format of config.
+ #saml2_config:
+ # config_path: "%s/sp_conf.py"
+ # idp_redirect_url: "http://%s/idp"
+ """ % (config_dir_path, server_name)
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index ecb2d42c1f..6c1df35e80 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -27,6 +27,7 @@ class TlsConfig(Config):
self.tls_certificate = self.read_tls_certificate(
config.get("tls_certificate_path")
)
+ self.tls_certificate_file = config.get("tls_certificate_path")
self.no_tls = config.get("no_tls", False)
@@ -49,7 +50,11 @@ class TlsConfig(Config):
tls_dh_params_path = base_key_name + ".tls.dh"
return """\
- # PEM encoded X509 certificate for TLS
+ # PEM encoded X509 certificate for TLS.
+ # You can replace the self-signed certificate that synapse
+ # autogenerates on launch with your own SSL certificate + key pair
+ # if you like. Any required intermediary certificates can be
+ # appended after the primary certificate in hierarchical order.
tls_certificate_path: "%(tls_certificate_path)s"
# PEM encoded private key for TLS
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index 2f8618a0df..c4390f3b2b 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -35,9 +35,9 @@ class ServerContextFactory(ssl.ContextFactory):
_ecCurve = _OpenSSLECCurve(_defaultCurveName)
_ecCurve.addECKeyToContext(context)
except:
- logger.exception("Failed to enable eliptic curve for TLS")
+ logger.exception("Failed to enable elliptic curve for TLS")
context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
- context.use_certificate(config.tls_certificate)
+ context.use_certificate_chain_file(config.tls_certificate_file)
if not config.no_tls:
context.use_privatekey(config.tls_private_key)
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 7b68585a17..a1288b4252 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -193,6 +193,35 @@ class RegistrationHandler(BaseHandler):
logger.info("Valid captcha entered from %s", ip)
@defer.inlineCallbacks
+ def register_saml2(self, localpart):
+ """
+ Registers email_id as SAML2 Based Auth.
+ """
+ if urllib.quote(localpart) != localpart:
+ raise SynapseError(
+ 400,
+ "User ID must only contain characters which do not"
+ " require URL encoding."
+ )
+ user = UserID(localpart, self.hs.hostname)
+ user_id = user.to_string()
+
+ yield self.check_user_id_is_valid(user_id)
+ token = self._generate_token(user_id)
+ try:
+ yield self.store.register(
+ user_id=user_id,
+ token=token,
+ password_hash=None
+ )
+ yield self.distributor.fire("registered_user", user)
+ except Exception, e:
+ yield self.store.add_access_token_to_user(user_id, token)
+ # Ignore Registration errors
+ logger.exception(e)
+ defer.returnValue((user_id, token))
+
+ @defer.inlineCallbacks
def register_email(self, threepidCreds):
"""
Registers emails with an identity server.
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 807ff95c65..b60e905a62 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -207,7 +207,7 @@ class JsonResource(HttpServer, resource.Resource):
incoming_requests_counter.inc(request.method, servlet_classname)
args = [
- urllib.unquote(u).decode("UTF-8") for u in m.groups()
+ urllib.unquote(u).decode("UTF-8") if u else u for u in m.groups()
]
callback_return = yield callback(request, *args)
diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py
index f3d1cf5c5f..1f015a7f2e 100644
--- a/synapse/push/baserules.py
+++ b/synapse/push/baserules.py
@@ -164,7 +164,7 @@ def make_base_append_underride_rules(user):
]
},
{
- 'rule_id': 'global/override/.m.rule.contains_display_name',
+ 'rule_id': 'global/underride/.m.rule.contains_display_name',
'conditions': [
{
'kind': 'contains_display_name'
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index f9e59dd917..17587170c8 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -31,6 +31,7 @@ REQUIREMENTS = {
"pillow": ["PIL"],
"pydenticon": ["pydenticon"],
"ujson": ["ujson"],
+ "pysaml2": ["saml2"],
}
CONDITIONAL_REQUIREMENTS = {
"web_client": {
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index b2257b749d..998d4d44c6 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -20,14 +20,32 @@ from synapse.types import UserID
from base import ClientV1RestServlet, client_path_pattern
import simplejson as json
+import urllib
+
+import logging
+from saml2 import BINDING_HTTP_POST
+from saml2 import config
+from saml2.client import Saml2Client
+
+
+logger = logging.getLogger(__name__)
class LoginRestServlet(ClientV1RestServlet):
PATTERN = client_path_pattern("/login$")
PASS_TYPE = "m.login.password"
+ SAML2_TYPE = "m.login.saml2"
+
+ def __init__(self, hs):
+ super(LoginRestServlet, self).__init__(hs)
+ self.idp_redirect_url = hs.config.saml2_idp_redirect_url
+ self.saml2_enabled = hs.config.saml2_enabled
def on_GET(self, request):
- return (200, {"flows": [{"type": LoginRestServlet.PASS_TYPE}]})
+ flows = [{"type": LoginRestServlet.PASS_TYPE}]
+ if self.saml2_enabled:
+ flows.append({"type": LoginRestServlet.SAML2_TYPE})
+ return (200, {"flows": flows})
def on_OPTIONS(self, request):
return (200, {})
@@ -39,6 +57,16 @@ class LoginRestServlet(ClientV1RestServlet):
if login_submission["type"] == LoginRestServlet.PASS_TYPE:
result = yield self.do_password_login(login_submission)
defer.returnValue(result)
+ elif self.saml2_enabled and (login_submission["type"] ==
+ LoginRestServlet.SAML2_TYPE):
+ relay_state = ""
+ if "relay_state" in login_submission:
+ relay_state = "&RelayState="+urllib.quote(
+ login_submission["relay_state"])
+ result = {
+ "uri": "%s%s" % (self.idp_redirect_url, relay_state)
+ }
+ defer.returnValue((200, result))
else:
raise SynapseError(400, "Bad login type.")
except KeyError:
@@ -94,6 +122,49 @@ class PasswordResetRestServlet(ClientV1RestServlet):
)
+class SAML2RestServlet(ClientV1RestServlet):
+ PATTERN = client_path_pattern("/login/saml2")
+
+ def __init__(self, hs):
+ super(SAML2RestServlet, self).__init__(hs)
+ self.sp_config = hs.config.saml2_config_path
+
+ @defer.inlineCallbacks
+ def on_POST(self, request):
+ saml2_auth = None
+ try:
+ conf = config.SPConfig()
+ conf.load_file(self.sp_config)
+ SP = Saml2Client(conf)
+ saml2_auth = SP.parse_authn_request_response(
+ request.args['SAMLResponse'][0], BINDING_HTTP_POST)
+ except Exception, e: # Not authenticated
+ logger.exception(e)
+ if saml2_auth and saml2_auth.status_ok() and not saml2_auth.not_signed:
+ username = saml2_auth.name_id.text
+ handler = self.handlers.registration_handler
+ (user_id, token) = yield handler.register_saml2(username)
+ # Forward to the RelayState callback along with ava
+ if 'RelayState' in request.args:
+ request.redirect(urllib.unquote(
+ request.args['RelayState'][0]) +
+ '?status=authenticated&access_token=' +
+ token + '&user_id=' + user_id + '&ava=' +
+ urllib.quote(json.dumps(saml2_auth.ava)))
+ request.finish()
+ defer.returnValue(None)
+ defer.returnValue((200, {"status": "authenticated",
+ "user_id": user_id, "token": token,
+ "ava": saml2_auth.ava}))
+ elif 'RelayState' in request.args:
+ request.redirect(urllib.unquote(
+ request.args['RelayState'][0]) +
+ '?status=not_authenticated')
+ request.finish()
+ defer.returnValue(None)
+ defer.returnValue((200, {"status": "not_authenticated"}))
+
+
def _parse_json(request):
try:
content = json.loads(request.content.read())
@@ -106,4 +177,6 @@ def _parse_json(request):
def register_servlets(hs, http_server):
LoginRestServlet(hs).register(http_server)
+ if hs.config.saml2_enabled:
+ SAML2RestServlet(hs).register(http_server)
# TODO PasswordResetRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/__init__.py b/synapse/rest/client/v2_alpha/__init__.py
index 7d1aff4307..c3323d2a8a 100644
--- a/synapse/rest/client/v2_alpha/__init__.py
+++ b/synapse/rest/client/v2_alpha/__init__.py
@@ -18,7 +18,8 @@ from . import (
filter,
account,
register,
- auth
+ auth,
+ keys,
)
from synapse.http.server import JsonResource
@@ -38,3 +39,4 @@ class ClientV2AlphaRestResource(JsonResource):
account.register_servlets(hs, client_resource)
register.register_servlets(hs, client_resource)
auth.register_servlets(hs, client_resource)
+ keys.register_servlets(hs, client_resource)
diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py
new file mode 100644
index 0000000000..f031267751
--- /dev/null
+++ b/synapse/rest/client/v2_alpha/keys.py
@@ -0,0 +1,276 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet import defer
+
+from synapse.api.errors import SynapseError
+from synapse.http.servlet import RestServlet
+from syutil.jsonutil import encode_canonical_json
+
+from ._base import client_v2_pattern
+
+import simplejson as json
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class KeyUploadServlet(RestServlet):
+ """
+ POST /keys/upload/<device_id> HTTP/1.1
+ Content-Type: application/json
+
+ {
+ "device_keys": {
+ "user_id": "<user_id>",
+ "device_id": "<device_id>",
+ "valid_until_ts": <millisecond_timestamp>,
+ "algorithms": [
+ "m.olm.curve25519-aes-sha256",
+ ]
+ "keys": {
+ "<algorithm>:<device_id>": "<key_base64>",
+ },
+ "signatures:" {
+ "<user_id>" {
+ "<algorithm>:<device_id>": "<signature_base64>"
+ } } },
+ "one_time_keys": {
+ "<algorithm>:<key_id>": "<key_base64>"
+ },
+ }
+ """
+ PATTERN = client_v2_pattern("/keys/upload/(?P<device_id>[^/]*)")
+
+ def __init__(self, hs):
+ super(KeyUploadServlet, self).__init__()
+ self.store = hs.get_datastore()
+ self.clock = hs.get_clock()
+ self.auth = hs.get_auth()
+
+ @defer.inlineCallbacks
+ def on_POST(self, request, device_id):
+ auth_user, client_info = yield self.auth.get_user_by_req(request)
+ user_id = auth_user.to_string()
+ # TODO: Check that the device_id matches that in the authentication
+ # or derive the device_id from the authentication instead.
+ try:
+ body = json.loads(request.content.read())
+ except:
+ raise SynapseError(400, "Invalid key JSON")
+ time_now = self.clock.time_msec()
+
+ # TODO: Validate the JSON to make sure it has the right keys.
+ device_keys = body.get("device_keys", None)
+ if device_keys:
+ logger.info(
+ "Updating device_keys for device %r for user %r at %d",
+ device_id, auth_user, time_now
+ )
+ # TODO: Sign the JSON with the server key
+ yield self.store.set_e2e_device_keys(
+ user_id, device_id, time_now,
+ encode_canonical_json(device_keys)
+ )
+
+ one_time_keys = body.get("one_time_keys", None)
+ if one_time_keys:
+ logger.info(
+ "Adding %d one_time_keys for device %r for user %r at %d",
+ len(one_time_keys), device_id, user_id, time_now
+ )
+ key_list = []
+ for key_id, key_json in one_time_keys.items():
+ algorithm, key_id = key_id.split(":")
+ key_list.append((
+ algorithm, key_id, encode_canonical_json(key_json)
+ ))
+
+ yield self.store.add_e2e_one_time_keys(
+ user_id, device_id, time_now, key_list
+ )
+
+ result = yield self.store.count_e2e_one_time_keys(user_id, device_id)
+ defer.returnValue((200, {"one_time_key_counts": result}))
+
+ @defer.inlineCallbacks
+ def on_GET(self, request, device_id):
+ auth_user, client_info = yield self.auth.get_user_by_req(request)
+ user_id = auth_user.to_string()
+
+ result = yield self.store.count_e2e_one_time_keys(user_id, device_id)
+ defer.returnValue((200, {"one_time_key_counts": result}))
+
+
+class KeyQueryServlet(RestServlet):
+ """
+ GET /keys/query/<user_id> HTTP/1.1
+
+ GET /keys/query/<user_id>/<device_id> HTTP/1.1
+
+ POST /keys/query HTTP/1.1
+ Content-Type: application/json
+ {
+ "device_keys": {
+ "<user_id>": ["<device_id>"]
+ } }
+
+ HTTP/1.1 200 OK
+ {
+ "device_keys": {
+ "<user_id>": {
+ "<device_id>": {
+ "user_id": "<user_id>", // Duplicated to be signed
+ "device_id": "<device_id>", // Duplicated to be signed
+ "valid_until_ts": <millisecond_timestamp>,
+ "algorithms": [ // List of supported algorithms
+ "m.olm.curve25519-aes-sha256",
+ ],
+ "keys": { // Must include a ed25519 signing key
+ "<algorithm>:<key_id>": "<key_base64>",
+ },
+ "signatures:" {
+ // Must be signed with device's ed25519 key
+ "<user_id>/<device_id>": {
+ "<algorithm>:<key_id>": "<signature_base64>"
+ }
+ // Must be signed by this server.
+ "<server_name>": {
+ "<algorithm>:<key_id>": "<signature_base64>"
+ } } } } } }
+ """
+
+ PATTERN = client_v2_pattern(
+ "/keys/query(?:"
+ "/(?P<user_id>[^/]*)(?:"
+ "/(?P<device_id>[^/]*)"
+ ")?"
+ ")?"
+ )
+
+ def __init__(self, hs):
+ super(KeyQueryServlet, self).__init__()
+ self.store = hs.get_datastore()
+ self.auth = hs.get_auth()
+
+ @defer.inlineCallbacks
+ def on_POST(self, request, user_id, device_id):
+ logger.debug("onPOST")
+ yield self.auth.get_user_by_req(request)
+ try:
+ body = json.loads(request.content.read())
+ except:
+ raise SynapseError(400, "Invalid key JSON")
+ query = []
+ for user_id, device_ids in body.get("device_keys", {}).items():
+ if not device_ids:
+ query.append((user_id, None))
+ else:
+ for device_id in device_ids:
+ query.append((user_id, device_id))
+ results = yield self.store.get_e2e_device_keys([(user_id, device_id)])
+ defer.returnValue(self.json_result(request, results))
+
+ @defer.inlineCallbacks
+ def on_GET(self, request, user_id, device_id):
+ auth_user, client_info = yield self.auth.get_user_by_req(request)
+ auth_user_id = auth_user.to_string()
+ if not user_id:
+ user_id = auth_user_id
+ if not device_id:
+ device_id = None
+ # Returns a map of user_id->device_id->json_bytes.
+ results = yield self.store.get_e2e_device_keys([(user_id, device_id)])
+ defer.returnValue(self.json_result(request, results))
+
+ def json_result(self, request, results):
+ json_result = {}
+ for user_id, device_keys in results.items():
+ for device_id, json_bytes in device_keys.items():
+ json_result.setdefault(user_id, {})[device_id] = json.loads(
+ json_bytes
+ )
+ return (200, {"device_keys": json_result})
+
+
+class OneTimeKeyServlet(RestServlet):
+ """
+ GET /keys/take/<user-id>/<device-id>/<algorithm> HTTP/1.1
+
+ POST /keys/take HTTP/1.1
+ {
+ "one_time_keys": {
+ "<user_id>": {
+ "<device_id>": "<algorithm>"
+ } } }
+
+ HTTP/1.1 200 OK
+ {
+ "one_time_keys": {
+ "<user_id>": {
+ "<device_id>": {
+ "<algorithm>:<key_id>": "<key_base64>"
+ } } } }
+
+ """
+ PATTERN = client_v2_pattern(
+ "/keys/take(?:/?|(?:/"
+ "(?P<user_id>[^/]*)/(?P<device_id>[^/]*)/(?P<algorithm>[^/]*)"
+ ")?)"
+ )
+
+ def __init__(self, hs):
+ super(OneTimeKeyServlet, self).__init__()
+ self.store = hs.get_datastore()
+ self.auth = hs.get_auth()
+ self.clock = hs.get_clock()
+
+ @defer.inlineCallbacks
+ def on_GET(self, request, user_id, device_id, algorithm):
+ yield self.auth.get_user_by_req(request)
+ results = yield self.store.take_e2e_one_time_keys(
+ [(user_id, device_id, algorithm)]
+ )
+ defer.returnValue(self.json_result(request, results))
+
+ @defer.inlineCallbacks
+ def on_POST(self, request, user_id, device_id, algorithm):
+ yield self.auth.get_user_by_req(request)
+ try:
+ body = json.loads(request.content.read())
+ except:
+ raise SynapseError(400, "Invalid key JSON")
+ query = []
+ for user_id, device_keys in body.get("one_time_keys", {}).items():
+ for device_id, algorithm in device_keys.items():
+ query.append((user_id, device_id, algorithm))
+ results = yield self.store.take_e2e_one_time_keys(query)
+ defer.returnValue(self.json_result(request, results))
+
+ def json_result(self, request, results):
+ json_result = {}
+ for user_id, device_keys in results.items():
+ for device_id, keys in device_keys.items():
+ for key_id, json_bytes in keys.items():
+ json_result.setdefault(user_id, {})[device_id] = {
+ key_id: json.loads(json_bytes)
+ }
+ return (200, {"one_time_keys": json_result})
+
+
+def register_servlets(hs, http_server):
+ KeyUploadServlet(hs).register(http_server)
+ KeyQueryServlet(hs).register(http_server)
+ OneTimeKeyServlet(hs).register(http_server)
diff --git a/synapse/rest/media/v1/base_resource.py b/synapse/rest/media/v1/base_resource.py
index 6c83a9478c..c43ae0314b 100644
--- a/synapse/rest/media/v1/base_resource.py
+++ b/synapse/rest/media/v1/base_resource.py
@@ -27,9 +27,11 @@ from twisted.web.resource import Resource
from twisted.protocols.basic import FileSender
from synapse.util.async import ObservableDeferred
+from synapse.util.stringutils import is_ascii
import os
+import cgi
import logging
logger = logging.getLogger(__name__)
@@ -37,8 +39,13 @@ logger = logging.getLogger(__name__)
def parse_media_id(request):
try:
- server_name, media_id = request.postpath
- return (server_name, media_id)
+ # This allows users to append e.g. /test.png to the URL. Useful for
+ # clients that parse the URL to see content type.
+ server_name, media_id = request.postpath[:2]
+ if len(request.postpath) > 2 and is_ascii(request.postpath[-1]):
+ return server_name, media_id, request.postpath[-1]
+ else:
+ return server_name, media_id, None
except:
raise SynapseError(
404,
@@ -128,12 +135,21 @@ class BaseMediaResource(Resource):
media_type = headers["Content-Type"][0]
time_now_ms = self.clock.time_msec()
+ content_disposition = headers.get("Content-Disposition", None)
+ if content_disposition:
+ _, params = cgi.parse_header(content_disposition[0],)
+ upload_name = params.get("filename", None)
+ if upload_name and not is_ascii(upload_name):
+ upload_name = None
+ else:
+ upload_name = None
+
yield self.store.store_cached_remote_media(
origin=server_name,
media_id=media_id,
media_type=media_type,
time_now_ms=self.clock.time_msec(),
- upload_name=None,
+ upload_name=upload_name,
media_length=length,
filesystem_id=file_id,
)
@@ -144,7 +160,7 @@ class BaseMediaResource(Resource):
media_info = {
"media_type": media_type,
"media_length": length,
- "upload_name": None,
+ "upload_name": upload_name,
"created_ts": time_now_ms,
"filesystem_id": file_id,
}
@@ -157,11 +173,16 @@ class BaseMediaResource(Resource):
@defer.inlineCallbacks
def _respond_with_file(self, request, media_type, file_path,
- file_size=None):
+ file_size=None, upload_name=None):
logger.debug("Responding with %r", file_path)
if os.path.isfile(file_path):
request.setHeader(b"Content-Type", media_type.encode("UTF-8"))
+ if upload_name:
+ request.setHeader(
+ b"Content-Disposition",
+ b"inline; filename=%s" % (upload_name.encode("utf-8"),),
+ )
# cache for at least a day.
# XXX: we might want to turn this off for data we don't want to
diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py
index 0fe6abf647..ab384e5388 100644
--- a/synapse/rest/media/v1/download_resource.py
+++ b/synapse/rest/media/v1/download_resource.py
@@ -32,14 +32,16 @@ class DownloadResource(BaseMediaResource):
@request_handler
@defer.inlineCallbacks
def _async_render_GET(self, request):
- server_name, media_id = parse_media_id(request)
+ server_name, media_id, name = parse_media_id(request)
if server_name == self.server_name:
- yield self._respond_local_file(request, media_id)
+ yield self._respond_local_file(request, media_id, name)
else:
- yield self._respond_remote_file(request, server_name, media_id)
+ yield self._respond_remote_file(
+ request, server_name, media_id, name
+ )
@defer.inlineCallbacks
- def _respond_local_file(self, request, media_id):
+ def _respond_local_file(self, request, media_id, name):
media_info = yield self.store.get_local_media(media_id)
if not media_info:
self._respond_404(request)
@@ -47,24 +49,28 @@ class DownloadResource(BaseMediaResource):
media_type = media_info["media_type"]
media_length = media_info["media_length"]
+ upload_name = name if name else media_info["upload_name"]
file_path = self.filepaths.local_media_filepath(media_id)
yield self._respond_with_file(
- request, media_type, file_path, media_length
+ request, media_type, file_path, media_length,
+ upload_name=upload_name,
)
@defer.inlineCallbacks
- def _respond_remote_file(self, request, server_name, media_id):
+ def _respond_remote_file(self, request, server_name, media_id, name):
media_info = yield self._get_remote_media(server_name, media_id)
media_type = media_info["media_type"]
media_length = media_info["media_length"]
filesystem_id = media_info["filesystem_id"]
+ upload_name = name if name else media_info["upload_name"]
file_path = self.filepaths.remote_media_filepath(
server_name, filesystem_id
)
yield self._respond_with_file(
- request, media_type, file_path, media_length
+ request, media_type, file_path, media_length,
+ upload_name=upload_name,
)
diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py
index 1dadd880b2..4a9b6d8eeb 100644
--- a/synapse/rest/media/v1/thumbnail_resource.py
+++ b/synapse/rest/media/v1/thumbnail_resource.py
@@ -36,7 +36,7 @@ class ThumbnailResource(BaseMediaResource):
@request_handler
@defer.inlineCallbacks
def _async_render_GET(self, request):
- server_name, media_id = parse_media_id(request)
+ server_name, media_id, _ = parse_media_id(request)
width = parse_integer(request, "width")
height = parse_integer(request, "height")
method = parse_string(request, "method", "scale")
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index cc571976a5..cdd1d44e07 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -15,7 +15,7 @@
from synapse.http.server import respond_with_json, request_handler
-from synapse.util.stringutils import random_string
+from synapse.util.stringutils import random_string, is_ascii
from synapse.api.errors import SynapseError
from twisted.web.server import NOT_DONE_YET
@@ -84,6 +84,12 @@ class UploadResource(BaseMediaResource):
code=413,
)
+ upload_name = request.args.get("filename", None)
+ if upload_name:
+ upload_name = upload_name[0]
+ if upload_name and not is_ascii(upload_name):
+ raise SynapseError(400, "filename must be ascii")
+
headers = request.requestHeaders
if headers.hasHeader("Content-Type"):
@@ -99,7 +105,7 @@ class UploadResource(BaseMediaResource):
# TODO(markjh): parse content-dispostion
content_uri = yield self.create_content(
- media_type, None, request.content.read(),
+ media_type, upload_name, request.content.read(),
content_length, auth_user
)
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index c137f47820..e089d81675 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -37,6 +37,7 @@ from .rejections import RejectionsStore
from .state import StateStore
from .signatures import SignatureStore
from .filtering import FilteringStore
+from .end_to_end_keys import EndToEndKeyStore
import fnmatch
@@ -51,7 +52,7 @@ logger = logging.getLogger(__name__)
# Remember to update this number every time a change is made to database
# schema files, so the users will be informed on server restarts.
-SCHEMA_VERSION = 20
+SCHEMA_VERSION = 21
dir_path = os.path.abspath(os.path.dirname(__file__))
@@ -74,6 +75,7 @@ class DataStore(RoomMemberStore, RoomStore,
PushRuleStore,
ApplicationServiceTransactionStore,
EventsStore,
+ EndToEndKeyStore,
):
def __init__(self, hs):
diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py
new file mode 100644
index 0000000000..99dc864e46
--- /dev/null
+++ b/synapse/storage/end_to_end_keys.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from _base import SQLBaseStore
+
+
+class EndToEndKeyStore(SQLBaseStore):
+ def set_e2e_device_keys(self, user_id, device_id, time_now, json_bytes):
+ return self._simple_upsert(
+ table="e2e_device_keys_json",
+ keyvalues={
+ "user_id": user_id,
+ "device_id": device_id,
+ },
+ values={
+ "ts_added_ms": time_now,
+ "key_json": json_bytes,
+ }
+ )
+
+ def get_e2e_device_keys(self, query_list):
+ """Fetch a list of device keys.
+ Args:
+ query_list(list): List of pairs of user_ids and device_ids.
+ Returns:
+ Dict mapping from user-id to dict mapping from device_id to
+ key json byte strings.
+ """
+ def _get_e2e_device_keys(txn):
+ result = {}
+ for user_id, device_id in query_list:
+ user_result = result.setdefault(user_id, {})
+ keyvalues = {"user_id": user_id}
+ if device_id:
+ keyvalues["device_id"] = device_id
+ rows = self._simple_select_list_txn(
+ txn, table="e2e_device_keys_json",
+ keyvalues=keyvalues,
+ retcols=["device_id", "key_json"]
+ )
+ for row in rows:
+ user_result[row["device_id"]] = row["key_json"]
+ return result
+ return self.runInteraction("get_e2e_device_keys", _get_e2e_device_keys)
+
+ def add_e2e_one_time_keys(self, user_id, device_id, time_now, key_list):
+ def _add_e2e_one_time_keys(txn):
+ for (algorithm, key_id, json_bytes) in key_list:
+ self._simple_upsert_txn(
+ txn, table="e2e_one_time_keys_json",
+ keyvalues={
+ "user_id": user_id,
+ "device_id": device_id,
+ "algorithm": algorithm,
+ "key_id": key_id,
+ },
+ values={
+ "ts_added_ms": time_now,
+ "key_json": json_bytes,
+ }
+ )
+ return self.runInteraction(
+ "add_e2e_one_time_keys", _add_e2e_one_time_keys
+ )
+
+ def count_e2e_one_time_keys(self, user_id, device_id):
+ """ Count the number of one time keys the server has for a device
+ Returns:
+ Dict mapping from algorithm to number of keys for that algorithm.
+ """
+ def _count_e2e_one_time_keys(txn):
+ sql = (
+ "SELECT algorithm, COUNT(key_id) FROM e2e_one_time_keys_json"
+ " WHERE user_id = ? AND device_id = ?"
+ " GROUP BY algorithm"
+ )
+ txn.execute(sql, (user_id, device_id))
+ result = {}
+ for algorithm, key_count in txn.fetchall():
+ result[algorithm] = key_count
+ return result
+ return self.runInteraction(
+ "count_e2e_one_time_keys", _count_e2e_one_time_keys
+ )
+
+ def take_e2e_one_time_keys(self, query_list):
+ """Take a list of one time keys out of the database"""
+ def _take_e2e_one_time_keys(txn):
+ sql = (
+ "SELECT key_id, key_json FROM e2e_one_time_keys_json"
+ " WHERE user_id = ? AND device_id = ? AND algorithm = ?"
+ " LIMIT 1"
+ )
+ result = {}
+ delete = []
+ for user_id, device_id, algorithm in query_list:
+ user_result = result.setdefault(user_id, {})
+ device_result = user_result.setdefault(device_id, {})
+ txn.execute(sql, (user_id, device_id, algorithm))
+ for key_id, key_json in txn.fetchall():
+ device_result[algorithm + ":" + key_id] = key_json
+ delete.append((user_id, device_id, algorithm, key_id))
+ sql = (
+ "DELETE FROM e2e_one_time_keys_json"
+ " WHERE user_id = ? AND device_id = ? AND algorithm = ?"
+ " AND key_id = ?"
+ )
+ for user_id, device_id, algorithm, key_id in delete:
+ txn.execute(sql, (user_id, device_id, algorithm, key_id))
+ return result
+ return self.runInteraction(
+ "take_e2e_one_time_keys", _take_e2e_one_time_keys
+ )
diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py
index ace7459538..c71019d93b 100644
--- a/synapse/storage/event_federation.py
+++ b/synapse/storage/event_federation.py
@@ -408,10 +408,12 @@ class EventFederationStore(SQLBaseStore):
keyvalues={
"event_id": event_id,
},
- retcol="depth"
+ retcol="depth",
+ allow_none=True,
)
- queue.put((-depth, event_id))
+ if depth:
+ queue.put((-depth, event_id))
while not queue.empty() and len(event_results) < limit:
try:
diff --git a/synapse/storage/schema/delta/21/end_to_end_keys.sql b/synapse/storage/schema/delta/21/end_to_end_keys.sql
new file mode 100644
index 0000000000..8b4a380d11
--- /dev/null
+++ b/synapse/storage/schema/delta/21/end_to_end_keys.sql
@@ -0,0 +1,34 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+CREATE TABLE IF NOT EXISTS e2e_device_keys_json (
+ user_id TEXT NOT NULL, -- The user these keys are for.
+ device_id TEXT NOT NULL, -- Which of the user's devices these keys are for.
+ ts_added_ms BIGINT NOT NULL, -- When the keys were uploaded.
+ key_json TEXT NOT NULL, -- The keys for the device as a JSON blob.
+ CONSTRAINT e2e_device_keys_json_uniqueness UNIQUE (user_id, device_id)
+);
+
+
+CREATE TABLE IF NOT EXISTS e2e_one_time_keys_json (
+ user_id TEXT NOT NULL, -- The user this one-time key is for.
+ device_id TEXT NOT NULL, -- The device this one-time key is for.
+ algorithm TEXT NOT NULL, -- Which algorithm this one-time key is for.
+ key_id TEXT NOT NULL, -- An id for suppressing duplicate uploads.
+ ts_added_ms BIGINT NOT NULL, -- When this key was uploaded.
+ key_json TEXT NOT NULL, -- The key as a JSON blob.
+ CONSTRAINT e2e_one_time_keys_json_uniqueness UNIQUE (user_id, device_id, algorithm, key_id)
+);
diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py
index 52e66beaee..7a1e96af37 100644
--- a/synapse/util/stringutils.py
+++ b/synapse/util/stringutils.py
@@ -33,3 +33,12 @@ def random_string_with_symbols(length):
return ''.join(
random.choice(_string_with_symbols) for _ in xrange(length)
)
+
+
+def is_ascii(s):
+ try:
+ s.encode("ascii")
+ except UnicodeDecodeError:
+ return False
+ else:
+ return True
|