diff --git a/synapse/__init__.py b/synapse/__init__.py
index 5853165a21..d85bb3dce0 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server.
"""
-__version__ = "0.10.0-rc1"
+__version__ = "0.10.0"
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 0a77a76cb8..df788230fa 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -20,7 +20,7 @@ from twisted.internet import defer
from synapse.api.constants import EventTypes, Membership, JoinRules
from synapse.api.errors import AuthError, Codes, SynapseError
from synapse.util.logutils import log_function
-from synapse.types import UserID
+from synapse.types import UserID, EventID
import logging
import pymacaroons
@@ -72,6 +72,14 @@ class Auth(object):
# FIXME
return True
+ creation_event = auth_events.get((EventTypes.Create, ""), None)
+
+ if not creation_event:
+ raise SynapseError(
+ 403,
+ "Room %r does not exist" % (event.room_id,)
+ )
+
# FIXME: Temp hack
if event.type == EventTypes.Aliases:
return True
@@ -98,7 +106,7 @@ class Auth(object):
self._check_power_levels(event, auth_events)
if event.type == EventTypes.Redaction:
- self._check_redaction(event, auth_events)
+ self.check_redaction(event, auth_events)
logger.debug("Allowing! %s", event)
except AuthError as e:
@@ -627,16 +635,35 @@ class Auth(object):
return True
- def _check_redaction(self, event, auth_events):
+ def check_redaction(self, event, auth_events):
+ """Check whether the event sender is allowed to redact the target event.
+
+ Returns:
+ True if the the sender is allowed to redact the target event if the
+ target event was created by them.
+ False if the sender is allowed to redact the target event with no
+ further checks.
+
+ Raises:
+ AuthError if the event sender is definitely not allowed to redact
+ the target event.
+ """
user_level = self._get_user_power_level(event.user_id, auth_events)
redact_level = self._get_named_level(auth_events, "redact", 50)
- if user_level < redact_level:
- raise AuthError(
- 403,
- "You don't have permission to redact events"
- )
+ if user_level > redact_level:
+ return False
+
+ redacter_domain = EventID.from_string(event.event_id).domain
+ redactee_domain = EventID.from_string(event.redacts).domain
+ if redacter_domain == redactee_domain:
+ return True
+
+ raise AuthError(
+ 403,
+ "You don't have permission to redact events"
+ )
def _check_power_levels(self, event, auth_events):
user_list = event.content.get("users", {})
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index f04493f92a..c23f853230 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -16,7 +16,7 @@
import sys
sys.dont_write_bytecode = True
-from synapse.python_dependencies import check_requirements
+from synapse.python_dependencies import check_requirements, DEPENDENCY_LINKS
if __name__ == '__main__':
check_requirements()
@@ -97,9 +97,25 @@ class SynapseHomeServer(HomeServer):
return JsonResource(self)
def build_resource_for_web_client(self):
- import syweb
- syweb_path = os.path.dirname(syweb.__file__)
- webclient_path = os.path.join(syweb_path, "webclient")
+ webclient_path = self.get_config().web_client_location
+ if not webclient_path:
+ try:
+ import syweb
+ except ImportError:
+ quit_with_error(
+ "Could not find a webclient.\n\n"
+ "Please either install the matrix-angular-sdk or configure\n"
+ "the location of the source to serve via the configuration\n"
+ "option `web_client_location`\n\n"
+ "To install the `matrix-angular-sdk` via pip, run:\n\n"
+ " pip install '%(dep)s'\n"
+ "\n"
+ "You can also disable hosting of the webclient via the\n"
+ "configuration option `web_client`\n"
+ % {"dep": DEPENDENCY_LINKS["matrix-angular-sdk"]}
+ )
+ syweb_path = os.path.dirname(syweb.__file__)
+ webclient_path = os.path.join(syweb_path, "webclient")
# GZip is disabled here due to
# https://twistedmatrix.com/trac/ticket/7678
# (It can stay enabled for the API resources: they call
@@ -259,11 +275,10 @@ class SynapseHomeServer(HomeServer):
def quit_with_error(error_string):
message_lines = error_string.split("\n")
- line_length = max([len(l) for l in message_lines]) + 2
+ line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
sys.stderr.write("*" * line_length + '\n')
for line in message_lines:
- if line.strip():
- sys.stderr.write(" %s\n" % (line.strip(),))
+ sys.stderr.write(" %s\n" % (line.rstrip(),))
sys.stderr.write("*" * line_length + '\n')
sys.exit(1)
@@ -326,7 +341,7 @@ def get_version_string():
)
).encode("ascii")
except Exception as e:
- logger.warn("Failed to check for git repository: %s", e)
+ logger.info("Failed to check for git repository: %s", e)
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
@@ -388,7 +403,7 @@ def setup(config_options):
database_engine=database_engine,
)
- logger.info("Preparing database: %r...", config.database_config)
+ logger.info("Preparing database: %s...", config.database_config['name'])
try:
db_conn = database_engine.module.connect(
@@ -410,7 +425,7 @@ def setup(config_options):
)
sys.exit(1)
- logger.info("Database prepared in %r.", config.database_config)
+ logger.info("Database prepared in %s.", config.database_config['name'])
hs.start_listening()
diff --git a/synapse/config/__main__.py b/synapse/config/__main__.py
new file mode 100644
index 0000000000..f822d12036
--- /dev/null
+++ b/synapse/config/__main__.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if __name__ == "__main__":
+ import sys
+ from homeserver import HomeServerConfig
+
+ action = sys.argv[1]
+
+ if action == "read":
+ key = sys.argv[2]
+ config = HomeServerConfig.load_config("", sys.argv[3:])
+
+ print getattr(config, key)
+ sys.exit(0)
+ else:
+ sys.stderr.write("Unknown command %r\n" % (action,))
+ sys.exit(1)
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index 73f6959959..8a75c48733 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -131,7 +131,8 @@ class Config(object):
"-c", "--config-path",
action="append",
metavar="CONFIG_FILE",
- help="Specify config file"
+ help="Specify config file. Can be given multiple times and"
+ " may specify directories containing *.yaml files."
)
config_parser.add_argument(
"--generate-config",
@@ -144,6 +145,13 @@ class Config(object):
help="Generate any missing key files then exit"
)
config_parser.add_argument(
+ "--keys-directory",
+ metavar="DIRECTORY",
+ help="Used with 'generate-*' options to specify where files such as"
+ " certs and signing keys should be stored in, unless explicitly"
+ " specified in the config."
+ )
+ config_parser.add_argument(
"-H", "--server-name",
help="The server name to generate a config file for"
)
@@ -151,16 +159,48 @@ class Config(object):
generate_keys = config_args.generate_keys
+ config_files = []
+ if config_args.config_path:
+ for config_path in config_args.config_path:
+ if os.path.isdir(config_path):
+ # We accept specifying directories as config paths, we search
+ # inside that directory for all files matching *.yaml, and then
+ # we apply them in *sorted* order.
+ files = []
+ for entry in os.listdir(config_path):
+ entry_path = os.path.join(config_path, entry)
+ if not os.path.isfile(entry_path):
+ print (
+ "Found subdirectory in config directory: %r. IGNORING."
+ ) % (entry_path, )
+ continue
+
+ if not entry.endswith(".yaml"):
+ print (
+ "Found file in config directory that does not"
+ " end in '.yaml': %r. IGNORING."
+ ) % (entry_path, )
+ continue
+
+ files.append(entry_path)
+
+ config_files.extend(sorted(files))
+ else:
+ config_files.append(config_path)
+
if config_args.generate_config:
- if not config_args.config_path:
+ if not config_files:
config_parser.error(
"Must supply a config file.\nA config file can be automatically"
" generated using \"--generate-config -H SERVER_NAME"
" -c CONFIG-FILE\""
)
- (config_path,) = config_args.config_path
+ (config_path,) = config_files
if not os.path.exists(config_path):
- config_dir_path = os.path.dirname(config_path)
+ if config_args.keys_directory:
+ config_dir_path = config_args.keys_directory
+ else:
+ config_dir_path = os.path.dirname(config_path)
config_dir_path = os.path.abspath(config_dir_path)
server_name = config_args.server_name
@@ -202,19 +242,22 @@ class Config(object):
obj.invoke_all("add_arguments", parser)
args = parser.parse_args(remaining_args)
- if not config_args.config_path:
+ if not config_files:
config_parser.error(
"Must supply a config file.\nA config file can be automatically"
" generated using \"--generate-config -H SERVER_NAME"
" -c CONFIG-FILE\""
)
- config_dir_path = os.path.dirname(config_args.config_path[-1])
+ if config_args.keys_directory:
+ config_dir_path = config_args.keys_directory
+ else:
+ config_dir_path = os.path.dirname(config_args.config_path[-1])
config_dir_path = os.path.abspath(config_dir_path)
specified_config = {}
- for config_path in config_args.config_path:
- yaml_config = cls.read_config_file(config_path)
+ for config_file in config_files:
+ yaml_config = cls.read_config_file(config_file)
specified_config.update(yaml_config)
server_name = specified_config["server_name"]
diff --git a/synapse/config/server.py b/synapse/config/server.py
index f9a3b5f15b..a03e55c223 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -22,6 +22,7 @@ class ServerConfig(Config):
self.server_name = config["server_name"]
self.pid_file = self.abspath(config.get("pid_file"))
self.web_client = config["web_client"]
+ self.web_client_location = config.get("web_client_location", None)
self.soft_file_limit = config["soft_file_limit"]
self.daemonize = config.get("daemonize")
self.print_pidfile = config.get("print_pidfile")
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index a692cdbe55..e251ab6af3 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -162,7 +162,9 @@ class Keyring(object):
def remove_deferreds(res, server_name, group_id):
server_to_gids[server_name].discard(group_id)
if not server_to_gids[server_name]:
- server_to_deferred.pop(server_name).callback(None)
+ d = server_to_deferred.pop(server_name, None)
+ if d:
+ d.callback(None)
return res
for g_id, deferred in deferreds.items():
@@ -200,8 +202,15 @@ class Keyring(object):
else:
break
- for server_name, deferred in server_to_deferred:
- self.key_downloads[server_name] = ObservableDeferred(deferred)
+ for server_name, deferred in server_to_deferred.items():
+ d = ObservableDeferred(deferred)
+ self.key_downloads[server_name] = d
+
+ def rm(r, server_name):
+ self.key_downloads.pop(server_name, None)
+ return r
+
+ d.addBoth(rm, server_name)
def get_server_verify_keys(self, group_id_to_group, group_id_to_deferred):
"""Takes a dict of KeyGroups and tries to find at least one key for
@@ -220,9 +229,8 @@ class Keyring(object):
merged_results = {}
missing_keys = {
- group.server_name: key_id
+ group.server_name: set(group.key_ids)
for group in group_id_to_group.values()
- for key_id in group.key_ids
}
for fn in key_fetch_fns:
@@ -279,16 +287,15 @@ class Keyring(object):
def get_keys_from_store(self, server_name_and_key_ids):
res = yield defer.gatherResults(
[
- self.store.get_server_verify_keys(server_name, key_ids)
+ self.store.get_server_verify_keys(
+ server_name, key_ids
+ ).addCallback(lambda ks, server: (server, ks), server_name)
for server_name, key_ids in server_name_and_key_ids
],
consumeErrors=True,
).addErrback(unwrapFirstError)
- defer.returnValue(dict(zip(
- [server_name for server_name, _ in server_name_and_key_ids],
- res
- )))
+ defer.returnValue(dict(res))
@defer.inlineCallbacks
def get_keys_from_perspectives(self, server_name_and_key_ids):
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index e91f1129db..60ac6617ae 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -15,7 +15,7 @@
from twisted.internet import defer
-from synapse.api.errors import LimitExceededError, SynapseError
+from synapse.api.errors import LimitExceededError, SynapseError, AuthError
from synapse.crypto.event_signing import add_hashes_and_signatures
from synapse.api.constants import Membership, EventTypes
from synapse.types import UserID, RoomAlias
@@ -107,6 +107,22 @@ class BaseHandler(object):
if not suppress_auth:
self.auth.check(event, auth_events=context.current_state)
+ if event.type == EventTypes.CanonicalAlias:
+ # Check the alias is acually valid (at this time at least)
+ room_alias_str = event.content.get("alias", None)
+ if room_alias_str:
+ room_alias = RoomAlias.from_string(room_alias_str)
+ directory_handler = self.hs.get_handlers().directory_handler
+ mapping = yield directory_handler.get_association(room_alias)
+
+ if mapping["room_id"] != event.room_id:
+ raise SynapseError(
+ 400,
+ "Room alias %s does not point to the room" % (
+ room_alias_str,
+ )
+ )
+
(event_stream_id, max_stream_id) = yield self.store.persist_event(
event, context=context
)
@@ -130,20 +146,19 @@ class BaseHandler(object):
returned_invite.signatures
)
- if event.type == EventTypes.CanonicalAlias:
- # Check the alias is acually valid (at this time at least)
- room_alias_str = event.content.get("alias", None)
- if room_alias_str:
- room_alias = RoomAlias.from_string(room_alias_str)
- directory_handler = self.hs.get_handlers().directory_handler
- mapping = yield directory_handler.get_association(room_alias)
-
- if mapping["room_id"] != event.room_id:
- raise SynapseError(
- 400,
- "Room alias %s does not point to the room" % (
- room_alias_str,
- )
+ if event.type == EventTypes.Redaction:
+ if self.auth.check_redaction(event, auth_events=context.current_state):
+ original_event = yield self.store.get_event(
+ event.redacts,
+ check_redacted=False,
+ get_prev_content=False,
+ allow_rejected=False,
+ allow_none=False
+ )
+ if event.user_id != original_event.user_id:
+ raise AuthError(
+ 403,
+ "You don't have permission to redact events"
)
destinations = set(extra_destinations)
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 1ab19cd1a6..59f687e0f1 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -324,7 +324,7 @@ class AuthHandler(BaseHandler):
def _check_password(self, user_id, password, stored_hash):
"""Checks that user_id has passed password, raises LoginError if not."""
- if not bcrypt.checkpw(password, stored_hash):
+ if not self.validate_hash(password, stored_hash):
logger.warn("Failed password login for user %s", user_id)
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
@@ -369,7 +369,7 @@ class AuthHandler(BaseHandler):
@defer.inlineCallbacks
def set_password(self, user_id, newpassword):
- password_hash = bcrypt.hashpw(newpassword, bcrypt.gensalt())
+ password_hash = self.hash(newpassword)
yield self.store.user_set_password_hash(user_id, password_hash)
yield self.store.user_delete_access_tokens(user_id)
@@ -391,3 +391,26 @@ class AuthHandler(BaseHandler):
def _remove_session(self, session):
logger.debug("Removing session %s", session)
del self.sessions[session["id"]]
+
+ def hash(self, password):
+ """Computes a secure hash of password.
+
+ Args:
+ password (str): Password to hash.
+
+ Returns:
+ Hashed password (str).
+ """
+ return bcrypt.hashpw(password, bcrypt.gensalt())
+
+ def validate_hash(self, password, stored_hash):
+ """Validates that self.hash(password) == stored_hash.
+
+ Args:
+ password (str): Password to hash.
+ stored_hash (str): Expected hash value.
+
+ Returns:
+ Whether self.hash(password) == stored_hash (bool).
+ """
+ return bcrypt.checkpw(password, stored_hash)
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index f9ca2f8634..891502c04f 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -49,7 +49,12 @@ class EventStreamHandler(BaseHandler):
@defer.inlineCallbacks
@log_function
def get_stream(self, auth_user_id, pagin_config, timeout=0,
- as_client_event=True, affect_presence=True):
+ as_client_event=True, affect_presence=True,
+ only_room_events=False):
+ """Fetches the events stream for a given user.
+
+ If `only_room_events` is `True` only room events will be returned.
+ """
auth_user = UserID.from_string(auth_user_id)
try:
@@ -89,7 +94,8 @@ class EventStreamHandler(BaseHandler):
timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
events, tokens = yield self.notifier.get_events_for(
- auth_user, room_ids, pagin_config, timeout
+ auth_user, room_ids, pagin_config, timeout,
+ only_room_events=only_room_events
)
time_now = self.clock.time_msec()
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 56d125f753..ef4081e3fe 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -25,7 +25,6 @@ import synapse.util.stringutils as stringutils
from synapse.util.async import run_on_reactor
from synapse.http.client import CaptchaServerHttpClient
-import bcrypt
import logging
import urllib
@@ -82,7 +81,7 @@ class RegistrationHandler(BaseHandler):
yield run_on_reactor()
password_hash = None
if password:
- password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
+ password_hash = self.auth_handler().hash(password)
if localpart:
yield self.check_username(localpart)
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index 026bd2b9d4..d7096aab8c 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -204,15 +204,11 @@ class TypingNotificationHandler(BaseHandler):
)
def _push_update_local(self, room_id, user, typing):
- if room_id not in self._room_serials:
- self._room_serials[room_id] = 0
- self._room_typing[room_id] = set()
-
- room_set = self._room_typing[room_id]
+ room_set = self._room_typing.setdefault(room_id, set())
if typing:
room_set.add(user)
- elif user in room_set:
- room_set.remove(user)
+ else:
+ room_set.discard(user)
self._latest_room_serial += 1
self._room_serials[room_id] = self._latest_room_serial
@@ -260,8 +256,8 @@ class TypingNotificationEventSource(object):
)
events = []
- for room_id in handler._room_serials:
- if room_id not in joined_room_ids:
+ for room_id in joined_room_ids:
+ if room_id not in handler._room_serials:
continue
if handler._room_serials[room_id] <= from_key:
continue
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index d7bcad8a8a..943d637459 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -17,7 +17,7 @@
from __future__ import absolute_import
import logging
-from resource import getrusage, getpagesize, RUSAGE_SELF
+from resource import getrusage, RUSAGE_SELF
import functools
import os
import stat
@@ -100,7 +100,6 @@ def render_all():
# process resource usage
rusage = None
-PAGE_SIZE = getpagesize()
def update_resource_metrics():
@@ -113,8 +112,8 @@ resource_metrics = get_metrics_for("process.resource")
resource_metrics.register_callback("utime", lambda: rusage.ru_utime * 1000)
resource_metrics.register_callback("stime", lambda: rusage.ru_stime * 1000)
-# pages
-resource_metrics.register_callback("maxrss", lambda: rusage.ru_maxrss * PAGE_SIZE)
+# kilobytes
+resource_metrics.register_callback("maxrss", lambda: rusage.ru_maxrss * 1024)
TYPES = {
stat.S_IFSOCK: "SOCK",
@@ -131,6 +130,10 @@ def _process_fds():
counts = {(k,): 0 for k in TYPES.values()}
counts[("other",)] = 0
+ # Not every OS will have a /proc/self/fd directory
+ if not os.path.exists("/proc/self/fd"):
+ return counts
+
for fd in os.listdir("/proc/self/fd"):
try:
s = os.stat("/proc/self/fd/%s" % (fd))
diff --git a/synapse/notifier.py b/synapse/notifier.py
index dbd8efe9fb..f998fc83bf 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -328,10 +328,13 @@ class Notifier(object):
defer.returnValue(result)
@defer.inlineCallbacks
- def get_events_for(self, user, rooms, pagination_config, timeout):
+ def get_events_for(self, user, rooms, pagination_config, timeout,
+ only_room_events=False):
""" For the given user and rooms, return any new events for them. If
there are no new events wait for up to `timeout` milliseconds for any
new events to happen before returning.
+
+ If `only_room_events` is `True` only room events will be returned.
"""
from_token = pagination_config.from_token
if not from_token:
@@ -352,6 +355,8 @@ class Notifier(object):
after_id = getattr(after_token, keyname)
if before_id == after_id:
continue
+ if only_room_events and name != "room":
+ continue
new_events, new_key = yield source.get_new_events_for_user(
user, getattr(from_token, keyname), limit,
)
diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py
index 13002e0db4..f1952b5a0f 100644
--- a/synapse/push/__init__.py
+++ b/synapse/push/__init__.py
@@ -249,7 +249,9 @@ class Pusher(object):
# we fail to dispatch the push)
config = PaginationConfig(from_token=None, limit='1')
chunk = yield self.evStreamHandler.get_stream(
- self.user_name, config, timeout=0)
+ self.user_name, config, timeout=0, affect_presence=False,
+ only_room_events=True
+ )
self.last_token = chunk['end']
self.store.update_pusher_last_token(
self.app_id, self.pushkey, self.user_name, self.last_token
@@ -280,8 +282,8 @@ class Pusher(object):
config = PaginationConfig(from_token=from_tok, limit='1')
timeout = (300 + random.randint(-60, 60)) * 1000
chunk = yield self.evStreamHandler.get_stream(
- self.user_name, config,
- timeout=timeout, affect_presence=False
+ self.user_name, config, timeout=timeout, affect_presence=False,
+ only_room_events=True
)
# limiting to 1 may get 1 event plus 1 presence event, so
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 4c07aa07aa..795ef27182 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -46,8 +46,8 @@ CONDITIONAL_REQUIREMENTS = {
def requirements(config=None, include_conditional=False):
reqs = REQUIREMENTS.copy()
- for key, req in CONDITIONAL_REQUIREMENTS.items():
- if (config and getattr(config, key)) or include_conditional:
+ if include_conditional:
+ for _, req in CONDITIONAL_REQUIREMENTS.items():
reqs.update(req)
return reqs
@@ -55,13 +55,8 @@ def requirements(config=None, include_conditional=False):
def github_link(project, version, egg):
return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg)
-DEPENDENCY_LINKS = [
- github_link(
- project="matrix-org/matrix-angular-sdk",
- version="v0.6.6",
- egg="matrix_angular_sdk-0.6.6",
- ),
-]
+DEPENDENCY_LINKS = {
+}
class MissingRequirementError(Exception):
@@ -129,7 +124,7 @@ def check_requirements(config=None):
def list_requirements():
result = []
linked = []
- for link in DEPENDENCY_LINKS:
+ for link in DEPENDENCY_LINKS.values():
egg = link.split("#egg=")[1]
linked.append(egg.split('-')[0])
result.append(link)
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index b5edffdb60..4692ba413c 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -96,6 +96,7 @@ class ThreepidRestServlet(RestServlet):
self.hs = hs
self.identity_handler = hs.get_handlers().identity_handler
self.auth = hs.get_auth()
+ self.auth_handler = hs.get_handlers().auth_handler
@defer.inlineCallbacks
def on_GET(self, request):
diff --git a/synapse/rest/media/v1/base_resource.py b/synapse/rest/media/v1/base_resource.py
index 4e21527c3d..b2aeb8c909 100644
--- a/synapse/rest/media/v1/base_resource.py
+++ b/synapse/rest/media/v1/base_resource.py
@@ -33,6 +33,8 @@ import os
import cgi
import logging
+import urllib
+import urlparse
logger = logging.getLogger(__name__)
@@ -42,10 +44,13 @@ def parse_media_id(request):
# This allows users to append e.g. /test.png to the URL. Useful for
# clients that parse the URL to see content type.
server_name, media_id = request.postpath[:2]
- if len(request.postpath) > 2 and is_ascii(request.postpath[-1]):
- return server_name, media_id, request.postpath[-1]
- else:
- return server_name, media_id, None
+ file_name = None
+ if len(request.postpath) > 2:
+ try:
+ file_name = urlparse.unquote(request.postpath[-1]).decode("utf-8")
+ except UnicodeDecodeError:
+ pass
+ return server_name, media_id, file_name
except:
raise SynapseError(
404,
@@ -140,9 +145,26 @@ class BaseMediaResource(Resource):
content_disposition = headers.get("Content-Disposition", None)
if content_disposition:
_, params = cgi.parse_header(content_disposition[0],)
- upload_name = params.get("filename", None)
- if upload_name and not is_ascii(upload_name):
- upload_name = None
+ upload_name = None
+
+ # First check if there is a valid UTF-8 filename
+ upload_name_utf8 = params.get("filename*", None)
+ if upload_name_utf8:
+ if upload_name_utf8.lower().startswith("utf-8''"):
+ upload_name = upload_name_utf8[7:]
+
+ # If there isn't check for an ascii name.
+ if not upload_name:
+ upload_name_ascii = params.get("filename", None)
+ if upload_name_ascii and is_ascii(upload_name_ascii):
+ upload_name = upload_name_ascii
+
+ if upload_name:
+ upload_name = urlparse.unquote(upload_name)
+ try:
+ upload_name = upload_name.decode("utf-8")
+ except UnicodeDecodeError:
+ upload_name = None
else:
upload_name = None
@@ -181,10 +203,20 @@ class BaseMediaResource(Resource):
if os.path.isfile(file_path):
request.setHeader(b"Content-Type", media_type.encode("UTF-8"))
if upload_name:
- request.setHeader(
- b"Content-Disposition",
- b"inline; filename=%s" % (upload_name.encode("utf-8"),),
- )
+ if is_ascii(upload_name):
+ request.setHeader(
+ b"Content-Disposition",
+ b"inline; filename=%s" % (
+ urllib.quote(upload_name.encode("utf-8")),
+ ),
+ )
+ else:
+ request.setHeader(
+ b"Content-Disposition",
+ b"inline; filename*=utf-8''%s" % (
+ urllib.quote(upload_name.encode("utf-8")),
+ ),
+ )
# cache for at least a day.
# XXX: we might want to turn this off for data we don't want to
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index 439d5a30a8..6abaf56b25 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -15,7 +15,7 @@
from synapse.http.server import respond_with_json, request_handler
-from synapse.util.stringutils import random_string, is_ascii
+from synapse.util.stringutils import random_string
from synapse.api.errors import SynapseError
from twisted.web.server import NOT_DONE_YET
@@ -86,9 +86,13 @@ class UploadResource(BaseMediaResource):
upload_name = request.args.get("filename", None)
if upload_name:
- upload_name = upload_name[0]
- if upload_name and not is_ascii(upload_name):
- raise SynapseError(400, "filename must be ascii")
+ try:
+ upload_name = upload_name[0].decode('UTF-8')
+ except UnicodeDecodeError:
+ raise SynapseError(
+ msg="Invalid UTF-8 filename parameter: %r" % (upload_name),
+ code=400,
+ )
headers = request.requestHeaders
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index ce71389f02..495ef087c9 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -167,7 +167,7 @@ class SQLBaseStore(object):
self._get_event_cache = Cache("*getEvent*", keylen=3, lru=True,
max_entries=hs.config.event_cache_size)
- self._state_group_cache = DictionaryCache("*stateGroupCache*", 100000)
+ self._state_group_cache = DictionaryCache("*stateGroupCache*", 2000)
self._event_fetch_lock = threading.Condition()
self._event_fetch_list = []
diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py
index bc90e17c63..989ad340b0 100644
--- a/synapse/storage/event_federation.py
+++ b/synapse/storage/event_federation.py
@@ -331,7 +331,10 @@ class EventFederationStore(SQLBaseStore):
txn.executemany(
query,
- [(ev.event_id, ev.room_id, ev.event_id) for ev in events]
+ [
+ (ev.event_id, ev.room_id, ev.event_id) for ev in events
+ if not ev.internal_metadata.is_outlier()
+ ]
)
query = (
@@ -358,7 +361,10 @@ class EventFederationStore(SQLBaseStore):
)
txn.executemany(
query,
- [(ev.event_id, ev.room_id) for ev in events]
+ [
+ (ev.event_id, ev.room_id) for ev in events
+ if not ev.internal_metadata.is_outlier()
+ ]
)
for room_id in events_by_room:
diff --git a/synapse/storage/schema/delta/23/drop_state_index.sql b/synapse/storage/schema/delta/23/drop_state_index.sql
new file mode 100644
index 0000000000..07d0ea5cb2
--- /dev/null
+++ b/synapse/storage/schema/delta/23/drop_state_index.sql
@@ -0,0 +1,16 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+DROP INDEX IF EXISTS state_groups_state_tuple;
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index c9110e6304..9630efcfcc 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -403,8 +403,15 @@ class StateStore(SQLBaseStore):
state_dict = results[group]
for event_id in state_ids:
- state_event = state_events[event_id]
- state_dict[(state_event.type, state_event.state_key)] = state_event
+ try:
+ state_event = state_events[event_id]
+ state_dict[(state_event.type, state_event.state_key)] = state_event
+ except KeyError:
+ # Hmm. So we do don't have that state event? Interesting.
+ logger.warn(
+ "Can't find state event %r for state group %r",
+ event_id, group,
+ )
self._state_group_cache.update(
cache_seq_num,
diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py
index 7a1e96af37..f3a36340e4 100644
--- a/synapse/util/stringutils.py
+++ b/synapse/util/stringutils.py
@@ -38,6 +38,8 @@ def random_string_with_symbols(length):
def is_ascii(s):
try:
s.encode("ascii")
+ except UnicodeEncodeError:
+ return False
except UnicodeDecodeError:
return False
else:
|