diff --git a/.gitignore b/.gitignore
index 4c336b710d..f8c4000134 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,3 +43,6 @@ build/
localhost-800*/
static/client/register/register_config.js
.tox
+
+env/
+*.config
diff --git a/CHANGES.rst b/CHANGES.rst
index e6d1a37307..2ec10516fd 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,4 +1,54 @@
-Changes in synapse v0.10.0-rc1 (2015-08-20)
+Changes in synapse v0.10.0 (2015-09-03)
+=======================================
+
+No change from release candidate.
+
+Changes in synapse v0.10.0-rc6 (2015-09-02)
+===========================================
+
+* Remove some of the old database upgrade scripts.
+* Fix database port script to work with newly created sqlite databases.
+
+Changes in synapse v0.10.0-rc5 (2015-08-27)
+===========================================
+
+* Fix bug that broke downloading files with ascii filenames across federation.
+
+Changes in synapse v0.10.0-rc4 (2015-08-27)
+===========================================
+
+* Allow UTF-8 filenames for upload. (PR #259)
+
+Changes in synapse v0.10.0-rc3 (2015-08-25)
+===========================================
+
+* Add ``--keys-directory`` config option to specify where files such as
+ certs and signing keys should be stored in, when using ``--generate-config``
+ or ``--generate-keys``. (PR #250)
+* Allow ``--config-path`` to specify a directory, causing synapse to use all
+ \*.yaml files in the directory as config files. (PR #249)
+* Add ``web_client_location`` config option to specify static files to be
+ hosted by synapse under ``/_matrix/client``. (PR #245)
+* Add helper utility to synapse to read and parse the config files and extract
+ the value of a given key. For example::
+
+ $ python -m synapse.config read server_name -c homeserver.yaml
+ localhost
+
+ (PR #246)
+
+
+Changes in synapse v0.10.0-rc2 (2015-08-24)
+===========================================
+
+* Fix bug where we incorrectly populated the ``event_forward_extremities``
+ table, resulting in problems joining large remote rooms (e.g.
+ ``#matrix:matrix.org``)
+* Reduce the number of times we wake up pushers by not listening for presence
+ or typing events, reducing the CPU cost of each pusher.
+
+
+Changes in synapse v0.10.0-rc1 (2015-08-21)
===========================================
Also see v0.9.4-rc1 changelog, which has been amalgamated into this release.
@@ -10,6 +60,9 @@ General:
(PR #208)
* Add support for logging in with email address (PR #234)
* Add support for new ``m.room.canonical_alias`` event. (PR #233)
+* Change synapse to treat user IDs case insensitively during registration and
+ login. (If two users already exist with case insensitive matching user ids,
+ synapse will continue to require them to specify their user ids exactly.)
* Error if a user tries to register with an email already in use. (PR #211)
* Add extra and improve existing caches (PR #212, #219, #226, #228)
* Batch various storage request (PR #226, #228)
diff --git a/README.rst b/README.rst
index d8d179135b..6c8431aa86 100644
--- a/README.rst
+++ b/README.rst
@@ -94,6 +94,7 @@ Synapse is the reference python/twisted Matrix homeserver implementation.
System requirements:
- POSIX-compliant system (tested on Linux & OS X)
- Python 2.7
+- At least 512 MB RAM.
Synapse is written in python but some of the libraries is uses are written in
C. So before we can install synapse itself we need a working C compiler and the
@@ -120,6 +121,7 @@ To install the synapse homeserver run::
virtualenv -p python2.7 ~/.synapse
source ~/.synapse/bin/activate
+ pip install --upgrade setuptools
pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
This installs synapse, along with the libraries it uses, into a virtual
@@ -284,6 +286,11 @@ may need to manually upgrade it::
sudo pip install --upgrade pip
+Installing may fail with ``mock requires setuptools>=17.1. Aborting installation``.
+You can fix this by upgrading setuptools::
+
+ pip install --upgrade setuptools
+
If pip crashes mid-installation for reason (e.g. lost terminal), pip may
refuse to run until you remove the temporary installation directory it
created. To reset the installation::
diff --git a/docs/postgres.rst b/docs/postgres.rst
index 19d8391115..b5027fefb0 100644
--- a/docs/postgres.rst
+++ b/docs/postgres.rst
@@ -55,9 +55,8 @@ Porting from SQLite
Overview
~~~~~~~~
-The script ``port_from_sqlite_to_postgres.py`` allows porting an existing
-synapse server backed by SQLite to using PostgreSQL. This is done in as a two
-phase process:
+The script ``synapse_port_db`` allows porting an existing synapse server
+backed by SQLite to using PostgreSQL. This is done in as a two phase process:
1. Copy the existing SQLite database to a separate location (while the server
is down) and running the port script against that offline database.
@@ -86,8 +85,7 @@ Assuming your new config file (as described in the section *Synapse config*)
is named ``homeserver-postgres.yaml`` and the SQLite snapshot is at
``homeserver.db.snapshot`` then simply run::
- python scripts/port_from_sqlite_to_postgres.py \
- --sqlite-database homeserver.db.snapshot \
+ synapse_port_db --sqlite-database homeserver.db.snapshot \
--postgres-config homeserver-postgres.yaml
The flag ``--curses`` displays a coloured curses progress UI.
@@ -100,8 +98,7 @@ To complete the conversion shut down the synapse server and run the port
script one last time, e.g. if the SQLite database is at ``homeserver.db``
run::
- python scripts/port_from_sqlite_to_postgres.py \
- --sqlite-database homeserver.db \
+ synapse_port_db --sqlite-database homeserver.db \
--postgres-config database_config.yaml
Once that has completed, change the synapse config to point at the PostgreSQL
diff --git a/scripts-dev/check_auth.py b/scripts-dev/check_auth.py
index b889ac7fa7..4fa8792a5f 100644
--- a/scripts-dev/check_auth.py
+++ b/scripts-dev/check_auth.py
@@ -56,10 +56,9 @@ if __name__ == '__main__':
js = json.load(args.json)
-
auth = Auth(Mock())
check_auth(
auth,
[FrozenEvent(d) for d in js["auth_chain"]],
- [FrozenEvent(d) for d in js["pdus"]],
+ [FrozenEvent(d) for d in js.get("pdus", [])],
)
diff --git a/scripts/database-prepare-for-0.0.1.sh b/scripts/database-prepare-for-0.0.1.sh
deleted file mode 100755
index 43d759a5cd..0000000000
--- a/scripts/database-prepare-for-0.0.1.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-# This is will prepare a synapse database for running with v0.0.1 of synapse.
-# It will store all the user information, but will *delete* all messages and
-# room data.
-
-set -e
-
-cp "$1" "$1.bak"
-
-DUMP=$(sqlite3 "$1" << 'EOF'
-.dump users
-.dump access_tokens
-.dump presence
-.dump profiles
-EOF
-)
-
-rm "$1"
-
-sqlite3 "$1" <<< "$DUMP"
diff --git a/scripts/database-prepare-for-0.5.0.sh b/scripts/database-prepare-for-0.5.0.sh
deleted file mode 100755
index e824cb583e..0000000000
--- a/scripts/database-prepare-for-0.5.0.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-# This is will prepare a synapse database for running with v0.5.0 of synapse.
-# It will store all the user information, but will *delete* all messages and
-# room data.
-
-set -e
-
-cp "$1" "$1.bak"
-
-DUMP=$(sqlite3 "$1" << 'EOF'
-.dump users
-.dump access_tokens
-.dump presence
-.dump profiles
-EOF
-)
-
-rm "$1"
-
-sqlite3 "$1" <<< "$DUMP"
diff --git a/scripts/port_from_sqlite_to_postgres.py b/scripts/synapse_port_db
index e7ed4c309b..6aba72e459 100755
--- a/scripts/port_from_sqlite_to_postgres.py
+++ b/scripts/synapse_port_db
@@ -29,7 +29,7 @@ import traceback
import yaml
-logger = logging.getLogger("port_from_sqlite_to_postgres")
+logger = logging.getLogger("synapse_port_db")
BOOLEAN_COLUMNS = {
@@ -412,14 +412,17 @@ class Porter(object):
self._convert_rows("sent_transactions", headers, rows)
inserted_rows = len(rows)
- max_inserted_rowid = max(r[0] for r in rows)
+ if inserted_rows:
+ max_inserted_rowid = max(r[0] for r in rows)
- def insert(txn):
- self.postgres_store.insert_many_txn(
- txn, "sent_transactions", headers[1:], rows
- )
+ def insert(txn):
+ self.postgres_store.insert_many_txn(
+ txn, "sent_transactions", headers[1:], rows
+ )
- yield self.postgres_store.execute(insert)
+ yield self.postgres_store.execute(insert)
+ else:
+ max_inserted_rowid = 0
def get_start_id(txn):
txn.execute(
diff --git a/scripts/upgrade_db_to_v0.6.0.py b/scripts/upgrade_db_to_v0.6.0.py
deleted file mode 100755
index cd4be28b86..0000000000
--- a/scripts/upgrade_db_to_v0.6.0.py
+++ /dev/null
@@ -1,326 +0,0 @@
-#!/usr/bin/env python
-from synapse.storage import SCHEMA_VERSION, read_schema
-from synapse.storage._base import SQLBaseStore
-from synapse.storage.signatures import SignatureStore
-from synapse.storage.event_federation import EventFederationStore
-
-from unpaddedbase64 import encode_base64, decode_base64
-
-from synapse.crypto.event_signing import compute_event_signature
-
-from synapse.events.builder import EventBuilder
-from synapse.events.utils import prune_event
-
-from synapse.crypto.event_signing import check_event_content_hash
-
-from signedjson.sign import verify_signed_json, SignatureVerifyException
-from signedjson.key import decode_verify_key_bytes
-
-from canonicaljson import encode_canonical_json
-
-import argparse
-# import dns.resolver
-import hashlib
-import httplib
-import json
-import sqlite3
-import urllib2
-
-
-delta_sql = """
-CREATE TABLE IF NOT EXISTS event_json(
- event_id TEXT NOT NULL,
- room_id TEXT NOT NULL,
- internal_metadata NOT NULL,
- json BLOB NOT NULL,
- CONSTRAINT ev_j_uniq UNIQUE (event_id)
-);
-
-CREATE INDEX IF NOT EXISTS event_json_id ON event_json(event_id);
-CREATE INDEX IF NOT EXISTS event_json_room_id ON event_json(room_id);
-
-PRAGMA user_version = 10;
-"""
-
-
-class Store(object):
- _get_event_signatures_txn = SignatureStore.__dict__["_get_event_signatures_txn"]
- _get_event_content_hashes_txn = SignatureStore.__dict__["_get_event_content_hashes_txn"]
- _get_event_reference_hashes_txn = SignatureStore.__dict__["_get_event_reference_hashes_txn"]
- _get_prev_event_hashes_txn = SignatureStore.__dict__["_get_prev_event_hashes_txn"]
- _get_prev_events_and_state = EventFederationStore.__dict__["_get_prev_events_and_state"]
- _get_auth_events = EventFederationStore.__dict__["_get_auth_events"]
- cursor_to_dict = SQLBaseStore.__dict__["cursor_to_dict"]
- _simple_select_onecol_txn = SQLBaseStore.__dict__["_simple_select_onecol_txn"]
- _simple_select_list_txn = SQLBaseStore.__dict__["_simple_select_list_txn"]
- _simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
-
- def _generate_event_json(self, txn, rows):
- events = []
- for row in rows:
- d = dict(row)
-
- d.pop("stream_ordering", None)
- d.pop("topological_ordering", None)
- d.pop("processed", None)
-
- if "origin_server_ts" not in d:
- d["origin_server_ts"] = d.pop("ts", 0)
- else:
- d.pop("ts", 0)
-
- d.pop("prev_state", None)
- d.update(json.loads(d.pop("unrecognized_keys")))
-
- d["sender"] = d.pop("user_id")
-
- d["content"] = json.loads(d["content"])
-
- if "age_ts" not in d:
- # For compatibility
- d["age_ts"] = d.get("origin_server_ts", 0)
-
- d.setdefault("unsigned", {})["age_ts"] = d.pop("age_ts")
-
- outlier = d.pop("outlier", False)
-
- # d.pop("membership", None)
-
- d.pop("state_hash", None)
-
- d.pop("replaces_state", None)
-
- b = EventBuilder(d)
- b.internal_metadata.outlier = outlier
-
- events.append(b)
-
- for i, ev in enumerate(events):
- signatures = self._get_event_signatures_txn(
- txn, ev.event_id,
- )
-
- ev.signatures = {
- n: {
- k: encode_base64(v) for k, v in s.items()
- }
- for n, s in signatures.items()
- }
-
- hashes = self._get_event_content_hashes_txn(
- txn, ev.event_id,
- )
-
- ev.hashes = {
- k: encode_base64(v) for k, v in hashes.items()
- }
-
- prevs = self._get_prev_events_and_state(txn, ev.event_id)
-
- ev.prev_events = [
- (e_id, h)
- for e_id, h, is_state in prevs
- if is_state == 0
- ]
-
- # ev.auth_events = self._get_auth_events(txn, ev.event_id)
-
- hashes = dict(ev.auth_events)
-
- for e_id, hash in ev.prev_events:
- if e_id in hashes and not hash:
- hash.update(hashes[e_id])
- #
- # if hasattr(ev, "state_key"):
- # ev.prev_state = [
- # (e_id, h)
- # for e_id, h, is_state in prevs
- # if is_state == 1
- # ]
-
- return [e.build() for e in events]
-
-
-store = Store()
-
-
-# def get_key(server_name):
-# print "Getting keys for: %s" % (server_name,)
-# targets = []
-# if ":" in server_name:
-# target, port = server_name.split(":")
-# targets.append((target, int(port)))
-# try:
-# answers = dns.resolver.query("_matrix._tcp." + server_name, "SRV")
-# for srv in answers:
-# targets.append((srv.target, srv.port))
-# except dns.resolver.NXDOMAIN:
-# targets.append((server_name, 8448))
-# except:
-# print "Failed to lookup keys for %s" % (server_name,)
-# return {}
-#
-# for target, port in targets:
-# url = "https://%s:%i/_matrix/key/v1" % (target, port)
-# try:
-# keys = json.load(urllib2.urlopen(url, timeout=2))
-# verify_keys = {}
-# for key_id, key_base64 in keys["verify_keys"].items():
-# verify_key = decode_verify_key_bytes(
-# key_id, decode_base64(key_base64)
-# )
-# verify_signed_json(keys, server_name, verify_key)
-# verify_keys[key_id] = verify_key
-# print "Got keys for: %s" % (server_name,)
-# return verify_keys
-# except urllib2.URLError:
-# pass
-# except urllib2.HTTPError:
-# pass
-# except httplib.HTTPException:
-# pass
-#
-# print "Failed to get keys for %s" % (server_name,)
-# return {}
-
-
-def reinsert_events(cursor, server_name, signing_key):
- print "Running delta: v10"
-
- cursor.executescript(delta_sql)
-
- cursor.execute(
- "SELECT * FROM events ORDER BY rowid ASC"
- )
-
- print "Getting events..."
-
- rows = store.cursor_to_dict(cursor)
-
- events = store._generate_event_json(cursor, rows)
-
- print "Got events from DB."
-
- algorithms = {
- "sha256": hashlib.sha256,
- }
-
- key_id = "%s:%s" % (signing_key.alg, signing_key.version)
- verify_key = signing_key.verify_key
- verify_key.alg = signing_key.alg
- verify_key.version = signing_key.version
-
- server_keys = {
- server_name: {
- key_id: verify_key
- }
- }
-
- i = 0
- N = len(events)
-
- for event in events:
- if i % 100 == 0:
- print "Processed: %d/%d events" % (i,N,)
- i += 1
-
- # for alg_name in event.hashes:
- # if check_event_content_hash(event, algorithms[alg_name]):
- # pass
- # else:
- # pass
- # print "FAIL content hash %s %s" % (alg_name, event.event_id, )
-
- have_own_correctly_signed = False
- for host, sigs in event.signatures.items():
- pruned = prune_event(event)
-
- for key_id in sigs:
- if host not in server_keys:
- server_keys[host] = {} # get_key(host)
- if key_id in server_keys[host]:
- try:
- verify_signed_json(
- pruned.get_pdu_json(),
- host,
- server_keys[host][key_id]
- )
-
- if host == server_name:
- have_own_correctly_signed = True
- except SignatureVerifyException:
- print "FAIL signature check %s %s" % (
- key_id, event.event_id
- )
-
- # TODO: Re sign with our own server key
- if not have_own_correctly_signed:
- sigs = compute_event_signature(event, server_name, signing_key)
- event.signatures.update(sigs)
-
- pruned = prune_event(event)
-
- for key_id in event.signatures[server_name]:
- verify_signed_json(
- pruned.get_pdu_json(),
- server_name,
- server_keys[server_name][key_id]
- )
-
- event_json = encode_canonical_json(
- event.get_dict()
- ).decode("UTF-8")
-
- metadata_json = encode_canonical_json(
- event.internal_metadata.get_dict()
- ).decode("UTF-8")
-
- store._simple_insert_txn(
- cursor,
- table="event_json",
- values={
- "event_id": event.event_id,
- "room_id": event.room_id,
- "internal_metadata": metadata_json,
- "json": event_json,
- },
- or_replace=True,
- )
-
-
-def main(database, server_name, signing_key):
- conn = sqlite3.connect(database)
- cursor = conn.cursor()
-
- # Do other deltas:
- cursor.execute("PRAGMA user_version")
- row = cursor.fetchone()
-
- if row and row[0]:
- user_version = row[0]
- # Run every version since after the current version.
- for v in range(user_version + 1, 10):
- print "Running delta: %d" % (v,)
- sql_script = read_schema("delta/v%d" % (v,))
- cursor.executescript(sql_script)
-
- reinsert_events(cursor, server_name, signing_key)
-
- conn.commit()
-
- print "Success!"
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
-
- parser.add_argument("database")
- parser.add_argument("server_name")
- parser.add_argument(
- "signing_key", type=argparse.FileType('r'),
- )
- args = parser.parse_args()
-
- signing_key = signedjson.key.read_signing_keys(args.signing_key)
-
- main(args.database, args.server_name, signing_key[0])
diff --git a/setup.cfg b/setup.cfg
index abb649958e..ba027c7d13 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -3,9 +3,6 @@ source-dir = docs/sphinx
build-dir = docs/build
all_files = 1
-[aliases]
-test = trial
-
[trial]
test_suite = tests
diff --git a/setup.py b/setup.py
index 8ad20df7cb..9d24761d44 100755
--- a/setup.py
+++ b/setup.py
@@ -16,7 +16,8 @@
import glob
import os
-from setuptools import setup, find_packages
+from setuptools import setup, find_packages, Command
+import sys
here = os.path.abspath(os.path.dirname(__file__))
@@ -37,6 +38,39 @@ def exec_file(path_segments):
exec(code, result)
return result
+
+class Tox(Command):
+ user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
+
+ def initialize_options(self):
+ self.tox_args = None
+
+ def finalize_options(self):
+ self.test_args = []
+ self.test_suite = True
+
+ def run(self):
+ #import here, cause outside the eggs aren't loaded
+ try:
+ import tox
+ except ImportError:
+ try:
+ self.distribution.fetch_build_eggs("tox")
+ import tox
+ except:
+ raise RuntimeError(
+ "The tests need 'tox' to run. Please install 'tox'."
+ )
+ import shlex
+ args = self.tox_args
+ if args:
+ args = shlex.split(self.tox_args)
+ else:
+ args = []
+ errno = tox.cmdline(args=args)
+ sys.exit(errno)
+
+
version = exec_file(("synapse", "__init__.py"))["__version__"]
dependencies = exec_file(("synapse", "python_dependencies.py"))
long_description = read_file(("README.rst",))
@@ -47,9 +81,10 @@ setup(
packages=find_packages(exclude=["tests", "tests.*"]),
description="Reference Synapse Home Server",
install_requires=dependencies['requirements'](include_conditional=True).keys(),
- dependency_links=dependencies["DEPENDENCY_LINKS"],
+ dependency_links=dependencies["DEPENDENCY_LINKS"].values(),
include_package_data=True,
zip_safe=False,
long_description=long_description,
scripts=["synctl"] + glob.glob("scripts/*"),
+ cmdclass={'test': Tox},
)
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 5853165a21..d85bb3dce0 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server.
"""
-__version__ = "0.10.0-rc1"
+__version__ = "0.10.0"
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 0a77a76cb8..df788230fa 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -20,7 +20,7 @@ from twisted.internet import defer
from synapse.api.constants import EventTypes, Membership, JoinRules
from synapse.api.errors import AuthError, Codes, SynapseError
from synapse.util.logutils import log_function
-from synapse.types import UserID
+from synapse.types import UserID, EventID
import logging
import pymacaroons
@@ -72,6 +72,14 @@ class Auth(object):
# FIXME
return True
+ creation_event = auth_events.get((EventTypes.Create, ""), None)
+
+ if not creation_event:
+ raise SynapseError(
+ 403,
+ "Room %r does not exist" % (event.room_id,)
+ )
+
# FIXME: Temp hack
if event.type == EventTypes.Aliases:
return True
@@ -98,7 +106,7 @@ class Auth(object):
self._check_power_levels(event, auth_events)
if event.type == EventTypes.Redaction:
- self._check_redaction(event, auth_events)
+ self.check_redaction(event, auth_events)
logger.debug("Allowing! %s", event)
except AuthError as e:
@@ -627,16 +635,35 @@ class Auth(object):
return True
- def _check_redaction(self, event, auth_events):
+ def check_redaction(self, event, auth_events):
+ """Check whether the event sender is allowed to redact the target event.
+
+ Returns:
+ True if the the sender is allowed to redact the target event if the
+ target event was created by them.
+ False if the sender is allowed to redact the target event with no
+ further checks.
+
+ Raises:
+ AuthError if the event sender is definitely not allowed to redact
+ the target event.
+ """
user_level = self._get_user_power_level(event.user_id, auth_events)
redact_level = self._get_named_level(auth_events, "redact", 50)
- if user_level < redact_level:
- raise AuthError(
- 403,
- "You don't have permission to redact events"
- )
+ if user_level > redact_level:
+ return False
+
+ redacter_domain = EventID.from_string(event.event_id).domain
+ redactee_domain = EventID.from_string(event.redacts).domain
+ if redacter_domain == redactee_domain:
+ return True
+
+ raise AuthError(
+ 403,
+ "You don't have permission to redact events"
+ )
def _check_power_levels(self, event, auth_events):
user_list = event.content.get("users", {})
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index f04493f92a..c23f853230 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -16,7 +16,7 @@
import sys
sys.dont_write_bytecode = True
-from synapse.python_dependencies import check_requirements
+from synapse.python_dependencies import check_requirements, DEPENDENCY_LINKS
if __name__ == '__main__':
check_requirements()
@@ -97,9 +97,25 @@ class SynapseHomeServer(HomeServer):
return JsonResource(self)
def build_resource_for_web_client(self):
- import syweb
- syweb_path = os.path.dirname(syweb.__file__)
- webclient_path = os.path.join(syweb_path, "webclient")
+ webclient_path = self.get_config().web_client_location
+ if not webclient_path:
+ try:
+ import syweb
+ except ImportError:
+ quit_with_error(
+ "Could not find a webclient.\n\n"
+ "Please either install the matrix-angular-sdk or configure\n"
+ "the location of the source to serve via the configuration\n"
+ "option `web_client_location`\n\n"
+ "To install the `matrix-angular-sdk` via pip, run:\n\n"
+ " pip install '%(dep)s'\n"
+ "\n"
+ "You can also disable hosting of the webclient via the\n"
+ "configuration option `web_client`\n"
+ % {"dep": DEPENDENCY_LINKS["matrix-angular-sdk"]}
+ )
+ syweb_path = os.path.dirname(syweb.__file__)
+ webclient_path = os.path.join(syweb_path, "webclient")
# GZip is disabled here due to
# https://twistedmatrix.com/trac/ticket/7678
# (It can stay enabled for the API resources: they call
@@ -259,11 +275,10 @@ class SynapseHomeServer(HomeServer):
def quit_with_error(error_string):
message_lines = error_string.split("\n")
- line_length = max([len(l) for l in message_lines]) + 2
+ line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
sys.stderr.write("*" * line_length + '\n')
for line in message_lines:
- if line.strip():
- sys.stderr.write(" %s\n" % (line.strip(),))
+ sys.stderr.write(" %s\n" % (line.rstrip(),))
sys.stderr.write("*" * line_length + '\n')
sys.exit(1)
@@ -326,7 +341,7 @@ def get_version_string():
)
).encode("ascii")
except Exception as e:
- logger.warn("Failed to check for git repository: %s", e)
+ logger.info("Failed to check for git repository: %s", e)
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
@@ -388,7 +403,7 @@ def setup(config_options):
database_engine=database_engine,
)
- logger.info("Preparing database: %r...", config.database_config)
+ logger.info("Preparing database: %s...", config.database_config['name'])
try:
db_conn = database_engine.module.connect(
@@ -410,7 +425,7 @@ def setup(config_options):
)
sys.exit(1)
- logger.info("Database prepared in %r.", config.database_config)
+ logger.info("Database prepared in %s.", config.database_config['name'])
hs.start_listening()
diff --git a/synapse/config/__main__.py b/synapse/config/__main__.py
new file mode 100644
index 0000000000..f822d12036
--- /dev/null
+++ b/synapse/config/__main__.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if __name__ == "__main__":
+ import sys
+ from homeserver import HomeServerConfig
+
+ action = sys.argv[1]
+
+ if action == "read":
+ key = sys.argv[2]
+ config = HomeServerConfig.load_config("", sys.argv[3:])
+
+ print getattr(config, key)
+ sys.exit(0)
+ else:
+ sys.stderr.write("Unknown command %r\n" % (action,))
+ sys.exit(1)
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index 73f6959959..8a75c48733 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -131,7 +131,8 @@ class Config(object):
"-c", "--config-path",
action="append",
metavar="CONFIG_FILE",
- help="Specify config file"
+ help="Specify config file. Can be given multiple times and"
+ " may specify directories containing *.yaml files."
)
config_parser.add_argument(
"--generate-config",
@@ -144,6 +145,13 @@ class Config(object):
help="Generate any missing key files then exit"
)
config_parser.add_argument(
+ "--keys-directory",
+ metavar="DIRECTORY",
+ help="Used with 'generate-*' options to specify where files such as"
+ " certs and signing keys should be stored in, unless explicitly"
+ " specified in the config."
+ )
+ config_parser.add_argument(
"-H", "--server-name",
help="The server name to generate a config file for"
)
@@ -151,16 +159,48 @@ class Config(object):
generate_keys = config_args.generate_keys
+ config_files = []
+ if config_args.config_path:
+ for config_path in config_args.config_path:
+ if os.path.isdir(config_path):
+ # We accept specifying directories as config paths, we search
+ # inside that directory for all files matching *.yaml, and then
+ # we apply them in *sorted* order.
+ files = []
+ for entry in os.listdir(config_path):
+ entry_path = os.path.join(config_path, entry)
+ if not os.path.isfile(entry_path):
+ print (
+ "Found subdirectory in config directory: %r. IGNORING."
+ ) % (entry_path, )
+ continue
+
+ if not entry.endswith(".yaml"):
+ print (
+ "Found file in config directory that does not"
+ " end in '.yaml': %r. IGNORING."
+ ) % (entry_path, )
+ continue
+
+ files.append(entry_path)
+
+ config_files.extend(sorted(files))
+ else:
+ config_files.append(config_path)
+
if config_args.generate_config:
- if not config_args.config_path:
+ if not config_files:
config_parser.error(
"Must supply a config file.\nA config file can be automatically"
" generated using \"--generate-config -H SERVER_NAME"
" -c CONFIG-FILE\""
)
- (config_path,) = config_args.config_path
+ (config_path,) = config_files
if not os.path.exists(config_path):
- config_dir_path = os.path.dirname(config_path)
+ if config_args.keys_directory:
+ config_dir_path = config_args.keys_directory
+ else:
+ config_dir_path = os.path.dirname(config_path)
config_dir_path = os.path.abspath(config_dir_path)
server_name = config_args.server_name
@@ -202,19 +242,22 @@ class Config(object):
obj.invoke_all("add_arguments", parser)
args = parser.parse_args(remaining_args)
- if not config_args.config_path:
+ if not config_files:
config_parser.error(
"Must supply a config file.\nA config file can be automatically"
" generated using \"--generate-config -H SERVER_NAME"
" -c CONFIG-FILE\""
)
- config_dir_path = os.path.dirname(config_args.config_path[-1])
+ if config_args.keys_directory:
+ config_dir_path = config_args.keys_directory
+ else:
+ config_dir_path = os.path.dirname(config_args.config_path[-1])
config_dir_path = os.path.abspath(config_dir_path)
specified_config = {}
- for config_path in config_args.config_path:
- yaml_config = cls.read_config_file(config_path)
+ for config_file in config_files:
+ yaml_config = cls.read_config_file(config_file)
specified_config.update(yaml_config)
server_name = specified_config["server_name"]
diff --git a/synapse/config/server.py b/synapse/config/server.py
index f9a3b5f15b..a03e55c223 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -22,6 +22,7 @@ class ServerConfig(Config):
self.server_name = config["server_name"]
self.pid_file = self.abspath(config.get("pid_file"))
self.web_client = config["web_client"]
+ self.web_client_location = config.get("web_client_location", None)
self.soft_file_limit = config["soft_file_limit"]
self.daemonize = config.get("daemonize")
self.print_pidfile = config.get("print_pidfile")
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index a692cdbe55..e251ab6af3 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -162,7 +162,9 @@ class Keyring(object):
def remove_deferreds(res, server_name, group_id):
server_to_gids[server_name].discard(group_id)
if not server_to_gids[server_name]:
- server_to_deferred.pop(server_name).callback(None)
+ d = server_to_deferred.pop(server_name, None)
+ if d:
+ d.callback(None)
return res
for g_id, deferred in deferreds.items():
@@ -200,8 +202,15 @@ class Keyring(object):
else:
break
- for server_name, deferred in server_to_deferred:
- self.key_downloads[server_name] = ObservableDeferred(deferred)
+ for server_name, deferred in server_to_deferred.items():
+ d = ObservableDeferred(deferred)
+ self.key_downloads[server_name] = d
+
+ def rm(r, server_name):
+ self.key_downloads.pop(server_name, None)
+ return r
+
+ d.addBoth(rm, server_name)
def get_server_verify_keys(self, group_id_to_group, group_id_to_deferred):
"""Takes a dict of KeyGroups and tries to find at least one key for
@@ -220,9 +229,8 @@ class Keyring(object):
merged_results = {}
missing_keys = {
- group.server_name: key_id
+ group.server_name: set(group.key_ids)
for group in group_id_to_group.values()
- for key_id in group.key_ids
}
for fn in key_fetch_fns:
@@ -279,16 +287,15 @@ class Keyring(object):
def get_keys_from_store(self, server_name_and_key_ids):
res = yield defer.gatherResults(
[
- self.store.get_server_verify_keys(server_name, key_ids)
+ self.store.get_server_verify_keys(
+ server_name, key_ids
+ ).addCallback(lambda ks, server: (server, ks), server_name)
for server_name, key_ids in server_name_and_key_ids
],
consumeErrors=True,
).addErrback(unwrapFirstError)
- defer.returnValue(dict(zip(
- [server_name for server_name, _ in server_name_and_key_ids],
- res
- )))
+ defer.returnValue(dict(res))
@defer.inlineCallbacks
def get_keys_from_perspectives(self, server_name_and_key_ids):
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index e91f1129db..60ac6617ae 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -15,7 +15,7 @@
from twisted.internet import defer
-from synapse.api.errors import LimitExceededError, SynapseError
+from synapse.api.errors import LimitExceededError, SynapseError, AuthError
from synapse.crypto.event_signing import add_hashes_and_signatures
from synapse.api.constants import Membership, EventTypes
from synapse.types import UserID, RoomAlias
@@ -107,6 +107,22 @@ class BaseHandler(object):
if not suppress_auth:
self.auth.check(event, auth_events=context.current_state)
+ if event.type == EventTypes.CanonicalAlias:
+ # Check the alias is acually valid (at this time at least)
+ room_alias_str = event.content.get("alias", None)
+ if room_alias_str:
+ room_alias = RoomAlias.from_string(room_alias_str)
+ directory_handler = self.hs.get_handlers().directory_handler
+ mapping = yield directory_handler.get_association(room_alias)
+
+ if mapping["room_id"] != event.room_id:
+ raise SynapseError(
+ 400,
+ "Room alias %s does not point to the room" % (
+ room_alias_str,
+ )
+ )
+
(event_stream_id, max_stream_id) = yield self.store.persist_event(
event, context=context
)
@@ -130,20 +146,19 @@ class BaseHandler(object):
returned_invite.signatures
)
- if event.type == EventTypes.CanonicalAlias:
- # Check the alias is acually valid (at this time at least)
- room_alias_str = event.content.get("alias", None)
- if room_alias_str:
- room_alias = RoomAlias.from_string(room_alias_str)
- directory_handler = self.hs.get_handlers().directory_handler
- mapping = yield directory_handler.get_association(room_alias)
-
- if mapping["room_id"] != event.room_id:
- raise SynapseError(
- 400,
- "Room alias %s does not point to the room" % (
- room_alias_str,
- )
+ if event.type == EventTypes.Redaction:
+ if self.auth.check_redaction(event, auth_events=context.current_state):
+ original_event = yield self.store.get_event(
+ event.redacts,
+ check_redacted=False,
+ get_prev_content=False,
+ allow_rejected=False,
+ allow_none=False
+ )
+ if event.user_id != original_event.user_id:
+ raise AuthError(
+ 403,
+ "You don't have permission to redact events"
)
destinations = set(extra_destinations)
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 1ab19cd1a6..59f687e0f1 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -324,7 +324,7 @@ class AuthHandler(BaseHandler):
def _check_password(self, user_id, password, stored_hash):
"""Checks that user_id has passed password, raises LoginError if not."""
- if not bcrypt.checkpw(password, stored_hash):
+ if not self.validate_hash(password, stored_hash):
logger.warn("Failed password login for user %s", user_id)
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
@@ -369,7 +369,7 @@ class AuthHandler(BaseHandler):
@defer.inlineCallbacks
def set_password(self, user_id, newpassword):
- password_hash = bcrypt.hashpw(newpassword, bcrypt.gensalt())
+ password_hash = self.hash(newpassword)
yield self.store.user_set_password_hash(user_id, password_hash)
yield self.store.user_delete_access_tokens(user_id)
@@ -391,3 +391,26 @@ class AuthHandler(BaseHandler):
def _remove_session(self, session):
logger.debug("Removing session %s", session)
del self.sessions[session["id"]]
+
+ def hash(self, password):
+ """Computes a secure hash of password.
+
+ Args:
+ password (str): Password to hash.
+
+ Returns:
+ Hashed password (str).
+ """
+ return bcrypt.hashpw(password, bcrypt.gensalt())
+
+ def validate_hash(self, password, stored_hash):
+ """Validates that self.hash(password) == stored_hash.
+
+ Args:
+ password (str): Password to hash.
+ stored_hash (str): Expected hash value.
+
+ Returns:
+ Whether self.hash(password) == stored_hash (bool).
+ """
+ return bcrypt.checkpw(password, stored_hash)
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index f9ca2f8634..891502c04f 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -49,7 +49,12 @@ class EventStreamHandler(BaseHandler):
@defer.inlineCallbacks
@log_function
def get_stream(self, auth_user_id, pagin_config, timeout=0,
- as_client_event=True, affect_presence=True):
+ as_client_event=True, affect_presence=True,
+ only_room_events=False):
+ """Fetches the events stream for a given user.
+
+ If `only_room_events` is `True` only room events will be returned.
+ """
auth_user = UserID.from_string(auth_user_id)
try:
@@ -89,7 +94,8 @@ class EventStreamHandler(BaseHandler):
timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
events, tokens = yield self.notifier.get_events_for(
- auth_user, room_ids, pagin_config, timeout
+ auth_user, room_ids, pagin_config, timeout,
+ only_room_events=only_room_events
)
time_now = self.clock.time_msec()
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 56d125f753..ef4081e3fe 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -25,7 +25,6 @@ import synapse.util.stringutils as stringutils
from synapse.util.async import run_on_reactor
from synapse.http.client import CaptchaServerHttpClient
-import bcrypt
import logging
import urllib
@@ -82,7 +81,7 @@ class RegistrationHandler(BaseHandler):
yield run_on_reactor()
password_hash = None
if password:
- password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
+ password_hash = self.auth_handler().hash(password)
if localpart:
yield self.check_username(localpart)
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index 026bd2b9d4..d7096aab8c 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -204,15 +204,11 @@ class TypingNotificationHandler(BaseHandler):
)
def _push_update_local(self, room_id, user, typing):
- if room_id not in self._room_serials:
- self._room_serials[room_id] = 0
- self._room_typing[room_id] = set()
-
- room_set = self._room_typing[room_id]
+ room_set = self._room_typing.setdefault(room_id, set())
if typing:
room_set.add(user)
- elif user in room_set:
- room_set.remove(user)
+ else:
+ room_set.discard(user)
self._latest_room_serial += 1
self._room_serials[room_id] = self._latest_room_serial
@@ -260,8 +256,8 @@ class TypingNotificationEventSource(object):
)
events = []
- for room_id in handler._room_serials:
- if room_id not in joined_room_ids:
+ for room_id in joined_room_ids:
+ if room_id not in handler._room_serials:
continue
if handler._room_serials[room_id] <= from_key:
continue
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index d7bcad8a8a..943d637459 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -17,7 +17,7 @@
from __future__ import absolute_import
import logging
-from resource import getrusage, getpagesize, RUSAGE_SELF
+from resource import getrusage, RUSAGE_SELF
import functools
import os
import stat
@@ -100,7 +100,6 @@ def render_all():
# process resource usage
rusage = None
-PAGE_SIZE = getpagesize()
def update_resource_metrics():
@@ -113,8 +112,8 @@ resource_metrics = get_metrics_for("process.resource")
resource_metrics.register_callback("utime", lambda: rusage.ru_utime * 1000)
resource_metrics.register_callback("stime", lambda: rusage.ru_stime * 1000)
-# pages
-resource_metrics.register_callback("maxrss", lambda: rusage.ru_maxrss * PAGE_SIZE)
+# kilobytes
+resource_metrics.register_callback("maxrss", lambda: rusage.ru_maxrss * 1024)
TYPES = {
stat.S_IFSOCK: "SOCK",
@@ -131,6 +130,10 @@ def _process_fds():
counts = {(k,): 0 for k in TYPES.values()}
counts[("other",)] = 0
+ # Not every OS will have a /proc/self/fd directory
+ if not os.path.exists("/proc/self/fd"):
+ return counts
+
for fd in os.listdir("/proc/self/fd"):
try:
s = os.stat("/proc/self/fd/%s" % (fd))
diff --git a/synapse/notifier.py b/synapse/notifier.py
index dbd8efe9fb..f998fc83bf 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -328,10 +328,13 @@ class Notifier(object):
defer.returnValue(result)
@defer.inlineCallbacks
- def get_events_for(self, user, rooms, pagination_config, timeout):
+ def get_events_for(self, user, rooms, pagination_config, timeout,
+ only_room_events=False):
""" For the given user and rooms, return any new events for them. If
there are no new events wait for up to `timeout` milliseconds for any
new events to happen before returning.
+
+ If `only_room_events` is `True` only room events will be returned.
"""
from_token = pagination_config.from_token
if not from_token:
@@ -352,6 +355,8 @@ class Notifier(object):
after_id = getattr(after_token, keyname)
if before_id == after_id:
continue
+ if only_room_events and name != "room":
+ continue
new_events, new_key = yield source.get_new_events_for_user(
user, getattr(from_token, keyname), limit,
)
diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py
index 13002e0db4..f1952b5a0f 100644
--- a/synapse/push/__init__.py
+++ b/synapse/push/__init__.py
@@ -249,7 +249,9 @@ class Pusher(object):
# we fail to dispatch the push)
config = PaginationConfig(from_token=None, limit='1')
chunk = yield self.evStreamHandler.get_stream(
- self.user_name, config, timeout=0)
+ self.user_name, config, timeout=0, affect_presence=False,
+ only_room_events=True
+ )
self.last_token = chunk['end']
self.store.update_pusher_last_token(
self.app_id, self.pushkey, self.user_name, self.last_token
@@ -280,8 +282,8 @@ class Pusher(object):
config = PaginationConfig(from_token=from_tok, limit='1')
timeout = (300 + random.randint(-60, 60)) * 1000
chunk = yield self.evStreamHandler.get_stream(
- self.user_name, config,
- timeout=timeout, affect_presence=False
+ self.user_name, config, timeout=timeout, affect_presence=False,
+ only_room_events=True
)
# limiting to 1 may get 1 event plus 1 presence event, so
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 4c07aa07aa..795ef27182 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -46,8 +46,8 @@ CONDITIONAL_REQUIREMENTS = {
def requirements(config=None, include_conditional=False):
reqs = REQUIREMENTS.copy()
- for key, req in CONDITIONAL_REQUIREMENTS.items():
- if (config and getattr(config, key)) or include_conditional:
+ if include_conditional:
+ for _, req in CONDITIONAL_REQUIREMENTS.items():
reqs.update(req)
return reqs
@@ -55,13 +55,8 @@ def requirements(config=None, include_conditional=False):
def github_link(project, version, egg):
return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg)
-DEPENDENCY_LINKS = [
- github_link(
- project="matrix-org/matrix-angular-sdk",
- version="v0.6.6",
- egg="matrix_angular_sdk-0.6.6",
- ),
-]
+DEPENDENCY_LINKS = {
+}
class MissingRequirementError(Exception):
@@ -129,7 +124,7 @@ def check_requirements(config=None):
def list_requirements():
result = []
linked = []
- for link in DEPENDENCY_LINKS:
+ for link in DEPENDENCY_LINKS.values():
egg = link.split("#egg=")[1]
linked.append(egg.split('-')[0])
result.append(link)
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index b5edffdb60..4692ba413c 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -96,6 +96,7 @@ class ThreepidRestServlet(RestServlet):
self.hs = hs
self.identity_handler = hs.get_handlers().identity_handler
self.auth = hs.get_auth()
+ self.auth_handler = hs.get_handlers().auth_handler
@defer.inlineCallbacks
def on_GET(self, request):
diff --git a/synapse/rest/media/v1/base_resource.py b/synapse/rest/media/v1/base_resource.py
index 4e21527c3d..b2aeb8c909 100644
--- a/synapse/rest/media/v1/base_resource.py
+++ b/synapse/rest/media/v1/base_resource.py
@@ -33,6 +33,8 @@ import os
import cgi
import logging
+import urllib
+import urlparse
logger = logging.getLogger(__name__)
@@ -42,10 +44,13 @@ def parse_media_id(request):
# This allows users to append e.g. /test.png to the URL. Useful for
# clients that parse the URL to see content type.
server_name, media_id = request.postpath[:2]
- if len(request.postpath) > 2 and is_ascii(request.postpath[-1]):
- return server_name, media_id, request.postpath[-1]
- else:
- return server_name, media_id, None
+ file_name = None
+ if len(request.postpath) > 2:
+ try:
+ file_name = urlparse.unquote(request.postpath[-1]).decode("utf-8")
+ except UnicodeDecodeError:
+ pass
+ return server_name, media_id, file_name
except:
raise SynapseError(
404,
@@ -140,9 +145,26 @@ class BaseMediaResource(Resource):
content_disposition = headers.get("Content-Disposition", None)
if content_disposition:
_, params = cgi.parse_header(content_disposition[0],)
- upload_name = params.get("filename", None)
- if upload_name and not is_ascii(upload_name):
- upload_name = None
+ upload_name = None
+
+ # First check if there is a valid UTF-8 filename
+ upload_name_utf8 = params.get("filename*", None)
+ if upload_name_utf8:
+ if upload_name_utf8.lower().startswith("utf-8''"):
+ upload_name = upload_name_utf8[7:]
+
+ # If there isn't check for an ascii name.
+ if not upload_name:
+ upload_name_ascii = params.get("filename", None)
+ if upload_name_ascii and is_ascii(upload_name_ascii):
+ upload_name = upload_name_ascii
+
+ if upload_name:
+ upload_name = urlparse.unquote(upload_name)
+ try:
+ upload_name = upload_name.decode("utf-8")
+ except UnicodeDecodeError:
+ upload_name = None
else:
upload_name = None
@@ -181,10 +203,20 @@ class BaseMediaResource(Resource):
if os.path.isfile(file_path):
request.setHeader(b"Content-Type", media_type.encode("UTF-8"))
if upload_name:
- request.setHeader(
- b"Content-Disposition",
- b"inline; filename=%s" % (upload_name.encode("utf-8"),),
- )
+ if is_ascii(upload_name):
+ request.setHeader(
+ b"Content-Disposition",
+ b"inline; filename=%s" % (
+ urllib.quote(upload_name.encode("utf-8")),
+ ),
+ )
+ else:
+ request.setHeader(
+ b"Content-Disposition",
+ b"inline; filename*=utf-8''%s" % (
+ urllib.quote(upload_name.encode("utf-8")),
+ ),
+ )
# cache for at least a day.
# XXX: we might want to turn this off for data we don't want to
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index 439d5a30a8..6abaf56b25 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -15,7 +15,7 @@
from synapse.http.server import respond_with_json, request_handler
-from synapse.util.stringutils import random_string, is_ascii
+from synapse.util.stringutils import random_string
from synapse.api.errors import SynapseError
from twisted.web.server import NOT_DONE_YET
@@ -86,9 +86,13 @@ class UploadResource(BaseMediaResource):
upload_name = request.args.get("filename", None)
if upload_name:
- upload_name = upload_name[0]
- if upload_name and not is_ascii(upload_name):
- raise SynapseError(400, "filename must be ascii")
+ try:
+ upload_name = upload_name[0].decode('UTF-8')
+ except UnicodeDecodeError:
+ raise SynapseError(
+ msg="Invalid UTF-8 filename parameter: %r" % (upload_name),
+ code=400,
+ )
headers = request.requestHeaders
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index ce71389f02..495ef087c9 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -167,7 +167,7 @@ class SQLBaseStore(object):
self._get_event_cache = Cache("*getEvent*", keylen=3, lru=True,
max_entries=hs.config.event_cache_size)
- self._state_group_cache = DictionaryCache("*stateGroupCache*", 100000)
+ self._state_group_cache = DictionaryCache("*stateGroupCache*", 2000)
self._event_fetch_lock = threading.Condition()
self._event_fetch_list = []
diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py
index bc90e17c63..989ad340b0 100644
--- a/synapse/storage/event_federation.py
+++ b/synapse/storage/event_federation.py
@@ -331,7 +331,10 @@ class EventFederationStore(SQLBaseStore):
txn.executemany(
query,
- [(ev.event_id, ev.room_id, ev.event_id) for ev in events]
+ [
+ (ev.event_id, ev.room_id, ev.event_id) for ev in events
+ if not ev.internal_metadata.is_outlier()
+ ]
)
query = (
@@ -358,7 +361,10 @@ class EventFederationStore(SQLBaseStore):
)
txn.executemany(
query,
- [(ev.event_id, ev.room_id) for ev in events]
+ [
+ (ev.event_id, ev.room_id) for ev in events
+ if not ev.internal_metadata.is_outlier()
+ ]
)
for room_id in events_by_room:
diff --git a/synapse/storage/schema/delta/23/drop_state_index.sql b/synapse/storage/schema/delta/23/drop_state_index.sql
new file mode 100644
index 0000000000..07d0ea5cb2
--- /dev/null
+++ b/synapse/storage/schema/delta/23/drop_state_index.sql
@@ -0,0 +1,16 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+DROP INDEX IF EXISTS state_groups_state_tuple;
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index c9110e6304..9630efcfcc 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -403,8 +403,15 @@ class StateStore(SQLBaseStore):
state_dict = results[group]
for event_id in state_ids:
- state_event = state_events[event_id]
- state_dict[(state_event.type, state_event.state_key)] = state_event
+ try:
+ state_event = state_events[event_id]
+ state_dict[(state_event.type, state_event.state_key)] = state_event
+ except KeyError:
+ # Hmm. So we do don't have that state event? Interesting.
+ logger.warn(
+ "Can't find state event %r for state group %r",
+ event_id, group,
+ )
self._state_group_cache.update(
cache_seq_num,
diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py
index 7a1e96af37..f3a36340e4 100644
--- a/synapse/util/stringutils.py
+++ b/synapse/util/stringutils.py
@@ -38,6 +38,8 @@ def random_string_with_symbols(length):
def is_ascii(s):
try:
s.encode("ascii")
+ except UnicodeEncodeError:
+ return False
except UnicodeDecodeError:
return False
else:
diff --git a/tests/test_state.py b/tests/test_state.py
index 5845358754..55f37c521f 100644
--- a/tests/test_state.py
+++ b/tests/test_state.py
@@ -204,8 +204,8 @@ class StateTestCase(unittest.TestCase):
nodes={
"START": DictObj(
type=EventTypes.Create,
- state_key="creator",
- content={"membership": "@user_id:example.com"},
+ state_key="",
+ content={"creator": "@user_id:example.com"},
depth=1,
),
"A": DictObj(
@@ -259,8 +259,8 @@ class StateTestCase(unittest.TestCase):
nodes={
"START": DictObj(
type=EventTypes.Create,
- state_key="creator",
- content={"membership": "@user_id:example.com"},
+ state_key="",
+ content={"creator": "@user_id:example.com"},
depth=1,
),
"A": DictObj(
@@ -432,13 +432,19 @@ class StateTestCase(unittest.TestCase):
def test_resolve_message_conflict(self):
event = create_event(type="test_message", name="event")
+ creation = create_event(
+ type=EventTypes.Create, state_key=""
+ )
+
old_state_1 = [
+ creation,
create_event(type="test1", state_key="1"),
create_event(type="test1", state_key="2"),
create_event(type="test2", state_key=""),
]
old_state_2 = [
+ creation,
create_event(type="test1", state_key="1"),
create_event(type="test3", state_key="2"),
create_event(type="test4", state_key=""),
@@ -446,7 +452,7 @@ class StateTestCase(unittest.TestCase):
context = yield self._get_context(event, old_state_1, old_state_2)
- self.assertEqual(len(context.current_state), 5)
+ self.assertEqual(len(context.current_state), 6)
self.assertIsNone(context.state_group)
@@ -454,13 +460,19 @@ class StateTestCase(unittest.TestCase):
def test_resolve_state_conflict(self):
event = create_event(type="test4", state_key="", name="event")
+ creation = create_event(
+ type=EventTypes.Create, state_key=""
+ )
+
old_state_1 = [
+ creation,
create_event(type="test1", state_key="1"),
create_event(type="test1", state_key="2"),
create_event(type="test2", state_key=""),
]
old_state_2 = [
+ creation,
create_event(type="test1", state_key="1"),
create_event(type="test3", state_key="2"),
create_event(type="test4", state_key=""),
@@ -468,7 +480,7 @@ class StateTestCase(unittest.TestCase):
context = yield self._get_context(event, old_state_1, old_state_2)
- self.assertEqual(len(context.current_state), 5)
+ self.assertEqual(len(context.current_state), 6)
self.assertIsNone(context.state_group)
@@ -484,36 +496,45 @@ class StateTestCase(unittest.TestCase):
}
)
+ creation = create_event(
+ type=EventTypes.Create, state_key="",
+ content={"creator": "@foo:bar"}
+ )
+
old_state_1 = [
+ creation,
member_event,
create_event(type="test1", state_key="1", depth=1),
]
old_state_2 = [
+ creation,
member_event,
create_event(type="test1", state_key="1", depth=2),
]
context = yield self._get_context(event, old_state_1, old_state_2)
- self.assertEqual(old_state_2[1], context.current_state[("test1", "1")])
+ self.assertEqual(old_state_2[2], context.current_state[("test1", "1")])
# Reverse the depth to make sure we are actually using the depths
# during state resolution.
old_state_1 = [
+ creation,
member_event,
create_event(type="test1", state_key="1", depth=2),
]
old_state_2 = [
+ creation,
member_event,
create_event(type="test1", state_key="1", depth=1),
]
context = yield self._get_context(event, old_state_1, old_state_2)
- self.assertEqual(old_state_1[1], context.current_state[("test1", "1")])
+ self.assertEqual(old_state_1[2], context.current_state[("test1", "1")])
def _get_context(self, event, old_state_1, old_state_2):
group_name_1 = "group_name_1"
diff --git a/tests/utils.py b/tests/utils.py
index 3766a994f2..dd19a16fc7 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -27,6 +27,7 @@ from twisted.enterprise.adbapi import ConnectionPool
from collections import namedtuple
from mock import patch, Mock
+import hashlib
import urllib
import urlparse
@@ -67,6 +68,18 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
**kargs
)
+ # bcrypt is far too slow to be doing in unit tests
+ def swap_out_hash_for_testing(old_build_handlers):
+ def build_handlers():
+ handlers = old_build_handlers()
+ auth_handler = handlers.auth_handler
+ auth_handler.hash = lambda p: hashlib.md5(p).hexdigest()
+ auth_handler.validate_hash = lambda p, h: hashlib.md5(p).hexdigest() == h
+ return handlers
+ return build_handlers
+
+ hs.build_handlers = swap_out_hash_for_testing(hs.build_handlers)
+
defer.returnValue(hs)
diff --git a/tox.ini b/tox.ini
index 58cf9613cb..a69948484f 100644
--- a/tox.ini
+++ b/tox.ini
@@ -9,10 +9,8 @@ deps =
setenv =
PYTHONDONTWRITEBYTECODE = no_byte_code
commands =
- coverage run --source=./synapse {envbindir}/trial tests
+ coverage run --source=synapse {envbindir}/trial {posargs:tests}
coverage report -m
-install_command =
- pip install --process-dependency-links --pre {opts} {packages}
[testenv:packaging]
deps =
|