diff --git a/.gitignore b/.gitignore
index 4c336b710d..f8c4000134 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,3 +43,6 @@ build/
localhost-800*/
static/client/register/register_config.js
.tox
+
+env/
+*.config
diff --git a/docs/postgres.rst b/docs/postgres.rst
index 19d8391115..b5027fefb0 100644
--- a/docs/postgres.rst
+++ b/docs/postgres.rst
@@ -55,9 +55,8 @@ Porting from SQLite
Overview
~~~~~~~~
-The script ``port_from_sqlite_to_postgres.py`` allows porting an existing
-synapse server backed by SQLite to using PostgreSQL. This is done in as a two
-phase process:
+The script ``synapse_port_db`` allows porting an existing synapse server
+backed by SQLite to using PostgreSQL. This is done in as a two phase process:
1. Copy the existing SQLite database to a separate location (while the server
is down) and running the port script against that offline database.
@@ -86,8 +85,7 @@ Assuming your new config file (as described in the section *Synapse config*)
is named ``homeserver-postgres.yaml`` and the SQLite snapshot is at
``homeserver.db.snapshot`` then simply run::
- python scripts/port_from_sqlite_to_postgres.py \
- --sqlite-database homeserver.db.snapshot \
+ synapse_port_db --sqlite-database homeserver.db.snapshot \
--postgres-config homeserver-postgres.yaml
The flag ``--curses`` displays a coloured curses progress UI.
@@ -100,8 +98,7 @@ To complete the conversion shut down the synapse server and run the port
script one last time, e.g. if the SQLite database is at ``homeserver.db``
run::
- python scripts/port_from_sqlite_to_postgres.py \
- --sqlite-database homeserver.db \
+ synapse_port_db --sqlite-database homeserver.db \
--postgres-config database_config.yaml
Once that has completed, change the synapse config to point at the PostgreSQL
diff --git a/scripts/database-prepare-for-0.0.1.sh b/scripts/database-prepare-for-0.0.1.sh
deleted file mode 100755
index 43d759a5cd..0000000000
--- a/scripts/database-prepare-for-0.0.1.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-# This is will prepare a synapse database for running with v0.0.1 of synapse.
-# It will store all the user information, but will *delete* all messages and
-# room data.
-
-set -e
-
-cp "$1" "$1.bak"
-
-DUMP=$(sqlite3 "$1" << 'EOF'
-.dump users
-.dump access_tokens
-.dump presence
-.dump profiles
-EOF
-)
-
-rm "$1"
-
-sqlite3 "$1" <<< "$DUMP"
diff --git a/scripts/database-prepare-for-0.5.0.sh b/scripts/database-prepare-for-0.5.0.sh
deleted file mode 100755
index e824cb583e..0000000000
--- a/scripts/database-prepare-for-0.5.0.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-# This is will prepare a synapse database for running with v0.5.0 of synapse.
-# It will store all the user information, but will *delete* all messages and
-# room data.
-
-set -e
-
-cp "$1" "$1.bak"
-
-DUMP=$(sqlite3 "$1" << 'EOF'
-.dump users
-.dump access_tokens
-.dump presence
-.dump profiles
-EOF
-)
-
-rm "$1"
-
-sqlite3 "$1" <<< "$DUMP"
diff --git a/scripts/port_from_sqlite_to_postgres.py b/scripts/synapse_port_db
index e7ed4c309b..e7ed4c309b 100755
--- a/scripts/port_from_sqlite_to_postgres.py
+++ b/scripts/synapse_port_db
diff --git a/scripts/upgrade_db_to_v0.6.0.py b/scripts/upgrade_db_to_v0.6.0.py
deleted file mode 100755
index cd4be28b86..0000000000
--- a/scripts/upgrade_db_to_v0.6.0.py
+++ /dev/null
@@ -1,326 +0,0 @@
-#!/usr/bin/env python
-from synapse.storage import SCHEMA_VERSION, read_schema
-from synapse.storage._base import SQLBaseStore
-from synapse.storage.signatures import SignatureStore
-from synapse.storage.event_federation import EventFederationStore
-
-from unpaddedbase64 import encode_base64, decode_base64
-
-from synapse.crypto.event_signing import compute_event_signature
-
-from synapse.events.builder import EventBuilder
-from synapse.events.utils import prune_event
-
-from synapse.crypto.event_signing import check_event_content_hash
-
-from signedjson.sign import verify_signed_json, SignatureVerifyException
-from signedjson.key import decode_verify_key_bytes
-
-from canonicaljson import encode_canonical_json
-
-import argparse
-# import dns.resolver
-import hashlib
-import httplib
-import json
-import sqlite3
-import urllib2
-
-
-delta_sql = """
-CREATE TABLE IF NOT EXISTS event_json(
- event_id TEXT NOT NULL,
- room_id TEXT NOT NULL,
- internal_metadata NOT NULL,
- json BLOB NOT NULL,
- CONSTRAINT ev_j_uniq UNIQUE (event_id)
-);
-
-CREATE INDEX IF NOT EXISTS event_json_id ON event_json(event_id);
-CREATE INDEX IF NOT EXISTS event_json_room_id ON event_json(room_id);
-
-PRAGMA user_version = 10;
-"""
-
-
-class Store(object):
- _get_event_signatures_txn = SignatureStore.__dict__["_get_event_signatures_txn"]
- _get_event_content_hashes_txn = SignatureStore.__dict__["_get_event_content_hashes_txn"]
- _get_event_reference_hashes_txn = SignatureStore.__dict__["_get_event_reference_hashes_txn"]
- _get_prev_event_hashes_txn = SignatureStore.__dict__["_get_prev_event_hashes_txn"]
- _get_prev_events_and_state = EventFederationStore.__dict__["_get_prev_events_and_state"]
- _get_auth_events = EventFederationStore.__dict__["_get_auth_events"]
- cursor_to_dict = SQLBaseStore.__dict__["cursor_to_dict"]
- _simple_select_onecol_txn = SQLBaseStore.__dict__["_simple_select_onecol_txn"]
- _simple_select_list_txn = SQLBaseStore.__dict__["_simple_select_list_txn"]
- _simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
-
- def _generate_event_json(self, txn, rows):
- events = []
- for row in rows:
- d = dict(row)
-
- d.pop("stream_ordering", None)
- d.pop("topological_ordering", None)
- d.pop("processed", None)
-
- if "origin_server_ts" not in d:
- d["origin_server_ts"] = d.pop("ts", 0)
- else:
- d.pop("ts", 0)
-
- d.pop("prev_state", None)
- d.update(json.loads(d.pop("unrecognized_keys")))
-
- d["sender"] = d.pop("user_id")
-
- d["content"] = json.loads(d["content"])
-
- if "age_ts" not in d:
- # For compatibility
- d["age_ts"] = d.get("origin_server_ts", 0)
-
- d.setdefault("unsigned", {})["age_ts"] = d.pop("age_ts")
-
- outlier = d.pop("outlier", False)
-
- # d.pop("membership", None)
-
- d.pop("state_hash", None)
-
- d.pop("replaces_state", None)
-
- b = EventBuilder(d)
- b.internal_metadata.outlier = outlier
-
- events.append(b)
-
- for i, ev in enumerate(events):
- signatures = self._get_event_signatures_txn(
- txn, ev.event_id,
- )
-
- ev.signatures = {
- n: {
- k: encode_base64(v) for k, v in s.items()
- }
- for n, s in signatures.items()
- }
-
- hashes = self._get_event_content_hashes_txn(
- txn, ev.event_id,
- )
-
- ev.hashes = {
- k: encode_base64(v) for k, v in hashes.items()
- }
-
- prevs = self._get_prev_events_and_state(txn, ev.event_id)
-
- ev.prev_events = [
- (e_id, h)
- for e_id, h, is_state in prevs
- if is_state == 0
- ]
-
- # ev.auth_events = self._get_auth_events(txn, ev.event_id)
-
- hashes = dict(ev.auth_events)
-
- for e_id, hash in ev.prev_events:
- if e_id in hashes and not hash:
- hash.update(hashes[e_id])
- #
- # if hasattr(ev, "state_key"):
- # ev.prev_state = [
- # (e_id, h)
- # for e_id, h, is_state in prevs
- # if is_state == 1
- # ]
-
- return [e.build() for e in events]
-
-
-store = Store()
-
-
-# def get_key(server_name):
-# print "Getting keys for: %s" % (server_name,)
-# targets = []
-# if ":" in server_name:
-# target, port = server_name.split(":")
-# targets.append((target, int(port)))
-# try:
-# answers = dns.resolver.query("_matrix._tcp." + server_name, "SRV")
-# for srv in answers:
-# targets.append((srv.target, srv.port))
-# except dns.resolver.NXDOMAIN:
-# targets.append((server_name, 8448))
-# except:
-# print "Failed to lookup keys for %s" % (server_name,)
-# return {}
-#
-# for target, port in targets:
-# url = "https://%s:%i/_matrix/key/v1" % (target, port)
-# try:
-# keys = json.load(urllib2.urlopen(url, timeout=2))
-# verify_keys = {}
-# for key_id, key_base64 in keys["verify_keys"].items():
-# verify_key = decode_verify_key_bytes(
-# key_id, decode_base64(key_base64)
-# )
-# verify_signed_json(keys, server_name, verify_key)
-# verify_keys[key_id] = verify_key
-# print "Got keys for: %s" % (server_name,)
-# return verify_keys
-# except urllib2.URLError:
-# pass
-# except urllib2.HTTPError:
-# pass
-# except httplib.HTTPException:
-# pass
-#
-# print "Failed to get keys for %s" % (server_name,)
-# return {}
-
-
-def reinsert_events(cursor, server_name, signing_key):
- print "Running delta: v10"
-
- cursor.executescript(delta_sql)
-
- cursor.execute(
- "SELECT * FROM events ORDER BY rowid ASC"
- )
-
- print "Getting events..."
-
- rows = store.cursor_to_dict(cursor)
-
- events = store._generate_event_json(cursor, rows)
-
- print "Got events from DB."
-
- algorithms = {
- "sha256": hashlib.sha256,
- }
-
- key_id = "%s:%s" % (signing_key.alg, signing_key.version)
- verify_key = signing_key.verify_key
- verify_key.alg = signing_key.alg
- verify_key.version = signing_key.version
-
- server_keys = {
- server_name: {
- key_id: verify_key
- }
- }
-
- i = 0
- N = len(events)
-
- for event in events:
- if i % 100 == 0:
- print "Processed: %d/%d events" % (i,N,)
- i += 1
-
- # for alg_name in event.hashes:
- # if check_event_content_hash(event, algorithms[alg_name]):
- # pass
- # else:
- # pass
- # print "FAIL content hash %s %s" % (alg_name, event.event_id, )
-
- have_own_correctly_signed = False
- for host, sigs in event.signatures.items():
- pruned = prune_event(event)
-
- for key_id in sigs:
- if host not in server_keys:
- server_keys[host] = {} # get_key(host)
- if key_id in server_keys[host]:
- try:
- verify_signed_json(
- pruned.get_pdu_json(),
- host,
- server_keys[host][key_id]
- )
-
- if host == server_name:
- have_own_correctly_signed = True
- except SignatureVerifyException:
- print "FAIL signature check %s %s" % (
- key_id, event.event_id
- )
-
- # TODO: Re sign with our own server key
- if not have_own_correctly_signed:
- sigs = compute_event_signature(event, server_name, signing_key)
- event.signatures.update(sigs)
-
- pruned = prune_event(event)
-
- for key_id in event.signatures[server_name]:
- verify_signed_json(
- pruned.get_pdu_json(),
- server_name,
- server_keys[server_name][key_id]
- )
-
- event_json = encode_canonical_json(
- event.get_dict()
- ).decode("UTF-8")
-
- metadata_json = encode_canonical_json(
- event.internal_metadata.get_dict()
- ).decode("UTF-8")
-
- store._simple_insert_txn(
- cursor,
- table="event_json",
- values={
- "event_id": event.event_id,
- "room_id": event.room_id,
- "internal_metadata": metadata_json,
- "json": event_json,
- },
- or_replace=True,
- )
-
-
-def main(database, server_name, signing_key):
- conn = sqlite3.connect(database)
- cursor = conn.cursor()
-
- # Do other deltas:
- cursor.execute("PRAGMA user_version")
- row = cursor.fetchone()
-
- if row and row[0]:
- user_version = row[0]
- # Run every version since after the current version.
- for v in range(user_version + 1, 10):
- print "Running delta: %d" % (v,)
- sql_script = read_schema("delta/v%d" % (v,))
- cursor.executescript(sql_script)
-
- reinsert_events(cursor, server_name, signing_key)
-
- conn.commit()
-
- print "Success!"
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
-
- parser.add_argument("database")
- parser.add_argument("server_name")
- parser.add_argument(
- "signing_key", type=argparse.FileType('r'),
- )
- args = parser.parse_args()
-
- signing_key = signedjson.key.read_signing_keys(args.signing_key)
-
- main(args.database, args.server_name, signing_key[0])
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index ff7807c2e6..ffc6299146 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -403,7 +403,7 @@ def setup(config_options):
database_engine=database_engine,
)
- logger.info("Preparing database: %r...", config.database_config)
+ logger.info("Preparing database: %s...", config.database_config['name'])
try:
db_conn = database_engine.module.connect(
@@ -425,7 +425,7 @@ def setup(config_options):
)
sys.exit(1)
- logger.info("Database prepared in %r.", config.database_config)
+ logger.info("Database prepared in %s.", config.database_config['name'])
hs.start_listening()
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index d01235d31f..1a6784a714 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -182,6 +182,8 @@ class Config(object):
) % (entry_path, )
continue
+ files.add(config_path)
+
config_files.extend(sorted(files))
else:
config_files.append(config_path)
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index 1ab19cd1a6..59f687e0f1 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -324,7 +324,7 @@ class AuthHandler(BaseHandler):
def _check_password(self, user_id, password, stored_hash):
"""Checks that user_id has passed password, raises LoginError if not."""
- if not bcrypt.checkpw(password, stored_hash):
+ if not self.validate_hash(password, stored_hash):
logger.warn("Failed password login for user %s", user_id)
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
@@ -369,7 +369,7 @@ class AuthHandler(BaseHandler):
@defer.inlineCallbacks
def set_password(self, user_id, newpassword):
- password_hash = bcrypt.hashpw(newpassword, bcrypt.gensalt())
+ password_hash = self.hash(newpassword)
yield self.store.user_set_password_hash(user_id, password_hash)
yield self.store.user_delete_access_tokens(user_id)
@@ -391,3 +391,26 @@ class AuthHandler(BaseHandler):
def _remove_session(self, session):
logger.debug("Removing session %s", session)
del self.sessions[session["id"]]
+
+ def hash(self, password):
+ """Computes a secure hash of password.
+
+ Args:
+ password (str): Password to hash.
+
+ Returns:
+ Hashed password (str).
+ """
+ return bcrypt.hashpw(password, bcrypt.gensalt())
+
+ def validate_hash(self, password, stored_hash):
+ """Validates that self.hash(password) == stored_hash.
+
+ Args:
+ password (str): Password to hash.
+ stored_hash (str): Expected hash value.
+
+ Returns:
+ Whether self.hash(password) == stored_hash (bool).
+ """
+ return bcrypt.checkpw(password, stored_hash)
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 56d125f753..ef4081e3fe 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -25,7 +25,6 @@ import synapse.util.stringutils as stringutils
from synapse.util.async import run_on_reactor
from synapse.http.client import CaptchaServerHttpClient
-import bcrypt
import logging
import urllib
@@ -82,7 +81,7 @@ class RegistrationHandler(BaseHandler):
yield run_on_reactor()
password_hash = None
if password:
- password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
+ password_hash = self.auth_handler().hash(password)
if localpart:
yield self.check_username(localpart)
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 8ec272fd5f..795ef27182 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -56,11 +56,6 @@ def github_link(project, version, egg):
return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg)
DEPENDENCY_LINKS = {
- "matrix-angular-sdk": github_link(
- project="matrix-org/matrix-angular-sdk",
- version="v0.6.6",
- egg="matrix_angular_sdk-0.6.6",
- ),
}
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index ce71389f02..495ef087c9 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -167,7 +167,7 @@ class SQLBaseStore(object):
self._get_event_cache = Cache("*getEvent*", keylen=3, lru=True,
max_entries=hs.config.event_cache_size)
- self._state_group_cache = DictionaryCache("*stateGroupCache*", 100000)
+ self._state_group_cache = DictionaryCache("*stateGroupCache*", 2000)
self._event_fetch_lock = threading.Condition()
self._event_fetch_list = []
diff --git a/tests/utils.py b/tests/utils.py
index 3766a994f2..dd19a16fc7 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -27,6 +27,7 @@ from twisted.enterprise.adbapi import ConnectionPool
from collections import namedtuple
from mock import patch, Mock
+import hashlib
import urllib
import urlparse
@@ -67,6 +68,18 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
**kargs
)
+ # bcrypt is far too slow to be doing in unit tests
+ def swap_out_hash_for_testing(old_build_handlers):
+ def build_handlers():
+ handlers = old_build_handlers()
+ auth_handler = handlers.auth_handler
+ auth_handler.hash = lambda p: hashlib.md5(p).hexdigest()
+ auth_handler.validate_hash = lambda p, h: hashlib.md5(p).hexdigest() == h
+ return handlers
+ return build_handlers
+
+ hs.build_handlers = swap_out_hash_for_testing(hs.build_handlers)
+
defer.returnValue(hs)
diff --git a/tox.ini b/tox.ini
index 3f0649b628..a69948484f 100644
--- a/tox.ini
+++ b/tox.ini
@@ -11,8 +11,6 @@ setenv =
commands =
coverage run --source=synapse {envbindir}/trial {posargs:tests}
coverage report -m
-install_command =
- pip install --process-dependency-links --pre {opts} {packages}
[testenv:packaging]
deps =
|