From 455579ca90dd5479dae785b5a1b9bdd201654ea6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 20 Mar 2015 10:55:55 +0000 Subject: Make database selection configurable --- synapse/app/homeserver.py | 44 +++++++++++++++++++++++++++++++++----------- 1 file changed, 33 insertions(+), 11 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 15c454af76..a2fca2e024 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -61,6 +61,7 @@ import resource import subprocess import sqlite3 import syweb +import yaml logger = logging.getLogger(__name__) @@ -108,14 +109,14 @@ class SynapseHomeServer(HomeServer): return None def build_db_pool(self): - return adbapi.ConnectionPool( - "sqlite3", self.get_db_name(), - check_same_thread=False, - cp_min=1, - cp_max=1, - cp_openfun=prepare_database, # Prepare the database for each conn - # so that :memory: sqlite works - ) + name = self.db_config.pop("name", None) + if name == "MySQLdb": + return adbapi.ConnectionPool( + name, + **self.db_config + ) + + raise RuntimeError("Unsupported database type") def create_resource_tree(self, redirect_root_to_web_client): """Create the resource tree for this Home Server. @@ -357,11 +358,29 @@ def setup(config_options): tls_context_factory = context_factory.ServerContextFactory(config) + if config.database_config: + with open(config.database_config, 'r') as f: + db_config = yaml.safe_load(f) + + name = db_config.get("name", None) + if name == "MySQLdb": + db_config.update({ + "sql_mode": "TRADITIONAL", + "charset": "utf8", + "use_unicode": True, + }) + else: + db_config = { + "name": "sqlite3", + "database": config.database_path, + } + hs = SynapseHomeServer( config.server_name, domain_with_port=domain_with_port, upload_dir=os.path.abspath("uploads"), db_name=config.database_path, + db_config=db_config, tls_context_factory=tls_context_factory, config=config, content_addr=config.content_addr, @@ -377,9 +396,12 @@ def setup(config_options): logger.info("Preparing database: %s...", db_name) try: - with sqlite3.connect(db_name) as db_conn: - prepare_sqlite3_database(db_conn) - prepare_database(db_conn) + # with sqlite3.connect(db_name) as db_conn: + # prepare_sqlite3_database(db_conn) + # prepare_database(db_conn) + import MySQLdb + db_conn = MySQLdb.connect(**db_config) + prepare_database(db_conn) except UpgradeDatabaseException: sys.stderr.write( "\nFailed to upgrade database.\n" -- cgit 1.4.1 From 0e8f5095c7e7075b249ad53a9f60a4d2fdeeaaed Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 25 Mar 2015 17:15:20 +0000 Subject: Fix unicode database support --- synapse/app/homeserver.py | 47 +++++++++++++--------- synapse/handlers/login.py | 2 +- synapse/rest/client/v1/profile.py | 7 +++- synapse/storage/__init__.py | 12 ++++-- synapse/storage/_base.py | 4 ++ synapse/storage/events.py | 8 ++-- synapse/storage/keys.py | 4 +- synapse/storage/profile.py | 12 +++++- synapse/storage/registration.py | 18 +++++++-- synapse/storage/room.py | 1 + .../schema/full_schemas/11/media_repository.sql | 2 +- .../storage/schema/full_schemas/11/profiles.sql | 2 +- .../schema/full_schemas/11/transactions.sql | 1 - synapse/storage/signatures.py | 10 ++--- synapse/storage/transactions.py | 2 +- 15 files changed, 88 insertions(+), 44 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 394e93e6c2..beab6ffc7a 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -110,14 +110,12 @@ class SynapseHomeServer(HomeServer): return None def build_db_pool(self): - name = self.db_config.pop("name", None) - if name == "MySQLdb": - return adbapi.ConnectionPool( - name, - **self.db_config - ) + name = self.db_config["name"] - raise RuntimeError("Unsupported database type") + return adbapi.ConnectionPool( + name, + **self.db_config.get("args", {}) + ) def create_resource_tree(self, redirect_root_to_web_client): """Create the resource tree for this Home Server. @@ -323,7 +321,7 @@ def change_resource_limit(soft_file_no): resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard)) logger.info("Set file limit to: %d", soft_file_no) - except (ValueError, resource.error) as e: + except ( ValueError, resource.error) as e: logger.warn("Failed to set file limit: %s", e) @@ -363,20 +361,33 @@ def setup(config_options): if config.database_config: with open(config.database_config, 'r') as f: db_config = yaml.safe_load(f) - - name = db_config.get("name", None) - if name == "MySQLdb": - db_config.update({ - "sql_mode": "TRADITIONAL", - "charset": "utf8", - "use_unicode": True, - }) else: db_config = { "name": "sqlite3", "database": config.database_path, } + db_config = { + k: v for k, v in db_config.items() + if not k.startswith("cp_") + } + + name = db_config.get("name", None) + if name in ["MySQLdb", "mysql.connector"]: + db_config.setdefault("args", {}).update({ + "sql_mode": "TRADITIONAL", + "charset": "utf8", + "use_unicode": True, + }) + elif name == "sqlite3": + db_config.setdefault("args", {}).update({ + "cp_min": 1, + "cp_max": 1, + "cp_openfun": prepare_database, + }) + else: + raise RuntimeError("Unsupported database type '%s'" % (name,)) + hs = SynapseHomeServer( config.server_name, domain_with_port=domain_with_port, @@ -401,8 +412,8 @@ def setup(config_options): # with sqlite3.connect(db_name) as db_conn: # prepare_sqlite3_database(db_conn) # prepare_database(db_conn) - import MySQLdb - db_conn = MySQLdb.connect(**db_config) + import mysql.connector + db_conn = mysql.connector.connect(**db_config.get("args", {})) prepare_database(db_conn) except UpgradeDatabaseException: sys.stderr.write( diff --git a/synapse/handlers/login.py b/synapse/handlers/login.py index 7447800460..76647c7941 100644 --- a/synapse/handlers/login.py +++ b/synapse/handlers/login.py @@ -57,7 +57,7 @@ class LoginHandler(BaseHandler): logger.warn("Attempted to login as %s but they do not exist", user) raise LoginError(403, "", errcode=Codes.FORBIDDEN) - stored_hash = user_info[0]["password_hash"] + stored_hash = user_info["password_hash"] if bcrypt.checkpw(password, stored_hash): # generate an access token and store it. token = self.reg_handler._generate_token(user) diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py index 1e77eb49cf..7387b4adb9 100644 --- a/synapse/rest/client/v1/profile.py +++ b/synapse/rest/client/v1/profile.py @@ -19,9 +19,13 @@ from twisted.internet import defer from .base import ClientV1RestServlet, client_path_pattern from synapse.types import UserID +import logging import simplejson as json +logger = logging.getLogger(__name__) + + class ProfileDisplaynameRestServlet(ClientV1RestServlet): PATTERN = client_path_pattern("/profile/(?P[^/]*)/displayname") @@ -47,7 +51,8 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet): defer.returnValue((400, "Unable to parse name")) yield self.handlers.profile_handler.set_displayname( - user, auth_user, new_name) + user, auth_user, new_name + ) defer.returnValue((200, {})) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index e03d55b00d..abde7d0df5 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -410,10 +410,14 @@ def executescript(txn, schema_path): def _get_or_create_schema_state(txn): - schema_path = os.path.join( - dir_path, "schema", "schema_version.sql", - ) - executescript(txn, schema_path) + try: + # Bluntly try creating the schema_version tables. + schema_path = os.path.join( + dir_path, "schema", "schema_version.sql", + ) + executescript(txn, schema_path) + except: + pass txn.execute("SELECT version, upgraded FROM schema_version") row = txn.fetchone() diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 1ea39bc0ad..76ec3ee93f 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -755,6 +755,8 @@ class SQLBaseStore(object): return None internal_metadata, js, redacted, rejected_reason = res + js = js.decode("utf8") + internal_metadata = internal_metadata.decode("utf8") start_time = update_counter("select_event", start_time) @@ -779,9 +781,11 @@ class SQLBaseStore(object): sql_getevents_timer.inc_by(curr_time - last_time, desc) return curr_time + logger.debug("Got js: %r", js) d = json.loads(js) start_time = update_counter("decode_json", start_time) + logger.debug("Got internal_metadata: %r", internal_metadata) internal_metadata = json.loads(internal_metadata) start_time = update_counter("decode_internal", start_time) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 4d636d3f46..69f598967e 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -294,15 +294,17 @@ class EventsStore(SQLBaseStore): ) if is_new_state and not context.rejected: - self._simple_insert_txn( + self._simple_upsert_txn( txn, "current_state_events", - { - "event_id": event.event_id, + keyvalues={ "room_id": event.room_id, "type": event.type, "state_key": event.state_key, }, + values={ + "event_id": event.event_id, + } ) for e_id, h in event.prev_state: diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index 25fef79434..e6975a945b 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -64,7 +64,7 @@ class KeyStore(SQLBaseStore): "fingerprint": fingerprint, "from_server": from_server, "ts_added_ms": time_now_ms, - "tls_certificate": buffer(tls_certificate_bytes), + "tls_certificate": tls_certificate_bytes, }, ) @@ -113,6 +113,6 @@ class KeyStore(SQLBaseStore): "key_id": "%s:%s" % (verify_key.alg, verify_key.version), "from_server": from_server, "ts_added_ms": time_now_ms, - "verify_key": buffer(verify_key.encode()), + "verify_key": verify_key.encode(), }, ) diff --git a/synapse/storage/profile.py b/synapse/storage/profile.py index a6e52cb248..09778045bf 100644 --- a/synapse/storage/profile.py +++ b/synapse/storage/profile.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from twisted.internet import defer + from ._base import SQLBaseStore @@ -24,19 +26,25 @@ class ProfileStore(SQLBaseStore): desc="create_profile", ) + @defer.inlineCallbacks def get_profile_displayname(self, user_localpart): - return self._simple_select_one_onecol( + name = yield self._simple_select_one_onecol( table="profiles", keyvalues={"user_id": user_localpart}, retcol="displayname", desc="get_profile_displayname", ) + if name: + name = name.decode("utf8") + + defer.returnValue(name) + def set_profile_displayname(self, user_localpart, new_displayname): return self._simple_update_one( table="profiles", keyvalues={"user_id": user_localpart}, - updatevalues={"displayname": new_displayname}, + updatevalues={"displayname": new_displayname.encode("utf8")}, desc="set_profile_displayname", ) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index fe26d6d62f..7258f7b2a5 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -81,13 +81,23 @@ class RegistrationStore(SQLBaseStore): txn.execute("INSERT INTO access_tokens(user_id, token) " + "VALUES (?,?)", [user_id, token]) + @defer.inlineCallbacks def get_user_by_id(self, user_id): - query = ("SELECT users.name, users.password_hash FROM users" - " WHERE users.name = ?") - return self._execute( - "get_user_by_id", self.cursor_to_dict, query, user_id + user_info = yield self._simple_select_one( + table="users", + keyvalues={ + "name": user_id, + }, + retcols=["name", "password_hash"], + allow_none=True, ) + if user_info: + user_info["password_hash"] = user_info["password_hash"].decode("utf8") + + defer.returnValue(user_info) + + @cached() # TODO(paul): Currently there's no code to invalidate this cache. That # means if/when we ever add internal ways to invalidate access tokens or diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 501e947ad7..a1a76280fe 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -72,6 +72,7 @@ class RoomStore(SQLBaseStore): keyvalues={"room_id": room_id}, retcols=RoomsTable.fields, desc="get_room", + allow_none=True, ) @defer.inlineCallbacks diff --git a/synapse/storage/schema/full_schemas/11/media_repository.sql b/synapse/storage/schema/full_schemas/11/media_repository.sql index 8bc84dc24d..d9559f5902 100644 --- a/synapse/storage/schema/full_schemas/11/media_repository.sql +++ b/synapse/storage/schema/full_schemas/11/media_repository.sql @@ -65,4 +65,4 @@ CREATE TABLE IF NOT EXISTS remote_media_cache_thumbnails ( ) ENGINE = INNODB; CREATE INDEX IF NOT EXISTS remote_media_cache_thumbnails_media_id - ON local_media_repository_thumbnails (media_id); + ON remote_media_cache_thumbnails (media_id); diff --git a/synapse/storage/schema/full_schemas/11/profiles.sql b/synapse/storage/schema/full_schemas/11/profiles.sql index 32defe2f79..552645c56f 100644 --- a/synapse/storage/schema/full_schemas/11/profiles.sql +++ b/synapse/storage/schema/full_schemas/11/profiles.sql @@ -14,7 +14,7 @@ */ CREATE TABLE IF NOT EXISTS profiles( user_id VARCHAR(255) NOT NULL, - displayname VARCHAR(255), + displayname VARBINARY(255), avatar_url VARCHAR(255), UNIQUE(user_id) ) ENGINE = INNODB; diff --git a/synapse/storage/schema/full_schemas/11/transactions.sql b/synapse/storage/schema/full_schemas/11/transactions.sql index 0570bf95d9..bd13bba8c2 100644 --- a/synapse/storage/schema/full_schemas/11/transactions.sql +++ b/synapse/storage/schema/full_schemas/11/transactions.sql @@ -38,7 +38,6 @@ CREATE TABLE IF NOT EXISTS sent_transactions( ) ENGINE = INNODB; CREATE INDEX IF NOT EXISTS sent_transaction_dest ON sent_transactions(destination); -CREATE INDEX IF NOT EXISTS sent_transaction_dest_referenced ON sent_transactions(destination); CREATE INDEX IF NOT EXISTS sent_transaction_txn_id ON sent_transactions(transaction_id); -- So that we can do an efficient look up of all transactions that have yet to be successfully -- sent. diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 13ce335101..35bba854f9 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -54,7 +54,7 @@ class SignatureStore(SQLBaseStore): { "event_id": event_id, "algorithm": algorithm, - "hash": buffer(hash_bytes), + "hash": hash_bytes, }, ) @@ -99,7 +99,7 @@ class SignatureStore(SQLBaseStore): " WHERE event_id = ?" ) txn.execute(query, (event_id, )) - return dict(txn.fetchall()) + return {k: v for k, v in txn.fetchall()} def _store_event_reference_hash_txn(self, txn, event_id, algorithm, hash_bytes): @@ -116,7 +116,7 @@ class SignatureStore(SQLBaseStore): { "event_id": event_id, "algorithm": algorithm, - "hash": buffer(hash_bytes), + "hash": hash_bytes, }, ) @@ -160,7 +160,7 @@ class SignatureStore(SQLBaseStore): "event_id": event_id, "signature_name": signature_name, "key_id": key_id, - "signature": buffer(signature_bytes), + "signature": signature_bytes, }, ) @@ -193,6 +193,6 @@ class SignatureStore(SQLBaseStore): "event_id": event_id, "prev_event_id": prev_event_id, "algorithm": algorithm, - "hash": buffer(hash_bytes), + "hash": hash_bytes, }, ) diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index 03e1e3b808..e3e484fb2d 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -282,7 +282,7 @@ class TransactionStore(SQLBaseStore): query = ( "UPDATE destinations" " SET retry_last_ts = ?, retry_interval = ?" - " WHERE destinations = ?" + " WHERE destination = ?" ) txn.execute( -- cgit 1.4.1 From e7887e37a86adbdc2dcb5bd3fbaabf836b168bd8 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 31 Mar 2015 09:32:40 +0100 Subject: Remove appservice REST servlets --- synapse/app/homeserver.py | 5 -- synapse/rest/appservice/__init__.py | 14 ----- synapse/rest/appservice/v1/__init__.py | 29 ---------- synapse/rest/appservice/v1/base.py | 48 ----------------- synapse/rest/appservice/v1/register.py | 99 ---------------------------------- synapse/server.py | 1 - 6 files changed, 196 deletions(-) delete mode 100644 synapse/rest/appservice/__init__.py delete mode 100644 synapse/rest/appservice/v1/__init__.py delete mode 100644 synapse/rest/appservice/v1/base.py delete mode 100644 synapse/rest/appservice/v1/register.py (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 500cae05fb..29ca720d5e 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -32,7 +32,6 @@ from twisted.web.resource import Resource from twisted.web.static import File from twisted.web.server import Site from synapse.http.server import JsonResource, RootRedirect -from synapse.rest.appservice.v1 import AppServiceRestResource from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.rest.media.v1.media_repository import MediaRepositoryResource from synapse.http.server_key_resource import LocalKey @@ -78,9 +77,6 @@ class SynapseHomeServer(HomeServer): def build_resource_for_federation(self): return JsonResource(self) - def build_resource_for_app_services(self): - return AppServiceRestResource(self) - def build_resource_for_web_client(self): import syweb syweb_path = os.path.dirname(syweb.__file__) @@ -141,7 +137,6 @@ class SynapseHomeServer(HomeServer): (CONTENT_REPO_PREFIX, self.get_resource_for_content_repo()), (SERVER_KEY_PREFIX, self.get_resource_for_server_key()), (MEDIA_PREFIX, self.get_resource_for_media_repository()), - (APP_SERVICE_PREFIX, self.get_resource_for_app_services()), (STATIC_PREFIX, self.get_resource_for_static_content()), ] diff --git a/synapse/rest/appservice/__init__.py b/synapse/rest/appservice/__init__.py deleted file mode 100644 index 1a84d94cd9..0000000000 --- a/synapse/rest/appservice/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2015 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/synapse/rest/appservice/v1/__init__.py b/synapse/rest/appservice/v1/__init__.py deleted file mode 100644 index a7877609ad..0000000000 --- a/synapse/rest/appservice/v1/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2015 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from . import register - -from synapse.http.server import JsonResource - - -class AppServiceRestResource(JsonResource): - """A resource for version 1 of the matrix application service API.""" - - def __init__(self, hs): - JsonResource.__init__(self, hs) - self.register_servlets(self, hs) - - @staticmethod - def register_servlets(appservice_resource, hs): - register.register_servlets(hs, appservice_resource) diff --git a/synapse/rest/appservice/v1/base.py b/synapse/rest/appservice/v1/base.py deleted file mode 100644 index 65d5bcf9be..0000000000 --- a/synapse/rest/appservice/v1/base.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2015 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This module contains base REST classes for constructing client v1 servlets. -""" - -from synapse.http.servlet import RestServlet -from synapse.api.urls import APP_SERVICE_PREFIX -import re - -import logging - - -logger = logging.getLogger(__name__) - - -def as_path_pattern(path_regex): - """Creates a regex compiled appservice path with the correct path - prefix. - - Args: - path_regex (str): The regex string to match. This should NOT have a ^ - as this will be prefixed. - Returns: - SRE_Pattern - """ - return re.compile("^" + APP_SERVICE_PREFIX + path_regex) - - -class AppServiceRestServlet(RestServlet): - """A base Synapse REST Servlet for the application services version 1 API. - """ - - def __init__(self, hs): - self.hs = hs - self.handler = hs.get_handlers().appservice_handler diff --git a/synapse/rest/appservice/v1/register.py b/synapse/rest/appservice/v1/register.py deleted file mode 100644 index ea24d88f79..0000000000 --- a/synapse/rest/appservice/v1/register.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2015 OpenMarket Ltd -# -# Licensensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This module contains REST servlets to do with registration: /register""" -from twisted.internet import defer - -from base import AppServiceRestServlet, as_path_pattern -from synapse.api.errors import CodeMessageException, SynapseError -from synapse.storage.appservice import ApplicationService - -import json -import logging - -logger = logging.getLogger(__name__) - - -class RegisterRestServlet(AppServiceRestServlet): - """Handles AS registration with the home server. - """ - - PATTERN = as_path_pattern("/register$") - - @defer.inlineCallbacks - def on_POST(self, request): - params = _parse_json(request) - - # sanity check required params - try: - as_token = params["as_token"] - as_url = params["url"] - if (not isinstance(as_token, basestring) or - not isinstance(as_url, basestring)): - raise ValueError - except (KeyError, ValueError): - raise SynapseError( - 400, "Missed required keys: as_token(str) / url(str)." - ) - - try: - app_service = ApplicationService( - as_token, as_url, params["namespaces"] - ) - except ValueError as e: - raise SynapseError(400, e.message) - - app_service = yield self.handler.register(app_service) - hs_token = app_service.hs_token - - defer.returnValue((200, { - "hs_token": hs_token - })) - - -class UnregisterRestServlet(AppServiceRestServlet): - """Handles AS registration with the home server. - """ - - PATTERN = as_path_pattern("/unregister$") - - def on_POST(self, request): - params = _parse_json(request) - try: - as_token = params["as_token"] - if not isinstance(as_token, basestring): - raise ValueError - except (KeyError, ValueError): - raise SynapseError(400, "Missing required key: as_token(str)") - - yield self.handler.unregister(as_token) - - raise CodeMessageException(500, "Not implemented") - - -def _parse_json(request): - try: - content = json.loads(request.content.read()) - if type(content) != dict: - raise SynapseError(400, "Content must be a JSON object.") - return content - except ValueError as e: - logger.warn(e) - raise SynapseError(400, "Content not JSON.") - - -def register_servlets(hs, http_server): - RegisterRestServlet(hs).register(http_server) - UnregisterRestServlet(hs).register(http_server) diff --git a/synapse/server.py b/synapse/server.py index c7772244ba..0bd87bdd77 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -79,7 +79,6 @@ class BaseHomeServer(object): 'resource_for_content_repo', 'resource_for_server_key', 'resource_for_media_repository', - 'resource_for_app_services', 'resource_for_metrics', 'event_sources', 'ratelimiter', -- cgit 1.4.1 From 3470cb36a81052d4968d109f99ecbad210b0c820 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 31 Mar 2015 13:03:31 +0100 Subject: Pyflakes --- synapse/app/homeserver.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 29ca720d5e..afb46d2e23 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -38,8 +38,7 @@ from synapse.http.server_key_resource import LocalKey from synapse.http.matrixfederationclient import MatrixFederationHttpClient from synapse.api.urls import ( CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX, - SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, APP_SERVICE_PREFIX, - STATIC_PREFIX + SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, STATIC_PREFIX ) from synapse.config.homeserver import HomeServerConfig from synapse.crypto import context_factory -- cgit 1.4.1 From 9236136f3a4f0d8119d4a6333f37378f8e259e4a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 1 Apr 2015 14:12:33 +0100 Subject: Make work in both Maria and SQLite. Fix tests --- synapse/app/homeserver.py | 20 ++++++---- synapse/storage/__init__.py | 43 +++++++++++----------- synapse/storage/_base.py | 30 ++++++++++----- synapse/storage/engines/__init__.py | 35 ++++++++++++++++++ synapse/storage/engines/maria.py | 30 +++++++++++++++ synapse/storage/engines/sqlite3.py | 25 +++++++++++++ synapse/storage/keys.py | 4 +- synapse/storage/registration.py | 8 +++- synapse/storage/schema/delta/12/v12.sql | 8 ++-- synapse/storage/schema/delta/13/v13.sql | 4 +- synapse/storage/schema/delta/14/v14.sql | 2 +- .../storage/schema/full_schemas/11/event_edges.sql | 14 +++---- .../schema/full_schemas/11/event_signatures.sql | 8 ++-- synapse/storage/schema/full_schemas/11/im.sql | 18 ++++----- synapse/storage/schema/full_schemas/11/keys.sql | 4 +- .../schema/full_schemas/11/media_repository.sql | 8 ++-- .../storage/schema/full_schemas/11/presence.sql | 6 +-- .../storage/schema/full_schemas/11/profiles.sql | 4 +- .../storage/schema/full_schemas/11/redactions.sql | 2 +- .../schema/full_schemas/11/room_aliases.sql | 8 ++-- synapse/storage/schema/full_schemas/11/state.sql | 6 +-- .../schema/full_schemas/11/transactions.sql | 8 ++-- synapse/storage/schema/full_schemas/11/users.sql | 10 ++--- synapse/storage/signatures.py | 8 ++-- synapse/storage/stream.py | 6 --- synapse/util/retryutils.py | 2 +- tests/federation/test_federation.py | 10 +++-- tests/handlers/test_federation.py | 9 +++++ tests/handlers/test_presence.py | 7 +++- tests/handlers/test_typing.py | 7 +++- tests/rest/client/v1/test_events.py | 9 ----- tests/storage/test_appservice.py | 12 ++---- tests/storage/test_base.py | 20 +++++----- tests/storage/test_registration.py | 36 +++++++++++------- tests/storage/test_roommember.py | 14 +++---- tests/utils.py | 11 +++++- 36 files changed, 296 insertions(+), 160 deletions(-) create mode 100644 synapse/storage/engines/__init__.py create mode 100644 synapse/storage/engines/maria.py create mode 100644 synapse/storage/engines/sqlite3.py (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index beab6ffc7a..b185b2f569 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -20,6 +20,7 @@ sys.dont_write_bytecode = True from synapse.storage import ( prepare_database, prepare_sqlite3_database, UpgradeDatabaseException, ) +from synapse.storage.engines import create_engine from synapse.server import HomeServer @@ -376,7 +377,7 @@ def setup(config_options): if name in ["MySQLdb", "mysql.connector"]: db_config.setdefault("args", {}).update({ "sql_mode": "TRADITIONAL", - "charset": "utf8", + "charset": "utf8mb4", "use_unicode": True, }) elif name == "sqlite3": @@ -388,6 +389,8 @@ def setup(config_options): else: raise RuntimeError("Unsupported database type '%s'" % (name,)) + database_engine = create_engine(name) + hs = SynapseHomeServer( config.server_name, domain_with_port=domain_with_port, @@ -398,6 +401,7 @@ def setup(config_options): config=config, content_addr=config.content_addr, version_string=version_string, + database_engine=database_engine, ) hs.create_resource_tree( @@ -409,12 +413,14 @@ def setup(config_options): logger.info("Preparing database: %s...", db_name) try: - # with sqlite3.connect(db_name) as db_conn: - # prepare_sqlite3_database(db_conn) - # prepare_database(db_conn) - import mysql.connector - db_conn = mysql.connector.connect(**db_config.get("args", {})) - prepare_database(db_conn) + db_conn = database_engine.module.connect(**db_config.get("args", {})) + + if name == "sqlite3": + prepare_sqlite3_database(db_conn) + + prepare_database(db_conn, database_engine) + + db_conn.commit() except UpgradeDatabaseException: sys.stderr.write( "\nFailed to upgrade database.\n" diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index abde7d0df5..f8053484cf 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -77,9 +77,6 @@ class DataStore(RoomMemberStore, RoomStore, self.min_token_deferred = self._get_min_token() self.min_token = None - self._next_stream_id_lock = threading.Lock() - self._next_stream_id = int(hs.get_clock().time_msec()) * 1000 - def insert_client_ip(self, user, access_token, device_id, ip, user_agent): return self._simple_upsert( "user_ips", @@ -127,19 +124,21 @@ class UpgradeDatabaseException(PrepareDatabaseException): pass -def prepare_database(db_conn): +def prepare_database(db_conn, database_engine): """Prepares a database for usage. Will either create all necessary tables or upgrade from an older schema version. """ try: cur = db_conn.cursor() - version_info = _get_or_create_schema_state(cur) + version_info = _get_or_create_schema_state(cur, database_engine) if version_info: user_version, delta_files, upgraded = version_info - _upgrade_existing_database(cur, user_version, delta_files, upgraded) + _upgrade_existing_database( + cur, user_version, delta_files, upgraded, database_engine + ) else: - _setup_new_database(cur) + _setup_new_database(cur, database_engine) # cur.execute("PRAGMA user_version = %d" % (SCHEMA_VERSION,)) @@ -150,7 +149,7 @@ def prepare_database(db_conn): raise -def _setup_new_database(cur): +def _setup_new_database(cur, database_engine): """Sets up the database by finding a base set of "full schemas" and then applying any necessary deltas. @@ -210,7 +209,7 @@ def _setup_new_database(cur): executescript(cur, sql_loc) cur.execute( - _convert_param_style( + database_engine.convert_param_style( "REPLACE INTO schema_version (version, upgraded)" " VALUES (?,?)" ), @@ -221,12 +220,13 @@ def _setup_new_database(cur): cur, current_version=max_current_ver, applied_delta_files=[], - upgraded=False + upgraded=False, + database_engine=database_engine, ) def _upgrade_existing_database(cur, current_version, applied_delta_files, - upgraded): + upgraded, database_engine): """Upgrades an existing database. Delta files can either be SQL stored in *.sql files, or python modules @@ -335,26 +335,22 @@ def _upgrade_existing_database(cur, current_version, applied_delta_files, # Mark as done. cur.execute( - _convert_param_style( + database_engine.convert_param_style( "INSERT INTO applied_schema_deltas (version, file)" - " VALUES (?,?)" + " VALUES (?,?)", ), (v, relative_path) ) cur.execute( - _convert_param_style( + database_engine.convert_param_style( "REPLACE INTO schema_version (version, upgraded)" - " VALUES (?,?)" + " VALUES (?,?)", ), (v, True) ) -def _convert_param_style(sql): - return sql.replace("?", "%s") - - def get_statements(f): statement_buffer = "" in_comment = False # If we're in a /* ... */ style comment @@ -409,7 +405,7 @@ def executescript(txn, schema_path): txn.execute(statement) -def _get_or_create_schema_state(txn): +def _get_or_create_schema_state(txn, database_engine): try: # Bluntly try creating the schema_version tables. schema_path = os.path.join( @@ -426,7 +422,7 @@ def _get_or_create_schema_state(txn): if current_version: txn.execute( - _convert_param_style( + database_engine.convert_param_style( "SELECT file FROM applied_schema_deltas WHERE version >= ?" ), (current_version,) @@ -446,6 +442,8 @@ def prepare_sqlite3_database(db_conn): new. This only affects sqlite databases since they were the only ones supported at the time. """ + import sqlite3 + with db_conn: schema_path = os.path.join( dir_path, "schema", "schema_version.sql", @@ -466,7 +464,8 @@ def prepare_sqlite3_database(db_conn): db_conn.execute( _convert_param_style( "REPLACE INTO schema_version (version, upgraded)" - " VALUES (?,?)" + " VALUES (?,?)", + sqlite3 ), (row[0], False) ) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 76ec3ee93f..047d100f46 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -29,6 +29,7 @@ import functools import simplejson as json import sys import time +import threading logger = logging.getLogger(__name__) @@ -118,19 +119,16 @@ def cached(max_entries=1000, num_args=1): return wrap -def _convert_param_style(sql): - return sql.replace("?", "%s") - - class LoggingTransaction(object): """An object that almost-transparently proxies for the 'txn' object passed to the constructor. Adds logging and metrics to the .execute() method.""" - __slots__ = ["txn", "name"] + __slots__ = ["txn", "name", "database_engine"] - def __init__(self, txn, name): + def __init__(self, txn, name, database_engine): object.__setattr__(self, "txn", txn) object.__setattr__(self, "name", name) + object.__setattr__(self, "database_engine", database_engine) def __getattr__(self, name): return getattr(self.txn, name) @@ -142,7 +140,7 @@ class LoggingTransaction(object): # TODO(paul): Maybe use 'info' and 'debug' for values? sql_logger.debug("[SQL] {%s} %s", self.name, sql) - sql = _convert_param_style(sql) + sql = self.database_engine.convert_param_style(sql) try: if args and args[0]: @@ -227,9 +225,14 @@ class SQLBaseStore(object): self._get_event_cache = LruCache(hs.config.event_cache_size) + self.database_engine = hs.database_engine + # Pretend the getEventCache is just another named cache caches_by_name["*getEvent*"] = self._get_event_cache + self._next_stream_id_lock = threading.Lock() + self._next_stream_id = int(hs.get_clock().time_msec()) * 1000 + def start_profiling(self): self._previous_loop_ts = self._clock.time_msec() @@ -281,7 +284,10 @@ class SQLBaseStore(object): sql_scheduling_timer.inc_by(time.time() * 1000 - start_time) transaction_logger.debug("[TXN START] {%s}", name) try: - return func(LoggingTransaction(txn, name), *args, **kwargs) + return func( + LoggingTransaction(txn, name, self.database_engine), + *args, **kwargs + ) except: logger.exception("[TXN FAIL] {%s}", name) raise @@ -588,7 +594,7 @@ class SQLBaseStore(object): select_sql = "SELECT %s FROM %s WHERE %s" % ( ", ".join(retcols), table, - " AND ".join("%s = ?" % (k) for k in keyvalues) + " AND ".join("%s = ?" % (k,) for k in keyvalues) ) txn.execute(select_sql, keyvalues.values()) @@ -836,6 +842,12 @@ class SQLBaseStore(object): result = txn.fetchone() return result[0] if result else None + def get_next_stream_id(self): + with self._next_stream_id_lock: + i = self._next_stream_id + self._next_stream_id += 1 + return i + class _RollbackButIsFineException(Exception): """ This exception is used to rollback a transaction without implying diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py new file mode 100644 index 0000000000..709b6f88ac --- /dev/null +++ b/synapse/storage/engines/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .maria import MariaEngine +from .sqlite3 import Sqlite3Engine + + +SUPPORTED_MODULE = { + "sqlite3": Sqlite3Engine, + "mysql.connector": MariaEngine, +} + + +def create_engine(name): + engine_class = SUPPORTED_MODULE.get(name, None) + + if engine_class: + module = __import__(name) + return engine_class(module) + + raise RuntimeError( + "Unsupported database engine '%s'" % (name,) + ) diff --git a/synapse/storage/engines/maria.py b/synapse/storage/engines/maria.py new file mode 100644 index 0000000000..df47763647 --- /dev/null +++ b/synapse/storage/engines/maria.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import types + + +class MariaEngine(object): + def __init__(self, database_module): + self.module = database_module + + def convert_param_style(self, sql): + return sql.replace("?", "%s") + + def encode_parameter(self, param): + if isinstance(param, types.BufferType): + return str(param) + return param diff --git a/synapse/storage/engines/sqlite3.py b/synapse/storage/engines/sqlite3.py new file mode 100644 index 0000000000..639cdea41d --- /dev/null +++ b/synapse/storage/engines/sqlite3.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class Sqlite3Engine(object): + def __init__(self, database_module): + self.module = database_module + + def convert_param_style(self, sql): + return sql + + def encode_parameter(self, param): + return param diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index e6975a945b..25fef79434 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -64,7 +64,7 @@ class KeyStore(SQLBaseStore): "fingerprint": fingerprint, "from_server": from_server, "ts_added_ms": time_now_ms, - "tls_certificate": tls_certificate_bytes, + "tls_certificate": buffer(tls_certificate_bytes), }, ) @@ -113,6 +113,6 @@ class KeyStore(SQLBaseStore): "key_id": "%s:%s" % (verify_key.alg, verify_key.version), "from_server": from_server, "ts_added_ms": time_now_ms, - "verify_key": verify_key.encode(), + "verify_key": buffer(verify_key.encode()), }, ) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 7258f7b2a5..0c785ec989 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -42,6 +42,7 @@ class RegistrationStore(SQLBaseStore): yield self._simple_insert( "access_tokens", { + "id": self.get_next_stream_id(), "user_id": user_id, "token": token }, @@ -78,8 +79,11 @@ class RegistrationStore(SQLBaseStore): # it's possible for this to get a conflict, but only for a single user # since tokens are namespaced based on their user ID - txn.execute("INSERT INTO access_tokens(user_id, token) " + - "VALUES (?,?)", [user_id, token]) + txn.execute( + "INSERT INTO access_tokens(id, user_id, token)" + " VALUES (?,?,?)", + (self.get_next_stream_id(), user_id, token,) + ) @defer.inlineCallbacks def get_user_by_id(self, user_id): diff --git a/synapse/storage/schema/delta/12/v12.sql b/synapse/storage/schema/delta/12/v12.sql index b526109e6e..90ac474859 100644 --- a/synapse/storage/schema/delta/12/v12.sql +++ b/synapse/storage/schema/delta/12/v12.sql @@ -18,7 +18,7 @@ CREATE TABLE IF NOT EXISTS rejections( reason VARCHAR(255) NOT NULL, last_check VARCHAR(255) NOT NULL, UNIQUE (event_id) -) ENGINE = INNODB; +) ; -- Push notification endpoints that users have configured CREATE TABLE IF NOT EXISTS pushers ( @@ -37,7 +37,7 @@ CREATE TABLE IF NOT EXISTS pushers ( last_success BIGINT, failing_since BIGINT, UNIQUE (app_id, pushkey) -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS push_rules ( id BIGINT PRIMARY KEY, @@ -48,7 +48,7 @@ CREATE TABLE IF NOT EXISTS push_rules ( conditions VARCHAR(255) NOT NULL, actions VARCHAR(255) NOT NULL, UNIQUE(user_name, rule_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS push_rules_user_name on push_rules (user_name); @@ -56,7 +56,7 @@ CREATE TABLE IF NOT EXISTS user_filters( user_id VARCHAR(255), filter_id BIGINT, filter_json BLOB -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS user_filters_by_user_id_filter_id ON user_filters( user_id, filter_id diff --git a/synapse/storage/schema/delta/13/v13.sql b/synapse/storage/schema/delta/13/v13.sql index f0a5daf445..4953b6323c 100644 --- a/synapse/storage/schema/delta/13/v13.sql +++ b/synapse/storage/schema/delta/13/v13.sql @@ -20,7 +20,7 @@ CREATE TABLE IF NOT EXISTS application_services( hs_token VARCHAR(255), sender VARCHAR(255), UNIQUE(token) -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS application_services_regex( id BIGINT PRIMARY KEY, @@ -28,4 +28,4 @@ CREATE TABLE IF NOT EXISTS application_services_regex( namespace INTEGER, /* enum[room_id|room_alias|user_id] */ regex VARCHAR(255), FOREIGN KEY(as_id) REFERENCES application_services(id) -) ENGINE = INNODB; +) ; diff --git a/synapse/storage/schema/delta/14/v14.sql b/synapse/storage/schema/delta/14/v14.sql index a1260c5c1f..3bda073c94 100644 --- a/synapse/storage/schema/delta/14/v14.sql +++ b/synapse/storage/schema/delta/14/v14.sql @@ -4,6 +4,6 @@ CREATE TABLE IF NOT EXISTS push_rules_enable ( rule_id VARCHAR(255) NOT NULL, enabled TINYINT, UNIQUE(user_name, rule_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS push_rules_enable_user_name on push_rules_enable (user_name); diff --git a/synapse/storage/schema/full_schemas/11/event_edges.sql b/synapse/storage/schema/full_schemas/11/event_edges.sql index 0f53488e92..336cd563df 100644 --- a/synapse/storage/schema/full_schemas/11/event_edges.sql +++ b/synapse/storage/schema/full_schemas/11/event_edges.sql @@ -17,7 +17,7 @@ CREATE TABLE IF NOT EXISTS event_forward_extremities( event_id VARCHAR(255) NOT NULL, room_id VARCHAR(255) NOT NULL, UNIQUE (event_id, room_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS ev_extrem_room ON event_forward_extremities(room_id); CREATE INDEX IF NOT EXISTS ev_extrem_id ON event_forward_extremities(event_id); @@ -27,7 +27,7 @@ CREATE TABLE IF NOT EXISTS event_backward_extremities( event_id VARCHAR(255) NOT NULL, room_id VARCHAR(255) NOT NULL, UNIQUE (event_id, room_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS ev_b_extrem_room ON event_backward_extremities(room_id); CREATE INDEX IF NOT EXISTS ev_b_extrem_id ON event_backward_extremities(event_id); @@ -39,7 +39,7 @@ CREATE TABLE IF NOT EXISTS event_edges( room_id VARCHAR(255) NOT NULL, is_state BOOL NOT NULL, UNIQUE (event_id, prev_event_id, room_id, is_state) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS ev_edges_id ON event_edges(event_id); CREATE INDEX IF NOT EXISTS ev_edges_prev_id ON event_edges(prev_event_id); @@ -49,7 +49,7 @@ CREATE TABLE IF NOT EXISTS room_depth( room_id VARCHAR(255) NOT NULL, min_depth INTEGER NOT NULL, UNIQUE (room_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS room_depth_room ON room_depth(room_id); @@ -59,7 +59,7 @@ create TABLE IF NOT EXISTS event_destinations( destination VARCHAR(255) NOT NULL, delivered_ts BIGINT DEFAULT 0, -- or 0 if not delivered UNIQUE (event_id, destination) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS event_destinations_id ON event_destinations(event_id); @@ -70,7 +70,7 @@ CREATE TABLE IF NOT EXISTS state_forward_extremities( type VARCHAR(255) NOT NULL, state_key VARCHAR(255) NOT NULL, UNIQUE (event_id, room_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS st_extrem_keys ON state_forward_extremities( room_id, type, state_key @@ -83,7 +83,7 @@ CREATE TABLE IF NOT EXISTS event_auth( auth_id VARCHAR(255) NOT NULL, room_id VARCHAR(255) NOT NULL, UNIQUE (event_id, auth_id, room_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS evauth_edges_id ON event_auth(event_id); CREATE INDEX IF NOT EXISTS evauth_edges_auth_id ON event_auth(auth_id); diff --git a/synapse/storage/schema/full_schemas/11/event_signatures.sql b/synapse/storage/schema/full_schemas/11/event_signatures.sql index 334d7c8680..11e611598b 100644 --- a/synapse/storage/schema/full_schemas/11/event_signatures.sql +++ b/synapse/storage/schema/full_schemas/11/event_signatures.sql @@ -18,7 +18,7 @@ CREATE TABLE IF NOT EXISTS event_content_hashes ( algorithm VARCHAR(255), hash BLOB, UNIQUE (event_id, algorithm) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS event_content_hashes_id ON event_content_hashes(event_id); @@ -28,7 +28,7 @@ CREATE TABLE IF NOT EXISTS event_reference_hashes ( algorithm VARCHAR(255), hash BLOB, UNIQUE (event_id, algorithm) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS event_reference_hashes_id ON event_reference_hashes(event_id); @@ -39,7 +39,7 @@ CREATE TABLE IF NOT EXISTS event_signatures ( key_id VARCHAR(255), signature BLOB, UNIQUE (event_id, signature_name, key_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS event_signatures_id ON event_signatures(event_id); @@ -50,6 +50,6 @@ CREATE TABLE IF NOT EXISTS event_edge_hashes( algorithm VARCHAR(255), hash BLOB, UNIQUE (event_id, prev_event_id, algorithm) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS event_edge_hashes_id ON event_edge_hashes(event_id); diff --git a/synapse/storage/schema/full_schemas/11/im.sql b/synapse/storage/schema/full_schemas/11/im.sql index 9849e969be..a0fb337629 100644 --- a/synapse/storage/schema/full_schemas/11/im.sql +++ b/synapse/storage/schema/full_schemas/11/im.sql @@ -25,7 +25,7 @@ CREATE TABLE IF NOT EXISTS events( outlier BOOL NOT NULL, depth BIGINT DEFAULT 0 NOT NULL, UNIQUE (event_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS events_stream_ordering ON events (stream_ordering); CREATE INDEX IF NOT EXISTS events_topological_ordering ON events (topological_ordering); @@ -38,7 +38,7 @@ CREATE TABLE IF NOT EXISTS event_json( internal_metadata BLOB NOT NULL, json BLOB NOT NULL, UNIQUE (event_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS event_json_room_id ON event_json(room_id); @@ -50,7 +50,7 @@ CREATE TABLE IF NOT EXISTS state_events( state_key VARCHAR(255) NOT NULL, prev_state VARCHAR(255), UNIQUE (event_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS state_events_room_id ON state_events (room_id); CREATE INDEX IF NOT EXISTS state_events_type ON state_events (type); @@ -64,7 +64,7 @@ CREATE TABLE IF NOT EXISTS current_state_events( state_key VARCHAR(255) NOT NULL, UNIQUE (event_id), UNIQUE (room_id, type, state_key) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS current_state_events_room_id ON current_state_events (room_id); CREATE INDEX IF NOT EXISTS current_state_events_type ON current_state_events (type); @@ -77,7 +77,7 @@ CREATE TABLE IF NOT EXISTS room_memberships( room_id VARCHAR(255) NOT NULL, membership VARCHAR(255) NOT NULL, UNIQUE (event_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS room_memberships_room_id ON room_memberships (room_id); CREATE INDEX IF NOT EXISTS room_memberships_user_id ON room_memberships (user_id); @@ -89,14 +89,14 @@ CREATE TABLE IF NOT EXISTS feedback( sender VARCHAR(255), room_id VARCHAR(255), UNIQUE (event_id) -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS topics( event_id VARCHAR(255) NOT NULL, room_id VARCHAR(255) NOT NULL, topic VARCHAR(255) NOT NULL, UNIQUE (event_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS topics_room_id ON topics(room_id); @@ -113,12 +113,12 @@ CREATE TABLE IF NOT EXISTS rooms( room_id VARCHAR(255) PRIMARY KEY NOT NULL, is_public BOOL, creator VARCHAR(255) -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS room_hosts( room_id VARCHAR(255) NOT NULL, host VARCHAR(255) NOT NULL, UNIQUE (room_id, host) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS room_hosts_room_id ON room_hosts (room_id); diff --git a/synapse/storage/schema/full_schemas/11/keys.sql b/synapse/storage/schema/full_schemas/11/keys.sql index c0f2ec29bb..a785cdb4c5 100644 --- a/synapse/storage/schema/full_schemas/11/keys.sql +++ b/synapse/storage/schema/full_schemas/11/keys.sql @@ -19,7 +19,7 @@ CREATE TABLE IF NOT EXISTS server_tls_certificates( ts_added_ms BIGINT, -- When the certifcate was added. tls_certificate BLOB, -- DER encoded x509 certificate. UNIQUE (server_name, fingerprint) -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS server_signature_keys( server_name VARCHAR(255), -- Server name. @@ -28,4 +28,4 @@ CREATE TABLE IF NOT EXISTS server_signature_keys( ts_added_ms BIGINT, -- When the key was added. verify_key BLOB, -- NACL verification key. UNIQUE (server_name, key_id) -) ENGINE = INNODB; +) ; diff --git a/synapse/storage/schema/full_schemas/11/media_repository.sql b/synapse/storage/schema/full_schemas/11/media_repository.sql index d9559f5902..27fe297af6 100644 --- a/synapse/storage/schema/full_schemas/11/media_repository.sql +++ b/synapse/storage/schema/full_schemas/11/media_repository.sql @@ -21,7 +21,7 @@ CREATE TABLE IF NOT EXISTS local_media_repository ( upload_name VARCHAR(255), -- The name the media was uploaded with. user_id VARCHAR(255), -- The user who uploaded the file. UNIQUE (media_id) -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS local_media_repository_thumbnails ( media_id VARCHAR(255), -- The id used to refer to the media. @@ -33,7 +33,7 @@ CREATE TABLE IF NOT EXISTS local_media_repository_thumbnails ( UNIQUE ( media_id, thumbnail_width, thumbnail_height, thumbnail_type ) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS local_media_repository_thumbnails_media_id ON local_media_repository_thumbnails (media_id); @@ -47,7 +47,7 @@ CREATE TABLE IF NOT EXISTS remote_media_cache ( media_length INTEGER, -- Length of the media in bytes. filesystem_id VARCHAR(255), -- The name used to store the media on disk. UNIQUE (media_origin, media_id) -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS remote_media_cache_thumbnails ( media_origin VARCHAR(255), -- The remote HS the media came from. @@ -62,7 +62,7 @@ CREATE TABLE IF NOT EXISTS remote_media_cache_thumbnails ( media_origin, media_id, thumbnail_width, thumbnail_height, thumbnail_type ) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS remote_media_cache_thumbnails_media_id ON remote_media_cache_thumbnails (media_id); diff --git a/synapse/storage/schema/full_schemas/11/presence.sql b/synapse/storage/schema/full_schemas/11/presence.sql index 8031321083..b48b110ae9 100644 --- a/synapse/storage/schema/full_schemas/11/presence.sql +++ b/synapse/storage/schema/full_schemas/11/presence.sql @@ -18,7 +18,7 @@ CREATE TABLE IF NOT EXISTS presence( status_msg VARCHAR(255), mtime BIGINT, -- miliseconds since last state change UNIQUE(user_id) -) ENGINE = INNODB; +) ; -- For each of /my/ users which possibly-remote users are allowed to see their -- presence state @@ -26,7 +26,7 @@ CREATE TABLE IF NOT EXISTS presence_allow_inbound( observed_user_id VARCHAR(255) NOT NULL, observer_user_id VARCHAR(255), -- a UserID, UNIQUE(observed_user_id) -) ENGINE = INNODB; +) ; -- For each of /my/ users (watcher), which possibly-remote users are they -- watching? @@ -35,4 +35,4 @@ CREATE TABLE IF NOT EXISTS presence_list( observed_user_id VARCHAR(255), -- a UserID, accepted BOOLEAN, UNIQUE(user_id) -) ENGINE = INNODB; +) ; diff --git a/synapse/storage/schema/full_schemas/11/profiles.sql b/synapse/storage/schema/full_schemas/11/profiles.sql index 552645c56f..92da48f97e 100644 --- a/synapse/storage/schema/full_schemas/11/profiles.sql +++ b/synapse/storage/schema/full_schemas/11/profiles.sql @@ -14,7 +14,7 @@ */ CREATE TABLE IF NOT EXISTS profiles( user_id VARCHAR(255) NOT NULL, - displayname VARBINARY(255), + displayname VARCHAR(255), avatar_url VARCHAR(255), UNIQUE(user_id) -) ENGINE = INNODB; +) ; diff --git a/synapse/storage/schema/full_schemas/11/redactions.sql b/synapse/storage/schema/full_schemas/11/redactions.sql index ba93e860f6..9b52a2012a 100644 --- a/synapse/storage/schema/full_schemas/11/redactions.sql +++ b/synapse/storage/schema/full_schemas/11/redactions.sql @@ -16,7 +16,7 @@ CREATE TABLE IF NOT EXISTS redactions ( event_id VARCHAR(255) NOT NULL, redacts VARCHAR(255) NOT NULL, UNIQUE (event_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS redactions_event_id ON redactions (event_id); CREATE INDEX IF NOT EXISTS redactions_redacts ON redactions (redacts); diff --git a/synapse/storage/schema/full_schemas/11/room_aliases.sql b/synapse/storage/schema/full_schemas/11/room_aliases.sql index 1e706aac2b..220df87573 100644 --- a/synapse/storage/schema/full_schemas/11/room_aliases.sql +++ b/synapse/storage/schema/full_schemas/11/room_aliases.sql @@ -14,12 +14,12 @@ */ CREATE TABLE IF NOT EXISTS room_aliases( - room_alias VARCHAR(255) NOT NULL, + room_alias VARBINARY(255) NOT NULL, room_id VARCHAR(255) NOT NULL, UNIQUE (room_alias) -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS room_alias_servers( - room_alias VARCHAR(255) NOT NULL, + room_alias VARBINARY(255) NOT NULL, server VARCHAR(255) NOT NULL -) ENGINE = INNODB; +) ; diff --git a/synapse/storage/schema/full_schemas/11/state.sql b/synapse/storage/schema/full_schemas/11/state.sql index be9dc2920d..40584a325f 100644 --- a/synapse/storage/schema/full_schemas/11/state.sql +++ b/synapse/storage/schema/full_schemas/11/state.sql @@ -17,7 +17,7 @@ CREATE TABLE IF NOT EXISTS state_groups( id VARCHAR(20) PRIMARY KEY, room_id VARCHAR(255) NOT NULL, event_id VARCHAR(255) NOT NULL -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS state_groups_state( state_group VARCHAR(20) NOT NULL, @@ -25,13 +25,13 @@ CREATE TABLE IF NOT EXISTS state_groups_state( type VARCHAR(255) NOT NULL, state_key VARCHAR(255) NOT NULL, event_id VARCHAR(255) NOT NULL -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS event_to_state_groups( event_id VARCHAR(255) NOT NULL, state_group VARCHAR(255) NOT NULL, UNIQUE (event_id) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS state_groups_id ON state_groups(id); diff --git a/synapse/storage/schema/full_schemas/11/transactions.sql b/synapse/storage/schema/full_schemas/11/transactions.sql index bd13bba8c2..d33bdfb301 100644 --- a/synapse/storage/schema/full_schemas/11/transactions.sql +++ b/synapse/storage/schema/full_schemas/11/transactions.sql @@ -21,7 +21,7 @@ CREATE TABLE IF NOT EXISTS received_transactions( response_json BLOB, has_been_referenced BOOL default 0, -- Whether thishas been referenced by a prev_tx UNIQUE (transaction_id, origin) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS transactions_have_ref ON received_transactions(origin, has_been_referenced);-- WHERE has_been_referenced = 0; @@ -35,7 +35,7 @@ CREATE TABLE IF NOT EXISTS sent_transactions( response_code INTEGER DEFAULT 0, response_json BLOB, ts BIGINT -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS sent_transaction_dest ON sent_transactions(destination); CREATE INDEX IF NOT EXISTS sent_transaction_txn_id ON sent_transactions(transaction_id); @@ -51,7 +51,7 @@ CREATE TABLE IF NOT EXISTS transaction_id_to_pdu( pdu_id VARCHAR(255), pdu_origin VARCHAR(255), UNIQUE (transaction_id, destination) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS transaction_id_to_pdu_dest ON transaction_id_to_pdu(destination); @@ -60,4 +60,4 @@ CREATE TABLE IF NOT EXISTS destinations( destination VARCHAR(255) PRIMARY KEY, retry_last_ts BIGINT, retry_interval INTEGER -) ENGINE = INNODB; +) ; diff --git a/synapse/storage/schema/full_schemas/11/users.sql b/synapse/storage/schema/full_schemas/11/users.sql index 55bffb22f3..28909f5805 100644 --- a/synapse/storage/schema/full_schemas/11/users.sql +++ b/synapse/storage/schema/full_schemas/11/users.sql @@ -14,20 +14,20 @@ */ CREATE TABLE IF NOT EXISTS users( name VARCHAR(255), - password_hash VARBINARY(255), + password_hash VARCHAR(255), creation_ts BIGINT, admin BOOL DEFAULT 0 NOT NULL, UNIQUE(name) -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS access_tokens( - id INTEGER PRIMARY KEY AUTO_INCREMENT, + id BIGINT PRIMARY KEY, user_id VARCHAR(255) NOT NULL, device_id VARCHAR(255), token VARCHAR(255) NOT NULL, last_used BIGINT, UNIQUE(token) -) ENGINE = INNODB; +) ; CREATE TABLE IF NOT EXISTS user_ips ( user VARCHAR(255) NOT NULL, @@ -37,6 +37,6 @@ CREATE TABLE IF NOT EXISTS user_ips ( user_agent VARCHAR(255) NOT NULL, last_seen BIGINT NOT NULL, UNIQUE (user, access_token, ip, user_agent) -) ENGINE = INNODB; +) ; CREATE INDEX IF NOT EXISTS user_ips_user ON user_ips(user); diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 35bba854f9..f051828630 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -54,7 +54,7 @@ class SignatureStore(SQLBaseStore): { "event_id": event_id, "algorithm": algorithm, - "hash": hash_bytes, + "hash": buffer(hash_bytes), }, ) @@ -116,7 +116,7 @@ class SignatureStore(SQLBaseStore): { "event_id": event_id, "algorithm": algorithm, - "hash": hash_bytes, + "hash": buffer(hash_bytes), }, ) @@ -160,7 +160,7 @@ class SignatureStore(SQLBaseStore): "event_id": event_id, "signature_name": signature_name, "key_id": key_id, - "signature": signature_bytes, + "signature": buffer(signature_bytes), }, ) @@ -193,6 +193,6 @@ class SignatureStore(SQLBaseStore): "event_id": event_id, "prev_event_id": prev_event_id, "algorithm": algorithm, - "hash": hash_bytes, + "hash": buffer(hash_bytes), }, ) diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index 3a310cd003..e6bb5a8077 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -433,12 +433,6 @@ class StreamStore(SQLBaseStore): defer.returnValue(self.min_token) - def get_next_stream_id(self): - with self._next_stream_id_lock: - i = self._next_stream_id - self._next_stream_id += 1 - return i - def _get_room_events_max_id_txn(self, txn): txn.execute( "SELECT MAX(stream_ordering) as m FROM events" diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 4e82232796..a42138f556 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -60,7 +60,7 @@ def get_retry_limiter(destination, clock, store, **kwargs): if retry_timings: retry_last_ts, retry_interval = ( - retry_timings.retry_last_ts, retry_timings.retry_interval + retry_timings["retry_last_ts"], retry_timings["retry_interval"] ) now = int(clock.time_msec()) diff --git a/tests/federation/test_federation.py b/tests/federation/test_federation.py index 2ecd00d2ad..a4ef60b911 100644 --- a/tests/federation/test_federation.py +++ b/tests/federation/test_federation.py @@ -24,8 +24,6 @@ from ..utils import MockHttpResource, MockClock, setup_test_homeserver from synapse.federation import initialize_http_replication from synapse.events import FrozenEvent -from synapse.storage.transactions import DestinationsTable - def make_pdu(prev_pdus=[], **kwargs): """Provide some default fields for making a PduTuple.""" @@ -57,8 +55,14 @@ class FederationTestCase(unittest.TestCase): self.mock_persistence.get_received_txn_response.return_value = ( defer.succeed(None) ) + + retry_timings_res = { + "destination": "", + "retry_last_ts": 0, + "retry_interval": 0, + } self.mock_persistence.get_destination_retry_timings.return_value = ( - defer.succeed(DestinationsTable.EntryType("", 0, 0)) + defer.succeed(retry_timings_res) ) self.mock_persistence.get_auth_chain.return_value = [] self.clock = MockClock() diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index c13ade3286..08d2404b6c 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -87,6 +87,15 @@ class FederationTestCase(unittest.TestCase): self.datastore.get_room.return_value = defer.succeed(True) self.auth.check_host_in_room.return_value = defer.succeed(True) + retry_timings_res = { + "destination": "", + "retry_last_ts": 0, + "retry_interval": 0, + } + self.datastore.get_destination_retry_timings.return_value = ( + defer.succeed(retry_timings_res) + ) + def have_events(event_ids): return defer.succeed({}) self.datastore.have_events.side_effect = have_events diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 04eba4289e..9b0e606918 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -194,8 +194,13 @@ class MockedDatastorePresenceTestCase(PresenceTestCase): return datastore def setUp_datastore_federation_mocks(self, datastore): + retry_timings_res = { + "destination": "", + "retry_last_ts": 0, + "retry_interval": 0, + } datastore.get_destination_retry_timings.return_value = ( - defer.succeed(DestinationsTable.EntryType("", 0, 0)) + defer.succeed(retry_timings_res) ) def get_received_txn_response(*args): diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index bf34b7ccbd..2d76b23564 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -96,8 +96,13 @@ class TypingNotificationsTestCase(unittest.TestCase): self.event_source = hs.get_event_sources().sources["typing"] self.datastore = hs.get_datastore() + retry_timings_res = { + "destination": "", + "retry_last_ts": 0, + "retry_interval": 0, + } self.datastore.get_destination_retry_timings.return_value = ( - defer.succeed(DestinationsTable.EntryType("", 0, 0)) + defer.succeed(retry_timings_res) ) def get_received_txn_response(*args): diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/v1/test_events.py index 36b0f2ff6d..445272e323 100644 --- a/tests/rest/client/v1/test_events.py +++ b/tests/rest/client/v1/test_events.py @@ -115,12 +115,6 @@ class EventStreamPermissionsTestCase(RestTestCase): hs = yield setup_test_homeserver( http_client=None, replication_layer=Mock(), - clock=Mock(spec=[ - "call_later", - "cancel_call_later", - "time_msec", - "time" - ]), ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), @@ -132,9 +126,6 @@ class EventStreamPermissionsTestCase(RestTestCase): hs.get_handlers().federation_handler = Mock() - hs.get_clock().time_msec.return_value = 1000000 - hs.get_clock().time.return_value = 1000 - synapse.rest.client.v1.register.register_servlets(hs, self.mock_resource) synapse.rest.client.v1.events.register_servlets(hs, self.mock_resource) synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource) diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index ca5b92ec85..2ad55c8462 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -16,22 +16,18 @@ from tests import unittest from twisted.internet import defer from synapse.appservice import ApplicationService -from synapse.server import HomeServer from synapse.storage.appservice import ApplicationServiceStore -from mock import Mock -from tests.utils import SQLiteMemoryDbPool, MockClock +from tests.utils import setup_test_homeserver class ApplicationServiceStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - db_pool = SQLiteMemoryDbPool() - yield db_pool.prepare() - hs = HomeServer( - "test", db_pool=db_pool, clock=MockClock(), config=Mock() - ) + hs = yield setup_test_homeserver() + db_pool = hs.get_db_pool() + self.as_token = "token1" db_pool.runQuery( "INSERT INTO application_services(token) VALUES(?)", diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py index 7f5845cf0c..5c17d30148 100644 --- a/tests/storage/test_base.py +++ b/tests/storage/test_base.py @@ -24,6 +24,7 @@ from collections import OrderedDict from synapse.server import HomeServer from synapse.storage._base import SQLBaseStore +from synapse.storage.engines import create_engine class SQLBaseStoreTestCase(unittest.TestCase): @@ -40,7 +41,12 @@ class SQLBaseStoreTestCase(unittest.TestCase): config = Mock() config.event_cache_size = 1 - hs = HomeServer("test", db_pool=self.db_pool, config=config) + hs = HomeServer( + "test", + db_pool=self.db_pool, + config=config, + database_engine=create_engine("sqlite3"), + ) self.datastore = SQLBaseStore(hs) @@ -86,8 +92,7 @@ class SQLBaseStoreTestCase(unittest.TestCase): self.assertEquals("Value", value) self.mock_txn.execute.assert_called_with( - "SELECT retcol FROM tablename WHERE keycol = ? " - "ORDER BY rowid asc", + "SELECT retcol FROM tablename WHERE keycol = ?", ["TheKey"] ) @@ -104,8 +109,7 @@ class SQLBaseStoreTestCase(unittest.TestCase): self.assertEquals({"colA": 1, "colB": 2, "colC": 3}, ret) self.mock_txn.execute.assert_called_with( - "SELECT colA, colB, colC FROM tablename WHERE keycol = ? " - "ORDER BY rowid asc", + "SELECT colA, colB, colC FROM tablename WHERE keycol = ?", ["TheKey"] ) @@ -139,8 +143,7 @@ class SQLBaseStoreTestCase(unittest.TestCase): self.assertEquals([{"colA": 1}, {"colA": 2}, {"colA": 3}], ret) self.mock_txn.execute.assert_called_with( - "SELECT colA FROM tablename WHERE keycol = ? " - "ORDER BY rowid asc", + "SELECT colA FROM tablename WHERE keycol = ?", ["A set"] ) @@ -189,8 +192,7 @@ class SQLBaseStoreTestCase(unittest.TestCase): self.assertEquals({"columname": "Old Value"}, ret) self.mock_txn.execute.assert_has_calls([ - call('SELECT columname FROM tablename WHERE keycol = ? ' - 'ORDER BY rowid asc', + call('SELECT columname FROM tablename WHERE keycol = ?', ['TheKey']), call("UPDATE tablename SET columname = ? WHERE keycol = ?", ["New Value", "TheKey"]) diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py index e0b81f2b57..78f6004204 100644 --- a/tests/storage/test_registration.py +++ b/tests/storage/test_registration.py @@ -42,28 +42,38 @@ class RegistrationStoreTestCase(unittest.TestCase): self.assertEquals( # TODO(paul): Surely this field should be 'user_id', not 'name' # Additionally surely it shouldn't come in a 1-element list - [{"name": self.user_id, "password_hash": self.pwhash}], + {"name": self.user_id, "password_hash": self.pwhash}, (yield self.store.get_user_by_id(self.user_id)) ) - self.assertEquals( - {"admin": 0, - "device_id": None, - "name": self.user_id, - "token_id": 1}, - (yield self.store.get_user_by_token(self.tokens[0])) + result = yield self.store.get_user_by_token(self.tokens[1]) + + self.assertDictContainsSubset( + { + "admin": 0, + "device_id": None, + "name": self.user_id, + }, + result ) + self.assertTrue("token_id" in result) + @defer.inlineCallbacks def test_add_tokens(self): yield self.store.register(self.user_id, self.tokens[0], self.pwhash) yield self.store.add_access_token_to_user(self.user_id, self.tokens[1]) - self.assertEquals( - {"admin": 0, - "device_id": None, - "name": self.user_id, - "token_id": 2}, - (yield self.store.get_user_by_token(self.tokens[1])) + result = yield self.store.get_user_by_token(self.tokens[1]) + + self.assertDictContainsSubset( + { + "admin": 0, + "device_id": None, + "name": self.user_id, + }, + result ) + self.assertTrue("token_id" in result) + diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py index 811fea544b..785953cc89 100644 --- a/tests/storage/test_roommember.py +++ b/tests/storage/test_roommember.py @@ -119,7 +119,7 @@ class RoomMemberStoreTestCase(unittest.TestCase): yield self.inject_room_member(self.room, self.u_alice, Membership.JOIN) self.assertEquals( - ["test"], + {"test"}, (yield self.store.get_joined_hosts_for_room(self.room.to_string())) ) @@ -127,7 +127,7 @@ class RoomMemberStoreTestCase(unittest.TestCase): yield self.inject_room_member(self.room, self.u_bob, Membership.JOIN) self.assertEquals( - ["test"], + {"test"}, (yield self.store.get_joined_hosts_for_room(self.room.to_string())) ) @@ -136,9 +136,9 @@ class RoomMemberStoreTestCase(unittest.TestCase): self.assertEquals( {"test", "elsewhere"}, - set((yield + (yield self.store.get_joined_hosts_for_room(self.room.to_string()) - )) + ) ) # Should still have both hosts @@ -146,15 +146,15 @@ class RoomMemberStoreTestCase(unittest.TestCase): self.assertEquals( {"test", "elsewhere"}, - set((yield + (yield self.store.get_joined_hosts_for_room(self.room.to_string()) - )) + ) ) # Should have only one host after other leaves yield self.inject_room_member(self.room, self.u_charlie, Membership.LEAVE) self.assertEquals( - ["test"], + {"test"}, (yield self.store.get_joined_hosts_for_room(self.room.to_string())) ) diff --git a/tests/utils.py b/tests/utils.py index 81e82a80df..cc038fecf1 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -17,6 +17,7 @@ from synapse.http.server import HttpServer from synapse.api.errors import cs_error, CodeMessageException, StoreError from synapse.api.constants import EventTypes from synapse.storage import prepare_database +from synapse.storage.engines import create_engine from synapse.server import HomeServer from synapse.util.logcontext import LoggingContext @@ -44,18 +45,23 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs): config.event_cache_size = 1 config.disable_registration = False + if "clock" not in kargs: + kargs["clock"] = MockClock() + if datastore is None: db_pool = SQLiteMemoryDbPool() yield db_pool.prepare() hs = HomeServer( name, db_pool=db_pool, config=config, version_string="Synapse/tests", + database_engine=create_engine("sqlite3"), **kargs ) else: hs = HomeServer( name, db_pool=None, datastore=datastore, config=config, version_string="Synapse/tests", + database_engine=create_engine("sqlite3"), **kargs ) @@ -227,7 +233,10 @@ class SQLiteMemoryDbPool(ConnectionPool, object): ) def prepare(self): - return self.runWithConnection(prepare_database) + engine = create_engine("sqlite3") + return self.runWithConnection( + lambda conn: prepare_database(conn, engine) + ) class MemoryDataStore(object): -- cgit 1.4.1 From e24c32e6f3c0d7c75529d05762645fe613085bec Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 1 Apr 2015 15:09:51 +0100 Subject: Fix SQLite support --- synapse/app/homeserver.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index b185b2f569..1ab6effd5f 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -365,7 +365,9 @@ def setup(config_options): else: db_config = { "name": "sqlite3", - "database": config.database_path, + "args": { + "database": config.database_path, + }, } db_config = { @@ -381,10 +383,12 @@ def setup(config_options): "use_unicode": True, }) elif name == "sqlite3": + def open_fun(conn): + prepare_database(conn, database_engine) db_config.setdefault("args", {}).update({ "cp_min": 1, "cp_max": 1, - "cp_openfun": prepare_database, + "cp_openfun": open_fun, }) else: raise RuntimeError("Unsupported database type '%s'" % (name,)) @@ -413,7 +417,12 @@ def setup(config_options): logger.info("Preparing database: %s...", db_name) try: - db_conn = database_engine.module.connect(**db_config.get("args", {})) + db_conn = database_engine.module.connect( + **{ + k: v for k, v in db_config.get("args", {}).items() + if not k.startswith("cp_") + } + ) if name == "sqlite3": prepare_sqlite3_database(db_conn) -- cgit 1.4.1 From 779f7b0f443bfab351702161763b62b7bb6a27d8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 2 Apr 2015 10:06:22 +0100 Subject: Fix unicode support --- synapse/app/homeserver.py | 12 ++-- synapse/storage/_base.py | 22 +++---- synapse/storage/engines/__init__.py | 4 +- synapse/storage/engines/maria.py | 12 ++++ synapse/storage/engines/sqlite3.py | 9 +++ synapse/storage/keys.py | 14 +++-- synapse/storage/schema/delta/12/v12.sql | 18 +++--- synapse/storage/schema/delta/13/v13.sql | 10 ++-- synapse/storage/schema/delta/14/v14.sql | 4 +- .../storage/schema/full_schemas/11/event_edges.sql | 34 +++++------ .../schema/full_schemas/11/event_signatures.sql | 20 +++---- synapse/storage/schema/full_schemas/11/im.sql | 68 +++++++++++----------- synapse/storage/schema/full_schemas/11/keys.sql | 12 ++-- .../schema/full_schemas/11/media_repository.sql | 34 +++++------ .../storage/schema/full_schemas/11/presence.sql | 16 +++-- .../storage/schema/full_schemas/11/profiles.sql | 6 +- .../storage/schema/full_schemas/11/redactions.sql | 4 +- .../schema/full_schemas/11/room_aliases.sql | 8 +-- synapse/storage/schema/full_schemas/11/state.sql | 16 ++--- .../schema/full_schemas/11/transactions.sql | 16 ++--- synapse/storage/schema/full_schemas/11/users.sql | 20 +++---- synapse/storage/schema/schema_version.sql | 2 +- 22 files changed, 193 insertions(+), 168 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 1ab6effd5f..9d7a58080d 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -18,7 +18,7 @@ import sys sys.dont_write_bytecode = True from synapse.storage import ( - prepare_database, prepare_sqlite3_database, UpgradeDatabaseException, + prepare_database, UpgradeDatabaseException, ) from synapse.storage.engines import create_engine @@ -381,19 +381,18 @@ def setup(config_options): "sql_mode": "TRADITIONAL", "charset": "utf8mb4", "use_unicode": True, + "collation": "utf8mb4_general_ci", }) elif name == "sqlite3": - def open_fun(conn): - prepare_database(conn, database_engine) db_config.setdefault("args", {}).update({ "cp_min": 1, "cp_max": 1, - "cp_openfun": open_fun, }) else: raise RuntimeError("Unsupported database type '%s'" % (name,)) database_engine = create_engine(name) + db_config["args"]["cp_openfun"] = database_engine.on_new_connection hs = SynapseHomeServer( config.server_name, @@ -424,10 +423,7 @@ def setup(config_options): } ) - if name == "sqlite3": - prepare_sqlite3_database(db_conn) - - prepare_database(db_conn, database_engine) + database_engine.prepare_database(db_conn) db_conn.commit() except UpgradeDatabaseException: diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 047d100f46..de4f661973 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -142,19 +142,23 @@ class LoggingTransaction(object): sql = self.database_engine.convert_param_style(sql) - try: - if args and args[0]: - values = args[0] + if args and args[0]: + args = list(args) + args[0] = [ + self.database_engine.encode_parameter(a) for a in args[0] + ] + try: sql_logger.debug( - "[SQL values] {%s} " + ", ".join(("<%r>",) * len(values)), + "[SQL values] {%s} " + ", ".join(("<%r>",) * len(args[0])), self.name, - *values + *args[0] ) - except: - # Don't let logging failures stop SQL from working - pass + except: + # Don't let logging failures stop SQL from working + pass start = time.time() * 1000 + try: return self.txn.execute( sql, *args, **kwargs @@ -761,8 +765,6 @@ class SQLBaseStore(object): return None internal_metadata, js, redacted, rejected_reason = res - js = js.decode("utf8") - internal_metadata = internal_metadata.decode("utf8") start_time = update_counter("select_event", start_time) diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py index 709b6f88ac..29702be923 100644 --- a/synapse/storage/engines/__init__.py +++ b/synapse/storage/engines/__init__.py @@ -16,6 +16,8 @@ from .maria import MariaEngine from .sqlite3 import Sqlite3Engine +import importlib + SUPPORTED_MODULE = { "sqlite3": Sqlite3Engine, @@ -27,7 +29,7 @@ def create_engine(name): engine_class = SUPPORTED_MODULE.get(name, None) if engine_class: - module = __import__(name) + module = importlib.import_module(name) return engine_class(module) raise RuntimeError( diff --git a/synapse/storage/engines/maria.py b/synapse/storage/engines/maria.py index df47763647..7fcb706a60 100644 --- a/synapse/storage/engines/maria.py +++ b/synapse/storage/engines/maria.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from synapse.storage import prepare_database import types @@ -28,3 +29,14 @@ class MariaEngine(object): if isinstance(param, types.BufferType): return str(param) return param + + def on_new_connection(self, db_conn): + pass + + def prepare_database(self, db_conn): + cur = db_conn.cursor() + cur.execute( + "ALTER DATABASE CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci" + ) + db_conn.commit() + prepare_database(db_conn, self) diff --git a/synapse/storage/engines/sqlite3.py b/synapse/storage/engines/sqlite3.py index 639cdea41d..e802b5d5fd 100644 --- a/synapse/storage/engines/sqlite3.py +++ b/synapse/storage/engines/sqlite3.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from synapse.storage import prepare_database, prepare_sqlite3_database + class Sqlite3Engine(object): def __init__(self, database_module): @@ -23,3 +25,10 @@ class Sqlite3Engine(object): def encode_parameter(self, param): return param + + def on_new_connection(self, db_conn): + self.prepare_database(db_conn) + + def prepare_database(self, db_conn): + prepare_sqlite3_database(db_conn) + prepare_database(db_conn, self) diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index 25fef79434..d3b9b38664 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -57,15 +57,18 @@ class KeyStore(SQLBaseStore): OpenSSL.crypto.FILETYPE_ASN1, tls_certificate ) fingerprint = hashlib.sha256(tls_certificate_bytes).hexdigest() - return self._simple_insert( + return self._simple_upsert( table="server_tls_certificates", - values={ + keyvalues={ "server_name": server_name, "fingerprint": fingerprint, + }, + values={ "from_server": from_server, "ts_added_ms": time_now_ms, "tls_certificate": buffer(tls_certificate_bytes), }, + desc="store_server_certificate", ) @defer.inlineCallbacks @@ -106,13 +109,16 @@ class KeyStore(SQLBaseStore): ts_now_ms (int): The time now in milliseconds verification_key (VerifyKey): The NACL verify key. """ - return self._simple_insert( + return self._simple_upsert( table="server_signature_keys", - values={ + keyvalues={ "server_name": server_name, "key_id": "%s:%s" % (verify_key.alg, verify_key.version), + }, + values={ "from_server": from_server, "ts_added_ms": time_now_ms, "verify_key": buffer(verify_key.encode()), }, + desc="store_server_verify_key", ) diff --git a/synapse/storage/schema/delta/12/v12.sql b/synapse/storage/schema/delta/12/v12.sql index 90ac474859..717d289f78 100644 --- a/synapse/storage/schema/delta/12/v12.sql +++ b/synapse/storage/schema/delta/12/v12.sql @@ -14,16 +14,16 @@ */ CREATE TABLE IF NOT EXISTS rejections( - event_id VARCHAR(255) NOT NULL, - reason VARCHAR(255) NOT NULL, - last_check VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + reason VARCHAR(150) NOT NULL, + last_check VARCHAR(150) NOT NULL, UNIQUE (event_id) ) ; -- Push notification endpoints that users have configured CREATE TABLE IF NOT EXISTS pushers ( id BIGINT PRIMARY KEY, - user_name VARCHAR(255) NOT NULL, + user_name VARCHAR(150) NOT NULL, profile_tag VARCHAR(32) NOT NULL, kind VARCHAR(8) NOT NULL, app_id VARCHAR(64) NOT NULL, @@ -41,19 +41,19 @@ CREATE TABLE IF NOT EXISTS pushers ( CREATE TABLE IF NOT EXISTS push_rules ( id BIGINT PRIMARY KEY, - user_name VARCHAR(255) NOT NULL, - rule_id VARCHAR(255) NOT NULL, + user_name VARCHAR(150) NOT NULL, + rule_id VARCHAR(150) NOT NULL, priority_class TINYINT NOT NULL, priority INTEGER NOT NULL DEFAULT 0, - conditions VARCHAR(255) NOT NULL, - actions VARCHAR(255) NOT NULL, + conditions VARCHAR(150) NOT NULL, + actions VARCHAR(150) NOT NULL, UNIQUE(user_name, rule_id) ) ; CREATE INDEX IF NOT EXISTS push_rules_user_name on push_rules (user_name); CREATE TABLE IF NOT EXISTS user_filters( - user_id VARCHAR(255), + user_id VARCHAR(150), filter_id BIGINT, filter_json BLOB ) ; diff --git a/synapse/storage/schema/delta/13/v13.sql b/synapse/storage/schema/delta/13/v13.sql index 4953b6323c..f5275a59b6 100644 --- a/synapse/storage/schema/delta/13/v13.sql +++ b/synapse/storage/schema/delta/13/v13.sql @@ -15,10 +15,10 @@ CREATE TABLE IF NOT EXISTS application_services( id BIGINT PRIMARY KEY, - url VARCHAR(255), - token VARCHAR(255), - hs_token VARCHAR(255), - sender VARCHAR(255), + url VARCHAR(150), + token VARCHAR(150), + hs_token VARCHAR(150), + sender VARCHAR(150), UNIQUE(token) ) ; @@ -26,6 +26,6 @@ CREATE TABLE IF NOT EXISTS application_services_regex( id BIGINT PRIMARY KEY, as_id BIGINT NOT NULL, namespace INTEGER, /* enum[room_id|room_alias|user_id] */ - regex VARCHAR(255), + regex VARCHAR(150), FOREIGN KEY(as_id) REFERENCES application_services(id) ) ; diff --git a/synapse/storage/schema/delta/14/v14.sql b/synapse/storage/schema/delta/14/v14.sql index 3bda073c94..1d582cc626 100644 --- a/synapse/storage/schema/delta/14/v14.sql +++ b/synapse/storage/schema/delta/14/v14.sql @@ -1,7 +1,7 @@ CREATE TABLE IF NOT EXISTS push_rules_enable ( id BIGINT PRIMARY KEY, - user_name VARCHAR(255) NOT NULL, - rule_id VARCHAR(255) NOT NULL, + user_name VARCHAR(150) NOT NULL, + rule_id VARCHAR(150) NOT NULL, enabled TINYINT, UNIQUE(user_name, rule_id) ) ; diff --git a/synapse/storage/schema/full_schemas/11/event_edges.sql b/synapse/storage/schema/full_schemas/11/event_edges.sql index 336cd563df..124c9a9bdf 100644 --- a/synapse/storage/schema/full_schemas/11/event_edges.sql +++ b/synapse/storage/schema/full_schemas/11/event_edges.sql @@ -14,8 +14,8 @@ */ CREATE TABLE IF NOT EXISTS event_forward_extremities( - event_id VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, UNIQUE (event_id, room_id) ) ; @@ -24,8 +24,8 @@ CREATE INDEX IF NOT EXISTS ev_extrem_id ON event_forward_extremities(event_id); CREATE TABLE IF NOT EXISTS event_backward_extremities( - event_id VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, UNIQUE (event_id, room_id) ) ; @@ -34,9 +34,9 @@ CREATE INDEX IF NOT EXISTS ev_b_extrem_id ON event_backward_extremities(event_id CREATE TABLE IF NOT EXISTS event_edges( - event_id VARCHAR(255) NOT NULL, - prev_event_id VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + prev_event_id VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, is_state BOOL NOT NULL, UNIQUE (event_id, prev_event_id, room_id, is_state) ) ; @@ -46,7 +46,7 @@ CREATE INDEX IF NOT EXISTS ev_edges_prev_id ON event_edges(prev_event_id); CREATE TABLE IF NOT EXISTS room_depth( - room_id VARCHAR(255) NOT NULL, + room_id VARCHAR(150) NOT NULL, min_depth INTEGER NOT NULL, UNIQUE (room_id) ) ; @@ -55,8 +55,8 @@ CREATE INDEX IF NOT EXISTS room_depth_room ON room_depth(room_id); create TABLE IF NOT EXISTS event_destinations( - event_id VARCHAR(255) NOT NULL, - destination VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + destination VARCHAR(150) NOT NULL, delivered_ts BIGINT DEFAULT 0, -- or 0 if not delivered UNIQUE (event_id, destination) ) ; @@ -65,10 +65,10 @@ CREATE INDEX IF NOT EXISTS event_destinations_id ON event_destinations(event_id) CREATE TABLE IF NOT EXISTS state_forward_extremities( - event_id VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, - type VARCHAR(255) NOT NULL, - state_key VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, + type VARCHAR(150) NOT NULL, + state_key VARCHAR(150) NOT NULL, UNIQUE (event_id, room_id) ) ; @@ -79,9 +79,9 @@ CREATE INDEX IF NOT EXISTS st_extrem_id ON state_forward_extremities(event_id); CREATE TABLE IF NOT EXISTS event_auth( - event_id VARCHAR(255) NOT NULL, - auth_id VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + auth_id VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, UNIQUE (event_id, auth_id, room_id) ) ; diff --git a/synapse/storage/schema/full_schemas/11/event_signatures.sql b/synapse/storage/schema/full_schemas/11/event_signatures.sql index 11e611598b..30e3f71c5f 100644 --- a/synapse/storage/schema/full_schemas/11/event_signatures.sql +++ b/synapse/storage/schema/full_schemas/11/event_signatures.sql @@ -14,8 +14,8 @@ */ CREATE TABLE IF NOT EXISTS event_content_hashes ( - event_id VARCHAR(255), - algorithm VARCHAR(255), + event_id VARCHAR(150), + algorithm VARCHAR(150), hash BLOB, UNIQUE (event_id, algorithm) ) ; @@ -24,8 +24,8 @@ CREATE INDEX IF NOT EXISTS event_content_hashes_id ON event_content_hashes(event CREATE TABLE IF NOT EXISTS event_reference_hashes ( - event_id VARCHAR(255), - algorithm VARCHAR(255), + event_id VARCHAR(150), + algorithm VARCHAR(150), hash BLOB, UNIQUE (event_id, algorithm) ) ; @@ -34,9 +34,9 @@ CREATE INDEX IF NOT EXISTS event_reference_hashes_id ON event_reference_hashes(e CREATE TABLE IF NOT EXISTS event_signatures ( - event_id VARCHAR(255), - signature_name VARCHAR(255), - key_id VARCHAR(255), + event_id VARCHAR(150), + signature_name VARCHAR(150), + key_id VARCHAR(150), signature BLOB, UNIQUE (event_id, signature_name, key_id) ) ; @@ -45,9 +45,9 @@ CREATE INDEX IF NOT EXISTS event_signatures_id ON event_signatures(event_id); CREATE TABLE IF NOT EXISTS event_edge_hashes( - event_id VARCHAR(255), - prev_event_id VARCHAR(255), - algorithm VARCHAR(255), + event_id VARCHAR(150), + prev_event_id VARCHAR(150), + algorithm VARCHAR(150), hash BLOB, UNIQUE (event_id, prev_event_id, algorithm) ) ; diff --git a/synapse/storage/schema/full_schemas/11/im.sql b/synapse/storage/schema/full_schemas/11/im.sql index a0fb337629..7cb8f802e1 100644 --- a/synapse/storage/schema/full_schemas/11/im.sql +++ b/synapse/storage/schema/full_schemas/11/im.sql @@ -16,9 +16,9 @@ CREATE TABLE IF NOT EXISTS events( stream_ordering BIGINT PRIMARY KEY, topological_ordering BIGINT NOT NULL, - event_id VARCHAR(255) NOT NULL, - type VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + type VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, content BLOB NOT NULL, unrecognized_keys BLOB, processed BOOL NOT NULL, @@ -33,8 +33,8 @@ CREATE INDEX IF NOT EXISTS events_room_id ON events (room_id); CREATE TABLE IF NOT EXISTS event_json( - event_id VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, internal_metadata BLOB NOT NULL, json BLOB NOT NULL, UNIQUE (event_id) @@ -44,11 +44,11 @@ CREATE INDEX IF NOT EXISTS event_json_room_id ON event_json(room_id); CREATE TABLE IF NOT EXISTS state_events( - event_id VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, - type VARCHAR(255) NOT NULL, - state_key VARCHAR(255) NOT NULL, - prev_state VARCHAR(255), + event_id VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, + type VARCHAR(150) NOT NULL, + state_key VARCHAR(150) NOT NULL, + prev_state VARCHAR(150), UNIQUE (event_id) ) ; @@ -58,10 +58,10 @@ CREATE INDEX IF NOT EXISTS state_events_state_key ON state_events (state_key); CREATE TABLE IF NOT EXISTS current_state_events( - event_id VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, - type VARCHAR(255) NOT NULL, - state_key VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, + type VARCHAR(150) NOT NULL, + state_key VARCHAR(150) NOT NULL, UNIQUE (event_id), UNIQUE (room_id, type, state_key) ) ; @@ -71,11 +71,11 @@ CREATE INDEX IF NOT EXISTS current_state_events_type ON current_state_events (ty CREATE INDEX IF NOT EXISTS current_state_events_state_key ON current_state_events (state_key); CREATE TABLE IF NOT EXISTS room_memberships( - event_id VARCHAR(255) NOT NULL, - user_id VARCHAR(255) NOT NULL, - sender VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, - membership VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + user_id VARCHAR(150) NOT NULL, + sender VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, + membership VARCHAR(150) NOT NULL, UNIQUE (event_id) ) ; @@ -83,41 +83,41 @@ CREATE INDEX IF NOT EXISTS room_memberships_room_id ON room_memberships (room_id CREATE INDEX IF NOT EXISTS room_memberships_user_id ON room_memberships (user_id); CREATE TABLE IF NOT EXISTS feedback( - event_id VARCHAR(255) NOT NULL, - feedback_type VARCHAR(255), - target_event_id VARCHAR(255), - sender VARCHAR(255), - room_id VARCHAR(255), + event_id VARCHAR(150) NOT NULL, + feedback_type VARCHAR(150), + target_event_id VARCHAR(150), + sender VARCHAR(150), + room_id VARCHAR(150), UNIQUE (event_id) ) ; CREATE TABLE IF NOT EXISTS topics( - event_id VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, - topic VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, + topic VARCHAR(150) NOT NULL, UNIQUE (event_id) ) ; CREATE INDEX IF NOT EXISTS topics_room_id ON topics(room_id); CREATE TABLE IF NOT EXISTS room_names( - event_id VARCHAR(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, - name VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, + name VARCHAR(150) NOT NULL, UNIQUE (event_id) ); CREATE INDEX IF NOT EXISTS room_names_room_id ON room_names(room_id); CREATE TABLE IF NOT EXISTS rooms( - room_id VARCHAR(255) PRIMARY KEY NOT NULL, + room_id VARCHAR(150) PRIMARY KEY NOT NULL, is_public BOOL, - creator VARCHAR(255) + creator VARCHAR(150) ) ; CREATE TABLE IF NOT EXISTS room_hosts( - room_id VARCHAR(255) NOT NULL, - host VARCHAR(255) NOT NULL, + room_id VARCHAR(150) NOT NULL, + host VARCHAR(150) NOT NULL, UNIQUE (room_id, host) ) ; diff --git a/synapse/storage/schema/full_schemas/11/keys.sql b/synapse/storage/schema/full_schemas/11/keys.sql index a785cdb4c5..062ca53fef 100644 --- a/synapse/storage/schema/full_schemas/11/keys.sql +++ b/synapse/storage/schema/full_schemas/11/keys.sql @@ -13,18 +13,18 @@ * limitations under the License. */ CREATE TABLE IF NOT EXISTS server_tls_certificates( - server_name VARCHAR(255), -- Server name. - fingerprint VARCHAR(255), -- Certificate fingerprint. - from_server VARCHAR(255), -- Which key server the certificate was fetched from. + server_name VARCHAR(150), -- Server name. + fingerprint VARCHAR(150), -- Certificate fingerprint. + from_server VARCHAR(150), -- Which key server the certificate was fetched from. ts_added_ms BIGINT, -- When the certifcate was added. tls_certificate BLOB, -- DER encoded x509 certificate. UNIQUE (server_name, fingerprint) ) ; CREATE TABLE IF NOT EXISTS server_signature_keys( - server_name VARCHAR(255), -- Server name. - key_id VARCHAR(255), -- Key version. - from_server VARCHAR(255), -- Which key server the key was fetched form. + server_name VARCHAR(150), -- Server name. + key_id VARCHAR(150), -- Key version. + from_server VARCHAR(150), -- Which key server the key was fetched form. ts_added_ms BIGINT, -- When the key was added. verify_key BLOB, -- NACL verification key. UNIQUE (server_name, key_id) diff --git a/synapse/storage/schema/full_schemas/11/media_repository.sql b/synapse/storage/schema/full_schemas/11/media_repository.sql index 27fe297af6..c8c5f1d2f0 100644 --- a/synapse/storage/schema/full_schemas/11/media_repository.sql +++ b/synapse/storage/schema/full_schemas/11/media_repository.sql @@ -14,21 +14,21 @@ */ CREATE TABLE IF NOT EXISTS local_media_repository ( - media_id VARCHAR(255), -- The id used to refer to the media. - media_type VARCHAR(255), -- The MIME-type of the media. + media_id VARCHAR(150), -- The id used to refer to the media. + media_type VARCHAR(150), -- The MIME-type of the media. media_length INTEGER, -- Length of the media in bytes. created_ts BIGINT, -- When the content was uploaded in ms. - upload_name VARCHAR(255), -- The name the media was uploaded with. - user_id VARCHAR(255), -- The user who uploaded the file. + upload_name VARCHAR(150), -- The name the media was uploaded with. + user_id VARCHAR(150), -- The user who uploaded the file. UNIQUE (media_id) ) ; CREATE TABLE IF NOT EXISTS local_media_repository_thumbnails ( - media_id VARCHAR(255), -- The id used to refer to the media. + media_id VARCHAR(150), -- The id used to refer to the media. thumbnail_width INTEGER, -- The width of the thumbnail in pixels. thumbnail_height INTEGER, -- The height of the thumbnail in pixels. - thumbnail_type VARCHAR(255), -- The MIME-type of the thumbnail. - thumbnail_method VARCHAR(255), -- The method used to make the thumbnail. + thumbnail_type VARCHAR(150), -- The MIME-type of the thumbnail. + thumbnail_method VARCHAR(150), -- The method used to make the thumbnail. thumbnail_length INTEGER, -- The length of the thumbnail in bytes. UNIQUE ( media_id, thumbnail_width, thumbnail_height, thumbnail_type @@ -39,25 +39,25 @@ CREATE INDEX IF NOT EXISTS local_media_repository_thumbnails_media_id ON local_media_repository_thumbnails (media_id); CREATE TABLE IF NOT EXISTS remote_media_cache ( - media_origin VARCHAR(255), -- The remote HS the media came from. - media_id VARCHAR(255), -- The id used to refer to the media on that server. - media_type VARCHAR(255), -- The MIME-type of the media. + media_origin VARCHAR(150), -- The remote HS the media came from. + media_id VARCHAR(150), -- The id used to refer to the media on that server. + media_type VARCHAR(150), -- The MIME-type of the media. created_ts BIGINT, -- When the content was uploaded in ms. - upload_name VARCHAR(255), -- The name the media was uploaded with. + upload_name VARCHAR(150), -- The name the media was uploaded with. media_length INTEGER, -- Length of the media in bytes. - filesystem_id VARCHAR(255), -- The name used to store the media on disk. + filesystem_id VARCHAR(150), -- The name used to store the media on disk. UNIQUE (media_origin, media_id) ) ; CREATE TABLE IF NOT EXISTS remote_media_cache_thumbnails ( - media_origin VARCHAR(255), -- The remote HS the media came from. - media_id VARCHAR(255), -- The id used to refer to the media. + media_origin VARCHAR(150), -- The remote HS the media came from. + media_id VARCHAR(150), -- The id used to refer to the media. thumbnail_width INTEGER, -- The width of the thumbnail in pixels. thumbnail_height INTEGER, -- The height of the thumbnail in pixels. - thumbnail_method VARCHAR(255), -- The method used to make the thumbnail - thumbnail_type VARCHAR(255), -- The MIME-type of the thumbnail. + thumbnail_method VARCHAR(150), -- The method used to make the thumbnail + thumbnail_type VARCHAR(150), -- The MIME-type of the thumbnail. thumbnail_length INTEGER, -- The length of the thumbnail in bytes. - filesystem_id VARCHAR(255), -- The name used to store the media on disk. + filesystem_id VARCHAR(150), -- The name used to store the media on disk. UNIQUE ( media_origin, media_id, thumbnail_width, thumbnail_height, thumbnail_type diff --git a/synapse/storage/schema/full_schemas/11/presence.sql b/synapse/storage/schema/full_schemas/11/presence.sql index b48b110ae9..273e61281a 100644 --- a/synapse/storage/schema/full_schemas/11/presence.sql +++ b/synapse/storage/schema/full_schemas/11/presence.sql @@ -13,9 +13,9 @@ * limitations under the License. */ CREATE TABLE IF NOT EXISTS presence( - user_id VARCHAR(255) NOT NULL, + user_id VARCHAR(150) NOT NULL, state VARCHAR(20), - status_msg VARCHAR(255), + status_msg VARCHAR(150), mtime BIGINT, -- miliseconds since last state change UNIQUE(user_id) ) ; @@ -23,16 +23,14 @@ CREATE TABLE IF NOT EXISTS presence( -- For each of /my/ users which possibly-remote users are allowed to see their -- presence state CREATE TABLE IF NOT EXISTS presence_allow_inbound( - observed_user_id VARCHAR(255) NOT NULL, - observer_user_id VARCHAR(255), -- a UserID, - UNIQUE(observed_user_id) + observed_user_id VARCHAR(150) NOT NULL, + observer_user_id VARCHAR(150) -- a UserID, ) ; -- For each of /my/ users (watcher), which possibly-remote users are they -- watching? CREATE TABLE IF NOT EXISTS presence_list( - user_id VARCHAR(255) NOT NULL, - observed_user_id VARCHAR(255), -- a UserID, - accepted BOOLEAN, - UNIQUE(user_id) + user_id VARCHAR(150) NOT NULL, + observed_user_id VARCHAR(150), -- a UserID, + accepted BOOLEAN ) ; diff --git a/synapse/storage/schema/full_schemas/11/profiles.sql b/synapse/storage/schema/full_schemas/11/profiles.sql index 92da48f97e..023060a54c 100644 --- a/synapse/storage/schema/full_schemas/11/profiles.sql +++ b/synapse/storage/schema/full_schemas/11/profiles.sql @@ -13,8 +13,8 @@ * limitations under the License. */ CREATE TABLE IF NOT EXISTS profiles( - user_id VARCHAR(255) NOT NULL, - displayname VARCHAR(255), - avatar_url VARCHAR(255), + user_id VARCHAR(150) NOT NULL, + displayname VARCHAR(150), + avatar_url VARCHAR(150), UNIQUE(user_id) ) ; diff --git a/synapse/storage/schema/full_schemas/11/redactions.sql b/synapse/storage/schema/full_schemas/11/redactions.sql index 9b52a2012a..5c23188d62 100644 --- a/synapse/storage/schema/full_schemas/11/redactions.sql +++ b/synapse/storage/schema/full_schemas/11/redactions.sql @@ -13,8 +13,8 @@ * limitations under the License. */ CREATE TABLE IF NOT EXISTS redactions ( - event_id VARCHAR(255) NOT NULL, - redacts VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + redacts VARCHAR(150) NOT NULL, UNIQUE (event_id) ) ; diff --git a/synapse/storage/schema/full_schemas/11/room_aliases.sql b/synapse/storage/schema/full_schemas/11/room_aliases.sql index 220df87573..63fe0f5c64 100644 --- a/synapse/storage/schema/full_schemas/11/room_aliases.sql +++ b/synapse/storage/schema/full_schemas/11/room_aliases.sql @@ -14,12 +14,12 @@ */ CREATE TABLE IF NOT EXISTS room_aliases( - room_alias VARBINARY(255) NOT NULL, - room_id VARCHAR(255) NOT NULL, + room_alias VARCHAR(150) NOT NULL, + room_id VARCHAR(150) NOT NULL, UNIQUE (room_alias) ) ; CREATE TABLE IF NOT EXISTS room_alias_servers( - room_alias VARBINARY(255) NOT NULL, - server VARCHAR(255) NOT NULL + room_alias VARCHAR(150) NOT NULL, + server VARCHAR(150) NOT NULL ) ; diff --git a/synapse/storage/schema/full_schemas/11/state.sql b/synapse/storage/schema/full_schemas/11/state.sql index 40584a325f..acfb76439b 100644 --- a/synapse/storage/schema/full_schemas/11/state.sql +++ b/synapse/storage/schema/full_schemas/11/state.sql @@ -15,21 +15,21 @@ CREATE TABLE IF NOT EXISTS state_groups( id VARCHAR(20) PRIMARY KEY, - room_id VARCHAR(255) NOT NULL, - event_id VARCHAR(255) NOT NULL + room_id VARCHAR(150) NOT NULL, + event_id VARCHAR(150) NOT NULL ) ; CREATE TABLE IF NOT EXISTS state_groups_state( state_group VARCHAR(20) NOT NULL, - room_id VARCHAR(255) NOT NULL, - type VARCHAR(255) NOT NULL, - state_key VARCHAR(255) NOT NULL, - event_id VARCHAR(255) NOT NULL + room_id VARCHAR(150) NOT NULL, + type VARCHAR(150) NOT NULL, + state_key VARCHAR(150) NOT NULL, + event_id VARCHAR(150) NOT NULL ) ; CREATE TABLE IF NOT EXISTS event_to_state_groups( - event_id VARCHAR(255) NOT NULL, - state_group VARCHAR(255) NOT NULL, + event_id VARCHAR(150) NOT NULL, + state_group VARCHAR(150) NOT NULL, UNIQUE (event_id) ) ; diff --git a/synapse/storage/schema/full_schemas/11/transactions.sql b/synapse/storage/schema/full_schemas/11/transactions.sql index d33bdfb301..43541661ce 100644 --- a/synapse/storage/schema/full_schemas/11/transactions.sql +++ b/synapse/storage/schema/full_schemas/11/transactions.sql @@ -14,8 +14,8 @@ */ -- Stores what transaction ids we have received and what our response was CREATE TABLE IF NOT EXISTS received_transactions( - transaction_id VARCHAR(255), - origin VARCHAR(255), + transaction_id VARCHAR(150), + origin VARCHAR(150), ts BIGINT, response_code INTEGER, response_json BLOB, @@ -30,8 +30,8 @@ CREATE INDEX IF NOT EXISTS transactions_have_ref ON received_transactions(origin -- since referenced the transaction in another outgoing transaction CREATE TABLE IF NOT EXISTS sent_transactions( id BIGINT PRIMARY KEY, -- This is used to apply insertion ordering - transaction_id VARCHAR(255), - destination VARCHAR(255), + transaction_id VARCHAR(150), + destination VARCHAR(150), response_code INTEGER DEFAULT 0, response_json BLOB, ts BIGINT @@ -47,9 +47,9 @@ CREATE INDEX IF NOT EXISTS sent_transaction_sent ON sent_transactions(response_c -- For sent transactions only. CREATE TABLE IF NOT EXISTS transaction_id_to_pdu( transaction_id INTEGER, - destination VARCHAR(255), - pdu_id VARCHAR(255), - pdu_origin VARCHAR(255), + destination VARCHAR(150), + pdu_id VARCHAR(150), + pdu_origin VARCHAR(150), UNIQUE (transaction_id, destination) ) ; @@ -57,7 +57,7 @@ CREATE INDEX IF NOT EXISTS transaction_id_to_pdu_dest ON transaction_id_to_pdu(d -- To track destination health CREATE TABLE IF NOT EXISTS destinations( - destination VARCHAR(255) PRIMARY KEY, + destination VARCHAR(150) PRIMARY KEY, retry_last_ts BIGINT, retry_interval INTEGER ) ; diff --git a/synapse/storage/schema/full_schemas/11/users.sql b/synapse/storage/schema/full_schemas/11/users.sql index 28909f5805..0271de3526 100644 --- a/synapse/storage/schema/full_schemas/11/users.sql +++ b/synapse/storage/schema/full_schemas/11/users.sql @@ -13,8 +13,8 @@ * limitations under the License. */ CREATE TABLE IF NOT EXISTS users( - name VARCHAR(255), - password_hash VARCHAR(255), + name VARCHAR(150), + password_hash VARCHAR(150), creation_ts BIGINT, admin BOOL DEFAULT 0 NOT NULL, UNIQUE(name) @@ -22,19 +22,19 @@ CREATE TABLE IF NOT EXISTS users( CREATE TABLE IF NOT EXISTS access_tokens( id BIGINT PRIMARY KEY, - user_id VARCHAR(255) NOT NULL, - device_id VARCHAR(255), - token VARCHAR(255) NOT NULL, + user_id VARCHAR(150) NOT NULL, + device_id VARCHAR(150), + token VARCHAR(150) NOT NULL, last_used BIGINT, UNIQUE(token) ) ; CREATE TABLE IF NOT EXISTS user_ips ( - user VARCHAR(255) NOT NULL, - access_token VARCHAR(255) NOT NULL, - device_id VARCHAR(255), - ip VARCHAR(255) NOT NULL, - user_agent VARCHAR(255) NOT NULL, + user VARCHAR(150) NOT NULL, + access_token VARCHAR(150) NOT NULL, + device_id VARCHAR(150), + ip VARCHAR(150) NOT NULL, + user_agent VARCHAR(150) NOT NULL, last_seen BIGINT NOT NULL, UNIQUE (user, access_token, ip, user_agent) ) ; diff --git a/synapse/storage/schema/schema_version.sql b/synapse/storage/schema/schema_version.sql index 28762861e5..e7fa6fe569 100644 --- a/synapse/storage/schema/schema_version.sql +++ b/synapse/storage/schema/schema_version.sql @@ -22,7 +22,7 @@ CREATE TABLE IF NOT EXISTS schema_version( CREATE TABLE IF NOT EXISTS applied_schema_deltas( `version` INTEGER NOT NULL, - `file` VARCHAR(255) NOT NULL, + `file` VARCHAR(150) NOT NULL, UNIQUE(version, file) ); -- cgit 1.4.1 From d0e444a648ec0e4168673665ff37d09a1c2c2292 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 7 Apr 2015 12:04:02 +0100 Subject: Explicitly name the __main__ module logger --- synapse/app/homeserver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 9d7a58080d..95190c1d75 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -64,7 +64,7 @@ import sqlite3 import yaml -logger = logging.getLogger(__name__) +logger = logging.getLogger("synapse.app.homeserver") class SynapseHomeServer(HomeServer): -- cgit 1.4.1 From 0bfa78b39bf95ee24e78166c9545f59b34fd1d81 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 7 Apr 2015 12:16:05 +0100 Subject: PEP8 --- synapse/app/homeserver.py | 7 ++----- synapse/storage/__init__.py | 4 ---- synapse/storage/registration.py | 1 - synapse/storage/roommember.py | 1 - synapse/storage/state.py | 1 - synapse/storage/transactions.py | 1 - 6 files changed, 2 insertions(+), 13 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 95190c1d75..f7c724c4b4 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -17,9 +17,7 @@ import sys sys.dont_write_bytecode = True -from synapse.storage import ( - prepare_database, UpgradeDatabaseException, -) +from synapse.storage import UpgradeDatabaseException from synapse.storage.engines import create_engine from synapse.server import HomeServer @@ -60,7 +58,6 @@ import os import re import resource import subprocess -import sqlite3 import yaml @@ -322,7 +319,7 @@ def change_resource_limit(soft_file_no): resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard)) logger.info("Set file limit to: %d", soft_file_no) - except ( ValueError, resource.error) as e: + except (ValueError, resource.error) as e: logger.warn("Failed to set file limit: %s", e) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index e16414d736..231ec8169f 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer - from .appservice import ApplicationServiceStore from .directory import DirectoryStore from .events import EventsStore @@ -43,8 +41,6 @@ import logging import os import re -import threading - logger = logging.getLogger(__name__) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index b62b4a3414..0f9d898e5d 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -105,7 +105,6 @@ class RegistrationStore(SQLBaseStore): defer.returnValue(user_info) - @cached() # TODO(paul): Currently there's no code to invalidate this cache. That # means if/when we ever add internal ways to invalidate access tokens or diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index df707f8124..8ea5756d61 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -40,7 +40,6 @@ class RoomMemberStore(SQLBaseStore): """ try: target_user_id = event.state_key - domain = UserID.from_string(target_user_id).domain except: logger.exception( "Failed to parse target_user_id=%s", target_user_id diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 3e55cb81bf..4994bacd6c 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -159,4 +159,3 @@ class StateStore(SQLBaseStore): def _make_group_id(clock): return str(int(clock.time_msec())) + random_string(5) - diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index 9594fe1f2b..4c3dc58662 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -375,4 +375,3 @@ class DestinationsTable(object): "retry_last_ts", "retry_interval", ] - -- cgit 1.4.1 From c5365dee564ffcf46300d1f979212e0ebaf98692 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 10 Apr 2015 18:46:33 +0100 Subject: Use case sensitive collations --- synapse/app/homeserver.py | 3 +-- synapse/storage/engines/maria.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index fbc9a43d66..a47e548d66 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -363,7 +363,6 @@ def setup(config_options): db_config = { k: v for k, v in db_config.items() - if not k.startswith("cp_") } name = db_config.get("name", None) @@ -372,7 +371,7 @@ def setup(config_options): "sql_mode": "TRADITIONAL", "charset": "utf8mb4", "use_unicode": True, - "collation": "utf8mb4_general_ci", + "collation": "utf8mb4_bin", }) elif name == "sqlite3": db_config.setdefault("args", {}).update({ diff --git a/synapse/storage/engines/maria.py b/synapse/storage/engines/maria.py index c934376930..90165f6849 100644 --- a/synapse/storage/engines/maria.py +++ b/synapse/storage/engines/maria.py @@ -36,7 +36,7 @@ class MariaEngine(object): def prepare_database(self, db_conn): cur = db_conn.cursor() cur.execute( - "ALTER DATABASE CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci" + "ALTER DATABASE CHARACTER SET utf8mb4 COLLATE utf8mb4_bin" ) db_conn.commit() prepare_database(db_conn, self) -- cgit 1.4.1 From 3cbc286d062977e192a10525040a5e713d4c97e0 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Tue, 14 Apr 2015 13:28:11 +0100 Subject: Move server key api into rest/key/v1 --- synapse/app/homeserver.py | 2 +- synapse/http/server_key_resource.py | 93 ------------------------------ synapse/rest/key/__init__.py | 14 +++++ synapse/rest/key/v1/__init__.py | 14 +++++ synapse/rest/key/v1/server_key_resource.py | 93 ++++++++++++++++++++++++++++++ 5 files changed, 122 insertions(+), 94 deletions(-) delete mode 100644 synapse/http/server_key_resource.py create mode 100644 synapse/rest/key/__init__.py create mode 100644 synapse/rest/key/v1/__init__.py create mode 100644 synapse/rest/key/v1/server_key_resource.py (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index afb46d2e23..27e53a9e56 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -34,7 +34,7 @@ from twisted.web.server import Site from synapse.http.server import JsonResource, RootRedirect from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.rest.media.v1.media_repository import MediaRepositoryResource -from synapse.http.server_key_resource import LocalKey +from synapse.rest.key.v1.server_key_resource import LocalKey from synapse.http.matrixfederationclient import MatrixFederationHttpClient from synapse.api.urls import ( CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX, diff --git a/synapse/http/server_key_resource.py b/synapse/http/server_key_resource.py deleted file mode 100644 index 71e9a51f5c..0000000000 --- a/synapse/http/server_key_resource.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from twisted.web.resource import Resource -from synapse.http.server import respond_with_json_bytes -from syutil.crypto.jsonsign import sign_json -from syutil.base64util import encode_base64 -from syutil.jsonutil import encode_canonical_json -from OpenSSL import crypto -import logging - - -logger = logging.getLogger(__name__) - - -class LocalKey(Resource): - """HTTP resource containing encoding the TLS X.509 certificate and NACL - signature verification keys for this server:: - - GET /key HTTP/1.1 - - HTTP/1.1 200 OK - Content-Type: application/json - { - "server_name": "this.server.example.com" - "verify_keys": { - "algorithm:version": # base64 encoded NACL verification key. - }, - "tls_certificate": # base64 ASN.1 DER encoded X.509 tls cert. - "signatures": { - "this.server.example.com": { - "algorithm:version": # NACL signature for this server. - } - } - } - """ - - def __init__(self, hs): - self.hs = hs - self.version_string = hs.version_string - self.response_body = encode_canonical_json( - self.response_json_object(hs.config) - ) - Resource.__init__(self) - - @staticmethod - def response_json_object(server_config): - verify_keys = {} - for key in server_config.signing_key: - verify_key_bytes = key.verify_key.encode() - key_id = "%s:%s" % (key.alg, key.version) - verify_keys[key_id] = encode_base64(verify_key_bytes) - - x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, - server_config.tls_certificate - ) - json_object = { - u"server_name": server_config.server_name, - u"verify_keys": verify_keys, - u"tls_certificate": encode_base64(x509_certificate_bytes) - } - for key in server_config.signing_key: - json_object = sign_json( - json_object, - server_config.server_name, - key, - ) - - return json_object - - def render_GET(self, request): - return respond_with_json_bytes( - request, 200, self.response_body, - version_string=self.version_string - ) - - def getChild(self, name, request): - if name == '': - return self diff --git a/synapse/rest/key/__init__.py b/synapse/rest/key/__init__.py new file mode 100644 index 0000000000..1a84d94cd9 --- /dev/null +++ b/synapse/rest/key/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/synapse/rest/key/v1/__init__.py b/synapse/rest/key/v1/__init__.py new file mode 100644 index 0000000000..1a84d94cd9 --- /dev/null +++ b/synapse/rest/key/v1/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/synapse/rest/key/v1/server_key_resource.py b/synapse/rest/key/v1/server_key_resource.py new file mode 100644 index 0000000000..71e9a51f5c --- /dev/null +++ b/synapse/rest/key/v1/server_key_resource.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2014, 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from twisted.web.resource import Resource +from synapse.http.server import respond_with_json_bytes +from syutil.crypto.jsonsign import sign_json +from syutil.base64util import encode_base64 +from syutil.jsonutil import encode_canonical_json +from OpenSSL import crypto +import logging + + +logger = logging.getLogger(__name__) + + +class LocalKey(Resource): + """HTTP resource containing encoding the TLS X.509 certificate and NACL + signature verification keys for this server:: + + GET /key HTTP/1.1 + + HTTP/1.1 200 OK + Content-Type: application/json + { + "server_name": "this.server.example.com" + "verify_keys": { + "algorithm:version": # base64 encoded NACL verification key. + }, + "tls_certificate": # base64 ASN.1 DER encoded X.509 tls cert. + "signatures": { + "this.server.example.com": { + "algorithm:version": # NACL signature for this server. + } + } + } + """ + + def __init__(self, hs): + self.hs = hs + self.version_string = hs.version_string + self.response_body = encode_canonical_json( + self.response_json_object(hs.config) + ) + Resource.__init__(self) + + @staticmethod + def response_json_object(server_config): + verify_keys = {} + for key in server_config.signing_key: + verify_key_bytes = key.verify_key.encode() + key_id = "%s:%s" % (key.alg, key.version) + verify_keys[key_id] = encode_base64(verify_key_bytes) + + x509_certificate_bytes = crypto.dump_certificate( + crypto.FILETYPE_ASN1, + server_config.tls_certificate + ) + json_object = { + u"server_name": server_config.server_name, + u"verify_keys": verify_keys, + u"tls_certificate": encode_base64(x509_certificate_bytes) + } + for key in server_config.signing_key: + json_object = sign_json( + json_object, + server_config.server_name, + key, + ) + + return json_object + + def render_GET(self, request): + return respond_with_json_bytes( + request, 200, self.response_body, + version_string=self.version_string + ) + + def getChild(self, name, request): + if name == '': + return self -- cgit 1.4.1 From 58d83399663a080c123d2f112b4f4d84accbc638 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 14 Apr 2015 13:53:20 +0100 Subject: Add support for postgres instead of mysql. Change sql accourdingly. blob + varbinary -> bytea. No support for UNSIGNED or CREATE INDEX IF NOT EXISTS. --- synapse/app/homeserver.py | 2 + synapse/storage/__init__.py | 15 +++----- synapse/storage/_base.py | 2 +- synapse/storage/engines/__init__.py | 2 + synapse/storage/event_federation.py | 10 ++--- synapse/storage/events.py | 4 +- synapse/storage/room.py | 34 ++++++++++++----- .../full_schemas/16/application_services.sql | 10 ++--- .../storage/schema/full_schemas/16/event_edges.sql | 26 ++++++------- .../schema/full_schemas/16/event_signatures.sql | 16 ++++---- synapse/storage/schema/full_schemas/16/im.sql | 44 +++++++++++----------- synapse/storage/schema/full_schemas/16/keys.sql | 8 ++-- .../schema/full_schemas/16/media_repository.sql | 8 ++-- .../storage/schema/full_schemas/16/presence.sql | 4 +- synapse/storage/schema/full_schemas/16/push.sql | 30 +++++++-------- .../storage/schema/full_schemas/16/redactions.sql | 4 +- synapse/storage/schema/full_schemas/16/state.sql | 10 ++--- .../schema/full_schemas/16/transactions.sql | 24 ++++++------ synapse/storage/schema/full_schemas/16/users.sql | 10 ++--- synapse/storage/schema/schema_version.sql | 14 +++---- synapse/storage/stream.py | 16 ++++---- 21 files changed, 153 insertions(+), 140 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index a47e548d66..033011e1d7 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -373,6 +373,8 @@ def setup(config_options): "use_unicode": True, "collation": "utf8mb4_bin", }) + elif name == "psycopg2": + pass elif name == "sqlite3": db_config.setdefault("args", {}).update({ "cp_min": 1, diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index b46cafd25e..272420194d 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -236,7 +236,7 @@ def _setup_new_database(cur, database_engine): cur.execute( database_engine.convert_param_style( - "REPLACE INTO schema_version (version, upgraded)" + "INSERT INTO schema_version (version, upgraded)" " VALUES (?,?)" ), (max_current_ver, False,) @@ -432,14 +432,11 @@ def executescript(txn, schema_path): def _get_or_create_schema_state(txn, database_engine): - try: - # Bluntly try creating the schema_version tables. - schema_path = os.path.join( - dir_path, "schema", "schema_version.sql", - ) - executescript(txn, schema_path) - except: - pass + # Bluntly try creating the schema_version tables. + schema_path = os.path.join( + dir_path, "schema", "schema_version.sql", + ) + executescript(txn, schema_path) txn.execute("SELECT version, upgraded FROM schema_version") row = txn.fetchone() diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index e30514cd5e..fa5199104a 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -330,7 +330,7 @@ class SQLBaseStore(object): continue raise except Exception as e: - logger.debug("[TXN FAIL] {%s}", name, e) + logger.debug("[TXN FAIL] {%s} %s", name, e) raise finally: end = time.time() * 1000 diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py index 29702be923..548d4e1b42 100644 --- a/synapse/storage/engines/__init__.py +++ b/synapse/storage/engines/__init__.py @@ -14,6 +14,7 @@ # limitations under the License. from .maria import MariaEngine +from .postgres import PostgresEngine from .sqlite3 import Sqlite3Engine import importlib @@ -22,6 +23,7 @@ import importlib SUPPORTED_MODULE = { "sqlite3": Sqlite3Engine, "mysql.connector": MariaEngine, + "psycopg2": PostgresEngine, } diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 79ad5ddc9c..54a3c9d805 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -153,7 +153,7 @@ class EventFederationStore(SQLBaseStore): results = self._get_prev_events_and_state( txn, event_id, - is_state=1, + is_state=True, ) return [(e_id, h, ) for e_id, h, _ in results] @@ -164,7 +164,7 @@ class EventFederationStore(SQLBaseStore): } if is_state is not None: - keyvalues["is_state"] = is_state + keyvalues["is_state"] = bool(is_state) res = self._simple_select_list_txn( txn, @@ -259,7 +259,7 @@ class EventFederationStore(SQLBaseStore): "event_id": event_id, "prev_event_id": e_id, "room_id": room_id, - "is_state": 0, + "is_state": False, }, ) @@ -397,7 +397,7 @@ class EventFederationStore(SQLBaseStore): query = ( "SELECT prev_event_id FROM event_edges " - "WHERE room_id = ? AND event_id = ? AND is_state = 0 " + "WHERE room_id = ? AND event_id = ? AND is_state = ? " "LIMIT ?" ) @@ -406,7 +406,7 @@ class EventFederationStore(SQLBaseStore): for event_id in front: txn.execute( query, - (room_id, event_id, limit - len(event_results)) + (room_id, event_id, False, limit - len(event_results)) ) for e_id, in txn.fetchall(): diff --git a/synapse/storage/events.py b/synapse/storage/events.py index a2e87c27ce..9fe2effb4b 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -188,12 +188,12 @@ class EventsStore(SQLBaseStore): ) sql = ( - "UPDATE events SET outlier = 0" + "UPDATE events SET outlier = ?" " WHERE event_id = ?" ) txn.execute( sql, - (event.event_id,) + (False, event.event_id,) ) return diff --git a/synapse/storage/room.py b/synapse/storage/room.py index a1a76280fe..48ebb33057 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -105,14 +105,12 @@ class RoomStore(SQLBaseStore): # We use non printing ascii character US (\x1F) as a separator sql = ( - "SELECT r.room_id, n.name, t.topic, " - "group_concat(a.room_alias, '\x1F') " - "FROM rooms AS r " - "LEFT JOIN (%(topic)s) AS t ON t.room_id = r.room_id " - "LEFT JOIN (%(name)s) AS n ON n.room_id = r.room_id " - "INNER JOIN room_aliases AS a ON a.room_id = r.room_id " - "WHERE r.is_public = ? " - "GROUP BY r.room_id " + "SELECT r.room_id, max(n.name), max(t.topic)" + " FROM rooms AS r" + " LEFT JOIN (%(topic)s) AS t ON t.room_id = r.room_id" + " LEFT JOIN (%(name)s) AS n ON n.room_id = r.room_id" + " WHERE r.is_public = ?" + " GROUP BY r.room_id" ) % { "topic": topic_subquery, "name": name_subquery, @@ -120,7 +118,22 @@ class RoomStore(SQLBaseStore): txn.execute(sql, (is_public,)) - return txn.fetchall() + rows = txn.fetchall() + + for i, row in enumerate(rows): + room_id = row[0] + aliases = self._simple_select_onecol_txn( + txn, + table="room_aliases", + keyvalues={ + "room_id": room_id + }, + retcol="room_alias", + ) + + rows[i] = list(row) + [aliases] + + return rows rows = yield self.runInteraction( "get_rooms", f @@ -131,9 +144,10 @@ class RoomStore(SQLBaseStore): "room_id": r[0], "name": r[1], "topic": r[2], - "aliases": r[3].split("\x1F"), + "aliases": r[3], } for r in rows + if r[3] # We only return rooms that have at least one alias. ] defer.returnValue(ret) diff --git a/synapse/storage/schema/full_schemas/16/application_services.sql b/synapse/storage/schema/full_schemas/16/application_services.sql index bc709df92d..f08c5bcf76 100644 --- a/synapse/storage/schema/full_schemas/16/application_services.sql +++ b/synapse/storage/schema/full_schemas/16/application_services.sql @@ -14,7 +14,7 @@ */ CREATE TABLE IF NOT EXISTS application_services( - id BIGINT UNSIGNED PRIMARY KEY, + id BIGINT PRIMARY KEY, url VARCHAR(150), token VARCHAR(150), hs_token VARCHAR(150), @@ -23,8 +23,8 @@ CREATE TABLE IF NOT EXISTS application_services( ); CREATE TABLE IF NOT EXISTS application_services_regex( - id BIGINT UNSIGNED PRIMARY KEY, - as_id BIGINT UNSIGNED NOT NULL, + id BIGINT PRIMARY KEY, + as_id BIGINT NOT NULL, namespace INTEGER, /* enum[room_id|room_alias|user_id] */ regex VARCHAR(150), FOREIGN KEY(as_id) REFERENCES application_services(id) @@ -39,10 +39,10 @@ CREATE TABLE IF NOT EXISTS application_services_state( CREATE TABLE IF NOT EXISTS application_services_txns( as_id VARCHAR(150) NOT NULL, txn_id INTEGER NOT NULL, - event_ids LONGBLOB NOT NULL, + event_ids bytea NOT NULL, UNIQUE(as_id, txn_id) ); -CREATE INDEX IF NOT EXISTS application_services_txns_id ON application_services_txns ( +CREATE INDEX application_services_txns_id ON application_services_txns ( as_id ); diff --git a/synapse/storage/schema/full_schemas/16/event_edges.sql b/synapse/storage/schema/full_schemas/16/event_edges.sql index bdb1109094..05d0874f0d 100644 --- a/synapse/storage/schema/full_schemas/16/event_edges.sql +++ b/synapse/storage/schema/full_schemas/16/event_edges.sql @@ -19,8 +19,8 @@ CREATE TABLE IF NOT EXISTS event_forward_extremities( UNIQUE (event_id, room_id) ); -CREATE INDEX IF NOT EXISTS ev_extrem_room ON event_forward_extremities(room_id); -CREATE INDEX IF NOT EXISTS ev_extrem_id ON event_forward_extremities(event_id); +CREATE INDEX ev_extrem_room ON event_forward_extremities(room_id); +CREATE INDEX ev_extrem_id ON event_forward_extremities(event_id); CREATE TABLE IF NOT EXISTS event_backward_extremities( @@ -29,8 +29,8 @@ CREATE TABLE IF NOT EXISTS event_backward_extremities( UNIQUE (event_id, room_id) ); -CREATE INDEX IF NOT EXISTS ev_b_extrem_room ON event_backward_extremities(room_id); -CREATE INDEX IF NOT EXISTS ev_b_extrem_id ON event_backward_extremities(event_id); +CREATE INDEX ev_b_extrem_room ON event_backward_extremities(room_id); +CREATE INDEX ev_b_extrem_id ON event_backward_extremities(event_id); CREATE TABLE IF NOT EXISTS event_edges( @@ -41,8 +41,8 @@ CREATE TABLE IF NOT EXISTS event_edges( UNIQUE (event_id, prev_event_id, room_id, is_state) ); -CREATE INDEX IF NOT EXISTS ev_edges_id ON event_edges(event_id); -CREATE INDEX IF NOT EXISTS ev_edges_prev_id ON event_edges(prev_event_id); +CREATE INDEX ev_edges_id ON event_edges(event_id); +CREATE INDEX ev_edges_prev_id ON event_edges(prev_event_id); CREATE TABLE IF NOT EXISTS room_depth( @@ -51,17 +51,17 @@ CREATE TABLE IF NOT EXISTS room_depth( UNIQUE (room_id) ); -CREATE INDEX IF NOT EXISTS room_depth_room ON room_depth(room_id); +CREATE INDEX room_depth_room ON room_depth(room_id); create TABLE IF NOT EXISTS event_destinations( event_id VARCHAR(150) NOT NULL, destination VARCHAR(150) NOT NULL, - delivered_ts BIGINT UNSIGNED DEFAULT 0, -- or 0 if not delivered + delivered_ts BIGINT DEFAULT 0, -- or 0 if not delivered UNIQUE (event_id, destination) ); -CREATE INDEX IF NOT EXISTS event_destinations_id ON event_destinations(event_id); +CREATE INDEX event_destinations_id ON event_destinations(event_id); CREATE TABLE IF NOT EXISTS state_forward_extremities( @@ -72,10 +72,10 @@ CREATE TABLE IF NOT EXISTS state_forward_extremities( UNIQUE (event_id, room_id) ); -CREATE INDEX IF NOT EXISTS st_extrem_keys ON state_forward_extremities( +CREATE INDEX st_extrem_keys ON state_forward_extremities( room_id, type, state_key ); -CREATE INDEX IF NOT EXISTS st_extrem_id ON state_forward_extremities(event_id); +CREATE INDEX st_extrem_id ON state_forward_extremities(event_id); CREATE TABLE IF NOT EXISTS event_auth( @@ -85,5 +85,5 @@ CREATE TABLE IF NOT EXISTS event_auth( UNIQUE (event_id, auth_id, room_id) ); -CREATE INDEX IF NOT EXISTS evauth_edges_id ON event_auth(event_id); -CREATE INDEX IF NOT EXISTS evauth_edges_auth_id ON event_auth(auth_id); +CREATE INDEX evauth_edges_id ON event_auth(event_id); +CREATE INDEX evauth_edges_auth_id ON event_auth(auth_id); diff --git a/synapse/storage/schema/full_schemas/16/event_signatures.sql b/synapse/storage/schema/full_schemas/16/event_signatures.sql index 09886f607c..4291827368 100644 --- a/synapse/storage/schema/full_schemas/16/event_signatures.sql +++ b/synapse/storage/schema/full_schemas/16/event_signatures.sql @@ -16,40 +16,40 @@ CREATE TABLE IF NOT EXISTS event_content_hashes ( event_id VARCHAR(150), algorithm VARCHAR(150), - hash LONGBLOB, + hash bytea, UNIQUE (event_id, algorithm) ); -CREATE INDEX IF NOT EXISTS event_content_hashes_id ON event_content_hashes(event_id); +CREATE INDEX event_content_hashes_id ON event_content_hashes(event_id); CREATE TABLE IF NOT EXISTS event_reference_hashes ( event_id VARCHAR(150), algorithm VARCHAR(150), - hash LONGBLOB, + hash bytea, UNIQUE (event_id, algorithm) ); -CREATE INDEX IF NOT EXISTS event_reference_hashes_id ON event_reference_hashes(event_id); +CREATE INDEX event_reference_hashes_id ON event_reference_hashes(event_id); CREATE TABLE IF NOT EXISTS event_signatures ( event_id VARCHAR(150), signature_name VARCHAR(150), key_id VARCHAR(150), - signature LONGBLOB, + signature bytea, UNIQUE (event_id, signature_name, key_id) ); -CREATE INDEX IF NOT EXISTS event_signatures_id ON event_signatures(event_id); +CREATE INDEX event_signatures_id ON event_signatures(event_id); CREATE TABLE IF NOT EXISTS event_edge_hashes( event_id VARCHAR(150), prev_event_id VARCHAR(150), algorithm VARCHAR(150), - hash LONGBLOB, + hash bytea, UNIQUE (event_id, prev_event_id, algorithm) ); -CREATE INDEX IF NOT EXISTS event_edge_hashes_id ON event_edge_hashes(event_id); +CREATE INDEX event_edge_hashes_id ON event_edge_hashes(event_id); diff --git a/synapse/storage/schema/full_schemas/16/im.sql b/synapse/storage/schema/full_schemas/16/im.sql index 19f0f34143..a661fc160c 100644 --- a/synapse/storage/schema/full_schemas/16/im.sql +++ b/synapse/storage/schema/full_schemas/16/im.sql @@ -14,33 +14,33 @@ */ CREATE TABLE IF NOT EXISTS events( - stream_ordering BIGINT UNSIGNED PRIMARY KEY, - topological_ordering BIGINT UNSIGNED NOT NULL, + stream_ordering BIGINT PRIMARY KEY, + topological_ordering BIGINT NOT NULL, event_id VARCHAR(150) NOT NULL, type VARCHAR(150) NOT NULL, room_id VARCHAR(150) NOT NULL, - content LONGBLOB NOT NULL, - unrecognized_keys LONGBLOB, + content bytea NOT NULL, + unrecognized_keys bytea, processed BOOL NOT NULL, outlier BOOL NOT NULL, - depth BIGINT UNSIGNED DEFAULT 0 NOT NULL, + depth BIGINT DEFAULT 0 NOT NULL, UNIQUE (event_id) ); -CREATE INDEX IF NOT EXISTS events_stream_ordering ON events (stream_ordering); -CREATE INDEX IF NOT EXISTS events_topological_ordering ON events (topological_ordering); -CREATE INDEX IF NOT EXISTS events_room_id ON events (room_id); +CREATE INDEX events_stream_ordering ON events (stream_ordering); +CREATE INDEX events_topological_ordering ON events (topological_ordering); +CREATE INDEX events_room_id ON events (room_id); CREATE TABLE IF NOT EXISTS event_json( event_id VARCHAR(150) NOT NULL, room_id VARCHAR(150) NOT NULL, - internal_metadata LONGBLOB NOT NULL, - json LONGBLOB NOT NULL, + internal_metadata bytea NOT NULL, + json bytea NOT NULL, UNIQUE (event_id) ); -CREATE INDEX IF NOT EXISTS event_json_room_id ON event_json(room_id); +CREATE INDEX event_json_room_id ON event_json(room_id); CREATE TABLE IF NOT EXISTS state_events( @@ -52,9 +52,9 @@ CREATE TABLE IF NOT EXISTS state_events( UNIQUE (event_id) ); -CREATE INDEX IF NOT EXISTS state_events_room_id ON state_events (room_id); -CREATE INDEX IF NOT EXISTS state_events_type ON state_events (type); -CREATE INDEX IF NOT EXISTS state_events_state_key ON state_events (state_key); +CREATE INDEX state_events_room_id ON state_events (room_id); +CREATE INDEX state_events_type ON state_events (type); +CREATE INDEX state_events_state_key ON state_events (state_key); CREATE TABLE IF NOT EXISTS current_state_events( @@ -66,9 +66,9 @@ CREATE TABLE IF NOT EXISTS current_state_events( UNIQUE (room_id, type, state_key) ); -CREATE INDEX IF NOT EXISTS current_state_events_room_id ON current_state_events (room_id); -CREATE INDEX IF NOT EXISTS current_state_events_type ON current_state_events (type); -CREATE INDEX IF NOT EXISTS current_state_events_state_key ON current_state_events (state_key); +CREATE INDEX current_state_events_room_id ON current_state_events (room_id); +CREATE INDEX current_state_events_type ON current_state_events (type); +CREATE INDEX current_state_events_state_key ON current_state_events (state_key); CREATE TABLE IF NOT EXISTS room_memberships( event_id VARCHAR(150) NOT NULL, @@ -79,8 +79,8 @@ CREATE TABLE IF NOT EXISTS room_memberships( UNIQUE (event_id) ); -CREATE INDEX IF NOT EXISTS room_memberships_room_id ON room_memberships (room_id); -CREATE INDEX IF NOT EXISTS room_memberships_user_id ON room_memberships (user_id); +CREATE INDEX room_memberships_room_id ON room_memberships (room_id); +CREATE INDEX room_memberships_user_id ON room_memberships (user_id); CREATE TABLE IF NOT EXISTS feedback( event_id VARCHAR(150) NOT NULL, @@ -98,7 +98,7 @@ CREATE TABLE IF NOT EXISTS topics( UNIQUE (event_id) ); -CREATE INDEX IF NOT EXISTS topics_room_id ON topics(room_id); +CREATE INDEX topics_room_id ON topics(room_id); CREATE TABLE IF NOT EXISTS room_names( event_id VARCHAR(150) NOT NULL, @@ -107,7 +107,7 @@ CREATE TABLE IF NOT EXISTS room_names( UNIQUE (event_id) ); -CREATE INDEX IF NOT EXISTS room_names_room_id ON room_names(room_id); +CREATE INDEX room_names_room_id ON room_names(room_id); CREATE TABLE IF NOT EXISTS rooms( room_id VARCHAR(150) PRIMARY KEY NOT NULL, @@ -121,4 +121,4 @@ CREATE TABLE IF NOT EXISTS room_hosts( UNIQUE (room_id, host) ); -CREATE INDEX IF NOT EXISTS room_hosts_room_id ON room_hosts (room_id); +CREATE INDEX room_hosts_room_id ON room_hosts (room_id); diff --git a/synapse/storage/schema/full_schemas/16/keys.sql b/synapse/storage/schema/full_schemas/16/keys.sql index 35f141c288..459b510427 100644 --- a/synapse/storage/schema/full_schemas/16/keys.sql +++ b/synapse/storage/schema/full_schemas/16/keys.sql @@ -16,8 +16,8 @@ CREATE TABLE IF NOT EXISTS server_tls_certificates( server_name VARCHAR(150), -- Server name. fingerprint VARCHAR(150), -- Certificate fingerprint. from_server VARCHAR(150), -- Which key server the certificate was fetched from. - ts_added_ms BIGINT UNSIGNED, -- When the certifcate was added. - tls_certificate LONGBLOB, -- DER encoded x509 certificate. + ts_added_ms BIGINT, -- When the certifcate was added. + tls_certificate bytea, -- DER encoded x509 certificate. UNIQUE (server_name, fingerprint) ); @@ -25,7 +25,7 @@ CREATE TABLE IF NOT EXISTS server_signature_keys( server_name VARCHAR(150), -- Server name. key_id VARCHAR(150), -- Key version. from_server VARCHAR(150), -- Which key server the key was fetched form. - ts_added_ms BIGINT UNSIGNED, -- When the key was added. - verify_key LONGBLOB, -- NACL verification key. + ts_added_ms BIGINT, -- When the key was added. + verify_key bytea, -- NACL verification key. UNIQUE (server_name, key_id) ); diff --git a/synapse/storage/schema/full_schemas/16/media_repository.sql b/synapse/storage/schema/full_schemas/16/media_repository.sql index 014bce4aeb..0e819fca38 100644 --- a/synapse/storage/schema/full_schemas/16/media_repository.sql +++ b/synapse/storage/schema/full_schemas/16/media_repository.sql @@ -17,7 +17,7 @@ CREATE TABLE IF NOT EXISTS local_media_repository ( media_id VARCHAR(150), -- The id used to refer to the media. media_type VARCHAR(150), -- The MIME-type of the media. media_length INTEGER, -- Length of the media in bytes. - created_ts BIGINT UNSIGNED, -- When the content was uploaded in ms. + created_ts BIGINT, -- When the content was uploaded in ms. upload_name VARCHAR(150), -- The name the media was uploaded with. user_id VARCHAR(150), -- The user who uploaded the file. UNIQUE (media_id) @@ -35,14 +35,14 @@ CREATE TABLE IF NOT EXISTS local_media_repository_thumbnails ( ) ); -CREATE INDEX IF NOT EXISTS local_media_repository_thumbnails_media_id +CREATE INDEX local_media_repository_thumbnails_media_id ON local_media_repository_thumbnails (media_id); CREATE TABLE IF NOT EXISTS remote_media_cache ( media_origin VARCHAR(150), -- The remote HS the media came from. media_id VARCHAR(150), -- The id used to refer to the media on that server. media_type VARCHAR(150), -- The MIME-type of the media. - created_ts BIGINT UNSIGNED, -- When the content was uploaded in ms. + created_ts BIGINT, -- When the content was uploaded in ms. upload_name VARCHAR(150), -- The name the media was uploaded with. media_length INTEGER, -- Length of the media in bytes. filesystem_id VARCHAR(150), -- The name used to store the media on disk. @@ -64,5 +64,5 @@ CREATE TABLE IF NOT EXISTS remote_media_cache_thumbnails ( ) ); -CREATE INDEX IF NOT EXISTS remote_media_cache_thumbnails_media_id +CREATE INDEX remote_media_cache_thumbnails_media_id ON remote_media_cache_thumbnails (media_id); diff --git a/synapse/storage/schema/full_schemas/16/presence.sql b/synapse/storage/schema/full_schemas/16/presence.sql index fbe5b0af6c..9c41be296e 100644 --- a/synapse/storage/schema/full_schemas/16/presence.sql +++ b/synapse/storage/schema/full_schemas/16/presence.sql @@ -16,7 +16,7 @@ CREATE TABLE IF NOT EXISTS presence( user_id VARCHAR(150) NOT NULL, state VARCHAR(20), status_msg VARCHAR(150), - mtime BIGINT UNSIGNED, -- miliseconds since last state change + mtime BIGINT, -- miliseconds since last state change UNIQUE (user_id) ); @@ -37,4 +37,4 @@ CREATE TABLE IF NOT EXISTS presence_list( UNIQUE (user_id, observed_user_id) ); -CREATE INDEX IF NOT EXISTS presence_list_user_id ON presence_list (user_id); +CREATE INDEX presence_list_user_id ON presence_list (user_id); diff --git a/synapse/storage/schema/full_schemas/16/push.sql b/synapse/storage/schema/full_schemas/16/push.sql index 33300736f9..5c0c7bc201 100644 --- a/synapse/storage/schema/full_schemas/16/push.sql +++ b/synapse/storage/schema/full_schemas/16/push.sql @@ -22,52 +22,52 @@ CREATE TABLE IF NOT EXISTS rejections( -- Push notification endpoints that users have configured CREATE TABLE IF NOT EXISTS pushers ( - id BIGINT UNSIGNED PRIMARY KEY, + id BIGINT PRIMARY KEY, user_name VARCHAR(150) NOT NULL, profile_tag VARCHAR(32) NOT NULL, kind VARCHAR(8) NOT NULL, app_id VARCHAR(64) NOT NULL, app_display_name VARCHAR(64) NOT NULL, device_display_name VARCHAR(128) NOT NULL, - pushkey VARBINARY(512) NOT NULL, - ts BIGINT UNSIGNED NOT NULL, + pushkey bytea NOT NULL, + ts BIGINT NOT NULL, lang VARCHAR(8), - data LONGBLOB, + data bytea, last_token TEXT, - last_success BIGINT UNSIGNED, - failing_since BIGINT UNSIGNED, + last_success BIGINT, + failing_since BIGINT, UNIQUE (app_id, pushkey) ); CREATE TABLE IF NOT EXISTS push_rules ( - id BIGINT UNSIGNED PRIMARY KEY, + id BIGINT PRIMARY KEY, user_name VARCHAR(150) NOT NULL, rule_id VARCHAR(150) NOT NULL, - priority_class TINYINT NOT NULL, + priority_class SMALLINT NOT NULL, priority INTEGER NOT NULL DEFAULT 0, conditions VARCHAR(150) NOT NULL, actions VARCHAR(150) NOT NULL, UNIQUE(user_name, rule_id) ); -CREATE INDEX IF NOT EXISTS push_rules_user_name on push_rules (user_name); +CREATE INDEX push_rules_user_name on push_rules (user_name); CREATE TABLE IF NOT EXISTS user_filters( user_id VARCHAR(150), - filter_id BIGINT UNSIGNED, - filter_json LONGBLOB + filter_id BIGINT, + filter_json bytea ); -CREATE INDEX IF NOT EXISTS user_filters_by_user_id_filter_id ON user_filters( +CREATE INDEX user_filters_by_user_id_filter_id ON user_filters( user_id, filter_id ); CREATE TABLE IF NOT EXISTS push_rules_enable ( - id BIGINT UNSIGNED PRIMARY KEY, + id BIGINT PRIMARY KEY, user_name VARCHAR(150) NOT NULL, rule_id VARCHAR(150) NOT NULL, - enabled TINYINT, + enabled SMALLINT, UNIQUE(user_name, rule_id) ); -CREATE INDEX IF NOT EXISTS push_rules_enable_user_name on push_rules_enable (user_name); +CREATE INDEX push_rules_enable_user_name on push_rules_enable (user_name); diff --git a/synapse/storage/schema/full_schemas/16/redactions.sql b/synapse/storage/schema/full_schemas/16/redactions.sql index b81451eab4..492fd22033 100644 --- a/synapse/storage/schema/full_schemas/16/redactions.sql +++ b/synapse/storage/schema/full_schemas/16/redactions.sql @@ -18,5 +18,5 @@ CREATE TABLE IF NOT EXISTS redactions ( UNIQUE (event_id) ); -CREATE INDEX IF NOT EXISTS redactions_event_id ON redactions (event_id); -CREATE INDEX IF NOT EXISTS redactions_redacts ON redactions (redacts); +CREATE INDEX redactions_event_id ON redactions (event_id); +CREATE INDEX redactions_redacts ON redactions (redacts); diff --git a/synapse/storage/schema/full_schemas/16/state.sql b/synapse/storage/schema/full_schemas/16/state.sql index 8c51610396..3c54595e64 100644 --- a/synapse/storage/schema/full_schemas/16/state.sql +++ b/synapse/storage/schema/full_schemas/16/state.sql @@ -14,7 +14,7 @@ */ CREATE TABLE IF NOT EXISTS state_groups( - id BIGINT UNSIGNED PRIMARY KEY, + id BIGINT PRIMARY KEY, room_id VARCHAR(150) NOT NULL, event_id VARCHAR(150) NOT NULL ); @@ -33,8 +33,8 @@ CREATE TABLE IF NOT EXISTS event_to_state_groups( UNIQUE (event_id) ); -CREATE INDEX IF NOT EXISTS state_groups_id ON state_groups(id); +CREATE INDEX state_groups_id ON state_groups(id); -CREATE INDEX IF NOT EXISTS state_groups_state_id ON state_groups_state(state_group); -CREATE INDEX IF NOT EXISTS state_groups_state_tuple ON state_groups_state(room_id, type, state_key); -CREATE INDEX IF NOT EXISTS event_to_state_groups_id ON event_to_state_groups(event_id); +CREATE INDEX state_groups_state_id ON state_groups_state(state_group); +CREATE INDEX state_groups_state_tuple ON state_groups_state(room_id, type, state_key); +CREATE INDEX event_to_state_groups_id ON event_to_state_groups(event_id); diff --git a/synapse/storage/schema/full_schemas/16/transactions.sql b/synapse/storage/schema/full_schemas/16/transactions.sql index f381e67603..bc64064936 100644 --- a/synapse/storage/schema/full_schemas/16/transactions.sql +++ b/synapse/storage/schema/full_schemas/16/transactions.sql @@ -16,32 +16,32 @@ CREATE TABLE IF NOT EXISTS received_transactions( transaction_id VARCHAR(150), origin VARCHAR(150), - ts BIGINT UNSIGNED, + ts BIGINT, response_code INTEGER, - response_json LONGBLOB, - has_been_referenced BOOL default 0, -- Whether thishas been referenced by a prev_tx + response_json bytea, + has_been_referenced smallint default 0, -- Whether thishas been referenced by a prev_tx UNIQUE (transaction_id, origin) ); -CREATE INDEX IF NOT EXISTS transactions_have_ref ON received_transactions(origin, has_been_referenced);-- WHERE has_been_referenced = 0; +CREATE INDEX transactions_have_ref ON received_transactions(origin, has_been_referenced);-- WHERE has_been_referenced = 0; -- Stores what transactions we've sent, what their response was (if we got one) and whether we have -- since referenced the transaction in another outgoing transaction CREATE TABLE IF NOT EXISTS sent_transactions( - id BIGINT UNSIGNED PRIMARY KEY, -- This is used to apply insertion ordering + id BIGINT PRIMARY KEY, -- This is used to apply insertion ordering transaction_id VARCHAR(150), destination VARCHAR(150), response_code INTEGER DEFAULT 0, - response_json LONGBLOB, - ts BIGINT UNSIGNED + response_json bytea, + ts BIGINT ); -CREATE INDEX IF NOT EXISTS sent_transaction_dest ON sent_transactions(destination); -CREATE INDEX IF NOT EXISTS sent_transaction_txn_id ON sent_transactions(transaction_id); +CREATE INDEX sent_transaction_dest ON sent_transactions(destination); +CREATE INDEX sent_transaction_txn_id ON sent_transactions(transaction_id); -- So that we can do an efficient look up of all transactions that have yet to be successfully -- sent. -CREATE INDEX IF NOT EXISTS sent_transaction_sent ON sent_transactions(response_code); +CREATE INDEX sent_transaction_sent ON sent_transactions(response_code); -- For sent transactions only. @@ -53,11 +53,11 @@ CREATE TABLE IF NOT EXISTS transaction_id_to_pdu( UNIQUE (transaction_id, destination) ); -CREATE INDEX IF NOT EXISTS transaction_id_to_pdu_dest ON transaction_id_to_pdu(destination); +CREATE INDEX transaction_id_to_pdu_dest ON transaction_id_to_pdu(destination); -- To track destination health CREATE TABLE IF NOT EXISTS destinations( destination VARCHAR(150) PRIMARY KEY, - retry_last_ts BIGINT UNSIGNED, + retry_last_ts BIGINT, retry_interval INTEGER ); diff --git a/synapse/storage/schema/full_schemas/16/users.sql b/synapse/storage/schema/full_schemas/16/users.sql index d0011c04b4..267284d07d 100644 --- a/synapse/storage/schema/full_schemas/16/users.sql +++ b/synapse/storage/schema/full_schemas/16/users.sql @@ -15,17 +15,17 @@ CREATE TABLE IF NOT EXISTS users( name VARCHAR(150), password_hash VARCHAR(150), - creation_ts BIGINT UNSIGNED, - admin BOOL DEFAULT 0 NOT NULL, + creation_ts BIGINT, + admin SMALLINT DEFAULT 0 NOT NULL, UNIQUE(name) ); CREATE TABLE IF NOT EXISTS access_tokens( - id BIGINT UNSIGNED PRIMARY KEY, + id BIGINT PRIMARY KEY, user_id VARCHAR(150) NOT NULL, device_id VARCHAR(150), token VARCHAR(150) NOT NULL, - last_used BIGINT UNSIGNED, + last_used BIGINT, UNIQUE(token) ); @@ -35,7 +35,7 @@ CREATE TABLE IF NOT EXISTS user_ips ( device_id VARCHAR(150), ip VARCHAR(150) NOT NULL, user_agent VARCHAR(150) NOT NULL, - last_seen BIGINT UNSIGNED NOT NULL + last_seen BIGINT NOT NULL ); CREATE INDEX IF NOT EXISTS user_ips_user ON user_ips(user); diff --git a/synapse/storage/schema/schema_version.sql b/synapse/storage/schema/schema_version.sql index e7fa6fe569..d9494611e0 100644 --- a/synapse/storage/schema/schema_version.sql +++ b/synapse/storage/schema/schema_version.sql @@ -14,16 +14,14 @@ */ CREATE TABLE IF NOT EXISTS schema_version( - `Lock` CHAR(1) NOT NULL DEFAULT 'X' UNIQUE, -- Makes sure this table only has one row. - `version` INTEGER NOT NULL, - `upgraded` BOOL NOT NULL, -- Whether we reached this version from an upgrade or an initial schema. - CHECK (`Lock`='X') + Lock CHAR(1) NOT NULL DEFAULT 'X' UNIQUE, -- Makes sure this table only has one row. + version INTEGER NOT NULL, + upgraded BOOL NOT NULL, -- Whether we reached this version from an upgrade or an initial schema. + CHECK (Lock='X') ); CREATE TABLE IF NOT EXISTS applied_schema_deltas( - `version` INTEGER NOT NULL, - `file` VARCHAR(150) NOT NULL, + version INTEGER NOT NULL, + file VARCHAR(150) NOT NULL, UNIQUE(version, file) ); - -CREATE INDEX IF NOT EXISTS schema_deltas_ver ON applied_schema_deltas(version); diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index 57c2e4dfeb..df6de7cbcd 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -240,7 +240,7 @@ class StreamStore(SQLBaseStore): sql = ( "SELECT e.event_id, e.stream_ordering FROM events AS e WHERE " - "(e.outlier = 0 AND (room_id IN (%(current)s)) OR " + "(e.outlier = ? AND (room_id IN (%(current)s)) OR " "(event_id IN (%(invites)s))) " "AND e.stream_ordering > ? AND e.stream_ordering <= ? " "ORDER BY stream_ordering ASC LIMIT %(limit)d " @@ -251,7 +251,7 @@ class StreamStore(SQLBaseStore): } def f(txn): - txn.execute(sql, (user_id, user_id, from_id.stream, to_id.stream,)) + txn.execute(sql, (False, user_id, user_id, from_id.stream, to_id.stream,)) rows = self.cursor_to_dict(txn) @@ -283,7 +283,7 @@ class StreamStore(SQLBaseStore): # Tokens really represent positions between elements, but we use # the convention of pointing to the event before the gap. Hence # we have a bit of asymmetry when it comes to equalities. - args = [room_id] + args = [False, room_id] if direction == 'b': order = "DESC" bounds = _StreamToken.parse(from_key).upper_bound() @@ -307,7 +307,7 @@ class StreamStore(SQLBaseStore): sql = ( "SELECT * FROM events" - " WHERE outlier = 0 AND room_id = ? AND %(bounds)s" + " WHERE outlier = ? AND room_id = ? AND %(bounds)s" " ORDER BY topological_ordering %(order)s," " stream_ordering %(order)s %(limit)s" ) % { @@ -358,7 +358,7 @@ class StreamStore(SQLBaseStore): sql = ( "SELECT stream_ordering, topological_ordering, event_id" " FROM events" - " WHERE room_id = ? AND stream_ordering <= ? AND outlier = 0" + " WHERE room_id = ? AND stream_ordering <= ? AND outlier = ?" " ORDER BY topological_ordering DESC, stream_ordering DESC" " LIMIT ?" ) @@ -368,17 +368,17 @@ class StreamStore(SQLBaseStore): "SELECT stream_ordering, topological_ordering, event_id" " FROM events" " WHERE room_id = ? AND stream_ordering > ?" - " AND stream_ordering <= ? AND outlier = 0" + " AND stream_ordering <= ? AND outlier = ?" " ORDER BY topological_ordering DESC, stream_ordering DESC" " LIMIT ?" ) def get_recent_events_for_room_txn(txn): if from_token is None: - txn.execute(sql, (room_id, end_token.stream, limit,)) + txn.execute(sql, (room_id, end_token.stream, False, limit,)) else: txn.execute(sql, ( - room_id, from_token.stream, end_token.stream, limit + room_id, from_token.stream, end_token.stream, False, limit )) rows = self.cursor_to_dict(txn) -- cgit 1.4.1 From d488463fa38ac91d30c008fb9c595140f9785b42 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Tue, 14 Apr 2015 16:04:52 +0100 Subject: Add a version 2 of the key server api --- synapse/api/urls.py | 1 + synapse/app/homeserver.py | 8 +- synapse/config/server.py | 34 ++++++++- synapse/rest/key/v2/__init__.py | 19 +++++ synapse/rest/key/v2/local_key_resource.py | 118 ++++++++++++++++++++++++++++++ synapse/server.py | 1 + 6 files changed, 179 insertions(+), 2 deletions(-) create mode 100644 synapse/rest/key/v2/__init__.py create mode 100644 synapse/rest/key/v2/local_key_resource.py (limited to 'synapse/app') diff --git a/synapse/api/urls.py b/synapse/api/urls.py index 3d43674625..15c8558ea7 100644 --- a/synapse/api/urls.py +++ b/synapse/api/urls.py @@ -22,5 +22,6 @@ STATIC_PREFIX = "/_matrix/static" WEB_CLIENT_PREFIX = "/_matrix/client" CONTENT_REPO_PREFIX = "/_matrix/content" SERVER_KEY_PREFIX = "/_matrix/key/v1" +SERVER_KEY_V2_PREFIX = "/_matrix/key/v2" MEDIA_PREFIX = "/_matrix/media/v1" APP_SERVICE_PREFIX = "/_matrix/appservice/v1" diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 27e53a9e56..e681941612 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -35,10 +35,12 @@ from synapse.http.server import JsonResource, RootRedirect from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.rest.media.v1.media_repository import MediaRepositoryResource from synapse.rest.key.v1.server_key_resource import LocalKey +from synapse.rest.key.v2 import KeyApiV2Resource from synapse.http.matrixfederationclient import MatrixFederationHttpClient from synapse.api.urls import ( CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX, - SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, STATIC_PREFIX + SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, STATIC_PREFIX, + SERVER_KEY_V2_PREFIX, ) from synapse.config.homeserver import HomeServerConfig from synapse.crypto import context_factory @@ -96,6 +98,9 @@ class SynapseHomeServer(HomeServer): def build_resource_for_server_key(self): return LocalKey(self) + def build_resource_for_server_key_v2(self): + return KeyApiV2Resource(self) + def build_resource_for_metrics(self): if self.get_config().enable_metrics: return MetricsResource(self) @@ -135,6 +140,7 @@ class SynapseHomeServer(HomeServer): (FEDERATION_PREFIX, self.get_resource_for_federation()), (CONTENT_REPO_PREFIX, self.get_resource_for_content_repo()), (SERVER_KEY_PREFIX, self.get_resource_for_server_key()), + (SERVER_KEY_V2_PREFIX, self.get_resource_for_server_key_v2()), (MEDIA_PREFIX, self.get_resource_for_media_repository()), (STATIC_PREFIX, self.get_resource_for_static_content()), ] diff --git a/synapse/config/server.py b/synapse/config/server.py index 58a828cc4c..050ab90403 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -23,6 +23,9 @@ class ServerConfig(Config): super(ServerConfig, self).__init__(args) self.server_name = args.server_name self.signing_key = self.read_signing_key(args.signing_key_path) + self.old_signing_keys = self.read_old_signing_keys( + args.old_signing_key_path + ) self.bind_port = args.bind_port self.bind_host = args.bind_host self.unsecure_port = args.unsecure_port @@ -31,6 +34,7 @@ class ServerConfig(Config): self.web_client = args.web_client self.manhole = args.manhole self.soft_file_limit = args.soft_file_limit + self.key_refresh_interval = args.key_refresh_interval if not args.content_addr: host = args.server_name @@ -55,6 +59,14 @@ class ServerConfig(Config): ) server_group.add_argument("--signing-key-path", help="The signing key to sign messages with") + server_group.add_argument("--old-signing-key-path", + help="The old signing keys") + server_group.add_argument("--key-refresh-interval", + default=24 * 60 * 60 * 1000, # 1 Day + help="How long a key response is valid for." + " Used to set the exipiry in /key/v2/." + " Controls how frequently servers will" + " query what keys are still valid") server_group.add_argument("-p", "--bind-port", metavar="PORT", type=int, help="https port to listen on", default=8448) @@ -96,6 +108,19 @@ class ServerConfig(Config): " Try running again with --generate-config" ) + def read_old_signing_keys(self, old_signing_key_path): + old_signing_keys = self.read_file( + old_signing_key_path, "old_signing_key" + ) + try: + return syutil.crypto.signing_key.read_old_signing_keys( + old_signing_keys.splitlines(True) + ) + except Exception: + raise ConfigError( + "Error reading old signing keys." + ) + @classmethod def generate_config(cls, args, config_dir_path): super(ServerConfig, cls).generate_config(args, config_dir_path) @@ -110,7 +135,7 @@ class ServerConfig(Config): with open(args.signing_key_path, "w") as signing_key_file: syutil.crypto.signing_key.write_signing_keys( signing_key_file, - (syutil.crypto.signing_key.generate_singing_key("auto"),), + (syutil.crypto.signing_key.generate_signing_key("auto"),), ) else: signing_keys = cls.read_file(args.signing_key_path, "signing_key") @@ -126,3 +151,10 @@ class ServerConfig(Config): signing_key_file, (key,), ) + + if not args.old_signing_key_path: + args.old_signing_key_path = base_key_name + ".old.signing.keys" + + if not os.path.exists(args.old_signing_key_path): + with open(args.old_signing_key_path, "w") as old_signing_key_file: + pass diff --git a/synapse/rest/key/v2/__init__.py b/synapse/rest/key/v2/__init__.py new file mode 100644 index 0000000000..b79ed02590 --- /dev/null +++ b/synapse/rest/key/v2/__init__.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .local_key_resource import LocalKey + +class KeyApiV2Resource(LocalKey): + pass diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py new file mode 100644 index 0000000000..5c77f308df --- /dev/null +++ b/synapse/rest/key/v2/local_key_resource.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +# Copyright 2014, 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from twisted.web.resource import Resource +from synapse.http.server import respond_with_json_bytes +from syutil.crypto.jsonsign import sign_json +from syutil.base64util import encode_base64 +from syutil.jsonutil import encode_canonical_json +from OpenSSL import crypto +import logging + + +logger = logging.getLogger(__name__) + + +class LocalKey(Resource): + """HTTP resource containing encoding the TLS X.509 certificate and NACL + signature verification keys for this server:: + + GET /_matrix/key/v2/ HTTP/1.1 + + HTTP/1.1 200 OK + Content-Type: application/json + { + "expires": # integer posix timestamp when this result expires. + "server_name": "this.server.example.com" + "verify_keys": { + "algorithm:version": # base64 encoded NACL verification key. + }, + "old_verify_keys": { + "algorithm:version": { + "expired": # integer posix timestamp when the key expired. + "key": # base64 encoded NACL verification key. + } + } + "tls_certificate": # base64 ASN.1 DER encoded X.509 tls cert. + "signatures": { + "this.server.example.com": { + "algorithm:version": # NACL signature for this server + } + } + } + """ + + def __init__(self, hs): + self.version_string = hs.version_string + self.config = hs.config + self.clock = hs.clock + self.update_response_body(self.clock.time_msec()) + Resource.__init__(self) + + def update_response_body(self, time_now_msec): + refresh_interval = self.config.key_refresh_interval + self.expires = int(time_now_msec + refresh_interval) + self.response_body = encode_canonical_json(self.response_json_object()) + + + def response_json_object(self): + verify_keys = {} + for key in self.config.signing_key: + verify_key_bytes = key.verify_key.encode() + key_id = "%s:%s" % (key.alg, key.version) + verify_keys[key_id] = encode_base64(verify_key_bytes) + + old_verify_keys = {} + for key in self.config.old_signing_keys: + key_id = "%s:%s" % (key.alg, key.version) + verify_key_bytes = key.encode() + old_verify_keys[key_id] = { + u"key": encode_base64(verify_key_bytes), + u"expired": key.expired, + } + + x509_certificate_bytes = crypto.dump_certificate( + crypto.FILETYPE_ASN1, + self.config.tls_certificate + ) + json_object = { + u"expires": self.expires, + u"server_name": self.config.server_name, + u"verify_keys": verify_keys, + u"old_verify_keys": old_verify_keys, + u"tls_certificate": encode_base64(x509_certificate_bytes) + } + for key in self.config.signing_key: + json_object = sign_json( + json_object, + self.config.server_name, + key, + ) + return json_object + + def render_GET(self, request): + time_now = self.clock.time_msec() + # Update the expiry time if less than half the interval remains. + if time_now + self.config.key_refresh_interval / 2 > self.expires: + self.update_response_body() + return respond_with_json_bytes( + request, 200, self.response_body, + version_string=self.version_string + ) + + def getChild(self, name, request): + if name == '': + return self diff --git a/synapse/server.py b/synapse/server.py index 0bd87bdd77..a602b425e3 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -78,6 +78,7 @@ class BaseHomeServer(object): 'resource_for_web_client', 'resource_for_content_repo', 'resource_for_server_key', + 'resource_for_server_key_v2', 'resource_for_media_repository', 'resource_for_metrics', 'event_sources', -- cgit 1.4.1 From ffad75bd6284873c27efb2cfdfdcf9f909eb9db3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 15 Apr 2015 17:00:50 +0100 Subject: Remove mysql/maria support --- scripts/port_from_sqlite_to_postgres.py | 373 ++++++++++++++++++++ scripts/port_to_maria.py | 383 --------------------- synapse/app/homeserver.py | 9 +- synapse/storage/engines/__init__.py | 2 - synapse/storage/engines/maria.py | 50 --- .../storage/schema/delta/16/unique_constraints.sql | 2 +- 6 files changed, 375 insertions(+), 444 deletions(-) create mode 100644 scripts/port_from_sqlite_to_postgres.py delete mode 100644 scripts/port_to_maria.py delete mode 100644 synapse/storage/engines/maria.py (limited to 'synapse/app') diff --git a/scripts/port_from_sqlite_to_postgres.py b/scripts/port_from_sqlite_to_postgres.py new file mode 100644 index 0000000000..4b3fd9e529 --- /dev/null +++ b/scripts/port_from_sqlite_to_postgres.py @@ -0,0 +1,373 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer, reactor +from twisted.enterprise import adbapi + +from synapse.storage._base import LoggingTransaction, SQLBaseStore +from synapse.storage.engines import create_engine + +import argparse +import itertools +import logging +import types +import yaml + + +logger = logging.getLogger("port_from_sqlite_to_postgres") + + +BINARY_COLUMNS = { + "event_content_hashes": ["hash"], + "event_reference_hashes": ["hash"], + "event_signatures": ["signature"], + "event_edge_hashes": ["hash"], + "events": ["content", "unrecognized_keys"], + "event_json": ["internal_metadata", "json"], + "application_services_txns": ["event_ids"], + "received_transactions": ["response_json"], + "sent_transactions": ["response_json"], + "server_tls_certificates": ["tls_certificate"], + "server_signature_keys": ["verify_key"], + "pushers": ["pushkey", "data"], + "user_filters": ["filter_json"], +} + +UNICODE_COLUMNS = { + "events": ["content", "unrecognized_keys"], + "event_json": ["internal_metadata", "json"], + "users": ["password_hash"], +} + + +BOOLEAN_COLUMNS = { + "events": ["processed", "outlier"], + "rooms": ["is_public"], + "event_edges": ["is_state"], + "presence_list": ["accepted"], +} + + +APPEND_ONLY_TABLES = [ + "event_content_hashes", + "event_reference_hashes", + "event_signatures", + "event_edge_hashes", + "events", + "event_json", + "state_events", + "room_memberships", + "feedback", + "topics", + "room_names", + "rooms", + "local_media_repository", + "local_media_repository_thumbnails", + "remote_media_cache", + "remote_media_cache_thumbnails", + "redactions", + "event_edges", + "event_auth", + "received_transactions", + "sent_transactions", + "transaction_id_to_pdu", + "users", + "state_groups", + "state_groups_state", + "event_to_state_groups", + "rejections", +] + + +class Store(object): + def __init__(self, db_pool, engine): + self.db_pool = db_pool + self.database_engine = engine + + _simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"] + _simple_insert = SQLBaseStore.__dict__["_simple_insert"] + + _simple_select_onecol_txn = SQLBaseStore.__dict__["_simple_select_onecol_txn"] + _simple_select_onecol = SQLBaseStore.__dict__["_simple_select_onecol"] + _simple_select_one_onecol = SQLBaseStore.__dict__["_simple_select_one_onecol"] + _simple_select_one_onecol_txn = SQLBaseStore.__dict__["_simple_select_one_onecol_txn"] + + _simple_update_one = SQLBaseStore.__dict__["_simple_update_one"] + _simple_update_one_txn = SQLBaseStore.__dict__["_simple_update_one_txn"] + + _execute_and_decode = SQLBaseStore.__dict__["_execute_and_decode"] + + def runInteraction(self, desc, func, *args, **kwargs): + def r(conn): + try: + i = 0 + N = 5 + while True: + try: + txn = conn.cursor() + return func( + LoggingTransaction(txn, desc, self.database_engine), + *args, **kwargs + ) + except self.database_engine.module.DatabaseError as e: + if self.database_engine.is_deadlock(e): + logger.warn("[TXN DEADLOCK] {%s} %d/%d", desc, i, N) + if i < N: + i += 1 + conn.rollback() + continue + raise + except Exception as e: + logger.debug("[TXN FAIL] {%s}", desc, e) + raise + + return self.db_pool.runWithConnection(r) + + def insert_many_txn(self, txn, table, headers, rows): + sql = "INSERT INTO %s (%s) VALUES (%s)" % ( + table, + ", ".join(k for k in headers), + ", ".join("%s" for _ in headers) + ) + + try: + txn.executemany(sql, rows) + except: + logger.exception( + "Failed to insert: %s", + table, + ) + raise + + + +def chunks(n): + for i in itertools.count(0, n): + yield range(i, i+n) + + +@defer.inlineCallbacks +def handle_table(table, sqlite_store, postgres_store): + if table in APPEND_ONLY_TABLES: + # It's safe to just carry on inserting. + next_chunk = yield postgres_store._simple_select_one_onecol( + table="port_from_sqlite3", + keyvalues={"table_name": table}, + retcol="rowid", + allow_none=True, + ) + + if next_chunk is None: + yield postgres_store._simple_insert( + table="port_from_sqlite3", + values={"table_name": table, "rowid": 0} + ) + + next_chunk = 0 + else: + def delete_all(txn): + txn.execute( + "DELETE FROM port_from_sqlite3 WHERE table_name = %s", + (table,) + ) + txn.execute("TRUNCATE %s CASCADE" % (table,)) + postgres_store._simple_insert_txn( + txn, + table="port_from_sqlite3", + values={"table_name": table, "rowid": 0} + ) + + yield postgres_store.runInteraction( + "delete_non_append_only", delete_all + ) + + next_chunk = 0 + + logger.info("next_chunk for %s: %d", table, next_chunk) + + N = 5000 + + select = "SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?" % (table,) + + uni_col_names = UNICODE_COLUMNS.get(table, []) + bool_col_names = BOOLEAN_COLUMNS.get(table, []) + bin_col_names = BINARY_COLUMNS.get(table, []) + + while True: + def r(txn): + txn.execute(select, (next_chunk, N,)) + rows = txn.fetchall() + headers = [column[0] for column in txn.description] + + return headers, rows + + headers, rows = yield sqlite_store.runInteraction("select", r) + + logger.info("Got %d rows for %s", len(rows), table) + + if rows: + uni_cols = [i for i, h in enumerate(headers) if h in uni_col_names] + bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names] + bin_cols = [i for i, h in enumerate(headers) if h in bin_col_names] + next_chunk = rows[-1][0] + 1 + + def conv(j, col): + if j in uni_cols: + col = sqlite_store.database_engine.load_unicode(col) + if j in bool_cols: + return bool(col) + + if j in bin_cols: + if isinstance(col, types.UnicodeType): + col = buffer(col.encode("utf8")) + + return col + + for i, row in enumerate(rows): + rows[i] = tuple( + postgres_store.database_engine.encode_parameter( + conv(j, col) + ) + for j, col in enumerate(row) + if j > 0 + ) + + def ins(txn): + postgres_store.insert_many_txn(txn, table, headers[1:], rows) + + postgres_store._simple_update_one_txn( + txn, + table="port_from_sqlite3", + keyvalues={"table_name": table}, + updatevalues={"rowid": next_chunk}, + ) + + yield postgres_store.runInteraction("insert_many", ins) + else: + return + + +def setup_db(db_config, database_engine): + db_conn = database_engine.module.connect( + **{ + k: v for k, v in db_config.get("args", {}).items() + if not k.startswith("cp_") + } + ) + + database_engine.prepare_database(db_conn) + + db_conn.commit() + + +@defer.inlineCallbacks +def main(sqlite_config, postgress_config): + try: + sqlite_db_pool = adbapi.ConnectionPool( + sqlite_config["name"], + **sqlite_config["args"] + ) + + postgres_db_pool = adbapi.ConnectionPool( + postgress_config["name"], + **postgress_config["args"] + ) + + sqlite_engine = create_engine("sqlite3") + postgres_engine = create_engine("psycopg2") + + sqlite_store = Store(sqlite_db_pool, sqlite_engine) + postgres_store = Store(postgres_db_pool, postgres_engine) + + # Step 1. Set up databases. + logger.info("Preparing sqlite database...") + setup_db(sqlite_config, sqlite_engine) + + logger.info("Preparing postgres database...") + setup_db(postgress_config, postgres_engine) + + # Step 2. Get tables. + logger.info("Fetching tables...") + tables = yield sqlite_store._simple_select_onecol( + table="sqlite_master", + keyvalues={ + "type": "table", + }, + retcol="name", + ) + + logger.info("Found %d tables", len(tables)) + + def create_port_table(txn): + txn.execute( + "CREATE TABLE port_from_sqlite3 (" + " table_name varchar(100) NOT NULL UNIQUE," + " rowid bigint NOT NULL" + ")" + ) + + try: + yield postgres_store.runInteraction( + "create_port_table", create_port_table + ) + except Exception as e: + logger.info("Failed to create port table: %s", e) + + # Process tables. + yield defer.gatherResults( + [ + handle_table(table, sqlite_store, postgres_store) + for table in tables + if table not in ["schema_version", "applied_schema_deltas"] + and not table.startswith("sqlite_") + ], + consumeErrors=True, + ) + + except: + logger.exception("") + finally: + reactor.stop() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--sqlite-database") + parser.add_argument( + "--postgres-config", type=argparse.FileType('r'), + ) + + args = parser.parse_args() + logging.basicConfig(level=logging.INFO) + + sqlite_config = { + "name": "sqlite3", + "args": { + "database": args.sqlite_database, + "cp_min": 1, + "cp_max": 1, + "check_same_thread": False, + }, + } + + postgres_config = yaml.safe_load(args.postgres_config) + + reactor.callWhenRunning( + main, + sqlite_config=sqlite_config, + postgres_config=postgres_config, + ) + + reactor.run() diff --git a/scripts/port_to_maria.py b/scripts/port_to_maria.py deleted file mode 100644 index 0d7ba92357..0000000000 --- a/scripts/port_to_maria.py +++ /dev/null @@ -1,383 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2015 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from twisted.internet import defer, reactor -from twisted.enterprise import adbapi - -from synapse.storage._base import LoggingTransaction, SQLBaseStore -from synapse.storage.engines import create_engine - -import argparse -import itertools -import logging -import types -import yaml - - -logger = logging.getLogger("port_to_maria") - - -BINARY_COLUMNS = { - "event_content_hashes": ["hash"], - "event_reference_hashes": ["hash"], - "event_signatures": ["signature"], - "event_edge_hashes": ["hash"], - "events": ["content", "unrecognized_keys"], - "event_json": ["internal_metadata", "json"], - "application_services_txns": ["event_ids"], - "received_transactions": ["response_json"], - "sent_transactions": ["response_json"], - "server_tls_certificates": ["tls_certificate"], - "server_signature_keys": ["verify_key"], - "pushers": ["pushkey", "data"], - "user_filters": ["filter_json"], -} - -UNICODE_COLUMNS = { - "events": ["content", "unrecognized_keys"], - "event_json": ["internal_metadata", "json"], - "users": ["password_hash"], -} - - -BOOLEAN_COLUMNS = { - "events": ["processed", "outlier"], - "rooms": ["is_public"], - "event_edges": ["is_state"], - "presence_list": ["accepted"], -} - - -APPEND_ONLY_TABLES = [ - "event_content_hashes", - "event_reference_hashes", - "event_signatures", - "event_edge_hashes", - "events", - "event_json", - "state_events", - "room_memberships", - "feedback", - "topics", - "room_names", - "rooms", - "local_media_repository", - "local_media_repository_thumbnails", - "remote_media_cache", - "remote_media_cache_thumbnails", - "redactions", - "event_edges", - "event_auth", - "received_transactions", - "sent_transactions", - "transaction_id_to_pdu", - "users", - "state_groups", - "state_groups_state", - "event_to_state_groups", - "rejections", -] - - -class Store(object): - def __init__(self, db_pool, engine): - self.db_pool = db_pool - self.database_engine = engine - - _simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"] - _simple_insert = SQLBaseStore.__dict__["_simple_insert"] - - _simple_select_onecol_txn = SQLBaseStore.__dict__["_simple_select_onecol_txn"] - _simple_select_onecol = SQLBaseStore.__dict__["_simple_select_onecol"] - _simple_select_one_onecol = SQLBaseStore.__dict__["_simple_select_one_onecol"] - _simple_select_one_onecol_txn = SQLBaseStore.__dict__["_simple_select_one_onecol_txn"] - - _simple_update_one = SQLBaseStore.__dict__["_simple_update_one"] - _simple_update_one_txn = SQLBaseStore.__dict__["_simple_update_one_txn"] - - _execute_and_decode = SQLBaseStore.__dict__["_execute_and_decode"] - - def runInteraction(self, desc, func, *args, **kwargs): - def r(conn): - try: - i = 0 - N = 5 - while True: - try: - txn = conn.cursor() - return func( - LoggingTransaction(txn, desc, self.database_engine), - *args, **kwargs - ) - except self.database_engine.module.DatabaseError as e: - if self.database_engine.is_deadlock(e): - logger.warn("[TXN DEADLOCK] {%s} %d/%d", desc, i, N) - if i < N: - i += 1 - conn.rollback() - continue - raise - except Exception as e: - logger.debug("[TXN FAIL] {%s}", desc, e) - raise - - return self.db_pool.runWithConnection(r) - - def insert_many_txn(self, txn, table, headers, rows): - sql = "INSERT INTO %s (%s) VALUES (%s)" % ( - table, - ", ".join(k for k in headers), - ", ".join("%s" for _ in headers) - ) - - try: - txn.executemany(sql, rows) - except: - logger.exception( - "Failed to insert: %s", - table, - ) - raise - - - -def chunks(n): - for i in itertools.count(0, n): - yield range(i, i+n) - - -@defer.inlineCallbacks -def handle_table(table, sqlite_store, mysql_store): - if table in APPEND_ONLY_TABLES: - # It's safe to just carry on inserting. - next_chunk = yield mysql_store._simple_select_one_onecol( - table="port_from_sqlite3", - keyvalues={"table_name": table}, - retcol="rowid", - allow_none=True, - ) - - if next_chunk is None: - yield mysql_store._simple_insert( - table="port_from_sqlite3", - values={"table_name": table, "rowid": 0} - ) - - next_chunk = 0 - else: - def delete_all(txn): - txn.execute( - "DELETE FROM port_from_sqlite3 WHERE table_name = %s", - (table,) - ) - txn.execute("TRUNCATE %s CASCADE" % (table,)) - mysql_store._simple_insert_txn( - txn, - table="port_from_sqlite3", - values={"table_name": table, "rowid": 0} - ) - - yield mysql_store.runInteraction( - "delete_non_append_only", delete_all - ) - - next_chunk = 0 - - logger.info("next_chunk for %s: %d", table, next_chunk) - - N = 5000 - - select = "SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?" % (table,) - - uni_col_names = UNICODE_COLUMNS.get(table, []) - bool_col_names = BOOLEAN_COLUMNS.get(table, []) - bin_col_names = BINARY_COLUMNS.get(table, []) - - while True: - def r(txn): - txn.execute(select, (next_chunk, N,)) - rows = txn.fetchall() - headers = [column[0] for column in txn.description] - - return headers, rows - - headers, rows = yield sqlite_store.runInteraction("select", r) - - logger.info("Got %d rows for %s", len(rows), table) - - if rows: - uni_cols = [i for i, h in enumerate(headers) if h in uni_col_names] - bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names] - bin_cols = [i for i, h in enumerate(headers) if h in bin_col_names] - next_chunk = rows[-1][0] + 1 - - def conv(j, col): - if j in uni_cols: - col = sqlite_store.database_engine.load_unicode(col) - if j in bool_cols: - return bool(col) - - if j in bin_cols: - if isinstance(col, types.UnicodeType): - col = buffer(col.encode("utf8")) - - return col - - for i, row in enumerate(rows): - rows[i] = tuple( - mysql_store.database_engine.encode_parameter( - conv(j, col) - ) - for j, col in enumerate(row) - if j > 0 - ) - - def ins(txn): - mysql_store.insert_many_txn(txn, table, headers[1:], rows) - - mysql_store._simple_update_one_txn( - txn, - table="port_from_sqlite3", - keyvalues={"table_name": table}, - updatevalues={"rowid": next_chunk}, - ) - - yield mysql_store.runInteraction("insert_many", ins) - else: - return - - -def setup_db(db_config, database_engine): - db_conn = database_engine.module.connect( - **{ - k: v for k, v in db_config.get("args", {}).items() - if not k.startswith("cp_") - } - ) - - database_engine.prepare_database(db_conn) - - db_conn.commit() - - -@defer.inlineCallbacks -def main(sqlite_config, mysql_config): - try: - sqlite_db_pool = adbapi.ConnectionPool( - sqlite_config["name"], - **sqlite_config["args"] - ) - - mysql_db_pool = adbapi.ConnectionPool( - mysql_config["name"], - **mysql_config["args"] - ) - - sqlite_engine = create_engine("sqlite3") - mysql_engine = create_engine("psycopg2") - - sqlite_store = Store(sqlite_db_pool, sqlite_engine) - mysql_store = Store(mysql_db_pool, mysql_engine) - - # Step 1. Set up mysql database. - logger.info("Preparing sqlite database...") - setup_db(sqlite_config, sqlite_engine) - - logger.info("Preparing mysql database...") - setup_db(mysql_config, mysql_engine) - - # Step 2. Get tables. - logger.info("Fetching tables...") - tables = yield sqlite_store._simple_select_onecol( - table="sqlite_master", - keyvalues={ - "type": "table", - }, - retcol="name", - ) - - logger.info("Found %d tables", len(tables)) - - def create_port_table(txn): - txn.execute( - "CREATE TABLE port_from_sqlite3 (" - " table_name varchar(100) NOT NULL UNIQUE," - " rowid bigint NOT NULL" - ")" - ) - - try: - yield mysql_store.runInteraction( - "create_port_table", create_port_table - ) - except Exception as e: - logger.info("Failed to create port table: %s", e) - - # Process tables. - yield defer.gatherResults( - [ - handle_table(table, sqlite_store, mysql_store) - for table in tables - if table not in ["schema_version", "applied_schema_deltas"] - and not table.startswith("sqlite_") - ], - consumeErrors=True, - ) - - # for table in ["current_state_events"]: # tables: - # if table not in ["schema_version", "applied_schema_deltas"]: - # if not table.startswith("sqlite_"): - # yield handle_table(table, sqlite_store, mysql_store) - except: - logger.exception("") - finally: - reactor.stop() - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("--sqlite-database") - parser.add_argument( - "--mysql-config", type=argparse.FileType('r'), - ) - - args = parser.parse_args() - logging.basicConfig(level=logging.INFO) - - sqlite_config = { - "name": "sqlite3", - "args": { - "database": args.sqlite_database, - "cp_min": 1, - "cp_max": 1, - "check_same_thread": False, - }, - } - - mysql_config = yaml.safe_load(args.mysql_config) - # mysql_config["args"].update({ - # "sql_mode": "TRADITIONAL", - # "charset": "utf8mb4", - # "use_unicode": True, - # "collation": "utf8mb4_bin", - # }) - - reactor.callWhenRunning( - main, - sqlite_config=sqlite_config, - mysql_config=mysql_config, - ) - - reactor.run() diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index f8a33120b5..93500dd791 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -366,14 +366,7 @@ def setup(config_options): } name = db_config.get("name", None) - if name in ["MySQLdb", "mysql.connector"]: - db_config.setdefault("args", {}).update({ - "sql_mode": "TRADITIONAL", - "charset": "utf8mb4", - "use_unicode": True, - "collation": "utf8mb4_bin", - }) - elif name == "psycopg2": + if name == "psycopg2": pass elif name == "sqlite3": db_config.setdefault("args", {}).update({ diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py index 548d4e1b42..eb76df7f01 100644 --- a/synapse/storage/engines/__init__.py +++ b/synapse/storage/engines/__init__.py @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .maria import MariaEngine from .postgres import PostgresEngine from .sqlite3 import Sqlite3Engine @@ -22,7 +21,6 @@ import importlib SUPPORTED_MODULE = { "sqlite3": Sqlite3Engine, - "mysql.connector": MariaEngine, "psycopg2": PostgresEngine, } diff --git a/synapse/storage/engines/maria.py b/synapse/storage/engines/maria.py deleted file mode 100644 index 90165f6849..0000000000 --- a/synapse/storage/engines/maria.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2015 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from synapse.storage import prepare_database - -import types - - -class MariaEngine(object): - def __init__(self, database_module): - self.module = database_module - - def convert_param_style(self, sql): - return sql.replace("?", "%s") - - def encode_parameter(self, param): - if isinstance(param, types.BufferType): - return bytes(param) - return param - - def on_new_connection(self, db_conn): - pass - - def prepare_database(self, db_conn): - cur = db_conn.cursor() - cur.execute( - "ALTER DATABASE CHARACTER SET utf8mb4 COLLATE utf8mb4_bin" - ) - db_conn.commit() - prepare_database(db_conn, self) - - def is_deadlock(self, error): - if isinstance(error, self.module.DatabaseError): - return error.sqlstate == "40001" and error.errno == 1213 - return False - - def load_unicode(self, v): - return bytes(v).decode("UTF8") diff --git a/synapse/storage/schema/delta/16/unique_constraints.sql b/synapse/storage/schema/delta/16/unique_constraints.sql index f9fbb6b448..3604ea8427 100644 --- a/synapse/storage/schema/delta/16/unique_constraints.sql +++ b/synapse/storage/schema/delta/16/unique_constraints.sql @@ -1,5 +1,5 @@ --- We can use SQLite features here, since mysql support was only added in v16 +-- We can use SQLite features here, since other db support was only added in v16 -- DELETE FROM current_state_events WHERE rowid not in ( -- cgit 1.4.1 From a7b51f4539af90460d47efe0bae2692de285cd26 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 24 Apr 2015 18:11:21 +0100 Subject: Check users in our table aren't on a different domain to the one we're configured with to try & fix SYN-266 --- synapse/app/homeserver.py | 19 +++++++++++++++++++ synapse/storage/registration.py | 18 ++++++++++++++++++ 2 files changed, 37 insertions(+) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 27e53a9e56..5f6cf4deeb 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -26,6 +26,7 @@ from synapse.server import HomeServer from synapse.python_dependencies import check_requirements from twisted.internet import reactor +from twisted.internet import defer from twisted.application import service from twisted.enterprise import adbapi from twisted.web.resource import Resource @@ -241,6 +242,22 @@ class SynapseHomeServer(HomeServer): ) logger.info("Metrics now running on 127.0.0.1 port %d", config.metrics_port) + @defer.inlineCallbacks + def post_startup_check(self): + all_users_native = yield self.get_datastore().all_users_on_domain( + self.hostname + ) + if not all_users_native: + sys.stderr.write( + "\n" + "******************************************************\n" + "Found users in database not native to %s!\n" + "You cannot changed a synapse server_name after it's been configured\n" + "******************************************************\n" + "\n" + ) + reactor.stop() + def get_version_string(): try: @@ -399,6 +416,8 @@ def setup(config_options): hs.get_datastore().start_profiling() hs.get_replication_layer().start_get_pdu_cache() + reactor.callWhenRunning(hs.post_startup_check) + return hs diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index f24154f146..c2efc3fd32 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -144,3 +144,21 @@ class RegistrationStore(SQLBaseStore): return rows[0] raise StoreError(404, "Token not found.") + + @defer.inlineCallbacks + def all_users_on_domain(self, domain): + res = yield self.runInteraction( + "all_users_on_domain", + self._all_users_on_domain_txn, + domain + ) + defer.returnValue(res) + + def _all_users_on_domain_txn(self, txn, domain): + sql = "SELECT COUNT(*) FROM users WHERE name NOT LIKE ?" + pat = "%:"+domain + cursor = txn.execute(sql, (pat,)) + num_not_matching = cursor.fetchall()[0][0] + if num_not_matching == 0: + return True + return False \ No newline at end of file -- cgit 1.4.1 From f8152f2708cc0c476f5e1ec028a63ca632927eff Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 27 Apr 2015 10:16:26 +0100 Subject: rename db method to be more informative --- synapse/app/homeserver.py | 2 +- synapse/storage/registration.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 5f6cf4deeb..8da1a4bafc 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -244,7 +244,7 @@ class SynapseHomeServer(HomeServer): @defer.inlineCallbacks def post_startup_check(self): - all_users_native = yield self.get_datastore().all_users_on_domain( + all_users_native = yield self.get_datastore().are_all_users_on_domain( self.hostname ) if not all_users_native: diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 697d487126..65ae58a39c 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -146,15 +146,15 @@ class RegistrationStore(SQLBaseStore): raise StoreError(404, "Token not found.") @defer.inlineCallbacks - def all_users_on_domain(self, domain): + def are_all_users_on_domain(self, domain): res = yield self.runInteraction( - "all_users_on_domain", - self._all_users_on_domain_txn, + "are_all_users_on_domain", + self._are_all_users_on_domain_txn, domain ) defer.returnValue(res) - def _all_users_on_domain_txn(self, txn, domain): + def _are_all_users_on_domain_txn(self, txn, domain): sql = "SELECT COUNT(*) FROM users WHERE name NOT LIKE ?" pat = "%:" + domain cursor = txn.execute(sql, (pat,)) -- cgit 1.4.1 From b02e1006b9d7282cdc9983d52ac478d4670a8361 Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 27 Apr 2015 11:46:00 +0100 Subject: Run database check before daemonizing, at the cost of database hygiene. --- synapse/app/homeserver.py | 17 ++++++++--------- synapse/storage/__init__.py | 10 ++++++++++ synapse/storage/registration.py | 18 ------------------ 3 files changed, 18 insertions(+), 27 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 8da1a4bafc..8a00b21aa5 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -18,7 +18,8 @@ import sys sys.dont_write_bytecode = True from synapse.storage import ( - prepare_database, prepare_sqlite3_database, UpgradeDatabaseException, + prepare_database, prepare_sqlite3_database, are_all_users_on_domain, + UpgradeDatabaseException, ) from synapse.server import HomeServer @@ -242,10 +243,9 @@ class SynapseHomeServer(HomeServer): ) logger.info("Metrics now running on 127.0.0.1 port %d", config.metrics_port) - @defer.inlineCallbacks - def post_startup_check(self): - all_users_native = yield self.get_datastore().are_all_users_on_domain( - self.hostname + def run_startup_checks(self, db_conn): + all_users_native = are_all_users_on_domain( + db_conn, self.hostname ) if not all_users_native: sys.stderr.write( @@ -254,9 +254,9 @@ class SynapseHomeServer(HomeServer): "Found users in database not native to %s!\n" "You cannot changed a synapse server_name after it's been configured\n" "******************************************************\n" - "\n" + "\n" % (self.hostname,) ) - reactor.stop() + sys.exit(1) def get_version_string(): @@ -392,6 +392,7 @@ def setup(config_options): with sqlite3.connect(db_name) as db_conn: prepare_sqlite3_database(db_conn) prepare_database(db_conn) + hs.run_startup_checks(db_conn) except UpgradeDatabaseException: sys.stderr.write( "\nFailed to upgrade database.\n" @@ -416,8 +417,6 @@ def setup(config_options): hs.get_datastore().start_profiling() hs.get_replication_layer().start_get_pdu_cache() - reactor.callWhenRunning(hs.post_startup_check) - return hs diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index f4dec70393..0c47443689 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -421,3 +421,13 @@ def prepare_sqlite3_database(db_conn): " VALUES (?,?)", (row[0], False) ) + + +def are_all_users_on_domain(txn, domain): + sql = "SELECT COUNT(*) FROM users WHERE name NOT LIKE ?" + pat = "%:" + domain + cursor = txn.execute(sql, (pat,)) + num_not_matching = cursor.fetchall()[0][0] + if num_not_matching == 0: + return True + return False \ No newline at end of file diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 65ae58a39c..f24154f146 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -144,21 +144,3 @@ class RegistrationStore(SQLBaseStore): return rows[0] raise StoreError(404, "Token not found.") - - @defer.inlineCallbacks - def are_all_users_on_domain(self, domain): - res = yield self.runInteraction( - "are_all_users_on_domain", - self._are_all_users_on_domain_txn, - domain - ) - defer.returnValue(res) - - def _are_all_users_on_domain_txn(self, txn, domain): - sql = "SELECT COUNT(*) FROM users WHERE name NOT LIKE ?" - pat = "%:" + domain - cursor = txn.execute(sql, (pat,)) - num_not_matching = cursor.fetchall()[0][0] - if num_not_matching == 0: - return True - return False -- cgit 1.4.1 From df7591479115b6ea73f774d2dcd2b071c92d7a37 Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 27 Apr 2015 11:48:33 +0100 Subject: pep8 --- synapse/app/homeserver.py | 1 - 1 file changed, 1 deletion(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 8a00b21aa5..541059b209 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -27,7 +27,6 @@ from synapse.server import HomeServer from synapse.python_dependencies import check_requirements from twisted.internet import reactor -from twisted.internet import defer from twisted.application import service from twisted.enterprise import adbapi from twisted.web.resource import Resource -- cgit 1.4.1 From 416a3e6c4f40c94cf859a07dbc4341c907aac091 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Apr 2015 15:44:30 +0100 Subject: Ensure check_same_thread is enabled for sqlite3 --- synapse/app/homeserver.py | 1 + 1 file changed, 1 insertion(+) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 93500dd791..3709cd7bf9 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -372,6 +372,7 @@ def setup(config_options): db_config.setdefault("args", {}).update({ "cp_min": 1, "cp_max": 1, + "check_same_thread": False, }) else: raise RuntimeError("Unsupported database type '%s'" % (name,)) -- cgit 1.4.1 From 1ef66cc3bd541ee1e4a017cfdd008eacaec5bcf8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Apr 2015 15:57:43 +0100 Subject: Move database configuration into config module --- synapse/app/homeserver.py | 35 ++++------------------------------- synapse/config/database.py | 23 +++++++++++++++++++++-- 2 files changed, 25 insertions(+), 33 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3709cd7bf9..f29f9d702e 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -350,42 +350,15 @@ def setup(config_options): tls_context_factory = context_factory.ServerContextFactory(config) - if config.database_config: - with open(config.database_config, 'r') as f: - db_config = yaml.safe_load(f) - else: - db_config = { - "name": "sqlite3", - "args": { - "database": config.database_path, - }, - } - - db_config = { - k: v for k, v in db_config.items() - } - - name = db_config.get("name", None) - if name == "psycopg2": - pass - elif name == "sqlite3": - db_config.setdefault("args", {}).update({ - "cp_min": 1, - "cp_max": 1, - "check_same_thread": False, - }) - else: - raise RuntimeError("Unsupported database type '%s'" % (name,)) - - database_engine = create_engine(name) - db_config["args"]["cp_openfun"] = database_engine.on_new_connection + database_engine = create_engine(config.database_config["name"]) + config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection hs = SynapseHomeServer( config.server_name, domain_with_port=domain_with_port, upload_dir=os.path.abspath("uploads"), db_name=config.database_path, - db_config=db_config, + db_config=config.database_config, tls_context_factory=tls_context_factory, config=config, content_addr=config.content_addr, @@ -404,7 +377,7 @@ def setup(config_options): try: db_conn = database_engine.module.connect( **{ - k: v for k, v in db_config.get("args", {}).items() + k: v for k, v in config.database_config.get("args", {}).items() if not k.startswith("cp_") } ) diff --git a/synapse/config/database.py b/synapse/config/database.py index f3d0898c09..190d119df4 100644 --- a/synapse/config/database.py +++ b/synapse/config/database.py @@ -15,6 +15,7 @@ from ._base import Config import os +import yaml class DatabaseConfig(Config): @@ -27,9 +28,27 @@ class DatabaseConfig(Config): self.event_cache_size = self.parse_size(args.event_cache_size) if args.database_config: - self.database_config = self.abspath(args.database_config) + with open(args.database_config) as f: + self.database_config = yaml.safe_load(f) else: - self.database_config = None + self.database_config = { + "name": "sqlite3", + "args": { + "database": self.database_path, + }, + } + + name = self.database_config.get("name", None) + if name == "psycopg2": + pass + elif name == "sqlite3": + self.database_config.setdefault("args", {}).update({ + "cp_min": 1, + "cp_max": 1, + "check_same_thread": False, + }) + else: + raise RuntimeError("Unsupported database type '%s'" % (name,)) @classmethod def add_arguments(cls, parser): -- cgit 1.4.1 From 42b7139deced5d9614bcb487625f4ce5d99e560b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Apr 2015 15:59:56 +0100 Subject: Remove unused import --- synapse/app/homeserver.py | 1 - 1 file changed, 1 deletion(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index f29f9d702e..110566e33f 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -56,7 +56,6 @@ import os import re import resource import subprocess -import yaml logger = logging.getLogger("synapse.app.homeserver") -- cgit 1.4.1 From 073b891ec13e8964759d6f0498cf385fe77729ee Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 28 Apr 2015 13:44:23 +0100 Subject: Remove unused imports --- synapse/app/homeserver.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 43ce12af72..694a0125ad 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -19,8 +19,7 @@ sys.dont_write_bytecode = True from synapse.storage.engines import create_engine from synapse.storage import ( - prepare_database, prepare_sqlite3_database, are_all_users_on_domain, - UpgradeDatabaseException, + are_all_users_on_domain, UpgradeDatabaseException, ) from synapse.server import HomeServer -- cgit 1.4.1 From 204132a998ec3be7069a9f2dada323bcdb217908 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 11:42:28 +0100 Subject: Check that postgres database has correct charset set --- scripts/port_from_sqlite_to_postgres.py | 4 ++++ synapse/app/homeserver.py | 11 ++++++++++- synapse/storage/engines/__init__.py | 1 + synapse/storage/engines/_base.py | 18 ++++++++++++++++++ synapse/storage/engines/postgres.py | 11 +++++++++++ synapse/storage/engines/sqlite3.py | 3 +++ 6 files changed, 47 insertions(+), 1 deletion(-) create mode 100644 synapse/storage/engines/_base.py (limited to 'synapse/app') diff --git a/scripts/port_from_sqlite_to_postgres.py b/scripts/port_from_sqlite_to_postgres.py index 3296f1f54f..da760af087 100644 --- a/scripts/port_from_sqlite_to_postgres.py +++ b/scripts/port_from_sqlite_to_postgres.py @@ -283,6 +283,10 @@ class Porter(object): self.sqlite_store = Store(sqlite_db_pool, sqlite_engine) self.postgres_store = Store(postgres_db_pool, postgres_engine) + yield self.postgres_store.execute( + postgres_engine.check_database + ) + # Step 1. Set up databases. self.progress.set_state("Preparing SQLite3") self.setup_db(sqlite_config, sqlite_engine) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 694a0125ad..cbd295c0a5 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -17,7 +17,7 @@ import sys sys.dont_write_bytecode = True -from synapse.storage.engines import create_engine +from synapse.storage.engines import create_engine, IncorrectDatabaseSetup from synapse.storage import ( are_all_users_on_domain, UpgradeDatabaseException, ) @@ -255,6 +255,15 @@ class SynapseHomeServer(HomeServer): ) sys.exit(1) + try: + database_engine.check_database(db_conn.cursor()) + except IncorrectDatabaseSetup as e: + sys.stderr.write("*" * len(e.message) + '\n') + sys.stderr.write(e.message) + sys.stderr.write('\n') + sys.stderr.write("*" * len(e.message) + '\n') + sys.exit(2) + def get_version_string(): try: diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py index eb76df7f01..ab070f3428 100644 --- a/synapse/storage/engines/__init__.py +++ b/synapse/storage/engines/__init__.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from ._base import IncorrectDatabaseSetup from .postgres import PostgresEngine from .sqlite3 import Sqlite3Engine diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py new file mode 100644 index 0000000000..0b549d314b --- /dev/null +++ b/synapse/storage/engines/_base.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class IncorrectDatabaseSetup(RuntimeError): + pass diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index b8cca9b187..ca858daee9 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -15,12 +15,23 @@ from synapse.storage import prepare_database +from ._base import IncorrectDatabaseSetup + class PostgresEngine(object): def __init__(self, database_module): self.module = database_module self.module.extensions.register_type(self.module.extensions.UNICODE) + def check_database(self, txn): + txn.execute("SHOW SERVER_ENCODING") + rows = txn.fetchall() + if rows and rows[0][0] != "UTF8": + raise IncorrectDatabaseSetup( + "Database has incorrect encoding: '%s' instead of 'UTF8'" + % (rows[0][0],) + ) + def convert_param_style(self, sql): return sql.replace("?", "%s") diff --git a/synapse/storage/engines/sqlite3.py b/synapse/storage/engines/sqlite3.py index f62d5d1205..7b49157cbd 100644 --- a/synapse/storage/engines/sqlite3.py +++ b/synapse/storage/engines/sqlite3.py @@ -20,6 +20,9 @@ class Sqlite3Engine(object): def __init__(self, database_module): self.module = database_module + def check_database(self, txn): + pass + def convert_param_style(self, sql): return sql -- cgit 1.4.1 From cd0864121bce3e1ab41ce432670cc87c4ffd54cd Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 12:12:18 +0100 Subject: Make postgres database error slightly more helpful --- synapse/app/homeserver.py | 27 +++++++++++++++------------ synapse/storage/engines/postgres.py | 3 ++- 2 files changed, 17 insertions(+), 13 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index cbd295c0a5..29a1bf1d70 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -245,24 +245,27 @@ class SynapseHomeServer(HomeServer): db_conn.cursor(), database_engine, self.hostname ) if not all_users_native: - sys.stderr.write( - "\n" - "******************************************************\n" + quit_with_error( "Found users in database not native to %s!\n" - "You cannot changed a synapse server_name after it's been configured\n" - "******************************************************\n" - "\n" % (self.hostname,) + "You cannot changed a synapse server_name after it's been configured" + % (self.hostname,) ) - sys.exit(1) try: database_engine.check_database(db_conn.cursor()) except IncorrectDatabaseSetup as e: - sys.stderr.write("*" * len(e.message) + '\n') - sys.stderr.write(e.message) - sys.stderr.write('\n') - sys.stderr.write("*" * len(e.message) + '\n') - sys.exit(2) + quit_with_error(e.message) + + +def quit_with_error(error_string): + message_lines = error_string.split("\n") + line_length = max([len(l) for l in message_lines]) + 2 + sys.stderr.write("*" * line_length + '\n') + for line in message_lines: + if line.strip(): + sys.stderr.write(" %s\n" % (line.strip(),)) + sys.stderr.write("*" * line_length + '\n') + sys.exit(1) def get_version_string(): diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index ca858daee9..7125f66f01 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -28,7 +28,8 @@ class PostgresEngine(object): rows = txn.fetchall() if rows and rows[0][0] != "UTF8": raise IncorrectDatabaseSetup( - "Database has incorrect encoding: '%s' instead of 'UTF8'" + "Database has incorrect encoding: '%s' instead of 'UTF8'\n" + "See docs/postgres.rst for more information." % (rows[0][0],) ) -- cgit 1.4.1 From 845b0b2c971e699a3f39b96ae418c6501f8a78b9 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Wed, 29 Apr 2015 14:52:42 +0100 Subject: Check requirements before doing anything else when running the homeserver --- synapse/app/homeserver.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index bc67e2a8e1..6f963cee22 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -16,6 +16,10 @@ import sys sys.dont_write_bytecode = True +from synapse.python_dependencies import check_requirements + +if __name__=='__main__': + check_requirements() from synapse.storage.engines import create_engine, IncorrectDatabaseSetup from synapse.storage import ( @@ -24,7 +28,6 @@ from synapse.storage import ( from synapse.server import HomeServer -from synapse.python_dependencies import check_requirements from twisted.internet import reactor from twisted.application import service -- cgit 1.4.1 From f2d90d5c02de56509ddbce7fc336751b3ad9a5ef Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Wed, 29 Apr 2015 14:53:23 +0100 Subject: Fix whitespace --- synapse/app/homeserver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 6f963cee22..5c6812f473 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -18,7 +18,7 @@ import sys sys.dont_write_bytecode = True from synapse.python_dependencies import check_requirements -if __name__=='__main__': +if __name__ == '__main__': check_requirements() from synapse.storage.engines import create_engine, IncorrectDatabaseSetup -- cgit 1.4.1 From c27d6ad6b5eaed43f1f20530a51f48ca2931063f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 18:25:24 +0100 Subject: Only start pushers when synapse has fully started --- synapse/app/homeserver.py | 6 +++++- synapse/push/pusherpool.py | 3 --- 2 files changed, 5 insertions(+), 4 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 5c6812f473..0987a76cfc 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -442,11 +442,15 @@ def setup(config_options): hs.start_listening() - hs.get_pusherpool().start() hs.get_state_handler().start_caching() hs.get_datastore().start_profiling() hs.get_replication_layer().start_get_pdu_cache() + def on_start(): + hs.get_pusherpool().start() + + reactor.callWhenRunning(on_start) + return hs diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 2e58f70ffa..571d8b2f82 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -19,8 +19,6 @@ from twisted.internet import defer from httppusher import HttpPusher from synapse.push import PusherConfigException -from synapse.util.async import run_on_reactor - import logging logger = logging.getLogger(__name__) @@ -50,7 +48,6 @@ class PusherPool: @defer.inlineCallbacks def start(self): - yield run_on_reactor() pushers = yield self.store.get_all_pushers() self._start_pushers(pushers) -- cgit 1.4.1 From e7a6edb0ee34a72ce5781802744ff6149ff6a940 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Apr 2015 18:37:30 +0100 Subject: Revert previous --- synapse/app/homeserver.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 0987a76cfc..5c6812f473 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -442,15 +442,11 @@ def setup(config_options): hs.start_listening() + hs.get_pusherpool().start() hs.get_state_handler().start_caching() hs.get_datastore().start_profiling() hs.get_replication_layer().start_get_pdu_cache() - def on_start(): - hs.get_pusherpool().start() - - reactor.callWhenRunning(on_start) - return hs -- cgit 1.4.1 From d624e2a6383bbb179132b79eec80fa516e747bd6 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Thu, 30 Apr 2015 04:24:44 +0100 Subject: Manually generate the default config yaml, remove most of the commandline arguments for synapse anticipating that people will use the yaml instead. Simpify implementing config options by not requiring the classes to hit the super class --- demo/start.sh | 27 ++++---- synapse/app/homeserver.py | 6 +- synapse/config/_base.py | 141 ++++++++++++++++++++++------------------- synapse/config/appservice.py | 18 ++---- synapse/config/captcha.py | 64 +++++++++---------- synapse/config/database.py | 73 +++++++++++---------- synapse/config/homeserver.py | 4 +- synapse/config/key.py | 139 ++++++++++++++++++---------------------- synapse/config/logger.py | 35 +++++++--- synapse/config/metrics.py | 29 ++++----- synapse/config/ratelimiting.py | 78 ++++++++++------------- synapse/config/registration.py | 47 ++++---------- synapse/config/repository.py | 35 +++++----- synapse/config/server.py | 117 ++++++++++++++++++++-------------- synapse/config/tls.py | 78 +++++++++++------------ synapse/config/voip.py | 43 ++++++------- synapse/server.py | 1 - 17 files changed, 457 insertions(+), 478 deletions(-) (limited to 'synapse/app') diff --git a/demo/start.sh b/demo/start.sh index 0485be8053..941eccd668 100755 --- a/demo/start.sh +++ b/demo/start.sh @@ -16,30 +16,29 @@ if [ $# -eq 1 ]; then fi fi +export PYTHONPATH=$(readlink -f $(pwd)) + + +echo $PYTHONPATH + for port in 8080 8081 8082; do echo "Starting server on port $port... " https_port=$((port + 400)) + mkdir -p demo/$port + pushd demo/$port + rm $DIR/etc/$port.config python -m synapse.app.homeserver \ - --generate-config \ - --config-path "demo/etc/$port.config" \ - -p "$https_port" \ - --unsecure-port "$port" \ - -H "localhost:$https_port" \ - -f "$DIR/$port.log" \ - -d "$DIR/$port.db" \ - -D --pid-file "$DIR/$port.pid" \ - --manhole $((port + 1000)) \ - --tls-dh-params-path "demo/demo.tls.dh" \ - --media-store-path "demo/media_store.$port" \ - $PARAMS $SYNAPSE_PARAMS \ - --enable-registration + --generate-config "localhost:$https_port" \ + --config-path "$DIR/etc/$port.config" \ python -m synapse.app.homeserver \ - --config-path "demo/etc/$port.config" \ + --config-path "$DIR/etc/$port.config" \ + -D \ -vv \ + popd done cd "$CWD" diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 5c6812f473..c16dd8acc3 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -394,7 +394,6 @@ def setup(config_options): config.server_name, domain_with_port=domain_with_port, upload_dir=os.path.abspath("uploads"), - db_name=config.database_path, db_config=config.database_config, tls_context_factory=tls_context_factory, config=config, @@ -407,9 +406,8 @@ def setup(config_options): redirect_root_to_web_client=True, ) - db_name = hs.get_db_name() - logger.info("Preparing database: %s...", db_name) + logger.info("Preparing database: %r...", config.database_config) try: db_conn = database_engine.module.connect( @@ -431,7 +429,7 @@ def setup(config_options): ) sys.exit(1) - logger.info("Database prepared in %s.", db_name) + logger.info("Database prepared in %r.", config.database_config) if config.manhole: f = twisted.manhole.telnet.ShellFactory() diff --git a/synapse/config/_base.py b/synapse/config/_base.py index b59f4e45e2..9f5da70948 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -14,9 +14,10 @@ # limitations under the License. import argparse -import sys import os import yaml +import sys +from textwrap import dedent class ConfigError(Exception): @@ -24,8 +25,6 @@ class ConfigError(Exception): class Config(object): - def __init__(self, args): - pass @staticmethod def parse_size(string): @@ -37,6 +36,22 @@ class Config(object): size = sizes[suffix] return int(string) * size + @staticmethod + def parse_duration(string): + second = 1000 + hour = 60 * 60 * second + day = 24 * hour + week = 7 * day + year = 365 * day + + sizes = {"s": second, "h": hour, "d": day, "w": week, "y": year} + size = 1 + suffix = string[-1] + if suffix in sizes: + string = string[:-1] + size = sizes[suffix] + return int(string) * size + @staticmethod def abspath(file_path): return os.path.abspath(file_path) if file_path else file_path @@ -77,17 +92,6 @@ class Config(object): with open(file_path) as file_stream: return file_stream.read() - @classmethod - def read_yaml_file(cls, file_path, config_name): - cls.check_file(file_path, config_name) - with open(file_path) as file_stream: - try: - return yaml.load(file_stream) - except: - raise ConfigError( - "Error parsing yaml in file %r" % (file_path,) - ) - @staticmethod def default_path(name): return os.path.abspath(os.path.join(os.path.curdir, name)) @@ -97,16 +101,33 @@ class Config(object): with open(file_path) as file_stream: return yaml.load(file_stream) - @classmethod - def add_arguments(cls, parser): - pass + def invoke_all(self, name, *args, **kargs): + results = [] + for cls in type(self).mro(): + if name in cls.__dict__: + results.append(getattr(cls, name)(self, *args, **kargs)) + return results - @classmethod - def generate_config(cls, args, config_dir_path): - pass + def generate_config(self, config_dir_path, server_name): + default_config = "# vim:ft=yaml\n" + + default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all( + "default_config", config_dir_path, server_name + )) + + config = yaml.load(default_config) + + if not os.path.exists(config_dir_path): + os.makedirs(config_dir_path) + + self.invoke_all("generate_keys", config) + + return default_config @classmethod def load_config(cls, description, argv, generate_section=None): + result = cls() + config_parser = argparse.ArgumentParser(add_help=False) config_parser.add_argument( "-c", "--config-path", @@ -115,66 +136,56 @@ class Config(object): ) config_parser.add_argument( "--generate-config", - action="store_true", - help="Generate config file" + metavar="SERVER_NAME", + help="Generate a config file for the server name" ) config_args, remaining_args = config_parser.parse_known_args(argv) - if config_args.generate_config: - if not config_args.config_path: - config_parser.error( - "Must specify where to generate the config file" - ) - config_dir_path = os.path.dirname(config_args.config_path) - if os.path.exists(config_args.config_path): - defaults = cls.read_config_file(config_args.config_path) - else: - defaults = {} - else: - if config_args.config_path: - defaults = cls.read_config_file(config_args.config_path) - else: - defaults = {} - - parser = argparse.ArgumentParser( - parents=[config_parser], - description=description, - formatter_class=argparse.RawDescriptionHelpFormatter, - ) - cls.add_arguments(parser) - parser.set_defaults(**defaults) - - args = parser.parse_args(remaining_args) + if not config_args.config_path: + config_parser.error( + "Must supply a config file.\nA config file can be automatically" + " generated using \"--generate-config SERVER_NAME" + " -c CONFIG-FILE\"" + ) if config_args.generate_config: + server_name = config_args.generate_config + config_path = config_args.config_path + if os.path.exists(config_path): + print "Config file %r already exists. Not overwriting" % ( + config_args.config_path + ) + sys.exit(0) config_dir_path = os.path.dirname(config_args.config_path) config_dir_path = os.path.abspath(config_dir_path) - if not os.path.exists(config_dir_path): - os.makedirs(config_dir_path) - cls.generate_config(args, config_dir_path) - config = {} - for key, value in vars(args).items(): - if (key not in set(["config_path", "generate_config"]) - and value is not None): - config[key] = value - with open(config_args.config_path, "w") as config_file: - # TODO(mark/paul) We might want to output emacs-style mode - # markers as well as vim-style mode markers into the file, - # to further hint to people this is a YAML file. - config_file.write("# vim:ft=yaml\n") - yaml.dump(config, config_file, default_flow_style=False) + with open(config_path, "wb") as config_file: + config_file.write( + result.generate_config(config_dir_path, server_name) + ) print ( "A config file has been generated in %s for server name" " '%s' with corresponding SSL keys and self-signed" " certificates. Please review this file and customise it to" " your needs." - ) % ( - config_args.config_path, config['server_name'] - ) + ) % (config_path, server_name) print ( "If this server name is incorrect, you will need to regenerate" " the SSL certificates" ) sys.exit(0) - return cls(args) + config = cls.read_config_file(config_args.config_path) + result.invoke_all("read_config", config) + + parser = argparse.ArgumentParser( + parents=[config_parser], + description=description, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + + result.invoke_all("add_arguments", parser) + args = parser.parse_args(remaining_args) + + result.invoke_all("read_arguments", args) + + return result diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index 399a716d80..38f41933b7 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -17,15 +17,11 @@ from ._base import Config class AppServiceConfig(Config): - def __init__(self, args): - super(AppServiceConfig, self).__init__(args) - self.app_service_config_files = args.app_service_config_files + def read_config(self, config): + self.app_service_config_files = config.get("app_service_config_files", []) - @classmethod - def add_arguments(cls, parser): - super(AppServiceConfig, cls).add_arguments(parser) - group = parser.add_argument_group("appservice") - group.add_argument( - "--app-service-config-files", type=str, nargs='+', - help="A list of application service config files to use." - ) + def default_config(cls, config_dir_path, server_name): + return """\ + # A list of application service config file to use + app_service_config_files: [] + """ diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py index 07fbfadc0f..ba7037aeb1 100644 --- a/synapse/config/captcha.py +++ b/synapse/config/captcha.py @@ -17,40 +17,34 @@ from ._base import Config class CaptchaConfig(Config): - def __init__(self, args): - super(CaptchaConfig, self).__init__(args) - self.recaptcha_private_key = args.recaptcha_private_key - self.recaptcha_public_key = args.recaptcha_public_key - self.enable_registration_captcha = args.enable_registration_captcha + def read_config(self, config): + self.recaptcha_private_key = config["recaptcha_private_key"] + self.recaptcha_public_key = config["recaptcha_public_key"] + self.enable_registration_captcha = config["enable_registration_captcha"] self.captcha_ip_origin_is_x_forwarded = ( - args.captcha_ip_origin_is_x_forwarded - ) - self.captcha_bypass_secret = args.captcha_bypass_secret - - @classmethod - def add_arguments(cls, parser): - super(CaptchaConfig, cls).add_arguments(parser) - group = parser.add_argument_group("recaptcha") - group.add_argument( - "--recaptcha-public-key", type=str, default="YOUR_PUBLIC_KEY", - help="This Home Server's ReCAPTCHA public key." - ) - group.add_argument( - "--recaptcha-private-key", type=str, default="YOUR_PRIVATE_KEY", - help="This Home Server's ReCAPTCHA private key." - ) - group.add_argument( - "--enable-registration-captcha", type=bool, default=False, - help="Enables ReCaptcha checks when registering, preventing signup" - + " unless a captcha is answered. Requires a valid ReCaptcha " - + "public/private key." - ) - group.add_argument( - "--captcha_ip_origin_is_x_forwarded", type=bool, default=False, - help="When checking captchas, use the X-Forwarded-For (XFF) header" - + " as the client IP and not the actual client IP." - ) - group.add_argument( - "--captcha_bypass_secret", type=str, - help="A secret key used to bypass the captcha test entirely." + config["captcha_ip_origin_is_x_forwarded"] ) + self.captcha_bypass_secret = config.get("captcha_bypass_secret") + + def default_config(self, config_dir_path, server_name): + return """\ + ## Captcha ## + + # This Home Server's ReCAPTCHA public key. + recaptcha_private_key: "YOUR_PUBLIC_KEY" + + # This Home Server's ReCAPTCHA private key. + recaptcha_public_key: "YOUR_PRIVATE_KEY" + + # Enables ReCaptcha checks when registering, preventing signup + # unless a captcha is answered. Requires a valid ReCaptcha + # public/private key. + enable_registration_captcha: False + + # When checking captchas, use the X-Forwarded-For (XFF) header + # as the client IP and not the actual client IP. + captcha_ip_origin_is_x_forwarded: False + + # A secret key used to bypass the captcha test entirely. + captcha_bypass_secret: ~ + """ diff --git a/synapse/config/database.py b/synapse/config/database.py index 190d119df4..ccd96c4f0f 100644 --- a/synapse/config/database.py +++ b/synapse/config/database.py @@ -14,28 +14,21 @@ # limitations under the License. from ._base import Config -import os -import yaml class DatabaseConfig(Config): - def __init__(self, args): - super(DatabaseConfig, self).__init__(args) - if args.database_path == ":memory:": - self.database_path = ":memory:" - else: - self.database_path = self.abspath(args.database_path) - self.event_cache_size = self.parse_size(args.event_cache_size) - if args.database_config: - with open(args.database_config) as f: - self.database_config = yaml.safe_load(f) - else: + def read_config(self, config): + self.event_cache_size = self.parse_size( + config.get("event_cache_size", "10K") + ) + + self.database_config = config.get("database") + + if self.database_config is None: self.database_config = { "name": "sqlite3", - "args": { - "database": self.database_path, - }, + "args": {}, } name = self.database_config.get("name", None) @@ -50,24 +43,36 @@ class DatabaseConfig(Config): else: raise RuntimeError("Unsupported database type '%s'" % (name,)) - @classmethod - def add_arguments(cls, parser): - super(DatabaseConfig, cls).add_arguments(parser) + self.set_databasepath(config.get("database_path")) + + def default_config(self, config, config_dir_path): + database_path = self.abspath("homeserver.db") + return """\ + # Database configuration + database: + # The database engine name + name: "sqlite3" + # Arguments to pass to the engine + args: + # Path to the database + database: "%(database_path)s" + # Number of events to cache in memory. + event_cache_size: "10K" + """ % locals() + + def read_arguments(self, args): + self.set_databasepath(args.database_path) + + def set_databasepath(self, database_path): + if database_path != ":memory:": + database_path = self.abspath(database_path) + if self.database_config.get("name", None) == "sqlite3": + if database_path is not None: + self.database_config["database"] = database_path + + def add_arguments(self, parser): db_group = parser.add_argument_group("database") db_group.add_argument( - "-d", "--database-path", default="homeserver.db", - metavar="SQLITE_DATABASE_PATH", help="The database name." - ) - db_group.add_argument( - "--event-cache-size", default="100K", - help="Number of events to cache in memory." + "-d", "--database-path", metavar="SQLITE_DATABASE_PATH", + help="The path to a sqlite database to use." ) - db_group.add_argument( - "--database-config", default=None, - help="Location of the database configuration file." - ) - - @classmethod - def generate_config(cls, args, config_dir_path): - super(DatabaseConfig, cls).generate_config(args, config_dir_path) - args.database_path = os.path.abspath(args.database_path) diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index 1c8ff38465..f9b4807a35 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -36,4 +36,6 @@ class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, if __name__ == '__main__': import sys - HomeServerConfig.load_config("Generate config", sys.argv[1:], "HomeServer") + sys.stdout.write( + HomeServerConfig().generate_config(sys.argv[1], sys.argv[2]) + ) diff --git a/synapse/config/key.py b/synapse/config/key.py index a2de6d5c17..a63f7d841b 100644 --- a/synapse/config/key.py +++ b/synapse/config/key.py @@ -24,44 +24,53 @@ from syutil.base64util import decode_base64 class KeyConfig(Config): - def __init__(self, args): - super(KeyConfig, self).__init__(args) - self.signing_key = self.read_signing_key(args.signing_key_path) + def read_config(self, config): + self.signing_key = self.read_signing_key(config["signing_key_path"]) self.old_signing_keys = self.read_old_signing_keys( - args.old_signing_key_path + config["old_signing_keys"] + ) + self.key_refresh_interval = self.parse_duration( + config["key_refresh_interval"] ) - self.key_refresh_interval = args.key_refresh_interval self.perspectives = self.read_perspectives( - args.perspectives_config_path + config["perspectives"] ) - @classmethod - def add_arguments(cls, parser): - super(KeyConfig, cls).add_arguments(parser) - key_group = parser.add_argument_group("keys") - key_group.add_argument("--signing-key-path", - help="The signing key to sign messages with") - key_group.add_argument("--old-signing-key-path", - help="The keys that the server used to sign" - " sign messages with but won't use" - " to sign new messages. E.g. it has" - " lost its private key") - key_group.add_argument("--key-refresh-interval", - default=24 * 60 * 60 * 1000, # 1 Day - help="How long a key response is valid for." - " Used to set the exipiry in /key/v2/." - " Controls how frequently servers will" - " query what keys are still valid") - key_group.add_argument("--perspectives-config-path", - help="The trusted servers to download signing" - " keys from") - - def read_perspectives(self, perspectives_config_path): - config = self.read_yaml_file( - perspectives_config_path, "perspectives_config_path" - ) + def default_config(self, config_dir_path, server_name): + base_key_name = os.path.join(config_dir_path, server_name) + return """\ + ## Signing Keys ## + + # Path to the signing key to sign messages with + signing_key_path: "%(base_key_name)s.signing.key" + + # The keys that the server used to sign messages with but won't use + # to sign new messages. E.g. it has lost its private key + old_signing_keys: {} + # "ed25519:auto": + # # Base64 encoded public key + # key: "The public part of your old signing key." + # # Millisecond POSIX timestamp when the key expired. + # expired_ts: 123456789123 + + # How long key response published by this server is valid for. + # Used to set the valid_until_ts in /key/v2 APIs. + # Determines how quickly servers will query to check which keys + # are still valid. + key_refresh_interval: "1d" # 1 Day. + + # The trusted servers to download signing keys from. + perspectives: + servers: + "matrix.org": + verify_keys: + "ed25519:auto": + key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw" + """ % locals() + + def read_perspectives(self, perspectives_config): servers = {} - for server_name, server_config in config["servers"].items(): + for server_name, server_config in perspectives_config["servers"].items(): for key_id, key_data in server_config["verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_base64 = key_data["key"] @@ -82,37 +91,31 @@ class KeyConfig(Config): " Try running again with --generate-config" ) - def read_old_signing_keys(self, old_signing_key_path): - old_signing_keys = self.read_file( - old_signing_key_path, "old_signing_key" - ) - try: - return syutil.crypto.signing_key.read_old_signing_keys( - old_signing_keys.splitlines(True) - ) - except Exception: - raise ConfigError( - "Error reading old signing keys." - ) - - @classmethod - def generate_config(cls, args, config_dir_path): - super(KeyConfig, cls).generate_config(args, config_dir_path) - base_key_name = os.path.join(config_dir_path, args.server_name) - - args.pid_file = os.path.abspath(args.pid_file) - - if not args.signing_key_path: - args.signing_key_path = base_key_name + ".signing.key" + def read_old_signing_keys(self, old_signing_keys): + keys = {} + for key_id, key_data in old_signing_keys.items(): + if is_signing_algorithm_supported(key_id): + key_base64 = key_data["key"] + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + verify_key.expired_ts = key_data["expired_ts"] + keys[key_id] = verify_key + else: + raise ConfigError( + "Unsupported signing algorithm for old key: %r" % (key_id,) + ) + return keys - if not os.path.exists(args.signing_key_path): - with open(args.signing_key_path, "w") as signing_key_file: + def generate_keys(self, config): + signing_key_path = config["signing_key_path"] + if not os.path.exists(signing_key_path): + with open(signing_key_path, "w") as signing_key_file: syutil.crypto.signing_key.write_signing_keys( signing_key_file, (syutil.crypto.signing_key.generate_signing_key("auto"),), ) else: - signing_keys = cls.read_file(args.signing_key_path, "signing_key") + signing_keys = self.read_file(signing_key_path, "signing_key") if len(signing_keys.split("\n")[0].split()) == 1: # handle keys in the old format. key = syutil.crypto.signing_key.decode_signing_key_base64( @@ -120,28 +123,8 @@ class KeyConfig(Config): "auto", signing_keys.split("\n")[0] ) - with open(args.signing_key_path, "w") as signing_key_file: + with open(signing_key_path, "w") as signing_key_file: syutil.crypto.signing_key.write_signing_keys( signing_key_file, (key,), ) - - if not args.old_signing_key_path: - args.old_signing_key_path = base_key_name + ".old.signing.keys" - - if not os.path.exists(args.old_signing_key_path): - with open(args.old_signing_key_path, "w"): - pass - - if not args.perspectives_config_path: - args.perspectives_config_path = base_key_name + ".perspectives" - - if not os.path.exists(args.perspectives_config_path): - with open(args.perspectives_config_path, "w") as perspectives_file: - perspectives_file.write( - 'servers:\n' - ' matrix.org:\n' - ' verify_keys:\n' - ' "ed25519:auto":\n' - ' key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"\n' - ) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 247b324816..37b3d5342c 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -22,22 +22,41 @@ import yaml class LoggingConfig(Config): - def __init__(self, args): - super(LoggingConfig, self).__init__(args) - self.verbosity = int(args.verbose) if args.verbose else None - self.log_config = self.abspath(args.log_config) - self.log_file = self.abspath(args.log_file) - @classmethod + def read_config(self, config): + self.verbosity = config.get("verbose", 0) + self.log_config = self.abspath(config.get("log_config")) + self.log_file = self.abspath(config.get("log_file")) + + def default_config(self, config_dir_path, server_name): + log_file = self.abspath("homeserver.log") + return """ + # Logging verbosity level. + verbose: 0 + + # File to write logging to + log_file: "%(log_file)s" + + # A yaml python logging config file + #log_config: "your.log.config.yaml" + """ % locals() + + def read_arguments(self, args): + if args.verbose is not None: + self.verbosity = args.verbose + if args.log_config is not None: + self.log_config = args.log_config + if args.log_file is not None: + self.log_file = args.log_file + def add_arguments(cls, parser): - super(LoggingConfig, cls).add_arguments(parser) logging_group = parser.add_argument_group("logging") logging_group.add_argument( '-v', '--verbose', dest="verbose", action='count', help="The verbosity level." ) logging_group.add_argument( - '-f', '--log-file', dest="log_file", default="homeserver.log", + '-f', '--log-file', dest="log_file", help="File to log to." ) logging_group.add_argument( diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py index 901a429c76..06e2e7ccff 100644 --- a/synapse/config/metrics.py +++ b/synapse/config/metrics.py @@ -17,20 +17,17 @@ from ._base import Config class MetricsConfig(Config): - def __init__(self, args): - super(MetricsConfig, self).__init__(args) - self.enable_metrics = args.enable_metrics - self.metrics_port = args.metrics_port + def read_config(self, config): + self.enable_metrics = config["enable_metrics"] + self.metrics_port = config["metrics_port"] - @classmethod - def add_arguments(cls, parser): - super(MetricsConfig, cls).add_arguments(parser) - metrics_group = parser.add_argument_group("metrics") - metrics_group.add_argument( - '--enable-metrics', dest="enable_metrics", action="store_true", - help="Enable collection and rendering of performance metrics" - ) - metrics_group.add_argument( - '--metrics-port', metavar="PORT", type=int, - help="Separate port to accept metrics requests on (on localhost)" - ) + def default_config(self, config_dir_path, server_name): + return """\ + ## Metrics ### + + # Enable collection and rendering of performance metrics + enable_metrics: False + + # Separate port to accept metrics requests on (on localhost) + metrics_port: ~ + """ diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py index 862c07ef8c..76d9970e5b 100644 --- a/synapse/config/ratelimiting.py +++ b/synapse/config/ratelimiting.py @@ -17,56 +17,42 @@ from ._base import Config class RatelimitConfig(Config): - def __init__(self, args): - super(RatelimitConfig, self).__init__(args) - self.rc_messages_per_second = args.rc_messages_per_second - self.rc_message_burst_count = args.rc_message_burst_count + def read_config(self, config): + self.rc_messages_per_second = config["rc_messages_per_second"] + self.rc_message_burst_count = config["rc_message_burst_count"] - self.federation_rc_window_size = args.federation_rc_window_size - self.federation_rc_sleep_limit = args.federation_rc_sleep_limit - self.federation_rc_sleep_delay = args.federation_rc_sleep_delay - self.federation_rc_reject_limit = args.federation_rc_reject_limit - self.federation_rc_concurrent = args.federation_rc_concurrent + self.federation_rc_window_size = config["federation_rc_window_size"] + self.federation_rc_sleep_limit = config["federation_rc_sleep_limit"] + self.federation_rc_sleep_delay = config["federation_rc_sleep_delay"] + self.federation_rc_reject_limit = config["federation_rc_reject_limit"] + self.federation_rc_concurrent = config["federation_rc_concurrent"] - @classmethod - def add_arguments(cls, parser): - super(RatelimitConfig, cls).add_arguments(parser) - rc_group = parser.add_argument_group("ratelimiting") - rc_group.add_argument( - "--rc-messages-per-second", type=float, default=0.2, - help="number of messages a client can send per second" - ) - rc_group.add_argument( - "--rc-message-burst-count", type=float, default=10, - help="number of message a client can send before being throttled" - ) + def default_config(self, config_dir_path, server_name): + return """\ + ## Ratelimiting ## - rc_group.add_argument( - "--federation-rc-window-size", type=int, default=10000, - help="The federation window size in milliseconds", - ) + # Number of messages a client can send per second + rc_messages_per_second: 0.2 - rc_group.add_argument( - "--federation-rc-sleep-limit", type=int, default=10, - help="The number of federation requests from a single server" - " in a window before the server will delay processing the" - " request.", - ) + # Number of message a client can send before being throttled + rc_message_burst_count: 10.0 - rc_group.add_argument( - "--federation-rc-sleep-delay", type=int, default=500, - help="The duration in milliseconds to delay processing events from" - " remote servers by if they go over the sleep limit.", - ) + # The federation window size in milliseconds + federation_rc_window_size: 1000 - rc_group.add_argument( - "--federation-rc-reject-limit", type=int, default=50, - help="The maximum number of concurrent federation requests allowed" - " from a single server", - ) + # The number of federation requests from a single server in a window + # before the server will delay processing the request. + federation_rc_sleep_limit: 10 - rc_group.add_argument( - "--federation-rc-concurrent", type=int, default=3, - help="The number of federation requests to concurrently process" - " from a single server", - ) + # The duration in milliseconds to delay processing events from + # remote servers by if they go over the sleep limit. + federation_rc_sleep_delay: 500 + + # The maximum number of concurrent federation requests allowed + # from a single server + federation_rc_reject_limit: 50 + + # The number of federation requests to concurrently process from a + # single server + federation_rc_concurrent: 3 + """ diff --git a/synapse/config/registration.py b/synapse/config/registration.py index f412a72f59..82684e4dc9 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -22,40 +22,21 @@ import distutils.util class RegistrationConfig(Config): - def __init__(self, args): - super(RegistrationConfig, self).__init__(args) - - # `args.enable_registration` may either be a bool or a string depending - # on if the option was given a value (e.g. --enable-registration=true - # would set `args.enable_registration` to "true" not True.) + def read_config(self, config): self.disable_registration = not bool( - distutils.util.strtobool(str(args.enable_registration)) - ) - self.registration_shared_secret = args.registration_shared_secret - - @classmethod - def add_arguments(cls, parser): - super(RegistrationConfig, cls).add_arguments(parser) - reg_group = parser.add_argument_group("registration") - - reg_group.add_argument( - "--enable-registration", - const=True, - default=False, - nargs='?', - help="Enable registration for new users.", - ) - reg_group.add_argument( - "--registration-shared-secret", type=str, - help="If set, allows registration by anyone who also has the shared" - " secret, even if registration is otherwise disabled.", + distutils.util.strtobool(str(config["enable_registration"])) ) + self.registration_shared_secret = config.get("registration_shared_secret") + + def default_config(self, config_dir, server_name): + registration_shared_secret = random_string_with_symbols(50) + return """\ + ## Registration ## - @classmethod - def generate_config(cls, args, config_dir_path): - super(RegistrationConfig, cls).generate_config(args, config_dir_path) - if args.enable_registration is None: - args.enable_registration = False + # Enable registration for new users. + enable_registration: True - if args.registration_shared_secret is None: - args.registration_shared_secret = random_string_with_symbols(50) + # If set, allows registration by anyone who also has the shared + # secret, even if registration is otherwise disabled. + registration_shared_secret: "%(registration_shared_secret)s" + """ % locals() diff --git a/synapse/config/repository.py b/synapse/config/repository.py index e1827f05e4..bf727285d7 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -17,11 +17,10 @@ from ._base import Config class ContentRepositoryConfig(Config): - def __init__(self, args): - super(ContentRepositoryConfig, self).__init__(args) - self.max_upload_size = self.parse_size(args.max_upload_size) - self.max_image_pixels = self.parse_size(args.max_image_pixels) - self.media_store_path = self.ensure_directory(args.media_store_path) + def read_config(self, config): + self.max_upload_size = self.parse_size(config["max_upload_size"]) + self.max_image_pixels = self.parse_size(config["max_image_pixels"]) + self.media_store_path = self.ensure_directory(config["media_store_path"]) def parse_size(self, string): sizes = {"K": 1024, "M": 1024 * 1024} @@ -32,17 +31,15 @@ class ContentRepositoryConfig(Config): size = sizes[suffix] return int(string) * size - @classmethod - def add_arguments(cls, parser): - super(ContentRepositoryConfig, cls).add_arguments(parser) - db_group = parser.add_argument_group("content_repository") - db_group.add_argument( - "--max-upload-size", default="10M" - ) - db_group.add_argument( - "--media-store-path", default=cls.default_path("media_store") - ) - db_group.add_argument( - "--max-image-pixels", default="32M", - help="Maximum number of pixels that will be thumbnailed" - ) + def default_config(self, config_dir_path, server_name): + media_store = self.default_path("media_store") + return """ + # Directory where uploaded images and attachments are stored. + media_store_path: "%(media_store)s" + + # The largest allowed upload size in bytes + max_upload_size: "10M" + + # Maximum number of pixels that will be thumbnailed + max_image_pixels: "32M" + """ % locals() diff --git a/synapse/config/server.py b/synapse/config/server.py index c25feb4c58..fe1b63469a 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -17,64 +17,85 @@ from ._base import Config class ServerConfig(Config): - def __init__(self, args): - super(ServerConfig, self).__init__(args) - self.server_name = args.server_name - self.bind_port = args.bind_port - self.bind_host = args.bind_host - self.unsecure_port = args.unsecure_port - self.daemonize = args.daemonize - self.pid_file = self.abspath(args.pid_file) - self.web_client = args.web_client - self.manhole = args.manhole - self.soft_file_limit = args.soft_file_limit - if not args.content_addr: - host = args.server_name + def read_config(self, config): + self.server_name = config["server_name"] + self.bind_port = config["bind_port"] + self.bind_host = config["bind_host"] + self.unsecure_port = config["unsecure_port"] + self.manhole = config["manhole"] + self.pid_file = self.abspath(config.get("pid_file")) + self.web_client = config["web_client"] + self.soft_file_limit = config["soft_file_limit"] + + # Attempt to guess the content_addr for the v0 content repostitory + content_addr = config.get("content_addr") + if not content_addr: + host = self.server_name if ':' not in host: - host = "%s:%d" % (host, args.unsecure_port) + host = "%s:%d" % (host, self.unsecure_port) else: host = host.split(':')[0] - host = "%s:%d" % (host, args.unsecure_port) - args.content_addr = "http://%s" % (host,) + host = "%s:%d" % (host, self.unsecure_port) + content_addr = "http://%s" % (host,) + + self.content_addr = content_addr + + def default_config(self, config_dir_path, server_name): + if ":" in server_name: + bind_port = int(server_name.split(":")[1]) + unsecure_port = bind_port - 400 + else: + bind_port = 8448 + unsecure_port = 8008 + + pid_file = self.abspath("homeserver.pid") + return """\ + ## Server ## + + # The domain name of the server, with optional explicit port. + # This is used by remote servers to connect to this server, + # e.g. matrix.org, localhost:8080, etc. + server_name: "%(server_name)s" + + # The port to listen for HTTPS requests on. + # For when matrix traffic is sent directly to synapse. + bind_port: %(bind_port)s - self.content_addr = args.content_addr + # The port to listen for HTTP requests on. + # For when matrix traffic passes through loadbalancer that unwraps TLS. + unsecure_port: %(unsecure_port)s + + # Local interface to listen on. + # The empty string will cause synapse to listen on all interfaces. + bind_host: "" + + # When running as a daemon, the file to store the pid in + pid_file: %(pid_file)s + + # Whether to serve a web client from the HTTP/HTTPS root resource. + web_client: True + + # Set the soft limit on the number of file descriptors synapse can use + # Zero is used to indicate synapse should set the soft limit to the + # hard limit. + soft_file_limit: 0 + + # Turn on the twisted telnet manhole service on localhost on the given + # port. + manhole: ~ + """ % locals() + + def read_arguments(self, args): + if args.manhole is not None: + self.manhole = args.manhole + self.daemonize = args.daemonize - @classmethod - def add_arguments(cls, parser): - super(ServerConfig, cls).add_arguments(parser) + def add_arguments(self, parser): server_group = parser.add_argument_group("server") - server_group.add_argument( - "-H", "--server-name", default="localhost", - help="The domain name of the server, with optional explicit port. " - "This is used by remote servers to connect to this server, " - "e.g. matrix.org, localhost:8080, etc." - ) - server_group.add_argument("-p", "--bind-port", metavar="PORT", - type=int, help="https port to listen on", - default=8448) - server_group.add_argument("--unsecure-port", metavar="PORT", - type=int, help="http port to listen on", - default=8008) - server_group.add_argument("--bind-host", default="", - help="Local interface to listen on") server_group.add_argument("-D", "--daemonize", action='store_true', help="Daemonize the home server") - server_group.add_argument('--pid-file', default="homeserver.pid", - help="When running as a daemon, the file to" - " store the pid in") - server_group.add_argument('--web_client', default=True, type=bool, - help="Whether or not to serve a web client") server_group.add_argument("--manhole", metavar="PORT", dest="manhole", type=int, help="Turn on the twisted telnet manhole" " service on the given port.") - server_group.add_argument("--content-addr", default=None, - help="The host and scheme to use for the " - "content repository") - server_group.add_argument("--soft-file-limit", type=int, default=0, - help="Set the soft limit on the number of " - "file descriptors synapse can use. " - "Zero is used to indicate synapse " - "should set the soft limit to the hard" - "limit.") diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 034f9a7bf0..e70bc1cd2c 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -23,37 +23,44 @@ GENERATE_DH_PARAMS = False class TlsConfig(Config): - def __init__(self, args): - super(TlsConfig, self).__init__(args) + def read_config(self, config): self.tls_certificate = self.read_tls_certificate( - args.tls_certificate_path + config.get("tls_certificate_path") ) - self.no_tls = args.no_tls + self.no_tls = config.get("no_tls", False) if self.no_tls: self.tls_private_key = None else: self.tls_private_key = self.read_tls_private_key( - args.tls_private_key_path + config.get("tls_private_key_path") ) self.tls_dh_params_path = self.check_file( - args.tls_dh_params_path, "tls_dh_params" + config.get("tls_dh_params_path"), "tls_dh_params" ) - @classmethod - def add_arguments(cls, parser): - super(TlsConfig, cls).add_arguments(parser) - tls_group = parser.add_argument_group("tls") - tls_group.add_argument("--tls-certificate-path", - help="PEM encoded X509 certificate for TLS") - tls_group.add_argument("--tls-private-key-path", - help="PEM encoded private key for TLS") - tls_group.add_argument("--tls-dh-params-path", - help="PEM dh parameters for ephemeral keys") - tls_group.add_argument("--no-tls", action='store_true', - help="Don't bind to the https port.") + def default_config(self, config_dir_path, server_name): + base_key_name = os.path.join(config_dir_path, server_name) + + tls_certificate_path = base_key_name + ".tls.crt" + tls_private_key_path = base_key_name + ".tls.key" + tls_dh_params_path = base_key_name + ".tls.dh" + + return """\ + # PEM encoded X509 certificate for TLS + tls_certificate_path: "%(tls_certificate_path)s" + + # PEM encoded private key for TLS + tls_private_key_path: "%(tls_private_key_path)s" + + # PEM dh parameters for ephemeral keys + tls_dh_params_path: "%(tls_dh_params_path)s" + + # Don't bind to the https port + no_tls: False + """ % locals() def read_tls_certificate(self, cert_path): cert_pem = self.read_file(cert_path, "tls_certificate") @@ -63,22 +70,13 @@ class TlsConfig(Config): private_key_pem = self.read_file(private_key_path, "tls_private_key") return crypto.load_privatekey(crypto.FILETYPE_PEM, private_key_pem) - @classmethod - def generate_config(cls, args, config_dir_path): - super(TlsConfig, cls).generate_config(args, config_dir_path) - base_key_name = os.path.join(config_dir_path, args.server_name) - - if args.tls_certificate_path is None: - args.tls_certificate_path = base_key_name + ".tls.crt" - - if args.tls_private_key_path is None: - args.tls_private_key_path = base_key_name + ".tls.key" - - if args.tls_dh_params_path is None: - args.tls_dh_params_path = base_key_name + ".tls.dh" + def generate_keys(self, config): + tls_certificate_path = config["tls_certificate_path"] + tls_private_key_path = config["tls_private_key_path"] + tls_dh_params_path = config["tls_dh_params_path"] - if not os.path.exists(args.tls_private_key_path): - with open(args.tls_private_key_path, "w") as private_key_file: + if not os.path.exists(tls_private_key_path): + with open(tls_private_key_path, "w") as private_key_file: tls_private_key = crypto.PKey() tls_private_key.generate_key(crypto.TYPE_RSA, 2048) private_key_pem = crypto.dump_privatekey( @@ -86,17 +84,17 @@ class TlsConfig(Config): ) private_key_file.write(private_key_pem) else: - with open(args.tls_private_key_path) as private_key_file: + with open(tls_private_key_path) as private_key_file: private_key_pem = private_key_file.read() tls_private_key = crypto.load_privatekey( crypto.FILETYPE_PEM, private_key_pem ) - if not os.path.exists(args.tls_certificate_path): - with open(args.tls_certificate_path, "w") as certifcate_file: + if not os.path.exists(tls_certificate_path): + with open(tls_certificate_path, "w") as certifcate_file: cert = crypto.X509() subject = cert.get_subject() - subject.CN = args.server_name + subject.CN = config["server_name"] cert.set_serial_number(1000) cert.gmtime_adj_notBefore(0) @@ -110,16 +108,16 @@ class TlsConfig(Config): certifcate_file.write(cert_pem) - if not os.path.exists(args.tls_dh_params_path): + if not os.path.exists(tls_dh_params_path): if GENERATE_DH_PARAMS: subprocess.check_call([ "openssl", "dhparam", "-outform", "PEM", - "-out", args.tls_dh_params_path, + "-out", tls_dh_params_path, "2048" ]) else: - with open(args.tls_dh_params_path, "w") as dh_params_file: + with open(tls_dh_params_path, "w") as dh_params_file: dh_params_file.write( "2048-bit DH parameters taken from rfc3526\n" "-----BEGIN DH PARAMETERS-----\n" diff --git a/synapse/config/voip.py b/synapse/config/voip.py index 65162d21b7..a1707223d3 100644 --- a/synapse/config/voip.py +++ b/synapse/config/voip.py @@ -17,28 +17,21 @@ from ._base import Config class VoipConfig(Config): - def __init__(self, args): - super(VoipConfig, self).__init__(args) - self.turn_uris = args.turn_uris - self.turn_shared_secret = args.turn_shared_secret - self.turn_user_lifetime = args.turn_user_lifetime - - @classmethod - def add_arguments(cls, parser): - super(VoipConfig, cls).add_arguments(parser) - group = parser.add_argument_group("voip") - group.add_argument( - "--turn-uris", type=str, default=None, action='append', - help="The public URIs of the TURN server to give to clients" - ) - group.add_argument( - "--turn-shared-secret", type=str, default=None, - help=( - "The shared secret used to compute passwords for the TURN" - " server" - ) - ) - group.add_argument( - "--turn-user-lifetime", type=int, default=(1000 * 60 * 60), - help="How long generated TURN credentials last, in ms" - ) + def read_config(self, config): + self.turn_uris = config.get("turn_uris", []) + self.turn_shared_secret = config["turn_shared_secret"] + self.turn_user_lifetime = self.parse_duration(config["turn_user_lifetime"]) + + def default_config(self, config_dir_path, server_name): + return """\ + ## Turn ## + + # The public URIs of the TURN server to give to clients + turn_uris: [] + + # The shared secret used to compute passwords for the TURN server + turn_shared_secret: "YOUR_SHARED_SECRET" + + # How long generated TURN credentials last + turn_user_lifetime: "1h" + """ diff --git a/synapse/server.py b/synapse/server.py index d61a228c36..8b3dc675cc 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -59,7 +59,6 @@ class BaseHomeServer(object): 'config', 'clock', 'http_client', - 'db_name', 'db_pool', 'persistence_service', 'replication_layer', -- cgit 1.4.1 From 6b69ddd17a9fe75544ce32b402042f2d50826874 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Thu, 30 Apr 2015 04:26:29 +0100 Subject: remove duplicate parse_size method --- synapse/app/homeserver.py | 1 - synapse/config/repository.py | 9 --------- 2 files changed, 10 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index c16dd8acc3..e6a34561c1 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -406,7 +406,6 @@ def setup(config_options): redirect_root_to_web_client=True, ) - logger.info("Preparing database: %r...", config.database_config) try: diff --git a/synapse/config/repository.py b/synapse/config/repository.py index bf727285d7..adaf4e4bb2 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -22,15 +22,6 @@ class ContentRepositoryConfig(Config): self.max_image_pixels = self.parse_size(config["max_image_pixels"]) self.media_store_path = self.ensure_directory(config["media_store_path"]) - def parse_size(self, string): - sizes = {"K": 1024, "M": 1024 * 1024} - size = 1 - suffix = string[-1] - if suffix in sizes: - string = string[:-1] - size = sizes[suffix] - return int(string) * size - def default_config(self, config_dir_path, server_name): media_store = self.default_path("media_store") return """ -- cgit 1.4.1 From d89a9f72833acfd472ec557a8fe3927320efea93 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 30 Apr 2015 13:58:13 +0100 Subject: Add an access_log SYN-161 #resolve --- synapse/app/homeserver.py | 25 ++++++++++++++++++++++--- synapse/config/captcha.py | 2 ++ synapse/config/logger.py | 5 +++++ 3 files changed, 29 insertions(+), 3 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 5c6812f473..0aa5c34c81 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -35,6 +35,7 @@ from twisted.enterprise import adbapi from twisted.web.resource import Resource from twisted.web.static import File from twisted.web.server import Site +from twisted.web.http import proxiedLogFormatter from synapse.http.server import JsonResource, RootRedirect from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.rest.media.v1.media_repository import MediaRepositoryResource @@ -225,10 +226,18 @@ class SynapseHomeServer(HomeServer): def start_listening(self): config = self.get_config() + log_formatter = None + if config.captcha_ip_origin_is_x_forwarded: + log_formatter = proxiedLogFormatter + if not config.no_tls and config.bind_port is not None: reactor.listenSSL( config.bind_port, - Site(self.root_resource), + Site( + self.root_resource, + logPath=config.access_log_file, + logFormatter=log_formatter, + ), self.tls_context_factory, interface=config.bind_host ) @@ -237,7 +246,11 @@ class SynapseHomeServer(HomeServer): if config.unsecure_port is not None: reactor.listenTCP( config.unsecure_port, - Site(self.root_resource), + Site( + self.root_resource, + logPath=config.access_log_file, + logFormatter=log_formatter, + ), interface=config.bind_host ) logger.info("Synapse now listening on port %d", config.unsecure_port) @@ -245,7 +258,13 @@ class SynapseHomeServer(HomeServer): metrics_resource = self.get_resource_for_metrics() if metrics_resource and config.metrics_port is not None: reactor.listenTCP( - config.metrics_port, Site(metrics_resource), interface="127.0.0.1", + config.metrics_port, + Site( + metrics_resource, + logPath=config.access_log_file, + logFormatter=log_formatter, + ), + interface="127.0.0.1", ) logger.info("Metrics now running on 127.0.0.1 port %d", config.metrics_port) diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py index 07fbfadc0f..456ce9c632 100644 --- a/synapse/config/captcha.py +++ b/synapse/config/captcha.py @@ -22,6 +22,8 @@ class CaptchaConfig(Config): self.recaptcha_private_key = args.recaptcha_private_key self.recaptcha_public_key = args.recaptcha_public_key self.enable_registration_captcha = args.enable_registration_captcha + + # XXX: This is used for more than just captcha self.captcha_ip_origin_is_x_forwarded = ( args.captcha_ip_origin_is_x_forwarded ) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 247b324816..559cbe7963 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -27,6 +27,7 @@ class LoggingConfig(Config): self.verbosity = int(args.verbose) if args.verbose else None self.log_config = self.abspath(args.log_config) self.log_file = self.abspath(args.log_file) + self.access_log_file = self.abspath(args.access_log_file) @classmethod def add_arguments(cls, parser): @@ -44,6 +45,10 @@ class LoggingConfig(Config): '--log-config', dest="log_config", default=None, help="Python logging config file" ) + logging_group.add_argument( + '--access-log-file', dest="access_log_file", default="access.log", + help="File to log server access to" + ) def setup_logging(self): log_format = ( -- cgit 1.4.1 From 054aa0d58c22ae76d3e094fc2fd6495456ffd2cf Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 30 Apr 2015 16:17:27 +0100 Subject: Do access log using python's logging stuff, just under a separate logger name --- synapse/app/homeserver.py | 42 ++++++++++++++++++++++++++++-------------- synapse/config/logger.py | 11 +++++++++++ 2 files changed, 39 insertions(+), 14 deletions(-) (limited to 'synapse/app') diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 0aa5c34c81..3ce5fa4a43 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -35,7 +35,7 @@ from twisted.enterprise import adbapi from twisted.web.resource import Resource from twisted.web.static import File from twisted.web.server import Site -from twisted.web.http import proxiedLogFormatter +from twisted.web.http import proxiedLogFormatter, combinedLogFormatter from synapse.http.server import JsonResource, RootRedirect from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.rest.media.v1.media_repository import MediaRepositoryResource @@ -226,17 +226,13 @@ class SynapseHomeServer(HomeServer): def start_listening(self): config = self.get_config() - log_formatter = None - if config.captcha_ip_origin_is_x_forwarded: - log_formatter = proxiedLogFormatter - if not config.no_tls and config.bind_port is not None: reactor.listenSSL( config.bind_port, - Site( + SynapseSite( + "synapse.access.https", + config, self.root_resource, - logPath=config.access_log_file, - logFormatter=log_formatter, ), self.tls_context_factory, interface=config.bind_host @@ -246,10 +242,10 @@ class SynapseHomeServer(HomeServer): if config.unsecure_port is not None: reactor.listenTCP( config.unsecure_port, - Site( + SynapseSite( + "synapse.access.http", + config, self.root_resource, - logPath=config.access_log_file, - logFormatter=log_formatter, ), interface=config.bind_host ) @@ -259,10 +255,10 @@ class SynapseHomeServer(HomeServer): if metrics_resource and config.metrics_port is not None: reactor.listenTCP( config.metrics_port, - Site( + SynapseSite( + "synapse.access.metrics", + config, metrics_resource, - logPath=config.access_log_file, - logFormatter=log_formatter, ), interface="127.0.0.1", ) @@ -484,6 +480,24 @@ class SynapseService(service.Service): return self._port.stopListening() +class SynapseSite(Site): + """ + Subclass of a twisted http Site that does access logging with python's + standard logging + """ + def __init__(self, logger_name, config, resource, *args, **kwargs): + Site.__init__(self, resource, *args, **kwargs) + if config.captcha_ip_origin_is_x_forwarded: + self._log_formatter = proxiedLogFormatter + else: + self._log_formatter = combinedLogFormatter + self.access_logger = logging.getLogger(logger_name) + + def log(self, request): + line = self._log_formatter(self._logDateTime, request) + self.access_logger.info(line) + + def run(hs): def in_thread(): diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 559cbe7963..077f20497a 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -83,6 +83,17 @@ class LoggingConfig(Config): handler.addFilter(LoggingContextFilter(request="")) logger.addHandler(handler) + + if self.access_log_file: + access_logger = logging.getLogger('synapse.access') + # we log to both files by default + access_logger.propagate = 1 + access_log_handler = logging.handlers.RotatingFileHandler( + self.access_log_file, maxBytes=(1000 * 1000 * 100), backupCount=3 + ) + access_log_formatter = logging.Formatter('%(message)s') + access_log_handler.setFormatter(access_log_formatter) + access_logger.addHandler(access_log_handler) else: with open(self.log_config, 'r') as f: logging.config.dictConfig(yaml.load(f)) -- cgit 1.4.1 From c95480963eed26af347c0d9a0ca47438b227e6ab Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Thu, 30 Apr 2015 17:12:15 +0100 Subject: read the pid_file from the config file in synctl --- README.rst | 2 +- synapse/app/synctl.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) (limited to 'synapse/app') diff --git a/README.rst b/README.rst index aad74585a4..c434262252 100644 --- a/README.rst +++ b/README.rst @@ -318,7 +318,7 @@ ArchLinux If running `$ synctl start` fails with 'returned non-zero exit status 1', you will need to explicitly call Python2.7 - either running as:: - $ python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml --pid-file homeserver.pid + $ python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml ...or by editing synctl with the correct python executable. diff --git a/synapse/app/synctl.py b/synapse/app/synctl.py index 3a70a248dc..462dfb7d73 100755 --- a/synapse/app/synctl.py +++ b/synapse/app/synctl.py @@ -22,11 +22,12 @@ import signal SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"] CONFIGFILE = "homeserver.yaml" -PIDFILE = "homeserver.pid" GREEN = "\x1b[1;32m" NORMAL = "\x1b[m" +CONFIG = yaml.load(open(CONFIGFILE)) +PIDFILE = CONFIG["pid_file"] def start(): if not os.path.exists(CONFIGFILE): @@ -40,7 +41,7 @@ def start(): sys.exit(1) print "Starting ...", args = SYNAPSE - args.extend(["--daemonize", "-c", CONFIGFILE, "--pid-file", PIDFILE]) + args.extend(["--daemonize", "-c", CONFIGFILE]) subprocess.check_call(args) print GREEN + "started" + NORMAL -- cgit 1.4.1 From 6ea9cf58be88678f6164f05b2eae5b3bdbc6c9f6 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Thu, 30 Apr 2015 17:21:21 +0100 Subject: missing import --- synapse/app/synctl.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'synapse/app') diff --git a/synapse/app/synctl.py b/synapse/app/synctl.py index 462dfb7d73..0a2b0d6fcd 100755 --- a/synapse/app/synctl.py +++ b/synapse/app/synctl.py @@ -18,6 +18,7 @@ import sys import os import subprocess import signal +import yaml SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"] @@ -29,6 +30,7 @@ NORMAL = "\x1b[m" CONFIG = yaml.load(open(CONFIGFILE)) PIDFILE = CONFIG["pid_file"] + def start(): if not os.path.exists(CONFIGFILE): sys.stderr.write( -- cgit 1.4.1