diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py
index 1100c67714..371600eebb 100644
--- a/synapse/storage/appservice.py
+++ b/synapse/storage/appservice.py
@@ -34,8 +34,8 @@ class ApplicationServiceStore(SQLBaseStore):
def __init__(self, hs):
super(ApplicationServiceStore, self).__init__(hs)
self.hostname = hs.hostname
- self.services_cache = []
- self._populate_appservice_cache(
+ self.services_cache = ApplicationServiceStore.load_appservices(
+ hs.hostname,
hs.config.app_service_config_files
)
@@ -144,21 +144,23 @@ class ApplicationServiceStore(SQLBaseStore):
return rooms_for_user_matching_user_id
- def _load_appservice(self, as_info):
+ @classmethod
+ def _load_appservice(cls, hostname, as_info, config_filename):
required_string_fields = [
- # TODO: Add id here when it's stable to release
- "url", "as_token", "hs_token", "sender_localpart"
+ "id", "url", "as_token", "hs_token", "sender_localpart"
]
for field in required_string_fields:
if not isinstance(as_info.get(field), basestring):
- raise KeyError("Required string field: '%s'", field)
+ raise KeyError("Required string field: '%s' (%s)" % (
+ field, config_filename,
+ ))
localpart = as_info["sender_localpart"]
if urllib.quote(localpart) != localpart:
raise ValueError(
"sender_localpart needs characters which are not URL encoded."
)
- user = UserID(localpart, self.hostname)
+ user = UserID(localpart, hostname)
user_id = user.to_string()
# namespace checks
@@ -188,25 +190,30 @@ class ApplicationServiceStore(SQLBaseStore):
namespaces=as_info["namespaces"],
hs_token=as_info["hs_token"],
sender=user_id,
- id=as_info["id"] if "id" in as_info else as_info["as_token"],
+ id=as_info["id"],
)
- def _populate_appservice_cache(self, config_files):
- """Populates a cache of Application Services from the config files."""
+ @classmethod
+ def load_appservices(cls, hostname, config_files):
+ """Returns a list of Application Services from the config files."""
if not isinstance(config_files, list):
logger.warning(
"Expected %s to be a list of AS config files.", config_files
)
- return
+ return []
# Dicts of value -> filename
seen_as_tokens = {}
seen_ids = {}
+ appservices = []
+
for config_file in config_files:
try:
with open(config_file, 'r') as f:
- appservice = self._load_appservice(yaml.load(f))
+ appservice = ApplicationServiceStore._load_appservice(
+ hostname, yaml.load(f), config_file
+ )
if appservice.id in seen_ids:
raise ConfigError(
"Cannot reuse ID across application services: "
@@ -226,11 +233,12 @@ class ApplicationServiceStore(SQLBaseStore):
)
seen_as_tokens[appservice.token] = config_file
logger.info("Loaded application service: %s", appservice)
- self.services_cache.append(appservice)
+ appservices.append(appservice)
except Exception as e:
logger.error("Failed to load appservice from '%s'", config_file)
logger.exception(e)
raise
+ return appservices
class ApplicationServiceTransactionStore(SQLBaseStore):
diff --git a/synapse/storage/directory.py b/synapse/storage/directory.py
index 1556619d5e..012a0b414a 100644
--- a/synapse/storage/directory.py
+++ b/synapse/storage/directory.py
@@ -70,13 +70,14 @@ class DirectoryStore(SQLBaseStore):
)
@defer.inlineCallbacks
- def create_room_alias_association(self, room_alias, room_id, servers):
+ def create_room_alias_association(self, room_alias, room_id, servers, creator=None):
""" Creates an associatin between a room alias and room_id/servers
Args:
room_alias (RoomAlias)
room_id (str)
servers (list)
+ creator (str): Optional user_id of creator.
Returns:
Deferred
@@ -87,6 +88,7 @@ class DirectoryStore(SQLBaseStore):
{
"room_alias": room_alias.to_string(),
"room_id": room_id,
+ "creator": creator,
},
desc="create_room_alias_association",
)
@@ -107,6 +109,17 @@ class DirectoryStore(SQLBaseStore):
)
self.get_aliases_for_room.invalidate((room_id,))
+ def get_room_alias_creator(self, room_alias):
+ return self._simple_select_one_onecol(
+ table="room_aliases",
+ keyvalues={
+ "room_alias": room_alias,
+ },
+ retcol="creator",
+ desc="get_room_alias_creator",
+ allow_none=True
+ )
+
@defer.inlineCallbacks
def delete_room_alias(self, room_alias):
room_id = yield self.runInteraction(
diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py
index 4290aea83a..a48230b93f 100644
--- a/synapse/storage/engines/__init__.py
+++ b/synapse/storage/engines/__init__.py
@@ -26,12 +26,13 @@ SUPPORTED_MODULE = {
}
-def create_engine(name):
+def create_engine(config):
+ name = config.database_config["name"]
engine_class = SUPPORTED_MODULE.get(name, None)
if engine_class:
module = importlib.import_module(name)
- return engine_class(module)
+ return engine_class(module, config=config)
raise RuntimeError(
"Unsupported database engine '%s'" % (name,)
diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py
index 17b7a9c077..a09685b4df 100644
--- a/synapse/storage/engines/postgres.py
+++ b/synapse/storage/engines/postgres.py
@@ -21,9 +21,10 @@ from ._base import IncorrectDatabaseSetup
class PostgresEngine(object):
single_threaded = False
- def __init__(self, database_module):
+ def __init__(self, database_module, config):
self.module = database_module
self.module.extensions.register_type(self.module.extensions.UNICODE)
+ self.config = config
def check_database(self, txn):
txn.execute("SHOW SERVER_ENCODING")
@@ -44,7 +45,7 @@ class PostgresEngine(object):
)
def prepare_database(self, db_conn):
- prepare_database(db_conn, self)
+ prepare_database(db_conn, self, config=self.config)
def is_deadlock(self, error):
if isinstance(error, self.module.DatabaseError):
diff --git a/synapse/storage/engines/sqlite3.py b/synapse/storage/engines/sqlite3.py
index 91fac33b8b..522b905949 100644
--- a/synapse/storage/engines/sqlite3.py
+++ b/synapse/storage/engines/sqlite3.py
@@ -23,8 +23,9 @@ import struct
class Sqlite3Engine(object):
single_threaded = True
- def __init__(self, database_module):
+ def __init__(self, database_module, config):
self.module = database_module
+ self.config = config
def check_database(self, txn):
pass
@@ -38,7 +39,7 @@ class Sqlite3Engine(object):
def prepare_database(self, db_conn):
prepare_sqlite3_database(db_conn)
- prepare_database(db_conn, self)
+ prepare_database(db_conn, self, config=self.config)
def is_deadlock(self, error):
return False
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index 0fd5d497ab..3f29aad1e8 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -50,7 +50,7 @@ class UpgradeDatabaseException(PrepareDatabaseException):
pass
-def prepare_database(db_conn, database_engine):
+def prepare_database(db_conn, database_engine, config):
"""Prepares a database for usage. Will either create all necessary tables
or upgrade from an older schema version.
"""
@@ -61,10 +61,10 @@ def prepare_database(db_conn, database_engine):
if version_info:
user_version, delta_files, upgraded = version_info
_upgrade_existing_database(
- cur, user_version, delta_files, upgraded, database_engine
+ cur, user_version, delta_files, upgraded, database_engine, config
)
else:
- _setup_new_database(cur, database_engine)
+ _setup_new_database(cur, database_engine, config)
# cur.execute("PRAGMA user_version = %d" % (SCHEMA_VERSION,))
@@ -75,7 +75,7 @@ def prepare_database(db_conn, database_engine):
raise
-def _setup_new_database(cur, database_engine):
+def _setup_new_database(cur, database_engine, config):
"""Sets up the database by finding a base set of "full schemas" and then
applying any necessary deltas.
@@ -148,11 +148,12 @@ def _setup_new_database(cur, database_engine):
applied_delta_files=[],
upgraded=False,
database_engine=database_engine,
+ config=config,
)
def _upgrade_existing_database(cur, current_version, applied_delta_files,
- upgraded, database_engine):
+ upgraded, database_engine, config):
"""Upgrades an existing database.
Delta files can either be SQL stored in *.sql files, or python modules
@@ -245,7 +246,7 @@ def _upgrade_existing_database(cur, current_version, applied_delta_files,
module_name, absolute_path, python_file
)
logger.debug("Running script %s", relative_path)
- module.run_upgrade(cur, database_engine)
+ module.run_upgrade(cur, database_engine, config=config)
elif ext == ".pyc":
# Sometimes .pyc files turn up anyway even though we've
# disabled their generation; e.g. from distribution package
diff --git a/synapse/storage/schema/delta/30/alias_creator.sql b/synapse/storage/schema/delta/30/alias_creator.sql
new file mode 100644
index 0000000000..c9d0dde638
--- /dev/null
+++ b/synapse/storage/schema/delta/30/alias_creator.sql
@@ -0,0 +1,16 @@
+/* Copyright 2016 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ALTER TABLE room_aliases ADD COLUMN creator TEXT;
diff --git a/synapse/storage/schema/delta/30/as_users.py b/synapse/storage/schema/delta/30/as_users.py
new file mode 100644
index 0000000000..4cf4dd0917
--- /dev/null
+++ b/synapse/storage/schema/delta/30/as_users.py
@@ -0,0 +1,59 @@
+# Copyright 2016 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+from synapse.storage.appservice import ApplicationServiceStore
+
+
+logger = logging.getLogger(__name__)
+
+
+def run_upgrade(cur, database_engine, config, *args, **kwargs):
+ # NULL indicates user was not registered by an appservice.
+ cur.execute("ALTER TABLE users ADD COLUMN appservice_id TEXT")
+
+ cur.execute("SELECT name FROM users")
+ rows = cur.fetchall()
+
+ config_files = []
+ try:
+ config_files = config.app_service_config_files
+ except AttributeError:
+ logger.warning("Could not get app_service_config_files from config")
+ pass
+
+ appservices = ApplicationServiceStore.load_appservices(
+ config.server_name, config_files
+ )
+
+ owned = {}
+
+ for row in rows:
+ user_id = row[0]
+ for appservice in appservices:
+ if appservice.is_exclusive_user(user_id):
+ if user_id in owned.keys():
+ logger.error(
+ "user_id %s was owned by more than one application"
+ " service (IDs %s and %s); assigning arbitrarily to %s" %
+ (user_id, owned[user_id], appservice.id, owned[user_id])
+ )
+ owned[user_id] = appservice.id
+
+ for user_id, as_id in owned.items():
+ cur.execute(
+ database_engine.convert_param_style(
+ "UPDATE users SET appservice_id = ? WHERE name = ?"
+ ),
+ (as_id, user_id)
+ )
|