diff --git a/jenkins-dendron-postgres.sh b/jenkins-dendron-postgres.sh
index 7e6f24aa7d..50268e0982 100755
--- a/jenkins-dendron-postgres.sh
+++ b/jenkins-dendron-postgres.sh
@@ -70,6 +70,7 @@ cd sytest
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
: ${PORT_BASE:=8000}
+: ${PORT_COUNT=20}
./jenkins/prep_sytest_for_postgres.sh
@@ -81,6 +82,6 @@ echo >&2 "Running sytest with PostgreSQL";
--dendron $WORKSPACE/dendron/bin/dendron \
--pusher \
--synchrotron \
- --port-base $PORT_BASE
+ --port-range ${PORT_BASE}:$((PORT_BASE+PORT_COUNT-1))
cd ..
diff --git a/jenkins-postgres.sh b/jenkins-postgres.sh
index ae6b111591..2f0768fcb7 100755
--- a/jenkins-postgres.sh
+++ b/jenkins-postgres.sh
@@ -44,6 +44,7 @@ cd sytest
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
: ${PORT_BASE:=8000}
+: ${PORT_COUNT=20}
./jenkins/prep_sytest_for_postgres.sh
@@ -51,7 +52,7 @@ echo >&2 "Running sytest with PostgreSQL";
./jenkins/install_and_run.sh --coverage \
--python $TOX_BIN/python \
--synapse-directory $WORKSPACE \
- --port-base $PORT_BASE
+ --port-range ${PORT_BASE}:$((PORT_BASE+PORT_COUNT-1)) \
cd ..
cp sytest/.coverage.* .
diff --git a/jenkins-sqlite.sh b/jenkins-sqlite.sh
index 9398d9db15..da603c5af8 100755
--- a/jenkins-sqlite.sh
+++ b/jenkins-sqlite.sh
@@ -41,11 +41,12 @@ cd sytest
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
-: ${PORT_BASE:=8500}
+: ${PORT_COUNT=20}
+: ${PORT_BASE:=8000}
./jenkins/install_and_run.sh --coverage \
--python $TOX_BIN/python \
--synapse-directory $WORKSPACE \
- --port-base $PORT_BASE
+ --port-range ${PORT_BASE}:$((PORT_BASE+PORT_COUNT-1)) \
cd ..
cp sytest/.coverage.* .
diff --git a/res/templates/notif_mail.html b/res/templates/notif_mail.html
index 8aee68b591..535bea764d 100644
--- a/res/templates/notif_mail.html
+++ b/res/templates/notif_mail.html
@@ -36,7 +36,7 @@
<div class="debug">
Sending email at {{ reason.now|format_ts("%c") }} due to activity in room {{ reason.room_name }} because
an event was received at {{ reason.received_at|format_ts("%c") }}
- which is more than {{ "%.1f"|format(reason.delay_before_mail_ms / (60*1000)) }} (delay_before_mail_ms) mins ago,
+ which is more than {{ "%.1f"|format(reason.delay_before_mail_ms / (60*1000)) }} ({{ reason.delay_before_mail_ms }}) mins ago,
{% if reason.last_sent_ts %}
and the last time we sent a mail for this room was {{ reason.last_sent_ts|format_ts("%c") }},
which is more than {{ "%.1f"|format(reason.throttle_ms / (60*1000)) }} (current throttle_ms) mins ago.
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 40ffd9bf0d..9c2dd32953 100755
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -147,7 +147,7 @@ class SynapseHomeServer(HomeServer):
MEDIA_PREFIX: media_repo,
LEGACY_MEDIA_PREFIX: media_repo,
CONTENT_REPO_PREFIX: ContentRepoResource(
- self, self.config.uploads_path, self.auth, self.content_addr
+ self, self.config.uploads_path
),
})
@@ -301,7 +301,6 @@ def setup(config_options):
db_config=config.database_config,
tls_server_context_factory=tls_server_context_factory,
config=config,
- content_addr=config.content_addr,
version_string=version_string,
database_engine=database_engine,
)
diff --git a/synapse/config/ldap.py b/synapse/config/ldap.py
index 9c14593a99..d83c2230be 100644
--- a/synapse/config/ldap.py
+++ b/synapse/config/ldap.py
@@ -13,40 +13,88 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from ._base import Config
+from ._base import Config, ConfigError
+
+
+MISSING_LDAP3 = (
+ "Missing ldap3 library. This is required for LDAP Authentication."
+)
+
+
+class LDAPMode(object):
+ SIMPLE = "simple",
+ SEARCH = "search",
+
+ LIST = (SIMPLE, SEARCH)
class LDAPConfig(Config):
def read_config(self, config):
- ldap_config = config.get("ldap_config", None)
- if ldap_config:
- self.ldap_enabled = ldap_config.get("enabled", False)
- self.ldap_server = ldap_config["server"]
- self.ldap_port = ldap_config["port"]
- self.ldap_tls = ldap_config.get("tls", False)
- self.ldap_search_base = ldap_config["search_base"]
- self.ldap_search_property = ldap_config["search_property"]
- self.ldap_email_property = ldap_config["email_property"]
- self.ldap_full_name_property = ldap_config["full_name_property"]
- else:
- self.ldap_enabled = False
- self.ldap_server = None
- self.ldap_port = None
- self.ldap_tls = False
- self.ldap_search_base = None
- self.ldap_search_property = None
- self.ldap_email_property = None
- self.ldap_full_name_property = None
+ ldap_config = config.get("ldap_config", {})
+
+ self.ldap_enabled = ldap_config.get("enabled", False)
+
+ if self.ldap_enabled:
+ # verify dependencies are available
+ try:
+ import ldap3
+ ldap3 # to stop unused lint
+ except ImportError:
+ raise ConfigError(MISSING_LDAP3)
+
+ self.ldap_mode = LDAPMode.SIMPLE
+
+ # verify config sanity
+ self.require_keys(ldap_config, [
+ "uri",
+ "base",
+ "attributes",
+ ])
+
+ self.ldap_uri = ldap_config["uri"]
+ self.ldap_start_tls = ldap_config.get("start_tls", False)
+ self.ldap_base = ldap_config["base"]
+ self.ldap_attributes = ldap_config["attributes"]
+
+ if "bind_dn" in ldap_config:
+ self.ldap_mode = LDAPMode.SEARCH
+ self.require_keys(ldap_config, [
+ "bind_dn",
+ "bind_password",
+ ])
+
+ self.ldap_bind_dn = ldap_config["bind_dn"]
+ self.ldap_bind_password = ldap_config["bind_password"]
+ self.ldap_filter = ldap_config.get("filter", None)
+
+ # verify attribute lookup
+ self.require_keys(ldap_config['attributes'], [
+ "uid",
+ "name",
+ "mail",
+ ])
+
+ def require_keys(self, config, required):
+ missing = [key for key in required if key not in config]
+ if missing:
+ raise ConfigError(
+ "LDAP enabled but missing required config values: {}".format(
+ ", ".join(missing)
+ )
+ )
def default_config(self, **kwargs):
return """\
# ldap_config:
# enabled: true
- # server: "ldap://localhost"
- # port: 389
- # tls: false
- # search_base: "ou=Users,dc=example,dc=com"
- # search_property: "cn"
- # email_property: "email"
- # full_name_property: "givenName"
+ # uri: "ldap://ldap.example.com:389"
+ # start_tls: true
+ # base: "ou=users,dc=example,dc=com"
+ # attributes:
+ # uid: "cn"
+ # mail: "email"
+ # name: "givenName"
+ # #bind_dn:
+ # #bind_password:
+ # #filter: "(objectClass=posixAccount)"
"""
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 7840dc3ad6..51eaf423ce 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -107,26 +107,6 @@ class ServerConfig(Config):
]
})
- # Attempt to guess the content_addr for the v0 content repostitory
- content_addr = config.get("content_addr")
- if not content_addr:
- for listener in self.listeners:
- if listener["type"] == "http" and not listener.get("tls", False):
- unsecure_port = listener["port"]
- break
- else:
- raise RuntimeError("Could not determine 'content_addr'")
-
- host = self.server_name
- if ':' not in host:
- host = "%s:%d" % (host, unsecure_port)
- else:
- host = host.split(':')[0]
- host = "%s:%d" % (host, unsecure_port)
- content_addr = "http://%s" % (host,)
-
- self.content_addr = content_addr
-
def default_config(self, server_name, **kwargs):
if ":" in server_name:
bind_port = int(server_name.split(":")[1])
@@ -169,7 +149,6 @@ class ServerConfig(Config):
# room directory.
# secondary_directory_servers:
# - matrix.org
- # - vector.im
# List of ports that Synapse should listen on, their purpose and their
# configuration.
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index 2a589524a4..85f5e752fe 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -49,6 +49,7 @@ class FederationServer(FederationBase):
super(FederationServer, self).__init__(hs)
self._room_pdu_linearizer = Linearizer()
+ self._server_linearizer = Linearizer()
def set_handler(self, handler):
"""Sets the handler that the replication layer will use to communicate
@@ -89,11 +90,14 @@ class FederationServer(FederationBase):
@defer.inlineCallbacks
@log_function
def on_backfill_request(self, origin, room_id, versions, limit):
- pdus = yield self.handler.on_backfill_request(
- origin, room_id, versions, limit
- )
+ with (yield self._server_linearizer.queue((origin, room_id))):
+ pdus = yield self.handler.on_backfill_request(
+ origin, room_id, versions, limit
+ )
+
+ res = self._transaction_from_pdus(pdus).get_dict()
- defer.returnValue((200, self._transaction_from_pdus(pdus).get_dict()))
+ defer.returnValue((200, res))
@defer.inlineCallbacks
@log_function
@@ -184,27 +188,28 @@ class FederationServer(FederationBase):
@defer.inlineCallbacks
@log_function
def on_context_state_request(self, origin, room_id, event_id):
- if event_id:
- pdus = yield self.handler.get_state_for_pdu(
- origin, room_id, event_id,
- )
- auth_chain = yield self.store.get_auth_chain(
- [pdu.event_id for pdu in pdus]
- )
+ with (yield self._server_linearizer.queue((origin, room_id))):
+ if event_id:
+ pdus = yield self.handler.get_state_for_pdu(
+ origin, room_id, event_id,
+ )
+ auth_chain = yield self.store.get_auth_chain(
+ [pdu.event_id for pdu in pdus]
+ )
- for event in auth_chain:
- # We sign these again because there was a bug where we
- # incorrectly signed things the first time round
- if self.hs.is_mine_id(event.event_id):
- event.signatures.update(
- compute_event_signature(
- event,
- self.hs.hostname,
- self.hs.config.signing_key[0]
+ for event in auth_chain:
+ # We sign these again because there was a bug where we
+ # incorrectly signed things the first time round
+ if self.hs.is_mine_id(event.event_id):
+ event.signatures.update(
+ compute_event_signature(
+ event,
+ self.hs.hostname,
+ self.hs.config.signing_key[0]
+ )
)
- )
- else:
- raise NotImplementedError("Specify an event")
+ else:
+ raise NotImplementedError("Specify an event")
defer.returnValue((200, {
"pdus": [pdu.get_pdu_json() for pdu in pdus],
@@ -283,14 +288,16 @@ class FederationServer(FederationBase):
@defer.inlineCallbacks
def on_event_auth(self, origin, room_id, event_id):
- time_now = self._clock.time_msec()
- auth_pdus = yield self.handler.on_event_auth(event_id)
- defer.returnValue((200, {
- "auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus],
- }))
+ with (yield self._server_linearizer.queue((origin, room_id))):
+ time_now = self._clock.time_msec()
+ auth_pdus = yield self.handler.on_event_auth(event_id)
+ res = {
+ "auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus],
+ }
+ defer.returnValue((200, res))
@defer.inlineCallbacks
- def on_query_auth_request(self, origin, content, event_id):
+ def on_query_auth_request(self, origin, content, room_id, event_id):
"""
Content is a dict with keys::
auth_chain (list): A list of events that give the auth chain.
@@ -309,32 +316,33 @@ class FederationServer(FederationBase):
Returns:
Deferred: Results in `dict` with the same format as `content`
"""
- auth_chain = [
- self.event_from_pdu_json(e)
- for e in content["auth_chain"]
- ]
-
- signed_auth = yield self._check_sigs_and_hash_and_fetch(
- origin, auth_chain, outlier=True
- )
+ with (yield self._server_linearizer.queue((origin, room_id))):
+ auth_chain = [
+ self.event_from_pdu_json(e)
+ for e in content["auth_chain"]
+ ]
+
+ signed_auth = yield self._check_sigs_and_hash_and_fetch(
+ origin, auth_chain, outlier=True
+ )
- ret = yield self.handler.on_query_auth(
- origin,
- event_id,
- signed_auth,
- content.get("rejects", []),
- content.get("missing", []),
- )
+ ret = yield self.handler.on_query_auth(
+ origin,
+ event_id,
+ signed_auth,
+ content.get("rejects", []),
+ content.get("missing", []),
+ )
- time_now = self._clock.time_msec()
- send_content = {
- "auth_chain": [
- e.get_pdu_json(time_now)
- for e in ret["auth_chain"]
- ],
- "rejects": ret.get("rejects", []),
- "missing": ret.get("missing", []),
- }
+ time_now = self._clock.time_msec()
+ send_content = {
+ "auth_chain": [
+ e.get_pdu_json(time_now)
+ for e in ret["auth_chain"]
+ ],
+ "rejects": ret.get("rejects", []),
+ "missing": ret.get("missing", []),
+ }
defer.returnValue(
(200, send_content)
@@ -386,21 +394,24 @@ class FederationServer(FederationBase):
@log_function
def on_get_missing_events(self, origin, room_id, earliest_events,
latest_events, limit, min_depth):
- logger.info(
- "on_get_missing_events: earliest_events: %r, latest_events: %r,"
- " limit: %d, min_depth: %d",
- earliest_events, latest_events, limit, min_depth
- )
- missing_events = yield self.handler.on_get_missing_events(
- origin, room_id, earliest_events, latest_events, limit, min_depth
- )
+ with (yield self._server_linearizer.queue((origin, room_id))):
+ logger.info(
+ "on_get_missing_events: earliest_events: %r, latest_events: %r,"
+ " limit: %d, min_depth: %d",
+ earliest_events, latest_events, limit, min_depth
+ )
+ missing_events = yield self.handler.on_get_missing_events(
+ origin, room_id, earliest_events, latest_events, limit, min_depth
+ )
- if len(missing_events) < 5:
- logger.info("Returning %d events: %r", len(missing_events), missing_events)
- else:
- logger.info("Returning %d events", len(missing_events))
+ if len(missing_events) < 5:
+ logger.info(
+ "Returning %d events: %r", len(missing_events), missing_events
+ )
+ else:
+ logger.info("Returning %d events", len(missing_events))
- time_now = self._clock.time_msec()
+ time_now = self._clock.time_msec()
defer.returnValue({
"events": [ev.get_pdu_json(time_now) for ev in missing_events],
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 8a1965f45a..26fa88ae84 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -388,7 +388,7 @@ class FederationQueryAuthServlet(BaseFederationServlet):
@defer.inlineCallbacks
def on_POST(self, origin, content, query, context, event_id):
new_content = yield self.handler.on_query_auth_request(
- origin, content, event_id
+ origin, content, context, event_id
)
defer.returnValue((200, new_content))
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index b38f81e999..968095c141 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -20,6 +20,7 @@ from synapse.api.constants import LoginType
from synapse.types import UserID
from synapse.api.errors import AuthError, LoginError, Codes, StoreError, SynapseError
from synapse.util.async import run_on_reactor
+from synapse.config.ldap import LDAPMode
from twisted.web.client import PartialDownloadError
@@ -28,6 +29,12 @@ import bcrypt
import pymacaroons
import simplejson
+try:
+ import ldap3
+except ImportError:
+ ldap3 = None
+ pass
+
import synapse.util.stringutils as stringutils
@@ -50,17 +57,20 @@ class AuthHandler(BaseHandler):
self.INVALID_TOKEN_HTTP_STATUS = 401
self.ldap_enabled = hs.config.ldap_enabled
- self.ldap_server = hs.config.ldap_server
- self.ldap_port = hs.config.ldap_port
- self.ldap_tls = hs.config.ldap_tls
- self.ldap_search_base = hs.config.ldap_search_base
- self.ldap_search_property = hs.config.ldap_search_property
- self.ldap_email_property = hs.config.ldap_email_property
- self.ldap_full_name_property = hs.config.ldap_full_name_property
-
- if self.ldap_enabled is True:
- import ldap
- logger.info("Import ldap version: %s", ldap.__version__)
+ if self.ldap_enabled:
+ if not ldap3:
+ raise RuntimeError(
+ 'Missing ldap3 library. This is required for LDAP Authentication.'
+ )
+ self.ldap_mode = hs.config.ldap_mode
+ self.ldap_uri = hs.config.ldap_uri
+ self.ldap_start_tls = hs.config.ldap_start_tls
+ self.ldap_base = hs.config.ldap_base
+ self.ldap_filter = hs.config.ldap_filter
+ self.ldap_attributes = hs.config.ldap_attributes
+ if self.ldap_mode == LDAPMode.SEARCH:
+ self.ldap_bind_dn = hs.config.ldap_bind_dn
+ self.ldap_bind_password = hs.config.ldap_bind_password
self.hs = hs # FIXME better possibility to access registrationHandler later?
@@ -452,40 +462,167 @@ class AuthHandler(BaseHandler):
@defer.inlineCallbacks
def _check_ldap_password(self, user_id, password):
- if not self.ldap_enabled:
- logger.debug("LDAP not configured")
+ """ Attempt to authenticate a user against an LDAP Server
+ and register an account if none exists.
+
+ Returns:
+ True if authentication against LDAP was successful
+ """
+
+ if not ldap3 or not self.ldap_enabled:
defer.returnValue(False)
- import ldap
+ if self.ldap_mode not in LDAPMode.LIST:
+ raise RuntimeError(
+ 'Invalid ldap mode specified: {mode}'.format(
+ mode=self.ldap_mode
+ )
+ )
- logger.info("Authenticating %s with LDAP" % user_id)
try:
- ldap_url = "%s:%s" % (self.ldap_server, self.ldap_port)
- logger.debug("Connecting LDAP server at %s" % ldap_url)
- l = ldap.initialize(ldap_url)
- if self.ldap_tls:
- logger.debug("Initiating TLS")
- self._connection.start_tls_s()
+ server = ldap3.Server(self.ldap_uri)
+ logger.debug(
+ "Attempting ldap connection with %s",
+ self.ldap_uri
+ )
- local_name = UserID.from_string(user_id).localpart
+ localpart = UserID.from_string(user_id).localpart
+ if self.ldap_mode == LDAPMode.SIMPLE:
+ # bind with the the local users ldap credentials
+ bind_dn = "{prop}={value},{base}".format(
+ prop=self.ldap_attributes['uid'],
+ value=localpart,
+ base=self.ldap_base
+ )
+ conn = ldap3.Connection(server, bind_dn, password)
+ logger.debug(
+ "Established ldap connection in simple mode: %s",
+ conn
+ )
- dn = "%s=%s, %s" % (
- self.ldap_search_property,
- local_name,
- self.ldap_search_base)
- logger.debug("DN for LDAP authentication: %s" % dn)
+ if self.ldap_start_tls:
+ conn.start_tls()
+ logger.debug(
+ "Upgraded ldap connection in simple mode through StartTLS: %s",
+ conn
+ )
+
+ conn.bind()
+
+ elif self.ldap_mode == LDAPMode.SEARCH:
+ # connect with preconfigured credentials and search for local user
+ conn = ldap3.Connection(
+ server,
+ self.ldap_bind_dn,
+ self.ldap_bind_password
+ )
+ logger.debug(
+ "Established ldap connection in search mode: %s",
+ conn
+ )
+
+ if self.ldap_start_tls:
+ conn.start_tls()
+ logger.debug(
+ "Upgraded ldap connection in search mode through StartTLS: %s",
+ conn
+ )
- l.simple_bind_s(dn.encode('utf-8'), password.encode('utf-8'))
+ conn.bind()
+ # find matching dn
+ query = "({prop}={value})".format(
+ prop=self.ldap_attributes['uid'],
+ value=localpart
+ )
+ if self.ldap_filter:
+ query = "(&{query}{filter})".format(
+ query=query,
+ filter=self.ldap_filter
+ )
+ logger.debug("ldap search filter: %s", query)
+ result = conn.search(self.ldap_base, query)
+
+ if result and len(conn.response) == 1:
+ # found exactly one result
+ user_dn = conn.response[0]['dn']
+ logger.debug('ldap search found dn: %s', user_dn)
+
+ # unbind and reconnect, rebind with found dn
+ conn.unbind()
+ conn = ldap3.Connection(
+ server,
+ user_dn,
+ password,
+ auto_bind=True
+ )
+ else:
+ # found 0 or > 1 results, abort!
+ logger.warn(
+ "ldap search returned unexpected (%d!=1) amount of results",
+ len(conn.response)
+ )
+ defer.returnValue(False)
+
+ logger.info(
+ "User authenticated against ldap server: %s",
+ conn
+ )
+
+ # check for existing account, if none exists, create one
if not (yield self.does_user_exist(user_id)):
- handler = self.hs.get_handlers().registration_handler
- user_id, access_token = (
- yield handler.register(localpart=local_name)
+ # query user metadata for account creation
+ query = "({prop}={value})".format(
+ prop=self.ldap_attributes['uid'],
+ value=localpart
+ )
+
+ if self.ldap_mode == LDAPMode.SEARCH and self.ldap_filter:
+ query = "(&{filter}{user_filter})".format(
+ filter=query,
+ user_filter=self.ldap_filter
+ )
+ logger.debug("ldap registration filter: %s", query)
+
+ result = conn.search(
+ search_base=self.ldap_base,
+ search_filter=query,
+ attributes=[
+ self.ldap_attributes['name'],
+ self.ldap_attributes['mail']
+ ]
)
+ if len(conn.response) == 1:
+ attrs = conn.response[0]['attributes']
+ mail = attrs[self.ldap_attributes['mail']][0]
+ name = attrs[self.ldap_attributes['name']][0]
+
+ # create account
+ registration_handler = self.hs.get_handlers().registration_handler
+ user_id, access_token = (
+ yield registration_handler.register(localpart=localpart)
+ )
+
+ # TODO: bind email, set displayname with data from ldap directory
+
+ logger.info(
+ "ldap registration successful: %d: %s (%s, %)",
+ user_id,
+ localpart,
+ name,
+ mail
+ )
+ else:
+ logger.warn(
+ "ldap registration failed: unexpected (%d!=1) amount of results",
+ len(result)
+ )
+ defer.returnValue(False)
+
defer.returnValue(True)
- except ldap.LDAPError, e:
- logger.warn("LDAP error: %s", e)
+ except ldap3.core.exceptions.LDAPException as e:
+ logger.warn("Error during ldap authentication: %s", e)
defer.returnValue(False)
@defer.inlineCallbacks
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index 3992804845..2acc6cc214 100644
--- a/synapse/push/httppusher.py
+++ b/synapse/push/httppusher.py
@@ -38,6 +38,7 @@ class HttpPusher(object):
self.hs = hs
self.store = self.hs.get_datastore()
self.clock = self.hs.get_clock()
+ self.state_handler = self.hs.get_state_handler()
self.user_id = pusherdict['user_name']
self.app_id = pusherdict['app_id']
self.app_display_name = pusherdict['app_display_name']
@@ -237,7 +238,9 @@ class HttpPusher(object):
@defer.inlineCallbacks
def _build_notification_dict(self, event, tweaks, badge):
- ctx = yield push_tools.get_context_for_event(self.hs.get_datastore(), event)
+ ctx = yield push_tools.get_context_for_event(
+ self.state_handler, event, self.user_id
+ )
d = {
'notification': {
@@ -269,8 +272,8 @@ class HttpPusher(object):
if 'content' in event:
d['notification']['content'] = event.content
- if len(ctx['aliases']):
- d['notification']['room_alias'] = ctx['aliases'][0]
+ # We no longer send aliases separately, instead, we send the human
+ # readable name of the room, which may be an alias.
if 'sender_display_name' in ctx and len(ctx['sender_display_name']) > 0:
d['notification']['sender_display_name'] = ctx['sender_display_name']
if 'name' in ctx and len(ctx['name']) > 0:
diff --git a/synapse/push/push_tools.py b/synapse/push/push_tools.py
index 89a3b5e90a..6f2d1ad57d 100644
--- a/synapse/push/push_tools.py
+++ b/synapse/push/push_tools.py
@@ -14,6 +14,9 @@
# limitations under the License.
from twisted.internet import defer
+from synapse.util.presentable_names import (
+ calculate_room_name, name_from_member_event
+)
@defer.inlineCallbacks
@@ -45,24 +48,21 @@ def get_badge_count(store, user_id):
@defer.inlineCallbacks
-def get_context_for_event(store, ev):
- name_aliases = yield store.get_room_name_and_aliases(
- ev.room_id
- )
+def get_context_for_event(state_handler, ev, user_id):
+ ctx = {}
- ctx = {'aliases': name_aliases[1]}
- if name_aliases[0] is not None:
- ctx['name'] = name_aliases[0]
+ room_state = yield state_handler.get_current_state(ev.room_id)
- their_member_events_for_room = yield store.get_current_state(
- room_id=ev.room_id,
- event_type='m.room.member',
- state_key=ev.user_id
+ # we no longer bother setting room_alias, and make room_name the
+ # human-readable name instead, be that m.room.namer, an alias or
+ # a list of people in the room
+ name = calculate_room_name(
+ room_state, user_id, fallback_to_single_member=False
)
- for mev in their_member_events_for_room:
- if mev.content['membership'] == 'join' and 'displayname' in mev.content:
- dn = mev.content['displayname']
- if dn is not None:
- ctx['sender_display_name'] = dn
+ if name:
+ ctx['name'] = name
+
+ sender_state_event = room_state[("m.room.member", ev.sender)]
+ ctx['sender_display_name'] = name_from_member_event(sender_state_event)
defer.returnValue(ctx)
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index e0a7a19777..e024cec0a2 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -48,6 +48,9 @@ CONDITIONAL_REQUIREMENTS = {
"Jinja2>=2.8": ["Jinja2>=2.8"],
"bleach>=1.4.2": ["bleach>=1.4.2"],
},
+ "ldap": {
+ "ldap3>=1.0": ["ldap3>=1.0"],
+ },
}
diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py
index 877c68508c..369d839464 100644
--- a/synapse/replication/slave/storage/events.py
+++ b/synapse/replication/slave/storage/events.py
@@ -18,7 +18,6 @@ from ._slaved_id_tracker import SlavedIdTracker
from synapse.api.constants import EventTypes
from synapse.events import FrozenEvent
from synapse.storage import DataStore
-from synapse.storage.room import RoomStore
from synapse.storage.roommember import RoomMemberStore
from synapse.storage.event_federation import EventFederationStore
from synapse.storage.event_push_actions import EventPushActionsStore
@@ -64,7 +63,6 @@ class SlavedEventStore(BaseSlavedStore):
# Cached functions can't be accessed through a class instance so we need
# to reach inside the __dict__ to extract them.
- get_room_name_and_aliases = RoomStore.__dict__["get_room_name_and_aliases"]
get_rooms_for_user = RoomMemberStore.__dict__["get_rooms_for_user"]
get_users_in_room = RoomMemberStore.__dict__["get_users_in_room"]
get_latest_event_ids_in_room = EventFederationStore.__dict__[
@@ -202,7 +200,6 @@ class SlavedEventStore(BaseSlavedStore):
self.get_rooms_for_user.invalidate_all()
self.get_users_in_room.invalidate((event.room_id,))
# self.get_joined_hosts_for_room.invalidate((event.room_id,))
- self.get_room_name_and_aliases.invalidate((event.room_id,))
self._invalidate_get_event_cache(event.event_id)
@@ -246,9 +243,3 @@ class SlavedEventStore(BaseSlavedStore):
self._get_current_state_for_key.invalidate((
event.room_id, event.type, event.state_key
))
-
- if event.type in [EventTypes.Name, EventTypes.Aliases]:
- self.get_room_name_and_aliases.invalidate(
- (event.room_id,)
- )
- pass
diff --git a/synapse/rest/media/v0/content_repository.py b/synapse/rest/media/v0/content_repository.py
index d9fc045fc6..956bd5da75 100644
--- a/synapse/rest/media/v0/content_repository.py
+++ b/synapse/rest/media/v0/content_repository.py
@@ -15,14 +15,12 @@
from synapse.http.server import respond_with_json_bytes, finish_request
-from synapse.util.stringutils import random_string
from synapse.api.errors import (
- cs_exception, SynapseError, CodeMessageException, Codes, cs_error
+ Codes, cs_error
)
from twisted.protocols.basic import FileSender
from twisted.web import server, resource
-from twisted.internet import defer
import base64
import simplejson as json
@@ -50,64 +48,10 @@ class ContentRepoResource(resource.Resource):
"""
isLeaf = True
- def __init__(self, hs, directory, auth, external_addr):
+ def __init__(self, hs, directory):
resource.Resource.__init__(self)
self.hs = hs
self.directory = directory
- self.auth = auth
- self.external_addr = external_addr.rstrip('/')
- self.max_upload_size = hs.config.max_upload_size
-
- if not os.path.isdir(self.directory):
- os.mkdir(self.directory)
- logger.info("ContentRepoResource : Created %s directory.",
- self.directory)
-
- @defer.inlineCallbacks
- def map_request_to_name(self, request):
- # auth the user
- requester = yield self.auth.get_user_by_req(request)
-
- # namespace all file uploads on the user
- prefix = base64.urlsafe_b64encode(
- requester.user.to_string()
- ).replace('=', '')
-
- # use a random string for the main portion
- main_part = random_string(24)
-
- # suffix with a file extension if we can make one. This is nice to
- # provide a hint to clients on the file information. We will also reuse
- # this info to spit back the content type to the client.
- suffix = ""
- if request.requestHeaders.hasHeader("Content-Type"):
- content_type = request.requestHeaders.getRawHeaders(
- "Content-Type")[0]
- suffix = "." + base64.urlsafe_b64encode(content_type)
- if (content_type.split("/")[0].lower() in
- ["image", "video", "audio"]):
- file_ext = content_type.split("/")[-1]
- # be a little paranoid and only allow a-z
- file_ext = re.sub("[^a-z]", "", file_ext)
- suffix += "." + file_ext
-
- file_name = prefix + main_part + suffix
- file_path = os.path.join(self.directory, file_name)
- logger.info("User %s is uploading a file to path %s",
- request.user.user_id.to_string(),
- file_path)
-
- # keep trying to make a non-clashing file, with a sensible max attempts
- attempts = 0
- while os.path.exists(file_path):
- main_part = random_string(24)
- file_name = prefix + main_part + suffix
- file_path = os.path.join(self.directory, file_name)
- attempts += 1
- if attempts > 25: # really? Really?
- raise SynapseError(500, "Unable to create file.")
-
- defer.returnValue(file_path)
def render_GET(self, request):
# no auth here on purpose, to allow anyone to view, even across home
@@ -155,58 +99,6 @@ class ContentRepoResource(resource.Resource):
return server.NOT_DONE_YET
- def render_POST(self, request):
- self._async_render(request)
- return server.NOT_DONE_YET
-
def render_OPTIONS(self, request):
respond_with_json_bytes(request, 200, {}, send_cors=True)
return server.NOT_DONE_YET
-
- @defer.inlineCallbacks
- def _async_render(self, request):
- try:
- # TODO: The checks here are a bit late. The content will have
- # already been uploaded to a tmp file at this point
- content_length = request.getHeader("Content-Length")
- if content_length is None:
- raise SynapseError(
- msg="Request must specify a Content-Length", code=400
- )
- if int(content_length) > self.max_upload_size:
- raise SynapseError(
- msg="Upload request body is too large",
- code=413,
- )
-
- fname = yield self.map_request_to_name(request)
-
- # TODO I have a suspicious feeling this is just going to block
- with open(fname, "wb") as f:
- f.write(request.content.read())
-
- # FIXME (erikj): These should use constants.
- file_name = os.path.basename(fname)
- # FIXME: we can't assume what the repo's public mounted path is
- # ...plus self-signed SSL won't work to remote clients anyway
- # ...and we can't assume that it's SSL anyway, as we might want to
- # serve it via the non-SSL listener...
- url = "%s/_matrix/content/%s" % (
- self.external_addr, file_name
- )
-
- respond_with_json_bytes(request, 200,
- json.dumps({"content_token": url}),
- send_cors=True)
-
- except CodeMessageException as e:
- logger.exception(e)
- respond_with_json_bytes(request, e.code,
- json.dumps(cs_exception(e)))
- except Exception as e:
- logger.error("Failed to store file: %s" % e)
- respond_with_json_bytes(
- request,
- 500,
- json.dumps({"error": "Internal server error"}),
- send_cors=True)
diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py
index 940e11d7a2..5f1b6f63a9 100644
--- a/synapse/storage/event_push_actions.py
+++ b/synapse/storage/event_push_actions.py
@@ -152,7 +152,7 @@ class EventPushActionsStore(SQLBaseStore):
if max_stream_ordering is not None:
sql += " AND ep.stream_ordering <= ?"
args.append(max_stream_ordering)
- sql += " ORDER BY ep.stream_ordering ASC LIMIT ?"
+ sql += " ORDER BY ep.stream_ordering DESC LIMIT ?"
args.append(limit)
txn.execute(sql, args)
return txn.fetchall()
@@ -176,14 +176,16 @@ class EventPushActionsStore(SQLBaseStore):
if max_stream_ordering is not None:
sql += " AND ep.stream_ordering <= ?"
args.append(max_stream_ordering)
- sql += " ORDER BY ep.stream_ordering ASC"
+ sql += " ORDER BY ep.stream_ordering DESC LIMIT ?"
+ args.append(limit)
txn.execute(sql, args)
return txn.fetchall()
no_read_receipt = yield self.runInteraction(
"get_unread_push_actions_for_user_in_range", get_no_receipt
)
- defer.returnValue([
+ # Make a list of dicts from the two sets of results.
+ notifs = [
{
"event_id": row[0],
"room_id": row[1],
@@ -191,7 +193,16 @@ class EventPushActionsStore(SQLBaseStore):
"actions": json.loads(row[3]),
"received_ts": row[4],
} for row in after_read_receipt + no_read_receipt
- ])
+ ]
+
+ # Now sort it so it's ordered correctly, since currently it will
+ # contain results from the first query, correctly ordered, followed
+ # by results from the second query, but we want them all ordered
+ # by received_ts
+ notifs.sort(key=lambda r: -(r['received_ts'] or 0))
+
+ # Now return the first `limit`
+ defer.returnValue(notifs[:limit])
@defer.inlineCallbacks
def get_time_of_last_push_action_before(self, stream_ordering):
diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index 6d978ffcd5..88a6ff7310 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -355,7 +355,6 @@ class EventsStore(SQLBaseStore):
txn.call_after(self.get_rooms_for_user.invalidate_all)
txn.call_after(self.get_users_in_room.invalidate, (event.room_id,))
txn.call_after(self.get_joined_hosts_for_room.invalidate, (event.room_id,))
- txn.call_after(self.get_room_name_and_aliases.invalidate, (event.room_id,))
# Add an entry to the current_state_resets table to record the point
# where we clobbered the current state
@@ -666,12 +665,6 @@ class EventsStore(SQLBaseStore):
(event.room_id, event.type, event.state_key,)
)
- if event.type in [EventTypes.Name, EventTypes.Aliases]:
- txn.call_after(
- self.get_room_name_and_aliases.invalidate,
- (event.room_id,)
- )
-
self._simple_upsert_txn(
txn,
"current_state_events",
diff --git a/synapse/storage/room.py b/synapse/storage/room.py
index 97f9f1929c..8251f58670 100644
--- a/synapse/storage/room.py
+++ b/synapse/storage/room.py
@@ -18,7 +18,6 @@ from twisted.internet import defer
from synapse.api.errors import StoreError
from ._base import SQLBaseStore
-from synapse.util.caches.descriptors import cachedInlineCallbacks
from .engines import PostgresEngine, Sqlite3Engine
import collections
@@ -192,49 +191,6 @@ class RoomStore(SQLBaseStore):
# This should be unreachable.
raise Exception("Unrecognized database engine")
- @cachedInlineCallbacks()
- def get_room_name_and_aliases(self, room_id):
- def get_room_name(txn):
- sql = (
- "SELECT name FROM room_names"
- " INNER JOIN current_state_events USING (room_id, event_id)"
- " WHERE room_id = ?"
- " LIMIT 1"
- )
-
- txn.execute(sql, (room_id,))
- rows = txn.fetchall()
- if rows:
- return rows[0][0]
- else:
- return None
-
- return [row[0] for row in txn.fetchall()]
-
- def get_room_aliases(txn):
- sql = (
- "SELECT content FROM current_state_events"
- " INNER JOIN events USING (room_id, event_id)"
- " WHERE room_id = ?"
- )
- txn.execute(sql, (room_id,))
- return [row[0] for row in txn.fetchall()]
-
- name = yield self.runInteraction("get_room_name", get_room_name)
- alias_contents = yield self.runInteraction("get_room_aliases", get_room_aliases)
-
- aliases = []
-
- for c in alias_contents:
- try:
- content = json.loads(c)
- except:
- continue
-
- aliases.extend(content.get('aliases', []))
-
- defer.returnValue((name, aliases))
-
def add_event_report(self, room_id, event_id, user_id, reason, content,
received_ts):
next_id = self._event_reports_id_gen.get_next()
diff --git a/synapse/util/presentable_names.py b/synapse/util/presentable_names.py
index a6866f6117..4c54812e6f 100644
--- a/synapse/util/presentable_names.py
+++ b/synapse/util/presentable_names.py
@@ -25,7 +25,8 @@ ALIAS_RE = re.compile(r"^#.*:.+$")
ALL_ALONE = "Empty Room"
-def calculate_room_name(room_state, user_id, fallback_to_members=True):
+def calculate_room_name(room_state, user_id, fallback_to_members=True,
+ fallback_to_single_member=True):
"""
Works out a user-facing name for the given room as per Matrix
spec recommendations.
@@ -129,6 +130,8 @@ def calculate_room_name(room_state, user_id, fallback_to_members=True):
return name_from_member_event(all_members[0])
else:
return ALL_ALONE
+ elif len(other_members) == 1 and not fallback_to_single_member:
+ return None
else:
return descriptor_from_member_events(other_members)
diff --git a/tests/replication/slave/storage/test_events.py b/tests/replication/slave/storage/test_events.py
index 17587fda00..f33e6f60fb 100644
--- a/tests/replication/slave/storage/test_events.py
+++ b/tests/replication/slave/storage/test_events.py
@@ -59,47 +59,6 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase):
[unpatch() for unpatch in self.unpatches]
@defer.inlineCallbacks
- def test_room_name_and_aliases(self):
- create = yield self.persist(type="m.room.create", key="", creator=USER_ID)
- yield self.persist(type="m.room.member", key=USER_ID, membership="join")
- yield self.persist(type="m.room.name", key="", name="name1")
- yield self.persist(
- type="m.room.aliases", key="blue", aliases=["#1:blue"]
- )
- yield self.replicate()
- yield self.check(
- "get_room_name_and_aliases", (ROOM_ID,), ("name1", ["#1:blue"])
- )
-
- # Set the room name.
- yield self.persist(type="m.room.name", key="", name="name2")
- yield self.replicate()
- yield self.check(
- "get_room_name_and_aliases", (ROOM_ID,), ("name2", ["#1:blue"])
- )
-
- # Set the room aliases.
- yield self.persist(
- type="m.room.aliases", key="blue", aliases=["#2:blue"]
- )
- yield self.replicate()
- yield self.check(
- "get_room_name_and_aliases", (ROOM_ID,), ("name2", ["#2:blue"])
- )
-
- # Leave and join the room clobbering the state.
- yield self.persist(type="m.room.member", key=USER_ID, membership="leave")
- yield self.persist(
- type="m.room.member", key=USER_ID, membership="join",
- reset_state=[create]
- )
- yield self.replicate()
-
- yield self.check(
- "get_room_name_and_aliases", (ROOM_ID,), (None, [])
- )
-
- @defer.inlineCallbacks
def test_room_members(self):
create = yield self.persist(type="m.room.create", key="", creator=USER_ID)
yield self.replicate()
diff --git a/tests/utils.py b/tests/utils.py
index 6e41ae1ff6..ed547bc39b 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -56,6 +56,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
config.use_frozen_dicts = True
config.database_config = {"name": "sqlite3"}
+ config.ldap_enabled = False
if "clock" not in kargs:
kargs["clock"] = MockClock()
|