diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 49922c6d03..d61e525e62 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -13,10 +13,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from ._base import Config
+from ._base import Config, ConfigError
from collections import namedtuple
-import sys
+
+MISSING_NETADDR = (
+ "Missing netaddr library. This is required for URL preview API."
+)
+
+MISSING_LXML = (
+ """Missing lxml library. This is required for URL preview API.
+
+ Install by running:
+ pip install lxml
+
+ Requires libxslt1-dev system package.
+ """
+)
+
ThumbnailRequirement = namedtuple(
"ThumbnailRequirement", ["width", "height", "method", "media_type"]
@@ -62,18 +76,32 @@ class ContentRepositoryConfig(Config):
self.thumbnail_requirements = parse_thumbnail_requirements(
config["thumbnail_sizes"]
)
- self.url_preview_enabled = config["url_preview_enabled"]
+ self.url_preview_enabled = config.get("url_preview_enabled", False)
if self.url_preview_enabled:
try:
+ import lxml
+ lxml # To stop unused lint.
+ except ImportError:
+ raise ConfigError(MISSING_LXML)
+
+ try:
from netaddr import IPSet
- if "url_preview_ip_range_blacklist" in config:
- self.url_preview_ip_range_blacklist = IPSet(
- config["url_preview_ip_range_blacklist"]
- )
- if "url_preview_url_blacklist" in config:
- self.url_preview_url_blacklist = config["url_preview_url_blacklist"]
except ImportError:
- sys.stderr.write("\nmissing netaddr dep - disabling preview_url API\n")
+ raise ConfigError(MISSING_NETADDR)
+
+ if "url_preview_ip_range_blacklist" in config:
+ self.url_preview_ip_range_blacklist = IPSet(
+ config["url_preview_ip_range_blacklist"]
+ )
+ else:
+ raise ConfigError(
+ "For security, you must specify an explicit target IP address "
+ "blacklist in url_preview_ip_range_blacklist for url previewing "
+ "to work"
+ )
+
+ if "url_preview_url_blacklist" in config:
+ self.url_preview_url_blacklist = config["url_preview_url_blacklist"]
def default_config(self, **kwargs):
media_store = self.default_path("media_store")
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index 57f0a69e03..6950a20632 100644
--- a/synapse/push/httppusher.py
+++ b/synapse/push/httppusher.py
@@ -21,6 +21,7 @@ import logging
import push_rule_evaluator
import push_tools
+from synapse.util.logcontext import LoggingContext
from synapse.util.metrics import Measure
logger = logging.getLogger(__name__)
@@ -85,9 +86,8 @@ class HttpPusher(object):
@defer.inlineCallbacks
def on_new_notifications(self, min_stream_ordering, max_stream_ordering):
- with Measure(self.clock, "push.on_new_notifications"):
- self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering)
- yield self._process()
+ self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering)
+ yield self._process()
@defer.inlineCallbacks
def on_new_receipts(self, min_stream_id, max_stream_id):
@@ -95,16 +95,16 @@ class HttpPusher(object):
# We could check the receipts are actually m.read receipts here,
# but currently that's the only type of receipt anyway...
- with Measure(self.clock, "push.on_new_receipts"):
- badge = yield push_tools.get_badge_count(
- self.hs.get_datastore(), self.user_id
- )
- yield self.send_badge(badge)
+ with LoggingContext("push.on_new_receipts"):
+ with Measure(self.clock, "push.on_new_receipts"):
+ badge = yield push_tools.get_badge_count(
+ self.hs.get_datastore(), self.user_id
+ )
+ yield self._send_badge(badge)
@defer.inlineCallbacks
def on_timer(self):
- with Measure(self.clock, "push.on_timer"):
- yield self._process()
+ yield self._process()
def on_stop(self):
if self.timed_call:
@@ -114,20 +114,23 @@ class HttpPusher(object):
def _process(self):
if self.processing:
return
- try:
- self.processing = True
- # if the max ordering changes while we're running _unsafe_process,
- # call it again, and so on until we've caught up.
- while True:
- starting_max_ordering = self.max_stream_ordering
+
+ with LoggingContext("push._process"):
+ with Measure(self.clock, "push._process"):
try:
- yield self._unsafe_process()
- except:
- logger.exception("Exception processing notifs")
- if self.max_stream_ordering == starting_max_ordering:
- break
- finally:
- self.processing = False
+ self.processing = True
+ # if the max ordering changes while we're running _unsafe_process,
+ # call it again, and so on until we've caught up.
+ while True:
+ starting_max_ordering = self.max_stream_ordering
+ try:
+ yield self._unsafe_process()
+ except:
+ logger.exception("Exception processing notifs")
+ if self.max_stream_ordering == starting_max_ordering:
+ break
+ finally:
+ self.processing = False
@defer.inlineCallbacks
def _unsafe_process(self):
@@ -146,7 +149,7 @@ class HttpPusher(object):
if processed:
self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
self.last_stream_ordering = push_action['stream_ordering']
- self.store.update_pusher_last_stream_ordering_and_success(
+ yield self.store.update_pusher_last_stream_ordering_and_success(
self.app_id, self.pushkey, self.user_id,
self.last_stream_ordering,
self.clock.time_msec()
@@ -291,7 +294,7 @@ class HttpPusher(object):
defer.returnValue(rejected)
@defer.inlineCallbacks
- def send_badge(self, badge):
+ def _send_badge(self, badge):
logger.info("Sending updated badge count %d to %r", badge, self.user_id)
d = {
'notification': {
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 1adbdd9421..b25b736493 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -43,7 +43,6 @@ CONDITIONAL_REQUIREMENTS = {
"matrix_angular_sdk>=0.6.8": ["syweb>=0.6.8"],
},
"preview_url": {
- "lxml>=3.6.0": ["lxml"],
"netaddr>=0.7.18": ["netaddr"],
},
}
diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 97b7e84af9..77fb0313c5 100644
--- a/synapse/rest/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -80,8 +80,4 @@ class MediaRepositoryResource(Resource):
self.putChild("thumbnail", ThumbnailResource(hs, filepaths))
self.putChild("identicon", IdenticonResource())
if hs.config.url_preview_enabled:
- try:
- self.putChild("preview_url", PreviewUrlResource(hs, filepaths))
- except Exception as e:
- logger.warn("Failed to mount preview_url")
- logger.exception(e)
+ self.putChild("preview_url", PreviewUrlResource(hs, filepaths))
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index 4dd97ac0e3..8e1cf6e2fb 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -40,33 +40,11 @@ import ujson as json
import logging
logger = logging.getLogger(__name__)
-try:
- from lxml import html
-except ImportError:
- pass
-
class PreviewUrlResource(BaseMediaResource):
isLeaf = True
def __init__(self, hs, filepaths):
- try:
- if html:
- pass
- except:
- raise RuntimeError("Disabling PreviewUrlResource as lxml not available")
-
- if not hasattr(hs.config, "url_preview_ip_range_blacklist"):
- logger.warn(
- "For security, you must specify an explicit target IP address "
- "blacklist in url_preview_ip_range_blacklist for url previewing "
- "to work"
- )
- raise RuntimeError(
- "Disabling PreviewUrlResource as "
- "url_preview_ip_range_blacklist not specified"
- )
-
BaseMediaResource.__init__(self, hs, filepaths)
self.client = SpiderHttpClient(hs)
if hasattr(hs.config, "url_preview_url_blacklist"):
@@ -201,6 +179,8 @@ class PreviewUrlResource(BaseMediaResource):
elif self._is_html(media_info['media_type']):
# TODO: somehow stop a big HTML tree from exploding synapse's RAM
+ from lxml import html
+
try:
tree = html.parse(media_info['filename'])
og = yield self._calc_og(tree, media_info, requester)
diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py
index 88737b7a6c..86a98b6f11 100644
--- a/synapse/storage/event_push_actions.py
+++ b/synapse/storage/event_push_actions.py
@@ -133,9 +133,10 @@ class EventPushActionsStore(SQLBaseStore):
" ep.topological_ordering > rl.topological_ordering"
" OR ("
" ep.topological_ordering = rl.topological_ordering"
- " AND ep.stream_ordering > ?"
+ " AND ep.stream_ordering > rl.stream_ordering"
" )"
" )"
+ " AND ep.stream_ordering > ?"
" AND ep.user_id = ?"
" AND ep.user_id = rl.user_id"
)
diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py
index e64c0dce0a..e5755c0aea 100644
--- a/synapse/storage/pusher.py
+++ b/synapse/storage/pusher.py
@@ -137,7 +137,11 @@ class PusherStore(SQLBaseStore):
users = yield self.get_users_in_room(room_id)
result = yield self._simple_select_many_batch(
- 'pushers', 'user_name', users, ['user_name']
+ table='pushers',
+ column='user_name',
+ iterable=users,
+ retcols=['user_name'],
+ desc='get_users_with_pushers_in_room'
)
defer.returnValue([r['user_name'] for r in result])
diff --git a/synapse/storage/schema/delta/31/pushers_index.sql b/synapse/storage/schema/delta/31/pushers_index.sql
new file mode 100644
index 0000000000..9027bccc69
--- /dev/null
+++ b/synapse/storage/schema/delta/31/pushers_index.sql
@@ -0,0 +1,18 @@
+/* Copyright 2016 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ CREATE INDEX event_push_actions_stream_ordering on event_push_actions(
+ stream_ordering, user_id
+ );
|