diff --git a/synapse/storage/events.py b/synapse/storage/events.py
index 49aeb953bd..ecb79c07ef 100644
--- a/synapse/storage/events.py
+++ b/synapse/storage/events.py
@@ -54,6 +54,7 @@ def encode_json(json_object):
else:
return json.dumps(json_object, ensure_ascii=False)
+
# These values are used in the `enqueus_event` and `_do_fetch` methods to
# control how we batch/bulk fetch events from the database.
# The values are plucked out of thing air to make initial sync run faster
diff --git a/synapse/storage/filtering.py b/synapse/storage/filtering.py
index 5248736816..a2ccc66ea7 100644
--- a/synapse/storage/filtering.py
+++ b/synapse/storage/filtering.py
@@ -16,6 +16,7 @@
from twisted.internet import defer
from ._base import SQLBaseStore
+from synapse.api.errors import SynapseError, Codes
from synapse.util.caches.descriptors import cachedInlineCallbacks
import simplejson as json
@@ -24,6 +25,13 @@ import simplejson as json
class FilteringStore(SQLBaseStore):
@cachedInlineCallbacks(num_args=2)
def get_user_filter(self, user_localpart, filter_id):
+ # filter_id is BIGINT UNSIGNED, so if it isn't a number, fail
+ # with a coherent error message rather than 500 M_UNKNOWN.
+ try:
+ int(filter_id)
+ except ValueError:
+ raise SynapseError(400, "Invalid filter ID", Codes.INVALID_PARAM)
+
def_json = yield self._simple_select_one_onecol(
table="user_filters",
keyvalues={
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index 49abf0ac74..23e7ad9922 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -653,7 +653,10 @@ class StateStore(SQLBaseStore):
else:
state_dict = results[group]
- state_dict.update(group_state_dict)
+ state_dict.update({
+ (intern_string(k[0]), intern_string(k[1])): v
+ for k, v in group_state_dict.items()
+ })
self._state_group_cache.update(
cache_seq_num,
|