diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index 168eb27b03..250ba536ea 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -119,12 +119,15 @@ class DataStore(RoomMemberStore, RoomStore,
self._state_groups_id_gen = IdGenerator(db_conn, "state_groups", "id")
self._access_tokens_id_gen = IdGenerator(db_conn, "access_tokens", "id")
self._refresh_tokens_id_gen = IdGenerator(db_conn, "refresh_tokens", "id")
- self._pushers_id_gen = IdGenerator(db_conn, "pushers", "id")
self._push_rule_id_gen = IdGenerator(db_conn, "push_rules", "id")
self._push_rules_enable_id_gen = IdGenerator(db_conn, "push_rules_enable", "id")
self._push_rules_stream_id_gen = ChainedIdGenerator(
self._stream_id_gen, db_conn, "push_rules_stream", "stream_id"
)
+ self._pushers_id_gen = StreamIdGenerator(
+ db_conn, "pushers", "id",
+ extra_tables=[("deleted_pushers", "stream_id")],
+ )
events_max = self._stream_id_gen.get_max_token()
event_cache_prefill, min_event_val = self._get_cache_dict(
diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py
index 7693ab9082..29da3bbd13 100644
--- a/synapse/storage/pusher.py
+++ b/synapse/storage/pusher.py
@@ -16,8 +16,6 @@
from ._base import SQLBaseStore
from twisted.internet import defer
-from synapse.api.errors import StoreError
-
from canonicaljson import encode_canonical_json
import logging
@@ -79,12 +77,41 @@ class PusherStore(SQLBaseStore):
rows = yield self.runInteraction("get_all_pushers", get_pushers)
defer.returnValue(rows)
+ def get_pushers_stream_token(self):
+ return self._pushers_id_gen.get_max_token()
+
+ def get_all_updated_pushers(self, last_id, current_id, limit):
+ def get_all_updated_pushers_txn(txn):
+ sql = (
+ "SELECT id, user_name, access_token, profile_tag, kind,"
+ " app_id, app_display_name, device_display_name, pushkey, ts,"
+ " lang, data"
+ " FROM pushers"
+ " WHERE ? < id AND id <= ?"
+ " ORDER BY id ASC LIMIT ?"
+ )
+ txn.execute(sql, (last_id, current_id, limit))
+ updated = txn.fetchall()
+
+ sql = (
+ "SELECT stream_id, user_id, app_id, pushkey"
+ " FROM deleted_pushers"
+ " WHERE ? < stream_id AND stream_id <= ?"
+ " ORDER BY stream_id ASC LIMIT ?"
+ )
+ txn.execute(sql, (last_id, current_id, limit))
+ deleted = txn.fetchall()
+
+ return (updated, deleted)
+ return self.runInteraction(
+ "get_all_updated_pushers", get_all_updated_pushers_txn
+ )
+
@defer.inlineCallbacks
def add_pusher(self, user_id, access_token, kind, app_id,
app_display_name, device_display_name,
pushkey, pushkey_ts, lang, data, profile_tag=""):
- try:
- next_id = self._pushers_id_gen.get_next()
+ with self._pushers_id_gen.get_next() as stream_id:
yield self._simple_upsert(
"pushers",
dict(
@@ -101,23 +128,29 @@ class PusherStore(SQLBaseStore):
lang=lang,
data=encode_canonical_json(data),
profile_tag=profile_tag,
- ),
- insertion_values=dict(
- id=next_id,
+ id=stream_id,
),
desc="add_pusher",
)
- except Exception as e:
- logger.error("create_pusher with failed: %s", e)
- raise StoreError(500, "Problem creating pusher.")
@defer.inlineCallbacks
def delete_pusher_by_app_id_pushkey_user_id(self, app_id, pushkey, user_id):
- yield self._simple_delete_one(
- "pushers",
- {"app_id": app_id, "pushkey": pushkey, 'user_name': user_id},
- desc="delete_pusher_by_app_id_pushkey_user_id",
- )
+ def delete_pusher_txn(txn, stream_id):
+ self._simple_delete_one(
+ txn,
+ "pushers",
+ {"app_id": app_id, "pushkey": pushkey, "user_name": user_id}
+ )
+ self._simple_upsert_txn(
+ txn,
+ "deleted_pushers",
+ {"app_id": app_id, "pushkey": pushkey, "user_id": user_id},
+ {"stream_id", stream_id},
+ )
+ with self._pushers_id_gen.get_next() as stream_id:
+ yield self.runInteraction(
+ "delete_pusher", delete_pusher_txn, stream_id
+ )
@defer.inlineCallbacks
def update_pusher_last_token(self, app_id, pushkey, user_id, last_token):
diff --git a/synapse/storage/schema/delta/30/deleted_pushers.sql b/synapse/storage/schema/delta/30/deleted_pushers.sql
new file mode 100644
index 0000000000..cdcf79ac81
--- /dev/null
+++ b/synapse/storage/schema/delta/30/deleted_pushers.sql
@@ -0,0 +1,24 @@
+/* Copyright 2016 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+CREATE TABLE IF NOT EXISTS deleted_pushers(
+ stream_id BIGINT NOT NULL,
+ app_id TEXT NOT NULL,
+ pushkey TEXT NOT NULL,
+ user_id TEXT NOT NULL,
+ UNIQUE (app_id, pushkey, user_id)
+);
+
+CREATE INDEX deleted_pushers_stream_id ON deleted_pushers (stream_id);
diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py
index 610ddad423..a02dfc7d58 100644
--- a/synapse/storage/util/id_generators.py
+++ b/synapse/storage/util/id_generators.py
@@ -49,9 +49,14 @@ class StreamIdGenerator(object):
with stream_id_gen.get_next() as stream_id:
# ... persist event ...
"""
- def __init__(self, db_conn, table, column):
+ def __init__(self, db_conn, table, column, extra_tables=[]):
self._lock = threading.Lock()
self._current_max = _load_max_id(db_conn, table, column)
+ for table, column in extra_tables:
+ self._current_max = max(
+ self._current_max,
+ _load_max_id(db_conn, table, column)
+ )
self._unfinished_ids = deque()
def get_next(self):
|