Implement cache replication stream
1 files changed, 29 insertions, 1 deletions
diff --git a/synapse/replication/slave/storage/_base.py b/synapse/replication/slave/storage/_base.py
index 46e43ce1c7..24c9946d6a 100644
--- a/synapse/replication/slave/storage/_base.py
+++ b/synapse/replication/slave/storage/_base.py
@@ -14,15 +14,43 @@
# limitations under the License.
from synapse.storage._base import SQLBaseStore
+from synapse.storage.engines import PostgresEngine
from twisted.internet import defer
+from ._slaved_id_tracker import SlavedIdTracker
+
+import logging
+
+logger = logging.getLogger(__name__)
+
class BaseSlavedStore(SQLBaseStore):
def __init__(self, db_conn, hs):
super(BaseSlavedStore, self).__init__(hs)
+ if isinstance(self.database_engine, PostgresEngine):
+ self._cache_id_gen = SlavedIdTracker(
+ db_conn, "cache_stream", "stream_id",
+ )
+ else:
+ self._cache_id_gen = None
def stream_positions(self):
- return {}
+ pos = {}
+ if self._cache_id_gen:
+ pos["caches"] = self._cache_id_gen.get_current_token()
+ return pos
def process_replication(self, result):
+ stream = result.get("caches")
+ if stream:
+ for row in stream["rows"]:
+ (
+ position, cache_func, keys, invalidation_ts,
+ ) = row
+
+ try:
+ getattr(self, cache_func).invalidate(tuple(keys))
+ except AttributeError:
+ logger.warn("Got unexpected cache_func: %r", cache_func)
+ self._cache_id_gen.advance(int(stream["position"]))
return defer.succeed(None)
|