diff options
author | Erik Johnston <erik@matrix.org> | 2019-07-05 14:08:21 +0100 |
---|---|---|
committer | Erik Johnston <erik@matrix.org> | 2019-07-05 14:08:21 +0100 |
commit | 7f0d8e42885078583d1eab16df2523292ed1148d (patch) | |
tree | 97d56ee3dd35549820a69b702d574d1704560773 /synapse/storage | |
parent | Assume key existence. Update docstrings (diff) | |
parent | remove dead transaction persist code (#5622) (diff) | |
download | synapse-7f0d8e42885078583d1eab16df2523292ed1148d.tar.xz |
Merge branch 'develop' of github.com:matrix-org/synapse into erikj/admin_exfiltrate_data
Diffstat (limited to 'synapse/storage')
-rw-r--r-- | synapse/storage/_base.py | 2 | ||||
-rw-r--r-- | synapse/storage/events.py | 13 | ||||
-rw-r--r-- | synapse/storage/events_worker.py | 6 | ||||
-rw-r--r-- | synapse/storage/registration.py | 13 | ||||
-rw-r--r-- | synapse/storage/stream.py | 2 | ||||
-rw-r--r-- | synapse/storage/transactions.py | 28 |
6 files changed, 25 insertions, 39 deletions
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 29589853c6..2f940dbae6 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -30,12 +30,12 @@ from prometheus_client import Histogram from twisted.internet import defer from synapse.api.errors import StoreError +from synapse.logging.context import LoggingContext, PreserveLoggingContext from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.engines import PostgresEngine, Sqlite3Engine from synapse.types import get_domain_from_id from synapse.util import batch_iter from synapse.util.caches.descriptors import Cache -from synapse.util.logcontext import LoggingContext, PreserveLoggingContext from synapse.util.stringutils import exception_to_unicode # import a function which will return a monotonic time, in seconds diff --git a/synapse/storage/events.py b/synapse/storage/events.py index fefba39ea1..b486ca50eb 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -33,6 +33,8 @@ from synapse.api.constants import EventTypes from synapse.api.errors import SynapseError from synapse.events import EventBase # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 +from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable +from synapse.logging.utils import log_function from synapse.metrics import BucketCollector from synapse.metrics.background_process_metrics import run_as_background_process from synapse.state import StateResolutionStore @@ -45,8 +47,6 @@ from synapse.util import batch_iter from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from synapse.util.frozenutils import frozendict_json_encoder -from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable -from synapse.util.logutils import log_function from synapse.util.metrics import Measure logger = logging.getLogger(__name__) @@ -253,7 +253,14 @@ class EventsStore( ) # Read the extrems every 60 minutes - hs.get_clock().looping_call(self._read_forward_extremities, 60 * 60 * 1000) + def read_forward_extremities(): + # run as a background process to make sure that the database transactions + # have a logcontext to report to + return run_as_background_process( + "read_forward_extremities", self._read_forward_extremities + ) + + hs.get_clock().looping_call(read_forward_extremities, 60 * 60 * 1000) @defer.inlineCallbacks def _read_forward_extremities(self): diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 6d680d405a..09db872511 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -29,14 +29,14 @@ from synapse.api.room_versions import EventFormatVersions from synapse.events import FrozenEvent, event_type_from_format_version # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 from synapse.events.utils import prune_event -from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.types import get_domain_from_id -from synapse.util.logcontext import ( +from synapse.logging.context import ( LoggingContext, PreserveLoggingContext, make_deferred_yieldable, run_in_background, ) +from synapse.metrics.background_process_metrics import run_as_background_process +from synapse.types import get_domain_from_id from synapse.util.metrics import Measure from ._base import SQLBaseStore diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 983ce13291..13a3d5208b 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -25,6 +25,7 @@ from twisted.internet import defer from synapse.api.constants import UserTypes from synapse.api.errors import Codes, StoreError, ThreepidValidationError +from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage import background_updates from synapse.storage._base import SQLBaseStore from synapse.types import UserID @@ -619,9 +620,15 @@ class RegistrationStore( ) # Create a background job for culling expired 3PID validity tokens - hs.get_clock().looping_call( - self.cull_expired_threepid_validation_tokens, THIRTY_MINUTES_IN_MS - ) + def start_cull(): + # run as a background process to make sure that the database transactions + # have a logcontext to report to + return run_as_background_process( + "cull_expired_threepid_validation_tokens", + self.cull_expired_threepid_validation_tokens, + ) + + hs.get_clock().looping_call(start_cull, THIRTY_MINUTES_IN_MS) @defer.inlineCallbacks def _backgroud_update_set_deactivated_flag(self, progress, batch_size): diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index 7b5b3b8c8d..f8e3007d67 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -41,12 +41,12 @@ from six.moves import range from twisted.internet import defer +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.storage._base import SQLBaseStore from synapse.storage.engines import PostgresEngine from synapse.storage.events_worker import EventsWorkerStore from synapse.types import RoomStreamToken from synapse.util.caches.stream_change_cache import StreamChangeCache -from synapse.util.logcontext import make_deferred_yieldable, run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index b1188f6bcb..fd18619178 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -133,34 +133,6 @@ class TransactionStore(SQLBaseStore): desc="set_received_txn_response", ) - def prep_send_transaction(self, transaction_id, destination, origin_server_ts): - """Persists an outgoing transaction and calculates the values for the - previous transaction id list. - - This should be called before sending the transaction so that it has the - correct value for the `prev_ids` key. - - Args: - transaction_id (str) - destination (str) - origin_server_ts (int) - - Returns: - list: A list of previous transaction ids. - """ - return defer.succeed([]) - - def delivered_txn(self, transaction_id, destination, code, response_dict): - """Persists the response for an outgoing transaction. - - Args: - transaction_id (str) - destination (str) - code (int) - response_json (str) - """ - pass - @defer.inlineCallbacks def get_destination_retry_timings(self, destination): """Gets the current retry timings (if any) for a given destination. |