summary refs log tree commit diff
path: root/synapse
diff options
context:
space:
mode:
Diffstat (limited to 'synapse')
-rw-r--r--synapse/__init__.py2
-rw-r--r--synapse/config/saml2_config.py22
-rw-r--r--synapse/metrics/__init__.py10
-rw-r--r--synapse/rest/client/v2_alpha/account.py13
-rw-r--r--synapse/rest/client/v2_alpha/register.py9
-rw-r--r--synapse/storage/database.py7
-rw-r--r--synapse/storage/databases/main/event_federation.py82
7 files changed, 131 insertions, 14 deletions
diff --git a/synapse/__init__.py b/synapse/__init__.py

index 3e1df2b035..537f2239e5 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py
@@ -48,7 +48,7 @@ try: except ImportError: pass -__version__ = "1.22.1" +__version__ = "1.23.0rc1" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py
index 778750f43b..2ff7dfb311 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2_config.py
@@ -271,6 +271,28 @@ class SAML2Config(Config): #description: ["My awesome SP", "en"] #name: ["Test SP", "en"] + #ui_info: + # display_name: + # - lang: en + # text: "Display Name is the descriptive name of your service." + # description: + # - lang: en + # text: "Description should be a short paragraph explaining the purpose of the service." + # information_url: + # - lang: en + # text: "https://example.com/terms-of-service" + # privacy_statement_url: + # - lang: en + # text: "https://example.com/privacy-policy" + # keywords: + # - lang: en + # text: ["Matrix", "Element"] + # logo: + # - lang: en + # text: "https://example.com/logo.svg" + # width: "200" + # height: "80" + #organization: # name: Example com # display_name: diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index b8d2a8e8a9..cbf0dbb871 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py
@@ -502,6 +502,16 @@ build_info.labels( last_ticked = time.time() +# 3PID send info +threepid_send_requests = Histogram( + "synapse_threepid_send_requests_with_tries", + documentation="Number of requests for a 3pid token by try count. Note if" + " there is a request with try count of 4, then there would have been one" + " each for 1, 2 and 3", + buckets=(1, 2, 3, 4, 5, 10), + labelnames=("type", "reason"), +) + class ReactorLastSeenMetric: def collect(self): diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index 17cd87f596..f5cd397493 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py
@@ -39,6 +39,7 @@ from synapse.http.servlet import ( parse_json_object_from_request, parse_string, ) +from synapse.metrics import threepid_send_requests from synapse.push.mailer import Mailer from synapse.types import UserID from synapse.util.msisdn import phone_number_to_msisdn @@ -145,6 +146,10 @@ class EmailPasswordRequestTokenRestServlet(RestServlet): # Wrap the session id in a JSON object ret = {"sid": sid} + threepid_send_requests.labels(type="email", reason="password_reset").observe( + send_attempt + ) + return 200, ret @@ -441,6 +446,10 @@ class EmailThreepidRequestTokenRestServlet(RestServlet): # Wrap the session id in a JSON object ret = {"sid": sid} + threepid_send_requests.labels(type="email", reason="add_threepid").observe( + send_attempt + ) + return 200, ret @@ -511,6 +520,10 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet): next_link, ) + threepid_send_requests.labels(type="msisdn", reason="add_threepid").observe( + send_attempt + ) + return 200, ret diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index 0725aa4512..0e54489b5b 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py
@@ -47,6 +47,7 @@ from synapse.http.servlet import ( parse_json_object_from_request, parse_string, ) +from synapse.metrics import threepid_send_requests from synapse.push.mailer import Mailer from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.ratelimitutils import FederationRateLimiter @@ -165,6 +166,10 @@ class EmailRegisterRequestTokenRestServlet(RestServlet): # Wrap the session id in a JSON object ret = {"sid": sid} + threepid_send_requests.labels(type="email", reason="register").observe( + send_attempt + ) + return 200, ret @@ -238,6 +243,10 @@ class MsisdnRegisterRequestTokenRestServlet(RestServlet): next_link, ) + threepid_send_requests.labels(type="msisdn", reason="register").observe( + send_attempt + ) + return 200, ret diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index a0572b2952..d1b5760c2c 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py
@@ -88,13 +88,18 @@ def make_pool( """Get the connection pool for the database. """ + # By default enable `cp_reconnect`. We need to fiddle with db_args in case + # someone has explicitly set `cp_reconnect`. + db_args = dict(db_config.config.get("args", {})) + db_args.setdefault("cp_reconnect", True) + return adbapi.ConnectionPool( db_config.config["name"], cp_reactor=reactor, cp_openfun=lambda conn: engine.on_new_connection( LoggingDatabaseConnection(conn, engine, "on_new_connection") ), - **db_config.config.get("args", {}), + **db_args, ) diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py
index a6279a6c13..2e07c37340 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py
@@ -26,6 +26,7 @@ from synapse.storage.databases.main.events_worker import EventsWorkerStore from synapse.storage.databases.main.signatures import SignatureWorkerStore from synapse.types import Collection from synapse.util.caches.descriptors import cached +from synapse.util.caches.lrucache import LruCache from synapse.util.iterutils import batch_iter logger = logging.getLogger(__name__) @@ -40,6 +41,11 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas self._delete_old_forward_extrem_cache, 60 * 60 * 1000 ) + # Cache of event ID to list of auth event IDs and their depths. + self._event_auth_cache = LruCache( + 500000, "_event_auth_cache", size_callback=len + ) # type: LruCache[str, List[Tuple[str, int]]] + async def get_auth_chain( self, event_ids: Collection[str], include_given: bool = False ) -> List[EventBase]: @@ -84,17 +90,45 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas else: results = set() - base_sql = "SELECT DISTINCT auth_id FROM event_auth WHERE " + # We pull out the depth simply so that we can populate the + # `_event_auth_cache` cache. + base_sql = """ + SELECT a.event_id, auth_id, depth + FROM event_auth AS a + INNER JOIN events AS e ON (e.event_id = a.auth_id) + WHERE + """ front = set(event_ids) while front: new_front = set() for chunk in batch_iter(front, 100): - clause, args = make_in_list_sql_clause( - txn.database_engine, "event_id", chunk - ) - txn.execute(base_sql + clause, args) - new_front.update(r[0] for r in txn) + # Pull the auth events either from the cache or DB. + to_fetch = [] # Event IDs to fetch from DB # type: List[str] + for event_id in chunk: + res = self._event_auth_cache.get(event_id) + if res is None: + to_fetch.append(event_id) + else: + new_front.update(auth_id for auth_id, depth in res) + + if to_fetch: + clause, args = make_in_list_sql_clause( + txn.database_engine, "a.event_id", to_fetch + ) + txn.execute(base_sql + clause, args) + + # Note we need to batch up the results by event ID before + # adding to the cache. + to_cache = {} + for event_id, auth_event_id, auth_event_depth in txn: + to_cache.setdefault(event_id, []).append( + (auth_event_id, auth_event_depth) + ) + new_front.add(auth_event_id) + + for event_id, auth_events in to_cache.items(): + self._event_auth_cache.set(event_id, auth_events) new_front -= results @@ -213,14 +247,38 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas break # Fetch the auth events and their depths of the N last events we're - # currently walking + # currently walking, either from cache or DB. search, chunk = search[:-100], search[-100:] - clause, args = make_in_list_sql_clause( - txn.database_engine, "a.event_id", [e_id for _, e_id in chunk] - ) - txn.execute(base_sql + clause, args) - for event_id, auth_event_id, auth_event_depth in txn: + found = [] # Results found # type: List[Tuple[str, str, int]] + to_fetch = [] # Event IDs to fetch from DB # type: List[str] + for _, event_id in chunk: + res = self._event_auth_cache.get(event_id) + if res is None: + to_fetch.append(event_id) + else: + found.extend((event_id, auth_id, depth) for auth_id, depth in res) + + if to_fetch: + clause, args = make_in_list_sql_clause( + txn.database_engine, "a.event_id", to_fetch + ) + txn.execute(base_sql + clause, args) + + # We parse the results and add the to the `found` set and the + # cache (note we need to batch up the results by event ID before + # adding to the cache). + to_cache = {} + for event_id, auth_event_id, auth_event_depth in txn: + to_cache.setdefault(event_id, []).append( + (auth_event_id, auth_event_depth) + ) + found.append((event_id, auth_event_id, auth_event_depth)) + + for event_id, auth_events in to_cache.items(): + self._event_auth_cache.set(event_id, auth_events) + + for event_id, auth_event_id, auth_event_depth in found: event_to_auth_events.setdefault(event_id, set()).add(auth_event_id) sets = event_to_missing_sets.get(auth_event_id)