diff --git a/synapse/http/client.py b/synapse/http/client.py
index 3797545824..3cef747a4d 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -49,7 +49,6 @@ from synapse.http.proxyagent import ProxyAgent
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import set_tag, start_active_span, tags
from synapse.util.async_helpers import timeout_deferred
-from synapse.util.caches import CACHE_SIZE_FACTOR
logger = logging.getLogger(__name__)
@@ -241,7 +240,10 @@ class SimpleHttpClient(object):
# tends to do so in batches, so we need to allow the pool to keep
# lots of idle connections around.
pool = HTTPConnectionPool(self.reactor)
- pool.maxPersistentPerHost = max((100 * CACHE_SIZE_FACTOR, 5))
+ # XXX: The justification for using the cache factor here is that larger instances
+ # will need both more cache and more connections.
+ # Still, this should probably be a separate dial
+ pool.maxPersistentPerHost = max((100 * hs.config.caches.global_factor, 5))
pool.cachedConnectionTimeout = 2 * 60
self.agent = ProxyAgent(
@@ -359,6 +361,7 @@ class SimpleHttpClient(object):
actual_headers = {
b"Content-Type": [b"application/x-www-form-urlencoded"],
b"User-Agent": [self.user_agent],
+ b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers)
@@ -399,6 +402,7 @@ class SimpleHttpClient(object):
actual_headers = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
+ b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers)
@@ -434,6 +438,10 @@ class SimpleHttpClient(object):
ValueError: if the response was not JSON
"""
+ actual_headers = {b"Accept": [b"application/json"]}
+ if headers:
+ actual_headers.update(headers)
+
body = yield self.get_raw(uri, args, headers=headers)
return json.loads(body)
@@ -467,6 +475,7 @@ class SimpleHttpClient(object):
actual_headers = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
+ b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers)
|