diff --git a/synapse/http/client.py b/synapse/http/client.py
index 51765ae3c0..3cef747a4d 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -45,10 +45,10 @@ from synapse.http import (
cancelled_to_request_timed_out_error,
redact_uri,
)
+from synapse.http.proxyagent import ProxyAgent
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import set_tag, start_active_span, tags
from synapse.util.async_helpers import timeout_deferred
-from synapse.util.caches import CACHE_SIZE_FACTOR
logger = logging.getLogger(__name__)
@@ -183,7 +183,15 @@ class SimpleHttpClient(object):
using HTTP in Matrix
"""
- def __init__(self, hs, treq_args={}, ip_whitelist=None, ip_blacklist=None):
+ def __init__(
+ self,
+ hs,
+ treq_args={},
+ ip_whitelist=None,
+ ip_blacklist=None,
+ http_proxy=None,
+ https_proxy=None,
+ ):
"""
Args:
hs (synapse.server.HomeServer)
@@ -192,6 +200,8 @@ class SimpleHttpClient(object):
we may not request.
ip_whitelist (netaddr.IPSet): The whitelisted IP addresses, that we can
request if it were otherwise caught in a blacklist.
+ http_proxy (bytes): proxy server to use for http connections. host[:port]
+ https_proxy (bytes): proxy server to use for https connections. host[:port]
"""
self.hs = hs
@@ -230,17 +240,19 @@ class SimpleHttpClient(object):
# tends to do so in batches, so we need to allow the pool to keep
# lots of idle connections around.
pool = HTTPConnectionPool(self.reactor)
- pool.maxPersistentPerHost = max((100 * CACHE_SIZE_FACTOR, 5))
+ # XXX: The justification for using the cache factor here is that larger instances
+ # will need both more cache and more connections.
+ # Still, this should probably be a separate dial
+ pool.maxPersistentPerHost = max((100 * hs.config.caches.global_factor, 5))
pool.cachedConnectionTimeout = 2 * 60
- # The default context factory in Twisted 14.0.0 (which we require) is
- # BrowserLikePolicyForHTTPS which will do regular cert validation
- # 'like a browser'
- self.agent = Agent(
+ self.agent = ProxyAgent(
self.reactor,
connectTimeout=15,
contextFactory=self.hs.get_http_client_context_factory(),
pool=pool,
+ http_proxy=http_proxy,
+ https_proxy=https_proxy,
)
if self._ip_blacklist:
@@ -327,7 +339,7 @@ class SimpleHttpClient(object):
Args:
uri (str):
args (dict[str, str|List[str]]): query params
- headers (dict[str, List[str]]|None): If not None, a map from
+ headers (dict[str|bytes, List[str|bytes]]|None): If not None, a map from
header name to a list of values for that header
Returns:
@@ -349,6 +361,7 @@ class SimpleHttpClient(object):
actual_headers = {
b"Content-Type": [b"application/x-www-form-urlencoded"],
b"User-Agent": [self.user_agent],
+ b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers)
@@ -371,7 +384,7 @@ class SimpleHttpClient(object):
Args:
uri (str):
post_json (object):
- headers (dict[str, List[str]]|None): If not None, a map from
+ headers (dict[str|bytes, List[str|bytes]]|None): If not None, a map from
header name to a list of values for that header
Returns:
@@ -389,6 +402,7 @@ class SimpleHttpClient(object):
actual_headers = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
+ b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers)
@@ -414,7 +428,7 @@ class SimpleHttpClient(object):
None.
**Note**: The value of each key is assumed to be an iterable
and *not* a string.
- headers (dict[str, List[str]]|None): If not None, a map from
+ headers (dict[str|bytes, List[str|bytes]]|None): If not None, a map from
header name to a list of values for that header
Returns:
Deferred: Succeeds when we get *any* 2xx HTTP response, with the
@@ -424,6 +438,10 @@ class SimpleHttpClient(object):
ValueError: if the response was not JSON
"""
+ actual_headers = {b"Accept": [b"application/json"]}
+ if headers:
+ actual_headers.update(headers)
+
body = yield self.get_raw(uri, args, headers=headers)
return json.loads(body)
@@ -438,7 +456,7 @@ class SimpleHttpClient(object):
None.
**Note**: The value of each key is assumed to be an iterable
and *not* a string.
- headers (dict[str, List[str]]|None): If not None, a map from
+ headers (dict[str|bytes, List[str|bytes]]|None): If not None, a map from
header name to a list of values for that header
Returns:
Deferred: Succeeds when we get *any* 2xx HTTP response, with the
@@ -457,6 +475,7 @@ class SimpleHttpClient(object):
actual_headers = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
+ b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers)
@@ -482,7 +501,7 @@ class SimpleHttpClient(object):
None.
**Note**: The value of each key is assumed to be an iterable
and *not* a string.
- headers (dict[str, List[str]]|None): If not None, a map from
+ headers (dict[str|bytes, List[str|bytes]]|None): If not None, a map from
header name to a list of values for that header
Returns:
Deferred: Succeeds when we get *any* 2xx HTTP response, with the
@@ -516,7 +535,7 @@ class SimpleHttpClient(object):
Args:
url (str): The URL to GET
output_stream (file): File to write the response body to.
- headers (dict[str, List[str]]|None): If not None, a map from
+ headers (dict[str|bytes, List[str|bytes]]|None): If not None, a map from
header name to a list of values for that header
Returns:
A (int,dict,string,int) tuple of the file length, dict of the response
@@ -535,7 +554,7 @@ class SimpleHttpClient(object):
b"Content-Length" in resp_headers
and int(resp_headers[b"Content-Length"][0]) > max_size
):
- logger.warn("Requested URL is too large > %r bytes" % (self.max_size,))
+ logger.warning("Requested URL is too large > %r bytes" % (self.max_size,))
raise SynapseError(
502,
"Requested file is too large > %r bytes" % (self.max_size,),
@@ -543,7 +562,7 @@ class SimpleHttpClient(object):
)
if response.code > 299:
- logger.warn("Got %d when downloading %s" % (response.code, url))
+ logger.warning("Got %d when downloading %s" % (response.code, url))
raise SynapseError(502, "Got error %d" % (response.code,), Codes.UNKNOWN)
# TODO: if our Content-Type is HTML or something, just read the first
|