summary refs log tree commit diff
path: root/synapse/http
diff options
context:
space:
mode:
authorTim Leung <tim95@hotmail.co.uk>2021-02-26 17:37:57 +0000
committerGitHub <noreply@github.com>2021-02-26 17:37:57 +0000
commitddb240293a3d7e0a903f322088e937d7e4f3de68 (patch)
treeb3b840433330217df6d031d97a962be73d482269 /synapse/http
parentSSO: redirect to public URL before setting cookies (#9436) (diff)
downloadsynapse-ddb240293a3d7e0a903f322088e937d7e4f3de68.tar.xz
Add support for no_proxy and case insensitive env variables (#9372)
### Changes proposed in this PR

- Add support for the `no_proxy` and `NO_PROXY` environment variables
  - Internally rely on urllib's [`proxy_bypass_environment`](https://github.com/python/cpython/blob/bdb941be423bde8b02a5695ccf51c303d6204bed/Lib/urllib/request.py#L2519)
- Extract env variables using urllib's `getproxies`/[`getproxies_environment`](https://github.com/python/cpython/blob/bdb941be423bde8b02a5695ccf51c303d6204bed/Lib/urllib/request.py#L2488) which supports lowercase + uppercase, preferring lowercase, except for `HTTP_PROXY` in a CGI environment

This does contain behaviour changes for consumers so making sure these are called out:
- `no_proxy`/`NO_PROXY` is now respected
- lowercase `https_proxy` is now allowed and taken over `HTTPS_PROXY`

Related to #9306 which also uses `ProxyAgent`

Signed-off-by: Timothy Leung tim95@hotmail.co.uk
Diffstat (limited to 'synapse/http')
-rw-r--r--synapse/http/client.py10
-rw-r--r--synapse/http/proxyagent.py37
2 files changed, 37 insertions, 10 deletions
diff --git a/synapse/http/client.py b/synapse/http/client.py
index e54d9bd213..a910548f1e 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -289,8 +289,7 @@ class SimpleHttpClient:
         treq_args: Dict[str, Any] = {},
         ip_whitelist: Optional[IPSet] = None,
         ip_blacklist: Optional[IPSet] = None,
-        http_proxy: Optional[bytes] = None,
-        https_proxy: Optional[bytes] = None,
+        use_proxy: bool = False,
     ):
         """
         Args:
@@ -300,8 +299,8 @@ class SimpleHttpClient:
                 we may not request.
             ip_whitelist: The whitelisted IP addresses, that we can
                request if it were otherwise caught in a blacklist.
-            http_proxy: proxy server to use for http connections. host[:port]
-            https_proxy: proxy server to use for https connections. host[:port]
+            use_proxy: Whether proxy settings should be discovered and used
+                from conventional environment variables.
         """
         self.hs = hs
 
@@ -345,8 +344,7 @@ class SimpleHttpClient:
             connectTimeout=15,
             contextFactory=self.hs.get_http_client_context_factory(),
             pool=pool,
-            http_proxy=http_proxy,
-            https_proxy=https_proxy,
+            use_proxy=use_proxy,
         )
 
         if self._ip_blacklist:
diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py
index b730d2c634..3d553ae236 100644
--- a/synapse/http/proxyagent.py
+++ b/synapse/http/proxyagent.py
@@ -14,6 +14,7 @@
 # limitations under the License.
 import logging
 import re
+from urllib.request import getproxies_environment, proxy_bypass_environment
 
 from zope.interface import implementer
 
@@ -58,6 +59,9 @@ class ProxyAgent(_AgentBase):
 
         pool (HTTPConnectionPool|None): connection pool to be used. If None, a
             non-persistent pool instance will be created.
+
+        use_proxy (bool): Whether proxy settings should be discovered and used
+            from conventional environment variables.
     """
 
     def __init__(
@@ -68,8 +72,7 @@ class ProxyAgent(_AgentBase):
         connectTimeout=None,
         bindAddress=None,
         pool=None,
-        http_proxy=None,
-        https_proxy=None,
+        use_proxy=False,
     ):
         _AgentBase.__init__(self, reactor, pool)
 
@@ -84,6 +87,15 @@ class ProxyAgent(_AgentBase):
         if bindAddress is not None:
             self._endpoint_kwargs["bindAddress"] = bindAddress
 
+        http_proxy = None
+        https_proxy = None
+        no_proxy = None
+        if use_proxy:
+            proxies = getproxies_environment()
+            http_proxy = proxies["http"].encode() if "http" in proxies else None
+            https_proxy = proxies["https"].encode() if "https" in proxies else None
+            no_proxy = proxies["no"] if "no" in proxies else None
+
         self.http_proxy_endpoint = _http_proxy_endpoint(
             http_proxy, self.proxy_reactor, **self._endpoint_kwargs
         )
@@ -92,6 +104,8 @@ class ProxyAgent(_AgentBase):
             https_proxy, self.proxy_reactor, **self._endpoint_kwargs
         )
 
+        self.no_proxy = no_proxy
+
         self._policy_for_https = contextFactory
         self._reactor = reactor
 
@@ -139,13 +153,28 @@ class ProxyAgent(_AgentBase):
         pool_key = (parsed_uri.scheme, parsed_uri.host, parsed_uri.port)
         request_path = parsed_uri.originForm
 
-        if parsed_uri.scheme == b"http" and self.http_proxy_endpoint:
+        should_skip_proxy = False
+        if self.no_proxy is not None:
+            should_skip_proxy = proxy_bypass_environment(
+                parsed_uri.host.decode(),
+                proxies={"no": self.no_proxy},
+            )
+
+        if (
+            parsed_uri.scheme == b"http"
+            and self.http_proxy_endpoint
+            and not should_skip_proxy
+        ):
             # Cache *all* connections under the same key, since we are only
             # connecting to a single destination, the proxy:
             pool_key = ("http-proxy", self.http_proxy_endpoint)
             endpoint = self.http_proxy_endpoint
             request_path = uri
-        elif parsed_uri.scheme == b"https" and self.https_proxy_endpoint:
+        elif (
+            parsed_uri.scheme == b"https"
+            and self.https_proxy_endpoint
+            and not should_skip_proxy
+        ):
             endpoint = HTTPConnectProxyEndpoint(
                 self.proxy_reactor,
                 self.https_proxy_endpoint,