summary refs log tree commit diff
path: root/synapse/http
diff options
context:
space:
mode:
Diffstat (limited to 'synapse/http')
-rw-r--r--synapse/http/client.py11
-rw-r--r--synapse/http/connectproxyclient.py96
-rw-r--r--synapse/http/proxyagent.py133
-rw-r--r--synapse/http/servlet.py67
4 files changed, 252 insertions, 55 deletions
diff --git a/synapse/http/client.py b/synapse/http/client.py

index df7730078f..960d57fb27 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py
@@ -289,8 +289,7 @@ class SimpleHttpClient: treq_args: Dict[str, Any] = {}, ip_whitelist: Optional[IPSet] = None, ip_blacklist: Optional[IPSet] = None, - http_proxy: Optional[bytes] = None, - https_proxy: Optional[bytes] = None, + use_proxy: bool = False, ): """ Args: @@ -300,8 +299,8 @@ class SimpleHttpClient: we may not request. ip_whitelist: The whitelisted IP addresses, that we can request if it were otherwise caught in a blacklist. - http_proxy: proxy server to use for http connections. host[:port] - https_proxy: proxy server to use for https connections. host[:port] + use_proxy: Whether proxy settings should be discovered and used + from conventional environment variables. """ self.hs = hs @@ -341,11 +340,11 @@ class SimpleHttpClient: self.agent = ProxyAgent( self.reactor, + hs.get_reactor(), connectTimeout=15, contextFactory=self.hs.get_http_client_context_factory(), pool=pool, - http_proxy=http_proxy, - https_proxy=https_proxy, + use_proxy=use_proxy, ) if self._ip_blacklist: diff --git a/synapse/http/connectproxyclient.py b/synapse/http/connectproxyclient.py
index 856e28454f..b797e3ce80 100644 --- a/synapse/http/connectproxyclient.py +++ b/synapse/http/connectproxyclient.py
@@ -19,9 +19,10 @@ from zope.interface import implementer from twisted.internet import defer, protocol from twisted.internet.error import ConnectError -from twisted.internet.interfaces import IStreamClientEndpoint -from twisted.internet.protocol import connectionDone +from twisted.internet.interfaces import IReactorCore, IStreamClientEndpoint +from twisted.internet.protocol import ClientFactory, Protocol, connectionDone from twisted.web import http +from twisted.web.http_headers import Headers logger = logging.getLogger(__name__) @@ -43,23 +44,33 @@ class HTTPConnectProxyEndpoint: Args: reactor: the Twisted reactor to use for the connection - proxy_endpoint (IStreamClientEndpoint): the endpoint to use to connect to the - proxy - host (bytes): hostname that we want to CONNECT to - port (int): port that we want to connect to + proxy_endpoint: the endpoint to use to connect to the proxy + host: hostname that we want to CONNECT to + port: port that we want to connect to + headers: Extra HTTP headers to include in the CONNECT request """ - def __init__(self, reactor, proxy_endpoint, host, port): + def __init__( + self, + reactor: IReactorCore, + proxy_endpoint: IStreamClientEndpoint, + host: bytes, + port: int, + headers: Headers, + ): self._reactor = reactor self._proxy_endpoint = proxy_endpoint self._host = host self._port = port + self._headers = headers def __repr__(self): return "<HTTPConnectProxyEndpoint %s>" % (self._proxy_endpoint,) - def connect(self, protocolFactory): - f = HTTPProxiedClientFactory(self._host, self._port, protocolFactory) + def connect(self, protocolFactory: ClientFactory): + f = HTTPProxiedClientFactory( + self._host, self._port, protocolFactory, self._headers + ) d = self._proxy_endpoint.connect(f) # once the tcp socket connects successfully, we need to wait for the # CONNECT to complete. @@ -74,15 +85,23 @@ class HTTPProxiedClientFactory(protocol.ClientFactory): HTTP Protocol object and run the rest of the connection. Args: - dst_host (bytes): hostname that we want to CONNECT to - dst_port (int): port that we want to connect to - wrapped_factory (protocol.ClientFactory): The original Factory + dst_host: hostname that we want to CONNECT to + dst_port: port that we want to connect to + wrapped_factory: The original Factory + headers: Extra HTTP headers to include in the CONNECT request """ - def __init__(self, dst_host, dst_port, wrapped_factory): + def __init__( + self, + dst_host: bytes, + dst_port: int, + wrapped_factory: ClientFactory, + headers: Headers, + ): self.dst_host = dst_host self.dst_port = dst_port self.wrapped_factory = wrapped_factory + self.headers = headers self.on_connection = defer.Deferred() def startedConnecting(self, connector): @@ -92,7 +111,11 @@ class HTTPProxiedClientFactory(protocol.ClientFactory): wrapped_protocol = self.wrapped_factory.buildProtocol(addr) return HTTPConnectProtocol( - self.dst_host, self.dst_port, wrapped_protocol, self.on_connection + self.dst_host, + self.dst_port, + wrapped_protocol, + self.on_connection, + self.headers, ) def clientConnectionFailed(self, connector, reason): @@ -112,24 +135,37 @@ class HTTPConnectProtocol(protocol.Protocol): """Protocol that wraps an existing Protocol to do a CONNECT handshake at connect Args: - host (bytes): The original HTTP(s) hostname or IPv4 or IPv6 address literal + host: The original HTTP(s) hostname or IPv4 or IPv6 address literal to put in the CONNECT request - port (int): The original HTTP(s) port to put in the CONNECT request + port: The original HTTP(s) port to put in the CONNECT request - wrapped_protocol (interfaces.IProtocol): the original protocol (probably - HTTPChannel or TLSMemoryBIOProtocol, but could be anything really) + wrapped_protocol: the original protocol (probably HTTPChannel or + TLSMemoryBIOProtocol, but could be anything really) - connected_deferred (Deferred): a Deferred which will be callbacked with + connected_deferred: a Deferred which will be callbacked with wrapped_protocol when the CONNECT completes + + headers: Extra HTTP headers to include in the CONNECT request """ - def __init__(self, host, port, wrapped_protocol, connected_deferred): + def __init__( + self, + host: bytes, + port: int, + wrapped_protocol: Protocol, + connected_deferred: defer.Deferred, + headers: Headers, + ): self.host = host self.port = port self.wrapped_protocol = wrapped_protocol self.connected_deferred = connected_deferred - self.http_setup_client = HTTPConnectSetupClient(self.host, self.port) + self.headers = headers + + self.http_setup_client = HTTPConnectSetupClient( + self.host, self.port, self.headers + ) self.http_setup_client.on_connected.addCallback(self.proxyConnected) def connectionMade(self): @@ -154,7 +190,7 @@ class HTTPConnectProtocol(protocol.Protocol): if buf: self.wrapped_protocol.dataReceived(buf) - def dataReceived(self, data): + def dataReceived(self, data: bytes): # if we've set up the HTTP protocol, we can send the data there if self.wrapped_protocol.connected: return self.wrapped_protocol.dataReceived(data) @@ -168,21 +204,29 @@ class HTTPConnectSetupClient(http.HTTPClient): """HTTPClient protocol to send a CONNECT message for proxies and read the response. Args: - host (bytes): The hostname to send in the CONNECT message - port (int): The port to send in the CONNECT message + host: The hostname to send in the CONNECT message + port: The port to send in the CONNECT message + headers: Extra headers to send with the CONNECT message """ - def __init__(self, host, port): + def __init__(self, host: bytes, port: int, headers: Headers): self.host = host self.port = port + self.headers = headers self.on_connected = defer.Deferred() def connectionMade(self): logger.debug("Connected to proxy, sending CONNECT") self.sendCommand(b"CONNECT", b"%s:%d" % (self.host, self.port)) + + # Send any additional specified headers + for name, values in self.headers.getAllRawHeaders(): + for value in values: + self.sendHeader(name, value) + self.endHeaders() - def handleStatus(self, version, status, message): + def handleStatus(self, version: bytes, status: bytes, message: bytes): logger.debug("Got Status: %s %s %s", status, message, version) if status != b"200": raise ProxyConnectError("Unexpected status on CONNECT: %s" % status) diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py
index e32d3f43e0..ee65a6668b 100644 --- a/synapse/http/proxyagent.py +++ b/synapse/http/proxyagent.py
@@ -12,9 +12,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import base64 import logging import re +from typing import Optional, Tuple +from urllib.request import getproxies_environment, proxy_bypass_environment +import attr from zope.interface import implementer from twisted.internet import defer @@ -22,6 +26,7 @@ from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS from twisted.python.failure import Failure from twisted.web.client import URI, BrowserLikePolicyForHTTPS, _AgentBase from twisted.web.error import SchemeNotSupported +from twisted.web.http_headers import Headers from twisted.web.iweb import IAgent from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint @@ -31,6 +36,22 @@ logger = logging.getLogger(__name__) _VALID_URI = re.compile(br"\A[\x21-\x7e]+\Z") +@attr.s +class ProxyCredentials: + username_password = attr.ib(type=bytes) + + def as_proxy_authorization_value(self) -> bytes: + """ + Return the value for a Proxy-Authorization header (i.e. 'Basic abdef=='). + + Returns: + A transformation of the authentication string the encoded value for + a Proxy-Authorization header. + """ + # Encode as base64 and prepend the authorization type + return b"Basic " + base64.encodebytes(self.username_password) + + @implementer(IAgent) class ProxyAgent(_AgentBase): """An Agent implementation which will use an HTTP proxy if one was requested @@ -39,6 +60,10 @@ class ProxyAgent(_AgentBase): reactor: twisted reactor to place outgoing connections. + proxy_reactor: twisted reactor to use for connections to the proxy server + reactor might have some blacklisting applied (i.e. for DNS queries), + but we need unblocked access to the proxy. + contextFactory (IPolicyForHTTPS): A factory for TLS contexts, to control the verification parameters of OpenSSL. The default is to use a `BrowserLikePolicyForHTTPS`, so unless you have special @@ -54,34 +79,56 @@ class ProxyAgent(_AgentBase): pool (HTTPConnectionPool|None): connection pool to be used. If None, a non-persistent pool instance will be created. + + use_proxy (bool): Whether proxy settings should be discovered and used + from conventional environment variables. """ def __init__( self, reactor, + proxy_reactor=None, contextFactory=BrowserLikePolicyForHTTPS(), connectTimeout=None, bindAddress=None, pool=None, - http_proxy=None, - https_proxy=None, + use_proxy=False, ): _AgentBase.__init__(self, reactor, pool) + if proxy_reactor is None: + self.proxy_reactor = reactor + else: + self.proxy_reactor = proxy_reactor + self._endpoint_kwargs = {} if connectTimeout is not None: self._endpoint_kwargs["timeout"] = connectTimeout if bindAddress is not None: self._endpoint_kwargs["bindAddress"] = bindAddress + http_proxy = None + https_proxy = None + no_proxy = None + if use_proxy: + proxies = getproxies_environment() + http_proxy = proxies["http"].encode() if "http" in proxies else None + https_proxy = proxies["https"].encode() if "https" in proxies else None + no_proxy = proxies["no"] if "no" in proxies else None + + # Parse credentials from https proxy connection string if present + self.https_proxy_creds, https_proxy = parse_username_password(https_proxy) + self.http_proxy_endpoint = _http_proxy_endpoint( - http_proxy, reactor, **self._endpoint_kwargs + http_proxy, self.proxy_reactor, **self._endpoint_kwargs ) self.https_proxy_endpoint = _http_proxy_endpoint( - https_proxy, reactor, **self._endpoint_kwargs + https_proxy, self.proxy_reactor, **self._endpoint_kwargs ) + self.no_proxy = no_proxy + self._policy_for_https = contextFactory self._reactor = reactor @@ -129,18 +176,43 @@ class ProxyAgent(_AgentBase): pool_key = (parsed_uri.scheme, parsed_uri.host, parsed_uri.port) request_path = parsed_uri.originForm - if parsed_uri.scheme == b"http" and self.http_proxy_endpoint: + should_skip_proxy = False + if self.no_proxy is not None: + should_skip_proxy = proxy_bypass_environment( + parsed_uri.host.decode(), proxies={"no": self.no_proxy}, + ) + + if ( + parsed_uri.scheme == b"http" + and self.http_proxy_endpoint + and not should_skip_proxy + ): # Cache *all* connections under the same key, since we are only # connecting to a single destination, the proxy: pool_key = ("http-proxy", self.http_proxy_endpoint) endpoint = self.http_proxy_endpoint request_path = uri - elif parsed_uri.scheme == b"https" and self.https_proxy_endpoint: + elif ( + parsed_uri.scheme == b"https" + and self.https_proxy_endpoint + and not should_skip_proxy + ): + connect_headers = Headers() + + # Determine whether we need to set Proxy-Authorization headers + if self.https_proxy_creds: + # Set a Proxy-Authorization header + connect_headers.addRawHeader( + b"Proxy-Authorization", + self.https_proxy_creds.as_proxy_authorization_value(), + ) + endpoint = HTTPConnectProxyEndpoint( - self._reactor, + self.proxy_reactor, self.https_proxy_endpoint, parsed_uri.host, parsed_uri.port, + headers=connect_headers, ) else: # not using a proxy @@ -169,12 +241,16 @@ class ProxyAgent(_AgentBase): ) -def _http_proxy_endpoint(proxy, reactor, **kwargs): +def _http_proxy_endpoint(proxy: Optional[bytes], reactor, **kwargs): """Parses an http proxy setting and returns an endpoint for the proxy Args: - proxy (bytes|None): the proxy setting + proxy: the proxy setting in the form: [<username>:<password>@]<host>[:<port>] + Note that compared to other apps, this function currently lacks support + for specifying a protocol schema (i.e. protocol://...). + reactor: reactor to be used to connect to the proxy + kwargs: other args to be passed to HostnameEndpoint Returns: @@ -184,16 +260,43 @@ def _http_proxy_endpoint(proxy, reactor, **kwargs): if proxy is None: return None - # currently we only support hostname:port. Some apps also support - # protocol://<host>[:port], which allows a way of requiring a TLS connection to the - # proxy. - + # Parse the connection string host, port = parse_host_port(proxy, default_port=1080) return HostnameEndpoint(reactor, host, port, **kwargs) -def parse_host_port(hostport, default_port=None): - # could have sworn we had one of these somewhere else... +def parse_username_password(proxy: bytes) -> Tuple[Optional[ProxyCredentials], bytes]: + """ + Parses the username and password from a proxy declaration e.g + username:password@hostname:port. + + Args: + proxy: The proxy connection string. + + Returns + An instance of ProxyCredentials and the proxy connection string with any credentials + stripped, i.e u:p@host:port -> host:port. If no credentials were found, the + ProxyCredentials instance is replaced with None. + """ + if proxy and b"@" in proxy: + # We use rsplit here as the password could contain an @ character + credentials, proxy_without_credentials = proxy.rsplit(b"@", 1) + return ProxyCredentials(credentials), proxy_without_credentials + + return None, proxy + + +def parse_host_port(hostport: bytes, default_port: int = None) -> Tuple[bytes, int]: + """ + Parse the hostname and port from a proxy connection byte string. + + Args: + hostport: The proxy connection string. Must be in the form 'host[:port]'. + default_port: The default port to return if one is not found in `hostport`. + + Returns: + A tuple containing the hostname and port. Uses `default_port` if one was not found. + """ if b":" in hostport: host, port = hostport.rsplit(b":", 1) try: diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py
index b361b7cbaf..9bfe151b5f 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py
@@ -14,8 +14,8 @@ # limitations under the License. """ This module contains base REST classes for constructing REST servlets. """ - import logging +from typing import Dict, List, Optional, Union from synapse.api.errors import Codes, SynapseError from synapse.util import json_decoder @@ -147,16 +147,67 @@ def parse_string( ) +def parse_list_from_args( + args: Dict[bytes, List[bytes]], + name: Union[bytes, str], + encoding: Optional[str] = "ascii", +): + """Parse and optionally decode a list of values from request query parameters. + + Args: + args: A dictionary of query parameters from a request. + name: The name of the query parameter to extract values from. If given as bytes, + will be decoded as "ascii". + encoding: An optional encoding that is used to decode each parameter value with. + + Raises: + KeyError: If the given `name` does not exist in `args`. + SynapseError: If an argument was not encoded with the specified `encoding`. + """ + if not isinstance(name, bytes): + name = name.encode("ascii") + args_list = args[name] + + if encoding: + # Decode each argument value + try: + args_list = [value.decode(encoding) for value in args_list] + except ValueError: + raise SynapseError(400, "Query parameter %r must be %s" % (name, encoding)) + + return args_list + + def parse_string_from_args( - args, - name, - default=None, - required=False, - allowed_values=None, - param_type="string", - encoding="ascii", + args: Dict[bytes, List[bytes]], + name: Union[bytes, str], + default: Optional[str] = None, + required: Optional[bool] = False, + allowed_values: Optional[List[bytes]] = None, + param_type: Optional[str] = "string", + encoding: Optional[str] = "ascii", ): + """Parse and optionally decode a single value from request query parameters. + Args: + args: A dictionary of query parameters from a request. + name: The name of the query parameter to extract values from. If given as bytes, + will be decoded as "ascii". + default: A default value to return if the given argument `name` was not found. + required: If this is True, no `default` is provided and the given argument `name` + was not found then a SynapseError is raised. + allowed_values: A list of allowed values. If specified and the found str is + not in this list, a SynapseError is raised. + param_type: The expected type of the query parameter's value. + encoding: An optional encoding that is used to decode each parameter value with. + + Returns: + The found argument value. + + Raises: + SynapseError: If the given name was not found in the request arguments, + the argument's values were encoded incorrectly or a required value was missing. + """ if not isinstance(name, bytes): name = name.encode("ascii")