diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py
index 8b5ad4574f..686012dd25 100644
--- a/tests/http/federation/test_matrix_federation_agent.py
+++ b/tests/http/federation/test_matrix_federation_agent.py
@@ -17,6 +17,7 @@ import logging
from mock import Mock
import treq
+from netaddr import IPSet
from service_identity import VerificationError
from zope.interface import implementer
@@ -35,6 +36,7 @@ from synapse.crypto.context_factory import FederationPolicyForHTTPS
from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
from synapse.http.federation.srv_resolver import Server
from synapse.http.federation.well_known_resolver import (
+ WELL_KNOWN_MAX_SIZE,
WellKnownResolver,
_cache_period_from_headers,
)
@@ -103,6 +105,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
reactor=self.reactor,
tls_client_options_factory=self.tls_factory,
user_agent="test-agent", # Note that this is unused since _well_known_resolver is provided.
+ ip_blacklist=IPSet(),
_srv_resolver=self.mock_resolver,
_well_known_resolver=self.well_known_resolver,
)
@@ -736,6 +739,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
reactor=self.reactor,
tls_client_options_factory=tls_factory,
user_agent=b"test-agent", # This is unused since _well_known_resolver is passed below.
+ ip_blacklist=IPSet(),
_srv_resolver=self.mock_resolver,
_well_known_resolver=WellKnownResolver(
self.reactor,
@@ -1091,7 +1095,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
# Expire both caches and repeat the request
self.reactor.pump((10000.0,))
- # Repated the request, this time it should fail if the lookup fails.
+ # Repeat the request, this time it should fail if the lookup fails.
fetch_d = defer.ensureDeferred(
self.well_known_resolver.get_well_known(b"testserv")
)
@@ -1104,6 +1108,32 @@ class MatrixFederationAgentTests(unittest.TestCase):
r = self.successResultOf(fetch_d)
self.assertEqual(r.delegated_server, None)
+ def test_well_known_too_large(self):
+ """A well-known query that returns a result which is too large should be rejected."""
+ self.reactor.lookups["testserv"] = "1.2.3.4"
+
+ fetch_d = defer.ensureDeferred(
+ self.well_known_resolver.get_well_known(b"testserv")
+ )
+
+ # there should be an attempt to connect on port 443 for the .well-known
+ clients = self.reactor.tcpClients
+ self.assertEqual(len(clients), 1)
+ (host, port, client_factory, _timeout, _bindAddress) = clients.pop(0)
+ self.assertEqual(host, "1.2.3.4")
+ self.assertEqual(port, 443)
+
+ self._handle_well_known_connection(
+ client_factory,
+ expected_sni=b"testserv",
+ response_headers={b"Cache-Control": b"max-age=1000"},
+ content=b'{ "m.server": "' + (b"a" * WELL_KNOWN_MAX_SIZE) + b'" }',
+ )
+
+ # The result is successful, but disabled delegation.
+ r = self.successResultOf(fetch_d)
+ self.assertIsNone(r.delegated_server)
+
def test_srv_fallbacks(self):
"""Test that other SRV results are tried if the first one fails.
"""
diff --git a/tests/http/test_additional_resource.py b/tests/http/test_additional_resource.py
index 05e9c449be..453391a5a5 100644
--- a/tests/http/test_additional_resource.py
+++ b/tests/http/test_additional_resource.py
@@ -46,16 +46,16 @@ class AdditionalResourceTests(HomeserverTestCase):
handler = _AsyncTestCustomEndpoint({}, None).handle_request
resource = AdditionalResource(self.hs, handler)
- request, channel = make_request(self.reactor, FakeSite(resource), "GET", "/")
+ channel = make_request(self.reactor, FakeSite(resource), "GET", "/")
- self.assertEqual(request.code, 200)
+ self.assertEqual(channel.code, 200)
self.assertEqual(channel.json_body, {"some_key": "some_value_async"})
def test_sync(self):
handler = _SyncTestCustomEndpoint({}, None).handle_request
resource = AdditionalResource(self.hs, handler)
- request, channel = make_request(self.reactor, FakeSite(resource), "GET", "/")
+ channel = make_request(self.reactor, FakeSite(resource), "GET", "/")
- self.assertEqual(request.code, 200)
+ self.assertEqual(channel.code, 200)
self.assertEqual(channel.json_body, {"some_key": "some_value_sync"})
diff --git a/tests/http/test_client.py b/tests/http/test_client.py
new file mode 100644
index 0000000000..f17c122e93
--- /dev/null
+++ b/tests/http/test_client.py
@@ -0,0 +1,101 @@
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from io import BytesIO
+
+from mock import Mock
+
+from twisted.python.failure import Failure
+from twisted.web.client import ResponseDone
+
+from synapse.http.client import BodyExceededMaxSize, read_body_with_max_size
+
+from tests.unittest import TestCase
+
+
+class ReadBodyWithMaxSizeTests(TestCase):
+ def setUp(self):
+ """Start reading the body, returns the response, result and proto"""
+ self.response = Mock()
+ self.result = BytesIO()
+ self.deferred = read_body_with_max_size(self.response, self.result, 6)
+
+ # Fish the protocol out of the response.
+ self.protocol = self.response.deliverBody.call_args[0][0]
+ self.protocol.transport = Mock()
+
+ def _cleanup_error(self):
+ """Ensure that the error in the Deferred is handled gracefully."""
+ called = [False]
+
+ def errback(f):
+ called[0] = True
+
+ self.deferred.addErrback(errback)
+ self.assertTrue(called[0])
+
+ def test_no_error(self):
+ """A response that is NOT too large."""
+
+ # Start sending data.
+ self.protocol.dataReceived(b"12345")
+ # Close the connection.
+ self.protocol.connectionLost(Failure(ResponseDone()))
+
+ self.assertEqual(self.result.getvalue(), b"12345")
+ self.assertEqual(self.deferred.result, 5)
+
+ def test_too_large(self):
+ """A response which is too large raises an exception."""
+
+ # Start sending data.
+ self.protocol.dataReceived(b"1234567890")
+ # Close the connection.
+ self.protocol.connectionLost(Failure(ResponseDone()))
+
+ self.assertEqual(self.result.getvalue(), b"1234567890")
+ self.assertIsInstance(self.deferred.result, Failure)
+ self.assertIsInstance(self.deferred.result.value, BodyExceededMaxSize)
+ self._cleanup_error()
+
+ def test_multiple_packets(self):
+ """Data should be accummulated through mutliple packets."""
+
+ # Start sending data.
+ self.protocol.dataReceived(b"12")
+ self.protocol.dataReceived(b"34")
+ # Close the connection.
+ self.protocol.connectionLost(Failure(ResponseDone()))
+
+ self.assertEqual(self.result.getvalue(), b"1234")
+ self.assertEqual(self.deferred.result, 4)
+
+ def test_additional_data(self):
+ """A connection can receive data after being closed."""
+
+ # Start sending data.
+ self.protocol.dataReceived(b"1234567890")
+ self.assertIsInstance(self.deferred.result, Failure)
+ self.assertIsInstance(self.deferred.result.value, BodyExceededMaxSize)
+ self.protocol.transport.loseConnection.assert_called_once()
+
+ # More data might have come in.
+ self.protocol.dataReceived(b"1234567890")
+ # Close the connection.
+ self.protocol.connectionLost(Failure(ResponseDone()))
+
+ self.assertEqual(self.result.getvalue(), b"1234567890")
+ self.assertIsInstance(self.deferred.result, Failure)
+ self.assertIsInstance(self.deferred.result.value, BodyExceededMaxSize)
+ self._cleanup_error()
diff --git a/tests/http/test_endpoint.py b/tests/http/test_endpoint.py
index b2e9533b07..d06ea518ce 100644
--- a/tests/http/test_endpoint.py
+++ b/tests/http/test_endpoint.py
@@ -12,7 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from synapse.http.endpoint import parse_and_validate_server_name, parse_server_name
+from synapse.util.stringutils import parse_and_validate_server_name, parse_server_name
from tests import unittest
diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py
index 212484a7fe..9c52c8fdca 100644
--- a/tests/http/test_fedclient.py
+++ b/tests/http/test_fedclient.py
@@ -560,4 +560,4 @@ class FederationClientTests(HomeserverTestCase):
self.pump()
f = self.failureResultOf(test_d)
- self.assertIsInstance(f.value, ValueError)
+ self.assertIsInstance(f.value, RequestSendFailed)
diff --git a/tests/http/test_proxyagent.py b/tests/http/test_proxyagent.py
index 22abf76515..9a56e1c14a 100644
--- a/tests/http/test_proxyagent.py
+++ b/tests/http/test_proxyagent.py
@@ -15,12 +15,14 @@
import logging
import treq
+from netaddr import IPSet
from twisted.internet import interfaces # noqa: F401
from twisted.internet.protocol import Factory
from twisted.protocols.tls import TLSMemoryBIOFactory
from twisted.web.http import HTTPChannel
+from synapse.http.client import BlacklistingReactorWrapper
from synapse.http.proxyagent import ProxyAgent
from tests.http import TestServerTLSConnectionFactory, get_test_https_policy
@@ -292,6 +294,134 @@ class MatrixFederationAgentTests(TestCase):
body = self.successResultOf(treq.content(resp))
self.assertEqual(body, b"result")
+ def test_http_request_via_proxy_with_blacklist(self):
+ # The blacklist includes the configured proxy IP.
+ agent = ProxyAgent(
+ BlacklistingReactorWrapper(
+ self.reactor, ip_whitelist=None, ip_blacklist=IPSet(["1.0.0.0/8"])
+ ),
+ self.reactor,
+ http_proxy=b"proxy.com:8888",
+ )
+
+ self.reactor.lookups["proxy.com"] = "1.2.3.5"
+ d = agent.request(b"GET", b"http://test.com")
+
+ # there should be a pending TCP connection
+ clients = self.reactor.tcpClients
+ self.assertEqual(len(clients), 1)
+ (host, port, client_factory, _timeout, _bindAddress) = clients[0]
+ self.assertEqual(host, "1.2.3.5")
+ self.assertEqual(port, 8888)
+
+ # make a test server, and wire up the client
+ http_server = self._make_connection(
+ client_factory, _get_test_protocol_factory()
+ )
+
+ # the FakeTransport is async, so we need to pump the reactor
+ self.reactor.advance(0)
+
+ # now there should be a pending request
+ self.assertEqual(len(http_server.requests), 1)
+
+ request = http_server.requests[0]
+ self.assertEqual(request.method, b"GET")
+ self.assertEqual(request.path, b"http://test.com")
+ self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
+ request.write(b"result")
+ request.finish()
+
+ self.reactor.advance(0)
+
+ resp = self.successResultOf(d)
+ body = self.successResultOf(treq.content(resp))
+ self.assertEqual(body, b"result")
+
+ def test_https_request_via_proxy_with_blacklist(self):
+ # The blacklist includes the configured proxy IP.
+ agent = ProxyAgent(
+ BlacklistingReactorWrapper(
+ self.reactor, ip_whitelist=None, ip_blacklist=IPSet(["1.0.0.0/8"])
+ ),
+ self.reactor,
+ contextFactory=get_test_https_policy(),
+ https_proxy=b"proxy.com",
+ )
+
+ self.reactor.lookups["proxy.com"] = "1.2.3.5"
+ d = agent.request(b"GET", b"https://test.com/abc")
+
+ # there should be a pending TCP connection
+ clients = self.reactor.tcpClients
+ self.assertEqual(len(clients), 1)
+ (host, port, client_factory, _timeout, _bindAddress) = clients[0]
+ self.assertEqual(host, "1.2.3.5")
+ self.assertEqual(port, 1080)
+
+ # make a test HTTP server, and wire up the client
+ proxy_server = self._make_connection(
+ client_factory, _get_test_protocol_factory()
+ )
+
+ # fish the transports back out so that we can do the old switcheroo
+ s2c_transport = proxy_server.transport
+ client_protocol = s2c_transport.other
+ c2s_transport = client_protocol.transport
+
+ # the FakeTransport is async, so we need to pump the reactor
+ self.reactor.advance(0)
+
+ # now there should be a pending CONNECT request
+ self.assertEqual(len(proxy_server.requests), 1)
+
+ request = proxy_server.requests[0]
+ self.assertEqual(request.method, b"CONNECT")
+ self.assertEqual(request.path, b"test.com:443")
+
+ # tell the proxy server not to close the connection
+ proxy_server.persistent = True
+
+ # this just stops the http Request trying to do a chunked response
+ # request.setHeader(b"Content-Length", b"0")
+ request.finish()
+
+ # now we can replace the proxy channel with a new, SSL-wrapped HTTP channel
+ ssl_factory = _wrap_server_factory_for_tls(_get_test_protocol_factory())
+ ssl_protocol = ssl_factory.buildProtocol(None)
+ http_server = ssl_protocol.wrappedProtocol
+
+ ssl_protocol.makeConnection(
+ FakeTransport(client_protocol, self.reactor, ssl_protocol)
+ )
+ c2s_transport.other = ssl_protocol
+
+ self.reactor.advance(0)
+
+ server_name = ssl_protocol._tlsConnection.get_servername()
+ expected_sni = b"test.com"
+ self.assertEqual(
+ server_name,
+ expected_sni,
+ "Expected SNI %s but got %s" % (expected_sni, server_name),
+ )
+
+ # now there should be a pending request
+ self.assertEqual(len(http_server.requests), 1)
+
+ request = http_server.requests[0]
+ self.assertEqual(request.method, b"GET")
+ self.assertEqual(request.path, b"/abc")
+ self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
+ request.write(b"result")
+ request.finish()
+
+ self.reactor.advance(0)
+
+ resp = self.successResultOf(d)
+ body = self.successResultOf(treq.content(resp))
+ self.assertEqual(body, b"result")
+
def _wrap_server_factory_for_tls(factory, sanlist=None):
"""Wrap an existing Protocol Factory with a test TLSMemoryBIOFactory
|