diff --git a/.circleci/config.yml b/.circleci/config.yml
index e03f01b837..5266544f3c 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -9,6 +9,8 @@ jobs:
- store_artifacts:
path: ~/project/logs
destination: logs
+ - store_test_results:
+ path: logs
sytestpy2postgres:
machine: true
steps:
@@ -18,6 +20,34 @@ jobs:
- store_artifacts:
path: ~/project/logs
destination: logs
+ - store_test_results:
+ path: logs
+ sytestpy2merged:
+ machine: true
+ steps:
+ - checkout
+ - run: bash .circleci/merge_base_branch.sh
+ - run: docker pull matrixdotorg/sytest-synapsepy2
+ - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy2
+ - store_artifacts:
+ path: ~/project/logs
+ destination: logs
+ - store_test_results:
+ path: logs
+
+ sytestpy2postgresmerged:
+ machine: true
+ steps:
+ - checkout
+ - run: bash .circleci/merge_base_branch.sh
+ - run: docker pull matrixdotorg/sytest-synapsepy2
+ - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy2
+ - store_artifacts:
+ path: ~/project/logs
+ destination: logs
+ - store_test_results:
+ path: logs
+
sytestpy3:
machine: true
steps:
@@ -27,6 +57,8 @@ jobs:
- store_artifacts:
path: ~/project/logs
destination: logs
+ - store_test_results:
+ path: logs
sytestpy3postgres:
machine: true
steps:
@@ -36,6 +68,32 @@ jobs:
- store_artifacts:
path: ~/project/logs
destination: logs
+ - store_test_results:
+ path: logs
+ sytestpy3merged:
+ machine: true
+ steps:
+ - checkout
+ - run: bash .circleci/merge_base_branch.sh
+ - run: docker pull matrixdotorg/sytest-synapsepy3
+ - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs hawkowl/sytestpy3
+ - store_artifacts:
+ path: ~/project/logs
+ destination: logs
+ - store_test_results:
+ path: logs
+ sytestpy3postgresmerged:
+ machine: true
+ steps:
+ - checkout
+ - run: bash .circleci/merge_base_branch.sh
+ - run: docker pull matrixdotorg/sytest-synapsepy3
+ - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy3
+ - store_artifacts:
+ path: ~/project/logs
+ destination: logs
+ - store_test_results:
+ path: logs
workflows:
version: 2
@@ -43,6 +101,14 @@ workflows:
jobs:
- sytestpy2
- sytestpy2postgres
+ - sytestpy2merged:
+ filters:
+ branches:
+ ignore: /develop|master/
+ - sytestpy2postgresmerged:
+ filters:
+ branches:
+ ignore: /develop|master/
# Currently broken while the Python 3 port is incomplete
# - sytestpy3
# - sytestpy3postgres
diff --git a/.circleci/merge_base_branch.sh b/.circleci/merge_base_branch.sh
new file mode 100755
index 0000000000..2d700dbf11
--- /dev/null
+++ b/.circleci/merge_base_branch.sh
@@ -0,0 +1,27 @@
+#!/usr/bin/env bash
+
+set -e
+
+# CircleCI doesn't give CIRCLE_PR_NUMBER in the environment for non-forked PRs. Wonderful.
+# In this case, we just need to do some ~shell magic~ to strip it out of the PULL_REQUEST URL.
+echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> "$BASH_ENV"
+source $BASH_ENV
+
+if [[ -z "${CIRCLE_PR_NUMBER}" ]]
+then
+ echo "Can't figure out what the PR number is!"
+ exit 1
+fi
+
+# Get the reference, using the GitHub API
+GITBASE=`curl -q https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
+
+# Show what we are before
+git show -s
+
+# Fetch and merge. If it doesn't work, it will raise due to set -e.
+git fetch -u origin $GITBASE
+git merge --no-edit origin/$GITBASE
+
+# Show what we are after.
+git show -s
\ No newline at end of file
diff --git a/changelog.d/3704.misc b/changelog.d/3704.misc
new file mode 100644
index 0000000000..aaae0fbd63
--- /dev/null
+++ b/changelog.d/3704.misc
@@ -0,0 +1 @@
+CircleCI tests now run on the potential merge of a PR.
diff --git a/changelog.d/3822.misc b/changelog.d/3822.misc
new file mode 100644
index 0000000000..5250f31896
--- /dev/null
+++ b/changelog.d/3822.misc
@@ -0,0 +1 @@
+crypto/ is now ported to Python 3.
diff --git a/changelog.d/3823.misc b/changelog.d/3823.misc
new file mode 100644
index 0000000000..0da491ddaa
--- /dev/null
+++ b/changelog.d/3823.misc
@@ -0,0 +1 @@
+rest/ is now ported to Python 3.
diff --git a/changelog.d/3824.bugfix b/changelog.d/3824.bugfix
new file mode 100644
index 0000000000..99f199dcc6
--- /dev/null
+++ b/changelog.d/3824.bugfix
@@ -0,0 +1 @@
+Fix jwt import check
\ No newline at end of file
diff --git a/changelog.d/3826.misc b/changelog.d/3826.misc
new file mode 100644
index 0000000000..a4d9a012f9
--- /dev/null
+++ b/changelog.d/3826.misc
@@ -0,0 +1 @@
+add some logging for the keyring queue
diff --git a/changelog.d/3835.bugfix b/changelog.d/3835.bugfix
new file mode 100644
index 0000000000..00dbcbc8dc
--- /dev/null
+++ b/changelog.d/3835.bugfix
@@ -0,0 +1 @@
+fix VOIP crashes under Python 3 (#3821)
diff --git a/changelog.d/3841.bugfix b/changelog.d/3841.bugfix
new file mode 100644
index 0000000000..2a48a7dd66
--- /dev/null
+++ b/changelog.d/3841.bugfix
@@ -0,0 +1 @@
+Fix manhole so that it works with latest openssh clients
diff --git a/changelog.d/3845.bugfix b/changelog.d/3845.bugfix
new file mode 100644
index 0000000000..5b7e8f1934
--- /dev/null
+++ b/changelog.d/3845.bugfix
@@ -0,0 +1 @@
+Fix outbound requests occasionally wedging, which can result in federation breaking between servers.
diff --git a/changelog.d/3847.misc b/changelog.d/3847.misc
new file mode 100644
index 0000000000..bf8b5afea4
--- /dev/null
+++ b/changelog.d/3847.misc
@@ -0,0 +1 @@
+federation/ is now ported to Python 3.
\ No newline at end of file
diff --git a/changelog.d/3851.bugfix b/changelog.d/3851.bugfix
new file mode 100644
index 0000000000..b53a9efe7b
--- /dev/null
+++ b/changelog.d/3851.bugfix
@@ -0,0 +1 @@
+Show heroes if room name/canonical alias has been deleted
diff --git a/changelog.d/3853.misc b/changelog.d/3853.misc
new file mode 100644
index 0000000000..db45d4983d
--- /dev/null
+++ b/changelog.d/3853.misc
@@ -0,0 +1 @@
+Log when we retry outbound requests
diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index 2fd9c48abf..b8d5690f2b 100644
--- a/synapse/config/homeserver.py
+++ b/synapse/config/homeserver.py
@@ -21,7 +21,7 @@ from .consent_config import ConsentConfig
from .database import DatabaseConfig
from .emailconfig import EmailConfig
from .groups import GroupsConfig
-from .jwt import JWTConfig
+from .jwt_config import JWTConfig
from .key import KeyConfig
from .logger import LoggingConfig
from .metrics import MetricsConfig
diff --git a/synapse/config/jwt.py b/synapse/config/jwt_config.py
index 51e7f7e003..51e7f7e003 100644
--- a/synapse/config/jwt.py
+++ b/synapse/config/jwt_config.py
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index 1a391adec1..02b76dfcfb 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -123,6 +123,6 @@ class ClientTLSOptionsFactory(object):
def get_options(self, host):
return ClientTLSOptions(
- host.decode('utf-8'),
+ host,
CertificateOptions(verify=False).getContext()
)
diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py
index e94400b8e2..57d4665e84 100644
--- a/synapse/crypto/keyclient.py
+++ b/synapse/crypto/keyclient.py
@@ -50,7 +50,7 @@ def fetch_server_key(server_name, tls_client_options_factory, path=KEY_API_V1):
defer.returnValue((server_response, server_certificate))
except SynapseKeyClientError as e:
logger.warn("Error getting key for %r: %s", server_name, e)
- if e.status.startswith("4"):
+ if e.status.startswith(b"4"):
# Don't retry for 4xx responses.
raise IOError("Cannot get key for %r" % server_name)
except (ConnectError, DomainError) as e:
@@ -82,6 +82,12 @@ class SynapseKeyClientProtocol(HTTPClient):
self._peer = self.transport.getPeer()
logger.debug("Connected to %s", self._peer)
+ if not isinstance(self.path, bytes):
+ self.path = self.path.encode('ascii')
+
+ if not isinstance(self.host, bytes):
+ self.host = self.host.encode('ascii')
+
self.sendCommand(b"GET", self.path)
if self.host:
self.sendHeader(b"Host", self.host)
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index 30e2742102..d89f94c219 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -16,9 +16,10 @@
import hashlib
import logging
-import urllib
from collections import namedtuple
+from six.moves import urllib
+
from signedjson.key import (
decode_verify_key_bytes,
encode_verify_key_base64,
@@ -40,6 +41,7 @@ from synapse.api.errors import Codes, SynapseError
from synapse.crypto.keyclient import fetch_server_key
from synapse.util import logcontext, unwrapFirstError
from synapse.util.logcontext import (
+ LoggingContext,
PreserveLoggingContext,
preserve_fn,
run_in_background,
@@ -216,23 +218,34 @@ class Keyring(object):
servers have completed. Follows the synapse rules of logcontext
preservation.
"""
+ loop_count = 1
while True:
wait_on = [
- self.key_downloads[server_name]
+ (server_name, self.key_downloads[server_name])
for server_name in server_names
if server_name in self.key_downloads
]
- if wait_on:
- with PreserveLoggingContext():
- yield defer.DeferredList(wait_on)
- else:
+ if not wait_on:
break
+ logger.info(
+ "Waiting for existing lookups for %s to complete [loop %i]",
+ [w[0] for w in wait_on], loop_count,
+ )
+ with PreserveLoggingContext():
+ yield defer.DeferredList((w[1] for w in wait_on))
+
+ loop_count += 1
+
+ ctx = LoggingContext.current_context()
def rm(r, server_name_):
- self.key_downloads.pop(server_name_, None)
+ with PreserveLoggingContext(ctx):
+ logger.debug("Releasing key lookup lock on %s", server_name_)
+ self.key_downloads.pop(server_name_, None)
return r
for server_name, deferred in server_to_deferred.items():
+ logger.debug("Got key lookup lock on %s", server_name)
self.key_downloads[server_name] = deferred
deferred.addBoth(rm, server_name)
@@ -432,7 +445,7 @@ class Keyring(object):
# an incoming request.
query_response = yield self.client.post_json(
destination=perspective_name,
- path=b"/_matrix/key/v2/query",
+ path="/_matrix/key/v2/query",
data={
u"server_keys": {
server_name: {
@@ -513,8 +526,8 @@ class Keyring(object):
(response, tls_certificate) = yield fetch_server_key(
server_name, self.hs.tls_client_options_factory,
- path=(b"/_matrix/key/v2/server/%s" % (
- urllib.quote(requested_key_id),
+ path=("/_matrix/key/v2/server/%s" % (
+ urllib.parse.quote(requested_key_id),
)).encode("ascii"),
)
diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py
index 5be8e66fb8..61782ae1c0 100644
--- a/synapse/federation/federation_base.py
+++ b/synapse/federation/federation_base.py
@@ -143,11 +143,31 @@ class FederationBase(object):
def callback(_, pdu):
with logcontext.PreserveLoggingContext(ctx):
if not check_event_content_hash(pdu):
- logger.warn(
- "Event content has been tampered, redacting %s: %s",
- pdu.event_id, pdu.get_pdu_json()
- )
- return prune_event(pdu)
+ # let's try to distinguish between failures because the event was
+ # redacted (which are somewhat expected) vs actual ball-tampering
+ # incidents.
+ #
+ # This is just a heuristic, so we just assume that if the keys are
+ # about the same between the redacted and received events, then the
+ # received event was probably a redacted copy (but we then use our
+ # *actual* redacted copy to be on the safe side.)
+ redacted_event = prune_event(pdu)
+ if (
+ set(six.iterkeys(redacted_event)) == set(six.iterkeys(pdu)) and
+ set(six.iterkeys(redacted_event.content))
+ == set(six.iterkeys(pdu.content))
+ ):
+ logger.info(
+ "Event %s seems to have been redacted; using our redacted "
+ "copy",
+ pdu.event_id,
+ )
+ else:
+ logger.warning(
+ "Event %s content has been tampered, redacting",
+ pdu.event_id, pdu.get_pdu_json(),
+ )
+ return redacted_event
if self.spam_checker.check_event_for_spam(pdu):
logger.warn(
@@ -162,8 +182,8 @@ class FederationBase(object):
failure.trap(SynapseError)
with logcontext.PreserveLoggingContext(ctx):
logger.warn(
- "Signature check failed for %s",
- pdu.event_id,
+ "Signature check failed for %s: %s",
+ pdu.event_id, failure.getErrorMessage(),
)
return failure
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index c9f3c2d352..fe67b2ff42 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -271,10 +271,10 @@ class FederationClient(FederationBase):
event_id, destination, e,
)
except NotRetryingDestination as e:
- logger.info(e.message)
+ logger.info(str(e))
continue
except FederationDeniedError as e:
- logger.info(e.message)
+ logger.info(str(e))
continue
except Exception as e:
pdu_attempts[destination] = now
@@ -510,7 +510,7 @@ class FederationClient(FederationBase):
else:
logger.warn(
"Failed to %s via %s: %i %s",
- description, destination, e.code, e.message,
+ description, destination, e.code, e.args[0],
)
except Exception:
logger.warn(
@@ -875,7 +875,7 @@ class FederationClient(FederationBase):
except Exception as e:
logger.exception(
"Failed to send_third_party_invite via %s: %s",
- destination, e.message
+ destination, str(e)
)
raise RuntimeError("Failed to send to any server.")
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index 1054441ca5..2ab973d6c8 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -15,7 +15,8 @@
# limitations under the License.
import logging
-import urllib
+
+from six.moves import urllib
from twisted.internet import defer
@@ -951,4 +952,4 @@ def _create_path(prefix, path, *args):
Returns:
str
"""
- return prefix + path % tuple(urllib.quote(arg, "") for arg in args)
+ return prefix + path % tuple(urllib.parse.quote(arg, "") for arg in args)
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 3972922ff9..2f874b4838 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -90,8 +90,8 @@ class Authenticator(object):
@defer.inlineCallbacks
def authenticate_request(self, request, content):
json_request = {
- "method": request.method,
- "uri": request.uri,
+ "method": request.method.decode('ascii'),
+ "uri": request.uri.decode('ascii'),
"destination": self.server_name,
"signatures": {},
}
@@ -252,7 +252,7 @@ class BaseFederationServlet(object):
by the callback method. None if the request has already been handled.
"""
content = None
- if request.method in ["PUT", "POST"]:
+ if request.method in [b"PUT", b"POST"]:
# TODO: Handle other method types? other content types?
content = parse_json_object_from_request(request)
@@ -386,7 +386,7 @@ class FederationStateServlet(BaseFederationServlet):
return self.handler.on_context_state_request(
origin,
context,
- query.get("event_id", [None])[0],
+ parse_string_from_args(query, "event_id", None),
)
@@ -397,7 +397,7 @@ class FederationStateIdsServlet(BaseFederationServlet):
return self.handler.on_state_ids_request(
origin,
room_id,
- query.get("event_id", [None])[0],
+ parse_string_from_args(query, "event_id", None),
)
@@ -405,14 +405,12 @@ class FederationBackfillServlet(BaseFederationServlet):
PATH = "/backfill/(?P<context>[^/]*)/"
def on_GET(self, origin, content, query, context):
- versions = query["v"]
- limits = query["limit"]
+ versions = [x.decode('ascii') for x in query[b"v"]]
+ limit = parse_integer_from_args(query, "limit", None)
- if not limits:
+ if not limit:
return defer.succeed((400, {"error": "Did not include limit param"}))
- limit = int(limits[-1])
-
return self.handler.on_backfill_request(origin, context, versions, limit)
@@ -423,7 +421,7 @@ class FederationQueryServlet(BaseFederationServlet):
def on_GET(self, origin, content, query, query_type):
return self.handler.on_query_request(
query_type,
- {k: v[0].decode("utf-8") for k, v in query.items()}
+ {k.decode('utf8'): v[0].decode("utf-8") for k, v in query.items()}
)
@@ -630,14 +628,14 @@ class OpenIdUserInfo(BaseFederationServlet):
@defer.inlineCallbacks
def on_GET(self, origin, content, query):
- token = query.get("access_token", [None])[0]
+ token = query.get(b"access_token", [None])[0]
if token is None:
defer.returnValue((401, {
"errcode": "M_MISSING_TOKEN", "error": "Access Token required"
}))
return
- user_id = yield self.handler.on_openid_userinfo(token)
+ user_id = yield self.handler.on_openid_userinfo(token.decode('ascii'))
if user_id is None:
defer.returnValue((401, {
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index 23983a51ab..9bca4e7067 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -562,8 +562,21 @@ class SyncHandler(object):
details.get(Membership.INVITE, empty_ms).count
)
- if name_id or canonical_alias_id:
- defer.returnValue(summary)
+ # if the room has a name or canonical_alias set, we can skip
+ # calculating heroes. we assume that if the event has contents, it'll
+ # be a valid name or canonical_alias - i.e. we're checking that they
+ # haven't been "deleted" by blatting {} over the top.
+ if name_id:
+ name = yield self.store.get_event(name_id, allow_none=False)
+ if name and name.content:
+ defer.returnValue(summary)
+
+ if canonical_alias_id:
+ canonical_alias = yield self.store.get_event(
+ canonical_alias_id, allow_none=False,
+ )
+ if canonical_alias and canonical_alias.content:
+ defer.returnValue(summary)
joined_user_ids = [
r[0] for r in details.get(Membership.JOIN, empty_ms).members
@@ -1698,10 +1711,10 @@ class SyncHandler(object):
sync_result_builder.joined.append(room_sync)
- if batch.limited:
+ if batch.limited and since_token:
user_id = sync_result_builder.sync_config.user.to_string()
logger.info(
- "Incremental syncing room %s for user %s with %d state events" % (
+ "Incremental gappy sync of %s for user %s with %d state events" % (
room_id,
user_id,
len(state),
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index 6a1fc8ca55..cf920bc041 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -177,11 +177,6 @@ class MatrixFederationHttpClient(object):
txn_id = "%s-O-%s" % (method, self._next_id)
self._next_id = (self._next_id + 1) % (MAXINT - 1)
- outbound_logger.info(
- "{%s} [%s] Sending request: %s %s",
- txn_id, destination, method, url
- )
-
# XXX: Would be much nicer to retry only at the transaction-layer
# (once we have reliable transactions in place)
if long_retries:
@@ -194,85 +189,89 @@ class MatrixFederationHttpClient(object):
).decode('ascii')
log_result = None
- try:
- while True:
- try:
- if json_callback:
- json = json_callback()
-
- if json:
- data = encode_canonical_json(json)
- headers_dict["Content-Type"] = ["application/json"]
- self.sign_request(
- destination, method, http_url, headers_dict, json
- )
- else:
- data = None
- self.sign_request(destination, method, http_url, headers_dict)
-
- request_deferred = treq.request(
- method,
- url,
- headers=Headers(headers_dict),
- data=data,
- agent=self.agent,
- )
- add_timeout_to_deferred(
- request_deferred,
- timeout / 1000. if timeout else 60,
- self.hs.get_reactor(),
- cancelled_to_request_timed_out_error,
+ while True:
+ try:
+ if json_callback:
+ json = json_callback()
+
+ if json:
+ data = encode_canonical_json(json)
+ headers_dict["Content-Type"] = ["application/json"]
+ self.sign_request(
+ destination, method, http_url, headers_dict, json
)
- response = yield make_deferred_yieldable(
- request_deferred,
- )
-
- log_result = "%d %s" % (response.code, response.phrase,)
- break
- except Exception as e:
- if not retry_on_dns_fail and isinstance(e, DNSLookupError):
- logger.warn(
- "DNS Lookup failed to %s with %s",
- destination,
- e
- )
- log_result = "DNS Lookup failed to %s with %s" % (
- destination, e
- )
- raise
-
+ else:
+ data = None
+ self.sign_request(destination, method, http_url, headers_dict)
+
+ outbound_logger.info(
+ "{%s} [%s] Sending request: %s %s",
+ txn_id, destination, method, url
+ )
+
+ request_deferred = treq.request(
+ method,
+ url,
+ headers=Headers(headers_dict),
+ data=data,
+ agent=self.agent,
+ )
+ add_timeout_to_deferred(
+ request_deferred,
+ timeout / 1000. if timeout else 60,
+ self.hs.get_reactor(),
+ cancelled_to_request_timed_out_error,
+ )
+ response = yield make_deferred_yieldable(
+ request_deferred,
+ )
+
+ log_result = "%d %s" % (response.code, response.phrase,)
+ break
+ except Exception as e:
+ if not retry_on_dns_fail and isinstance(e, DNSLookupError):
logger.warn(
- "{%s} Sending request failed to %s: %s %s: %s",
- txn_id,
+ "DNS Lookup failed to %s with %s",
destination,
- method,
- url,
- _flatten_response_never_received(e),
+ e
)
-
- log_result = _flatten_response_never_received(e)
-
- if retries_left and not timeout:
- if long_retries:
- delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left)
- delay = min(delay, 60)
- delay *= random.uniform(0.8, 1.4)
- else:
- delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left)
- delay = min(delay, 2)
- delay *= random.uniform(0.8, 1.4)
-
- yield self.clock.sleep(delay)
- retries_left -= 1
+ log_result = "DNS Lookup failed to %s with %s" % (
+ destination, e
+ )
+ raise
+
+ logger.warn(
+ "{%s} Sending request failed to %s: %s %s: %s",
+ txn_id,
+ destination,
+ method,
+ url,
+ _flatten_response_never_received(e),
+ )
+
+ log_result = _flatten_response_never_received(e)
+
+ if retries_left and not timeout:
+ if long_retries:
+ delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left)
+ delay = min(delay, 60)
+ delay *= random.uniform(0.8, 1.4)
else:
- raise
- finally:
- outbound_logger.info(
- "{%s} [%s] Result: %s",
- txn_id,
- destination,
- log_result,
- )
+ delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left)
+ delay = min(delay, 2)
+ delay *= random.uniform(0.8, 1.4)
+
+ yield self.clock.sleep(delay)
+ retries_left -= 1
+ else:
+ raise
+ finally:
+ outbound_logger.info(
+ "{%s} [%s] Result: %s",
+ txn_id,
+ destination,
+ log_result,
+ )
if 200 <= response.code < 300:
pass
@@ -280,7 +279,10 @@ class MatrixFederationHttpClient(object):
# :'(
# Update transactions table?
with logcontext.PreserveLoggingContext():
- body = yield treq.content(response)
+ body = yield self._timeout_deferred(
+ treq.content(response),
+ timeout,
+ )
raise HttpResponseException(
response.code, response.phrase, body
)
@@ -394,7 +396,10 @@ class MatrixFederationHttpClient(object):
check_content_type_is_json(response.headers)
with logcontext.PreserveLoggingContext():
- body = yield treq.json_content(response)
+ body = yield self._timeout_deferred(
+ treq.json_content(response),
+ timeout,
+ )
defer.returnValue(body)
@defer.inlineCallbacks
@@ -444,7 +449,10 @@ class MatrixFederationHttpClient(object):
check_content_type_is_json(response.headers)
with logcontext.PreserveLoggingContext():
- body = yield treq.json_content(response)
+ body = yield self._timeout_deferred(
+ treq.json_content(response),
+ timeout,
+ )
defer.returnValue(body)
@@ -496,7 +504,10 @@ class MatrixFederationHttpClient(object):
check_content_type_is_json(response.headers)
with logcontext.PreserveLoggingContext():
- body = yield treq.json_content(response)
+ body = yield self._timeout_deferred(
+ treq.json_content(response),
+ timeout,
+ )
defer.returnValue(body)
@@ -543,7 +554,10 @@ class MatrixFederationHttpClient(object):
check_content_type_is_json(response.headers)
with logcontext.PreserveLoggingContext():
- body = yield treq.json_content(response)
+ body = yield self._timeout_deferred(
+ treq.json_content(response),
+ timeout,
+ )
defer.returnValue(body)
@@ -585,8 +599,10 @@ class MatrixFederationHttpClient(object):
try:
with logcontext.PreserveLoggingContext():
- length = yield _readBodyToFile(
- response, output_stream, max_size
+ length = yield self._timeout_deferred(
+ _readBodyToFile(
+ response, output_stream, max_size
+ ),
)
except Exception:
logger.exception("Failed to download body")
@@ -594,6 +610,27 @@ class MatrixFederationHttpClient(object):
defer.returnValue((length, headers))
+ def _timeout_deferred(self, deferred, timeout_ms=None):
+ """Times the deferred out after `timeout_ms` ms
+
+ Args:
+ deferred (Deferred)
+ timeout_ms (int|None): Timeout in milliseconds. If None defaults
+ to 60 seconds.
+
+ Returns:
+ Deferred
+ """
+
+ add_timeout_to_deferred(
+ deferred,
+ timeout_ms / 1000. if timeout_ms else 60,
+ self.hs.get_reactor(),
+ cancelled_to_request_timed_out_error,
+ )
+
+ return deferred
+
class _ReadBodyToFileProtocol(protocol.Protocol):
def __init__(self, stream, deferred, max_size):
diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py
index ad536ab570..41534b8c2a 100644
--- a/synapse/rest/client/v1/admin.py
+++ b/synapse/rest/client/v1/admin.py
@@ -101,7 +101,7 @@ class UserRegisterServlet(ClientV1RestServlet):
nonce = self.hs.get_secrets().token_hex(64)
self.nonces[nonce] = int(self.reactor.seconds())
- return (200, {"nonce": nonce.encode('ascii')})
+ return (200, {"nonce": nonce})
@defer.inlineCallbacks
def on_POST(self, request):
@@ -164,7 +164,7 @@ class UserRegisterServlet(ClientV1RestServlet):
key=self.hs.config.registration_shared_secret.encode(),
digestmod=hashlib.sha1,
)
- want_mac.update(nonce)
+ want_mac.update(nonce.encode('utf8'))
want_mac.update(b"\x00")
want_mac.update(username)
want_mac.update(b"\x00")
@@ -173,7 +173,10 @@ class UserRegisterServlet(ClientV1RestServlet):
want_mac.update(b"admin" if admin else b"notadmin")
want_mac = want_mac.hexdigest()
- if not hmac.compare_digest(want_mac, got_mac.encode('ascii')):
+ if not hmac.compare_digest(
+ want_mac.encode('ascii'),
+ got_mac.encode('ascii')
+ ):
raise SynapseError(403, "HMAC incorrect")
# Reuse the parts of RegisterRestServlet to reduce code duplication
diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py
index 0f3a2e8b51..cd9b3bdbd1 100644
--- a/synapse/rest/client/v1/events.py
+++ b/synapse/rest/client/v1/events.py
@@ -45,20 +45,20 @@ class EventStreamRestServlet(ClientV1RestServlet):
is_guest = requester.is_guest
room_id = None
if is_guest:
- if "room_id" not in request.args:
+ if b"room_id" not in request.args:
raise SynapseError(400, "Guest users must specify room_id param")
- if "room_id" in request.args:
- room_id = request.args["room_id"][0]
+ if b"room_id" in request.args:
+ room_id = request.args[b"room_id"][0].decode('ascii')
pagin_config = PaginationConfig.from_request(request)
timeout = EventStreamRestServlet.DEFAULT_LONGPOLL_TIME_MS
- if "timeout" in request.args:
+ if b"timeout" in request.args:
try:
- timeout = int(request.args["timeout"][0])
+ timeout = int(request.args[b"timeout"][0])
except ValueError:
raise SynapseError(400, "timeout must be in milliseconds.")
- as_client_event = "raw" not in request.args
+ as_client_event = b"raw" not in request.args
chunk = yield self.event_stream_handler.get_stream(
requester.user.to_string(),
diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py
index fd5f85b53e..3ead75cb77 100644
--- a/synapse/rest/client/v1/initial_sync.py
+++ b/synapse/rest/client/v1/initial_sync.py
@@ -32,7 +32,7 @@ class InitialSyncRestServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request):
requester = yield self.auth.get_user_by_req(request)
- as_client_event = "raw" not in request.args
+ as_client_event = b"raw" not in request.args
pagination_config = PaginationConfig.from_request(request)
include_archived = parse_boolean(request, "archived", default=False)
content = yield self.initial_sync_handler.snapshot_all_rooms(
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index cb85fa1436..0010699d31 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -14,10 +14,9 @@
# limitations under the License.
import logging
-import urllib
import xml.etree.ElementTree as ET
-from six.moves.urllib import parse as urlparse
+from six.moves import urllib
from canonicaljson import json
from saml2 import BINDING_HTTP_POST, config
@@ -134,7 +133,7 @@ class LoginRestServlet(ClientV1RestServlet):
LoginRestServlet.SAML2_TYPE):
relay_state = ""
if "relay_state" in login_submission:
- relay_state = "&RelayState=" + urllib.quote(
+ relay_state = "&RelayState=" + urllib.parse.quote(
login_submission["relay_state"])
result = {
"uri": "%s%s" % (self.idp_redirect_url, relay_state)
@@ -366,7 +365,7 @@ class SAML2RestServlet(ClientV1RestServlet):
(user_id, token) = yield handler.register_saml2(username)
# Forward to the RelayState callback along with ava
if 'RelayState' in request.args:
- request.redirect(urllib.unquote(
+ request.redirect(urllib.parse.unquote(
request.args['RelayState'][0]) +
'?status=authenticated&access_token=' +
token + '&user_id=' + user_id + '&ava=' +
@@ -377,7 +376,7 @@ class SAML2RestServlet(ClientV1RestServlet):
"user_id": user_id, "token": token,
"ava": saml2_auth.ava}))
elif 'RelayState' in request.args:
- request.redirect(urllib.unquote(
+ request.redirect(urllib.parse.unquote(
request.args['RelayState'][0]) +
'?status=not_authenticated')
finish_request(request)
@@ -390,21 +389,22 @@ class CasRedirectServlet(ClientV1RestServlet):
def __init__(self, hs):
super(CasRedirectServlet, self).__init__(hs)
- self.cas_server_url = hs.config.cas_server_url
- self.cas_service_url = hs.config.cas_service_url
+ self.cas_server_url = hs.config.cas_server_url.encode('ascii')
+ self.cas_service_url = hs.config.cas_service_url.encode('ascii')
def on_GET(self, request):
args = request.args
- if "redirectUrl" not in args:
+ if b"redirectUrl" not in args:
return (400, "Redirect URL not specified for CAS auth")
- client_redirect_url_param = urllib.urlencode({
- "redirectUrl": args["redirectUrl"][0]
- })
- hs_redirect_url = self.cas_service_url + "/_matrix/client/api/v1/login/cas/ticket"
- service_param = urllib.urlencode({
- "service": "%s?%s" % (hs_redirect_url, client_redirect_url_param)
- })
- request.redirect("%s/login?%s" % (self.cas_server_url, service_param))
+ client_redirect_url_param = urllib.parse.urlencode({
+ b"redirectUrl": args[b"redirectUrl"][0]
+ }).encode('ascii')
+ hs_redirect_url = (self.cas_service_url +
+ b"/_matrix/client/api/v1/login/cas/ticket")
+ service_param = urllib.parse.urlencode({
+ b"service": b"%s?%s" % (hs_redirect_url, client_redirect_url_param)
+ }).encode('ascii')
+ request.redirect(b"%s/login?%s" % (self.cas_server_url, service_param))
finish_request(request)
@@ -422,11 +422,11 @@ class CasTicketServlet(ClientV1RestServlet):
@defer.inlineCallbacks
def on_GET(self, request):
- client_redirect_url = request.args["redirectUrl"][0]
+ client_redirect_url = request.args[b"redirectUrl"][0]
http_client = self.hs.get_simple_http_client()
uri = self.cas_server_url + "/proxyValidate"
args = {
- "ticket": request.args["ticket"],
+ "ticket": request.args[b"ticket"][0].decode('ascii'),
"service": self.cas_service_url
}
try:
@@ -471,11 +471,11 @@ class CasTicketServlet(ClientV1RestServlet):
finish_request(request)
def add_login_token_to_redirect_url(self, url, token):
- url_parts = list(urlparse.urlparse(url))
- query = dict(urlparse.parse_qsl(url_parts[4]))
+ url_parts = list(urllib.parse.urlparse(url))
+ query = dict(urllib.parse.parse_qsl(url_parts[4]))
query.update({"loginToken": token})
- url_parts[4] = urllib.urlencode(query)
- return urlparse.urlunparse(url_parts)
+ url_parts[4] = urllib.parse.urlencode(query).encode('ascii')
+ return urllib.parse.urlunparse(url_parts)
def parse_cas_response(self, cas_response_body):
user = None
diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py
index 6e95d9bec2..9382b1f124 100644
--- a/synapse/rest/client/v1/push_rule.py
+++ b/synapse/rest/client/v1/push_rule.py
@@ -46,7 +46,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
try:
priority_class = _priority_class_from_spec(spec)
except InvalidRuleException as e:
- raise SynapseError(400, e.message)
+ raise SynapseError(400, str(e))
requester = yield self.auth.get_user_by_req(request)
@@ -73,7 +73,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
content,
)
except InvalidRuleException as e:
- raise SynapseError(400, e.message)
+ raise SynapseError(400, str(e))
before = parse_string(request, "before")
if before:
@@ -95,9 +95,9 @@ class PushRuleRestServlet(ClientV1RestServlet):
)
self.notify_user(user_id)
except InconsistentRuleException as e:
- raise SynapseError(400, e.message)
+ raise SynapseError(400, str(e))
except RuleNotFoundException as e:
- raise SynapseError(400, e.message)
+ raise SynapseError(400, str(e))
defer.returnValue((200, {}))
@@ -142,10 +142,10 @@ class PushRuleRestServlet(ClientV1RestServlet):
PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
)
- if path[0] == '':
+ if path[0] == b'':
defer.returnValue((200, rules))
- elif path[0] == 'global':
- path = path[1:]
+ elif path[0] == b'global':
+ path = [x.decode('ascii') for x in path[1:]]
result = _filter_ruleset_with_path(rules['global'], path)
defer.returnValue((200, result))
else:
@@ -192,10 +192,10 @@ class PushRuleRestServlet(ClientV1RestServlet):
def _rule_spec_from_path(path):
if len(path) < 2:
raise UnrecognizedRequestError()
- if path[0] != 'pushrules':
+ if path[0] != b'pushrules':
raise UnrecognizedRequestError()
- scope = path[1]
+ scope = path[1].decode('ascii')
path = path[2:]
if scope != 'global':
raise UnrecognizedRequestError()
@@ -203,13 +203,13 @@ def _rule_spec_from_path(path):
if len(path) == 0:
raise UnrecognizedRequestError()
- template = path[0]
+ template = path[0].decode('ascii')
path = path[1:]
if len(path) == 0 or len(path[0]) == 0:
raise UnrecognizedRequestError()
- rule_id = path[0]
+ rule_id = path[0].decode('ascii')
spec = {
'scope': scope,
@@ -220,7 +220,7 @@ def _rule_spec_from_path(path):
path = path[1:]
if len(path) > 0 and len(path[0]) > 0:
- spec['attr'] = path[0]
+ spec['attr'] = path[0].decode('ascii')
return spec
diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py
index 182a68b1e2..b84f0260f2 100644
--- a/synapse/rest/client/v1/pusher.py
+++ b/synapse/rest/client/v1/pusher.py
@@ -59,7 +59,7 @@ class PushersRestServlet(ClientV1RestServlet):
]
for p in pushers:
- for k, v in p.items():
+ for k, v in list(p.items()):
if k not in allowed_keys:
del p[k]
@@ -126,7 +126,7 @@ class PushersSetRestServlet(ClientV1RestServlet):
profile_tag=content.get('profile_tag', ""),
)
except PusherConfigException as pce:
- raise SynapseError(400, "Config Error: " + pce.message,
+ raise SynapseError(400, "Config Error: " + str(pce),
errcode=Codes.MISSING_PARAM)
self.notifier.on_new_replication_data()
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index 976d98387d..663934efd0 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -207,7 +207,7 @@ class RoomSendEventRestServlet(ClientV1RestServlet):
"sender": requester.user.to_string(),
}
- if 'ts' in request.args and requester.app_service:
+ if b'ts' in request.args and requester.app_service:
event_dict['origin_server_ts'] = parse_integer(request, "ts", 0)
event = yield self.event_creation_hander.create_and_send_nonmember_event(
@@ -255,7 +255,9 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
if RoomID.is_valid(room_identifier):
room_id = room_identifier
try:
- remote_room_hosts = request.args["server_name"]
+ remote_room_hosts = [
+ x.decode('ascii') for x in request.args[b"server_name"]
+ ]
except Exception:
remote_room_hosts = None
elif RoomAlias.is_valid(room_identifier):
@@ -461,10 +463,10 @@ class RoomMessageListRestServlet(ClientV1RestServlet):
pagination_config = PaginationConfig.from_request(
request, default_limit=10,
)
- as_client_event = "raw" not in request.args
- filter_bytes = parse_string(request, "filter")
+ as_client_event = b"raw" not in request.args
+ filter_bytes = parse_string(request, b"filter", encoding=None)
if filter_bytes:
- filter_json = urlparse.unquote(filter_bytes).decode("UTF-8")
+ filter_json = urlparse.unquote(filter_bytes.decode("UTF-8"))
event_filter = Filter(json.loads(filter_json))
else:
event_filter = None
@@ -560,7 +562,7 @@ class RoomEventContextServlet(ClientV1RestServlet):
# picking the API shape for symmetry with /messages
filter_bytes = parse_string(request, "filter")
if filter_bytes:
- filter_json = urlparse.unquote(filter_bytes).decode("UTF-8")
+ filter_json = urlparse.unquote(filter_bytes)
event_filter = Filter(json.loads(filter_json))
else:
event_filter = None
diff --git a/synapse/rest/client/v1/voip.py b/synapse/rest/client/v1/voip.py
index 62f4c3d93e..53da905eea 100644
--- a/synapse/rest/client/v1/voip.py
+++ b/synapse/rest/client/v1/voip.py
@@ -42,7 +42,11 @@ class VoipRestServlet(ClientV1RestServlet):
expiry = (self.hs.get_clock().time_msec() + userLifetime) / 1000
username = "%d:%s" % (expiry, requester.user.to_string())
- mac = hmac.new(turnSecret, msg=username, digestmod=hashlib.sha1)
+ mac = hmac.new(
+ turnSecret.encode(),
+ msg=username.encode(),
+ digestmod=hashlib.sha1
+ )
# We need to use standard padded base64 encoding here
# encode_base64 because we need to add the standard padding to get the
# same result as the TURN server.
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
index 263d8eb73e..0251146722 100644
--- a/synapse/rest/client/v2_alpha/sync.py
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -89,7 +89,7 @@ class SyncRestServlet(RestServlet):
@defer.inlineCallbacks
def on_GET(self, request):
- if "from" in request.args:
+ if b"from" in request.args:
# /events used to use 'from', but /sync uses 'since'.
# Lets be helpful and whine if we see a 'from'.
raise SynapseError(
diff --git a/synapse/rest/client/v2_alpha/thirdparty.py b/synapse/rest/client/v2_alpha/thirdparty.py
index d9d379182e..b9b5d07677 100644
--- a/synapse/rest/client/v2_alpha/thirdparty.py
+++ b/synapse/rest/client/v2_alpha/thirdparty.py
@@ -79,7 +79,7 @@ class ThirdPartyUserServlet(RestServlet):
yield self.auth.get_user_by_req(request, allow_guest=True)
fields = request.args
- fields.pop("access_token", None)
+ fields.pop(b"access_token", None)
results = yield self.appservice_handler.query_3pe(
ThirdPartyEntityKind.USER, protocol, fields
@@ -102,7 +102,7 @@ class ThirdPartyLocationServlet(RestServlet):
yield self.auth.get_user_by_req(request, allow_guest=True)
fields = request.args
- fields.pop("access_token", None)
+ fields.pop(b"access_token", None)
results = yield self.appservice_handler.query_3pe(
ThirdPartyEntityKind.LOCATION, protocol, fields
diff --git a/synapse/rest/key/v1/server_key_resource.py b/synapse/rest/key/v1/server_key_resource.py
index b9ee6e1c13..38eb2ee23f 100644
--- a/synapse/rest/key/v1/server_key_resource.py
+++ b/synapse/rest/key/v1/server_key_resource.py
@@ -88,5 +88,5 @@ class LocalKey(Resource):
)
def getChild(self, name, request):
- if name == '':
+ if name == b'':
return self
diff --git a/synapse/rest/key/v2/__init__.py b/synapse/rest/key/v2/__init__.py
index 3491fd2118..cb5abcf826 100644
--- a/synapse/rest/key/v2/__init__.py
+++ b/synapse/rest/key/v2/__init__.py
@@ -22,5 +22,5 @@ from .remote_key_resource import RemoteKey
class KeyApiV2Resource(Resource):
def __init__(self, hs):
Resource.__init__(self)
- self.putChild("server", LocalKey(hs))
- self.putChild("query", RemoteKey(hs))
+ self.putChild(b"server", LocalKey(hs))
+ self.putChild(b"query", RemoteKey(hs))
diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index 7d67e4b064..eb8782aa6e 100644
--- a/synapse/rest/key/v2/remote_key_resource.py
+++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -103,7 +103,7 @@ class RemoteKey(Resource):
def async_render_GET(self, request):
if len(request.postpath) == 1:
server, = request.postpath
- query = {server: {}}
+ query = {server.decode('ascii'): {}}
elif len(request.postpath) == 2:
server, key_id = request.postpath
minimum_valid_until_ts = parse_integer(
@@ -112,11 +112,12 @@ class RemoteKey(Resource):
arguments = {}
if minimum_valid_until_ts is not None:
arguments["minimum_valid_until_ts"] = minimum_valid_until_ts
- query = {server: {key_id: arguments}}
+ query = {server.decode('ascii'): {key_id.decode('ascii'): arguments}}
else:
raise SynapseError(
404, "Not found %r" % request.postpath, Codes.NOT_FOUND
)
+
yield self.query_keys(request, query, query_remote_on_cache_miss=True)
def render_POST(self, request):
@@ -135,6 +136,7 @@ class RemoteKey(Resource):
@defer.inlineCallbacks
def query_keys(self, request, query, query_remote_on_cache_miss=False):
logger.info("Handling query for keys %r", query)
+
store_queries = []
for server_name, key_ids in query.items():
if (
diff --git a/synapse/rest/media/v0/content_repository.py b/synapse/rest/media/v0/content_repository.py
index f255f2883f..5a426ff2f6 100644
--- a/synapse/rest/media/v0/content_repository.py
+++ b/synapse/rest/media/v0/content_repository.py
@@ -56,7 +56,7 @@ class ContentRepoResource(resource.Resource):
# servers.
# TODO: A little crude here, we could do this better.
- filename = request.path.split('/')[-1]
+ filename = request.path.decode('ascii').split('/')[-1]
# be paranoid
filename = re.sub("[^0-9A-z.-_]", "", filename)
@@ -78,7 +78,7 @@ class ContentRepoResource(resource.Resource):
# select private. don't bother setting Expires as all our matrix
# clients are smart enough to be happy with Cache-Control (right?)
request.setHeader(
- "Cache-Control", "public,max-age=86400,s-maxage=86400"
+ b"Cache-Control", b"public,max-age=86400,s-maxage=86400"
)
d = FileSender().beginFileTransfer(f, request)
diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py
index 65f4bd2910..76e479afa3 100644
--- a/synapse/rest/media/v1/_base.py
+++ b/synapse/rest/media/v1/_base.py
@@ -15,9 +15,8 @@
import logging
import os
-import urllib
-from six.moves.urllib import parse as urlparse
+from six.moves import urllib
from twisted.internet import defer
from twisted.protocols.basic import FileSender
@@ -35,10 +34,15 @@ def parse_media_id(request):
# This allows users to append e.g. /test.png to the URL. Useful for
# clients that parse the URL to see content type.
server_name, media_id = request.postpath[:2]
+
+ if isinstance(server_name, bytes):
+ server_name = server_name.decode('utf-8')
+ media_id = media_id.decode('utf8')
+
file_name = None
if len(request.postpath) > 2:
try:
- file_name = urlparse.unquote(request.postpath[-1]).decode("utf-8")
+ file_name = urllib.parse.unquote(request.postpath[-1].decode("utf-8"))
except UnicodeDecodeError:
pass
return server_name, media_id, file_name
@@ -93,22 +97,18 @@ def add_file_headers(request, media_type, file_size, upload_name):
file_size (int): Size in bytes of the media, if known.
upload_name (str): The name of the requested file, if any.
"""
+ def _quote(x):
+ return urllib.parse.quote(x.encode("utf-8"))
+
request.setHeader(b"Content-Type", media_type.encode("UTF-8"))
if upload_name:
if is_ascii(upload_name):
- request.setHeader(
- b"Content-Disposition",
- b"inline; filename=%s" % (
- urllib.quote(upload_name.encode("utf-8")),
- ),
- )
+ disposition = ("inline; filename=%s" % (_quote(upload_name),)).encode("ascii")
else:
- request.setHeader(
- b"Content-Disposition",
- b"inline; filename*=utf-8''%s" % (
- urllib.quote(upload_name.encode("utf-8")),
- ),
- )
+ disposition = (
+ "inline; filename*=utf-8''%s" % (_quote(upload_name),)).encode("ascii")
+
+ request.setHeader(b"Content-Disposition", disposition)
# cache for at least a day.
# XXX: we might want to turn this off for data we don't want to
diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py
index fbfa85f74f..ca90964d1d 100644
--- a/synapse/rest/media/v1/download_resource.py
+++ b/synapse/rest/media/v1/download_resource.py
@@ -47,12 +47,12 @@ class DownloadResource(Resource):
def _async_render_GET(self, request):
set_cors_headers(request)
request.setHeader(
- "Content-Security-Policy",
- "default-src 'none';"
- " script-src 'none';"
- " plugin-types application/pdf;"
- " style-src 'unsafe-inline';"
- " object-src 'self';"
+ b"Content-Security-Policy",
+ b"default-src 'none';"
+ b" script-src 'none';"
+ b" plugin-types application/pdf;"
+ b" style-src 'unsafe-inline';"
+ b" object-src 'self';"
)
server_name, media_id, name = parse_media_id(request)
if server_name == self.server_name:
diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 241c972070..a828ff4438 100644
--- a/synapse/rest/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -20,7 +20,7 @@ import logging
import os
import shutil
-from six import iteritems
+from six import PY3, iteritems
from six.moves.urllib import parse as urlparse
import twisted.internet.error
@@ -397,13 +397,13 @@ class MediaRepository(object):
yield finish()
- media_type = headers["Content-Type"][0]
+ media_type = headers[b"Content-Type"][0].decode('ascii')
time_now_ms = self.clock.time_msec()
- content_disposition = headers.get("Content-Disposition", None)
+ content_disposition = headers.get(b"Content-Disposition", None)
if content_disposition:
- _, params = cgi.parse_header(content_disposition[0],)
+ _, params = cgi.parse_header(content_disposition[0].decode('ascii'),)
upload_name = None
# First check if there is a valid UTF-8 filename
@@ -419,9 +419,13 @@ class MediaRepository(object):
upload_name = upload_name_ascii
if upload_name:
- upload_name = urlparse.unquote(upload_name)
+ if PY3:
+ upload_name = urlparse.unquote(upload_name)
+ else:
+ upload_name = urlparse.unquote(upload_name.encode('ascii'))
try:
- upload_name = upload_name.decode("utf-8")
+ if isinstance(upload_name, bytes):
+ upload_name = upload_name.decode("utf-8")
except UnicodeDecodeError:
upload_name = None
else:
@@ -755,14 +759,15 @@ class MediaRepositoryResource(Resource):
Resource.__init__(self)
media_repo = hs.get_media_repository()
- self.putChild("upload", UploadResource(hs, media_repo))
- self.putChild("download", DownloadResource(hs, media_repo))
- self.putChild("thumbnail", ThumbnailResource(
+
+ self.putChild(b"upload", UploadResource(hs, media_repo))
+ self.putChild(b"download", DownloadResource(hs, media_repo))
+ self.putChild(b"thumbnail", ThumbnailResource(
hs, media_repo, media_repo.media_storage,
))
- self.putChild("identicon", IdenticonResource())
+ self.putChild(b"identicon", IdenticonResource())
if hs.config.url_preview_enabled:
- self.putChild("preview_url", PreviewUrlResource(
+ self.putChild(b"preview_url", PreviewUrlResource(
hs, media_repo, media_repo.media_storage,
))
- self.putChild("config", MediaConfigResource(hs))
+ self.putChild(b"config", MediaConfigResource(hs))
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index 778ef97337..cad2dec33a 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -261,7 +261,7 @@ class PreviewUrlResource(Resource):
logger.debug("Calculated OG for %s as %s" % (url, og))
- jsonog = json.dumps(og)
+ jsonog = json.dumps(og).encode('utf8')
# store OG in history-aware DB cache
yield self.store.store_url_cache(
@@ -301,20 +301,20 @@ class PreviewUrlResource(Resource):
logger.warn("Error downloading %s: %r", url, e)
raise SynapseError(
500, "Failed to download content: %s" % (
- traceback.format_exception_only(sys.exc_type, e),
+ traceback.format_exception_only(sys.exc_info()[0], e),
),
Codes.UNKNOWN,
)
yield finish()
try:
- if "Content-Type" in headers:
- media_type = headers["Content-Type"][0]
+ if b"Content-Type" in headers:
+ media_type = headers[b"Content-Type"][0].decode('ascii')
else:
media_type = "application/octet-stream"
time_now_ms = self.clock.time_msec()
- content_disposition = headers.get("Content-Disposition", None)
+ content_disposition = headers.get(b"Content-Disposition", None)
if content_disposition:
_, params = cgi.parse_header(content_disposition[0],)
download_name = None
diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py
index f547977600..a1331c1a61 100644
--- a/synapse/storage/keys.py
+++ b/synapse/storage/keys.py
@@ -134,6 +134,7 @@ class KeyStore(SQLBaseStore):
"""
key_id = "%s:%s" % (verify_key.alg, verify_key.version)
+ # XXX fix this to not need a lock (#3819)
def _txn(txn):
self._simple_upsert_txn(
txn,
diff --git a/synapse/util/manhole.py b/synapse/util/manhole.py
index 14be3c7396..8d0f2a8918 100644
--- a/synapse/util/manhole.py
+++ b/synapse/util/manhole.py
@@ -19,22 +19,40 @@ from twisted.conch.ssh.keys import Key
from twisted.cred import checkers, portal
PUBLIC_KEY = (
- "ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAGEArzJx8OYOnJmzf4tfBEvLi8DVPrJ3/c9k2I/Az"
- "64fxjHf9imyRJbixtQhlH9lfNjUIx+4LmrJH5QNRsFporcHDKOTwTTYLh5KmRpslkYHRivcJS"
- "kbh/C+BR3utDS555mV"
+ "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDHhGATaW4KhE23+7nrH4jFx3yLq9OjaEs5"
+ "XALqeK+7385NlLja3DE/DO9mGhnd9+bAy39EKT3sTV6+WXQ4yD0TvEEyUEMtjWkSEm6U32+C"
+ "DaS3TW/vPBUMeJQwq+Ydcif1UlnpXrDDTamD0AU9VaEvHq+3HAkipqn0TGpKON6aqk4vauDx"
+ "oXSsV5TXBVrxP/y7HpMOpU4GUWsaaacBTKKNnUaQB4UflvydaPJUuwdaCUJGTMjbhWrjVfK+"
+ "jslseSPxU6XvrkZMyCr4znxvuDxjMk1RGIdO7v+rbBMLEgqtSMNqJbYeVCnj2CFgc3fcTcld"
+ "X2uOJDrJb/WRlHulthCh"
)
PRIVATE_KEY = """-----BEGIN RSA PRIVATE KEY-----
-MIIByAIBAAJhAK8ycfDmDpyZs3+LXwRLy4vA1T6yd/3PZNiPwM+uH8Yx3/YpskSW
-4sbUIZR/ZXzY1CMfuC5qyR+UDUbBaaK3Bwyjk8E02C4eSpkabJZGB0Yr3CUpG4fw
-vgUd7rQ0ueeZlQIBIwJgbh+1VZfr7WftK5lu7MHtqE1S1vPWZQYE3+VUn8yJADyb
-Z4fsZaCrzW9lkIqXkE3GIY+ojdhZhkO1gbG0118sIgphwSWKRxK0mvh6ERxKqIt1
-xJEJO74EykXZV4oNJ8sjAjEA3J9r2ZghVhGN6V8DnQrTk24Td0E8hU8AcP0FVP+8
-PQm/g/aXf2QQkQT+omdHVEJrAjEAy0pL0EBH6EVS98evDCBtQw22OZT52qXlAwZ2
-gyTriKFVoqjeEjt3SZKKqXHSApP/AjBLpF99zcJJZRq2abgYlf9lv1chkrWqDHUu
-DZttmYJeEfiFBBavVYIF1dOlZT0G8jMCMBc7sOSZodFnAiryP+Qg9otSBjJ3bQML
-pSTqy7c3a2AScC/YyOwkDaICHnnD3XyjMwIxALRzl0tQEKMXs6hH8ToUdlLROCrP
-EhQ0wahUTCk1gKA4uPD6TMTChavbh4K63OvbKg==
+MIIEpQIBAAKCAQEAx4RgE2luCoRNt/u56x+Ixcd8i6vTo2hLOVwC6nivu9/OTZS4
+2twxPwzvZhoZ3ffmwMt/RCk97E1evll0OMg9E7xBMlBDLY1pEhJulN9vgg2kt01v
+7zwVDHiUMKvmHXIn9VJZ6V6ww02pg9AFPVWhLx6vtxwJIqap9ExqSjjemqpOL2rg
+8aF0rFeU1wVa8T/8ux6TDqVOBlFrGmmnAUyijZ1GkAeFH5b8nWjyVLsHWglCRkzI
+24Vq41Xyvo7JbHkj8VOl765GTMgq+M58b7g8YzJNURiHTu7/q2wTCxIKrUjDaiW2
+HlQp49ghYHN33E3JXV9rjiQ6yW/1kZR7pbYQoQIDAQABAoIBAQC8KJ0q8Wzzwh5B
+esa1dQHZ8+4DEsL/Amae66VcVwD0X3cCN1W2IZ7X5W0Ij2kBqr8V51RYhcR+S+Ek
+BtzSiBUBvbKGrqcMGKaUgomDIMzai99hd0gvCCyZnEW1OQhFkNkaRNXCfqiZJ27M
+fqvSUiU2eOwh9fCvmxoA6Of8o3FbzcJ+1GMcobWRllDtLmj6lgVbDzuA+0jC5daB
+9Tj1pBzu3wn3ufxiS+gBnJ+7NcXH3E73lqCcPa2ufbZ1haxfiGCnRIhFXuQDgxFX
+vKdEfDgtvas6r1ahGbc+b/q8E8fZT7cABuIU4yfOORK+MhpyWbvoyyzuVGKj3PKt
+KSPJu5CZAoGBAOkoJfAVyYteqKcmGTanGqQnAY43CaYf6GdSPX/jg+JmKZg0zqMC
+jWZUtPb93i+jnOInbrnuHOiHAxI8wmhEPed28H2lC/LU8PzlqFkZXKFZ4vLOhhRB
+/HeHCFIDosPFlohWi3b+GAjD7sXgnIuGmnXWe2ea/TS3yersifDEoKKjAoGBANsQ
+gJX2cJv1c3jhdgcs8vAt5zIOKcCLTOr/QPmVf/kxjNgndswcKHwsxE/voTO9q+TF
+v/6yCSTxAdjuKz1oIYWgi/dZo82bBKWxNRpgrGviU3/zwxiHlyIXUhzQu78q3VS/
+7S1XVbc7qMV++XkYKHPVD+nVG/gGzFxumX7MLXfrAoGBAJit9cn2OnjNj9uFE1W6
+r7N254ndeLAUjPe73xH0RtTm2a4WRopwjW/JYIetTuYbWgyujc+robqTTuuOZjAp
+H/CG7o0Ym251CypQqaFO/l2aowclPp/dZhpPjp9GSjuxFBZLtiBB3DNBOwbRQzIK
+/vLTdRQvZkgzYkI4i0vjNt3JAoGBANP8HSKBLymMlShlrSx2b8TB9tc2Y2riohVJ
+2ttqs0M2kt/dGJWdrgOz4mikL+983Olt/0P9juHDoxEEMK2kpcPEv40lnmBpYU7h
+s8yJvnBLvJe2EJYdJ8AipyAhUX1FgpbvfxmASP8eaUxsegeXvBWTGWojAoS6N2o+
+0KSl+l3vAoGAFqm0gO9f/Q1Se60YQd4l2PZeMnJFv0slpgHHUwegmd6wJhOD7zJ1
+CkZcXwiv7Nog7AI9qKJEUXLjoqL+vJskBzSOqU3tcd670YQMi1aXSXJqYE202K7o
+EddTrx3TNpr1D5m/f+6mnXWrc8u9y1+GNx9yz889xMjIBTBI9KqaaOs=
-----END RSA PRIVATE KEY-----"""
|