summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--changelog.d/7519.misc1
-rw-r--r--scripts-dev/convert_server_keys.py9
-rw-r--r--synapse/http/matrixfederationclient.py8
-rw-r--r--synapse/logging/utils.py10
-rw-r--r--synapse/push/httppusher.py11
-rw-r--r--synapse/replication/slave/storage/_base.py9
-rw-r--r--synapse/replication/slave/storage/presence.py8
-rw-r--r--synapse/rest/media/v1/_base.py27
-rw-r--r--synapse/storage/_base.py8
-rw-r--r--synapse/storage/data_stores/main/keys.py10
-rw-r--r--synapse/storage/data_stores/main/roommember.py25
-rw-r--r--synapse/storage/data_stores/main/state.py5
-rw-r--r--synapse/storage/data_stores/main/transactions.py9
-rw-r--r--synapse/storage/database.py15
-rw-r--r--synapse/util/caches/__init__.py7
-rw-r--r--synapse/util/stringutils.py82
16 files changed, 48 insertions, 196 deletions
diff --git a/changelog.d/7519.misc b/changelog.d/7519.misc
new file mode 100644
index 0000000000..c730b5e507
--- /dev/null
+++ b/changelog.d/7519.misc
@@ -0,0 +1 @@
+Remove some redundant Python 2 support code.
diff --git a/scripts-dev/convert_server_keys.py b/scripts-dev/convert_server_keys.py
index 06b4c1e2ff..961dc59f11 100644
--- a/scripts-dev/convert_server_keys.py
+++ b/scripts-dev/convert_server_keys.py
@@ -3,8 +3,6 @@ import json
 import sys
 import time
 
-import six
-
 import psycopg2
 import yaml
 from canonicaljson import encode_canonical_json
@@ -12,10 +10,7 @@ from signedjson.key import read_signing_keys
 from signedjson.sign import sign_json
 from unpaddedbase64 import encode_base64
 
-if six.PY2:
-    db_type = six.moves.builtins.buffer
-else:
-    db_type = memoryview
+db_binary_type = memoryview
 
 
 def select_v1_keys(connection):
@@ -72,7 +67,7 @@ def rows_v2(server, json):
     valid_until = json["valid_until_ts"]
     key_json = encode_canonical_json(json)
     for key_id in json["verify_keys"]:
-        yield (server, key_id, "-", valid_until, valid_until, db_type(key_json))
+        yield (server, key_id, "-", valid_until, valid_until, db_binary_type(key_json))
 
 
 def main():
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index 225a47e3c3..44077f5349 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -19,7 +19,7 @@ import random
 import sys
 from io import BytesIO
 
-from six import PY3, raise_from, string_types
+from six import raise_from, string_types
 from six.moves import urllib
 
 import attr
@@ -70,11 +70,7 @@ incoming_responses_counter = Counter(
 
 MAX_LONG_RETRIES = 10
 MAX_SHORT_RETRIES = 3
-
-if PY3:
-    MAXINT = sys.maxsize
-else:
-    MAXINT = sys.maxint
+MAXINT = sys.maxsize
 
 
 _next_id = 1
diff --git a/synapse/logging/utils.py b/synapse/logging/utils.py
index 0c2527bd86..99049bb5d8 100644
--- a/synapse/logging/utils.py
+++ b/synapse/logging/utils.py
@@ -20,8 +20,6 @@ import time
 from functools import wraps
 from inspect import getcallargs
 
-from six import PY3
-
 _TIME_FUNC_ID = 0
 
 
@@ -30,12 +28,8 @@ def _log_debug_as_f(f, msg, msg_args):
     logger = logging.getLogger(name)
 
     if logger.isEnabledFor(logging.DEBUG):
-        if PY3:
-            lineno = f.__code__.co_firstlineno
-            pathname = f.__code__.co_filename
-        else:
-            lineno = f.func_code.co_firstlineno
-            pathname = f.func_code.co_filename
+        lineno = f.__code__.co_firstlineno
+        pathname = f.__code__.co_filename
 
         record = logging.LogRecord(
             name=name,
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index 5bb17d1228..eaaa7afc91 100644
--- a/synapse/push/httppusher.py
+++ b/synapse/push/httppusher.py
@@ -15,8 +15,6 @@
 # limitations under the License.
 import logging
 
-import six
-
 from prometheus_client import Counter
 
 from twisted.internet import defer
@@ -28,9 +26,6 @@ from synapse.push import PusherConfigException
 
 from . import push_rule_evaluator, push_tools
 
-if six.PY3:
-    long = int
-
 logger = logging.getLogger(__name__)
 
 http_push_processed_counter = Counter(
@@ -318,7 +313,7 @@ class HttpPusher(object):
                         {
                             "app_id": self.app_id,
                             "pushkey": self.pushkey,
-                            "pushkey_ts": long(self.pushkey_ts / 1000),
+                            "pushkey_ts": int(self.pushkey_ts / 1000),
                             "data": self.data_minus_url,
                         }
                     ],
@@ -347,7 +342,7 @@ class HttpPusher(object):
                     {
                         "app_id": self.app_id,
                         "pushkey": self.pushkey,
-                        "pushkey_ts": long(self.pushkey_ts / 1000),
+                        "pushkey_ts": int(self.pushkey_ts / 1000),
                         "data": self.data_minus_url,
                         "tweaks": tweaks,
                     }
@@ -409,7 +404,7 @@ class HttpPusher(object):
                     {
                         "app_id": self.app_id,
                         "pushkey": self.pushkey,
-                        "pushkey_ts": long(self.pushkey_ts / 1000),
+                        "pushkey_ts": int(self.pushkey_ts / 1000),
                         "data": self.data_minus_url,
                     }
                 ],
diff --git a/synapse/replication/slave/storage/_base.py b/synapse/replication/slave/storage/_base.py
index 2904bd0235..f9e2533e96 100644
--- a/synapse/replication/slave/storage/_base.py
+++ b/synapse/replication/slave/storage/_base.py
@@ -16,8 +16,6 @@
 import logging
 from typing import Optional
 
-import six
-
 from synapse.storage.data_stores.main.cache import CacheInvalidationWorkerStore
 from synapse.storage.database import Database
 from synapse.storage.engines import PostgresEngine
@@ -26,13 +24,6 @@ from synapse.storage.util.id_generators import MultiWriterIdGenerator
 logger = logging.getLogger(__name__)
 
 
-def __func__(inp):
-    if six.PY3:
-        return inp
-    else:
-        return inp.__func__
-
-
 class BaseSlavedStore(CacheInvalidationWorkerStore):
     def __init__(self, database: Database, db_conn, hs):
         super(BaseSlavedStore, self).__init__(database, db_conn, hs)
diff --git a/synapse/replication/slave/storage/presence.py b/synapse/replication/slave/storage/presence.py
index bd79ba99be..4e0124842d 100644
--- a/synapse/replication/slave/storage/presence.py
+++ b/synapse/replication/slave/storage/presence.py
@@ -18,7 +18,7 @@ from synapse.storage.data_stores.main.presence import PresenceStore
 from synapse.storage.database import Database
 from synapse.util.caches.stream_change_cache import StreamChangeCache
 
-from ._base import BaseSlavedStore, __func__
+from ._base import BaseSlavedStore
 from ._slaved_id_tracker import SlavedIdTracker
 
 
@@ -27,14 +27,14 @@ class SlavedPresenceStore(BaseSlavedStore):
         super(SlavedPresenceStore, self).__init__(database, db_conn, hs)
         self._presence_id_gen = SlavedIdTracker(db_conn, "presence_stream", "stream_id")
 
-        self._presence_on_startup = self._get_active_presence(db_conn)
+        self._presence_on_startup = self._get_active_presence(db_conn)  # type: ignore
 
         self.presence_stream_cache = StreamChangeCache(
             "PresenceStreamChangeCache", self._presence_id_gen.get_current_token()
         )
 
-    _get_active_presence = __func__(DataStore._get_active_presence)
-    take_presence_startup_info = __func__(DataStore.take_presence_startup_info)
+    _get_active_presence = DataStore._get_active_presence
+    take_presence_startup_info = DataStore.take_presence_startup_info
     _get_presence_for_user = PresenceStore.__dict__["_get_presence_for_user"]
     get_presence_for_users = PresenceStore.__dict__["get_presence_for_users"]
 
diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py
index 503f2bed98..3689777266 100644
--- a/synapse/rest/media/v1/_base.py
+++ b/synapse/rest/media/v1/_base.py
@@ -17,7 +17,6 @@
 import logging
 import os
 
-from six import PY3
 from six.moves import urllib
 
 from twisted.internet import defer
@@ -324,23 +323,15 @@ def get_filename_from_headers(headers):
             upload_name_utf8 = upload_name_utf8[7:]
             # We have a filename*= section. This MUST be ASCII, and any UTF-8
             # bytes are %-quoted.
-            if PY3:
-                try:
-                    # Once it is decoded, we can then unquote the %-encoded
-                    # parts strictly into a unicode string.
-                    upload_name = urllib.parse.unquote(
-                        upload_name_utf8.decode("ascii"), errors="strict"
-                    )
-                except UnicodeDecodeError:
-                    # Incorrect UTF-8.
-                    pass
-            else:
-                # On Python 2, we first unquote the %-encoded parts and then
-                # decode it strictly using UTF-8.
-                try:
-                    upload_name = urllib.parse.unquote(upload_name_utf8).decode("utf8")
-                except UnicodeDecodeError:
-                    pass
+            try:
+                # Once it is decoded, we can then unquote the %-encoded
+                # parts strictly into a unicode string.
+                upload_name = urllib.parse.unquote(
+                    upload_name_utf8.decode("ascii"), errors="strict"
+                )
+            except UnicodeDecodeError:
+                # Incorrect UTF-8.
+                pass
 
     # If there isn't check for an ascii name.
     if not upload_name:
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 59073c0a42..bfce541ca7 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -19,9 +19,6 @@ import random
 from abc import ABCMeta
 from typing import Any, Optional
 
-from six import PY2
-from six.moves import builtins
-
 from canonicaljson import json
 
 from synapse.storage.database import LoggingTransaction  # noqa: F401
@@ -103,11 +100,6 @@ def db_to_json(db_content):
     if isinstance(db_content, memoryview):
         db_content = db_content.tobytes()
 
-    # psycopg2 on Python 2 returns buffer objects, which we need to cast to
-    # bytes to decode
-    if PY2 and isinstance(db_content, builtins.buffer):
-        db_content = bytes(db_content)
-
     # Decode it to a Unicode string before feeding it to json.loads, so we
     # consistenty get a Unicode-containing object out.
     if isinstance(db_content, (bytes, bytearray)):
diff --git a/synapse/storage/data_stores/main/keys.py b/synapse/storage/data_stores/main/keys.py
index ba89c68c9f..4e1642a27a 100644
--- a/synapse/storage/data_stores/main/keys.py
+++ b/synapse/storage/data_stores/main/keys.py
@@ -17,8 +17,6 @@
 import itertools
 import logging
 
-import six
-
 from signedjson.key import decode_verify_key_bytes
 
 from synapse.storage._base import SQLBaseStore
@@ -28,12 +26,8 @@ from synapse.util.iterutils import batch_iter
 
 logger = logging.getLogger(__name__)
 
-# py2 sqlite has buffer hardcoded as only binary type, so we must use it,
-# despite being deprecated and removed in favor of memoryview
-if six.PY2:
-    db_binary_type = six.moves.builtins.buffer
-else:
-    db_binary_type = memoryview
+
+db_binary_type = memoryview
 
 
 class KeyStore(SQLBaseStore):
diff --git a/synapse/storage/data_stores/main/roommember.py b/synapse/storage/data_stores/main/roommember.py
index 48810a3e91..1e9c850152 100644
--- a/synapse/storage/data_stores/main/roommember.py
+++ b/synapse/storage/data_stores/main/roommember.py
@@ -45,7 +45,6 @@ from synapse.util.async_helpers import Linearizer
 from synapse.util.caches import intern_string
 from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
 from synapse.util.metrics import Measure
-from synapse.util.stringutils import to_ascii
 
 logger = logging.getLogger(__name__)
 
@@ -179,7 +178,7 @@ class RoomMemberWorkerStore(EventsWorkerStore):
             """
 
         txn.execute(sql, (room_id, Membership.JOIN))
-        return [to_ascii(r[0]) for r in txn]
+        return [r[0] for r in txn]
 
     @cached(max_entries=100000)
     def get_room_summary(self, room_id):
@@ -223,7 +222,7 @@ class RoomMemberWorkerStore(EventsWorkerStore):
             txn.execute(sql, (room_id,))
             res = {}
             for count, membership in txn:
-                summary = res.setdefault(to_ascii(membership), MemberSummary([], count))
+                summary = res.setdefault(membership, MemberSummary([], count))
 
             # we order by membership and then fairly arbitrarily by event_id so
             # heroes are consistent
@@ -255,11 +254,11 @@ class RoomMemberWorkerStore(EventsWorkerStore):
             # 6 is 5 (number of heroes) plus 1, in case one of them is the calling user.
             txn.execute(sql, (room_id, Membership.JOIN, Membership.INVITE, 6))
             for user_id, membership, event_id in txn:
-                summary = res[to_ascii(membership)]
+                summary = res[membership]
                 # we will always have a summary for this membership type at this
                 # point given the summary currently contains the counts.
                 members = summary.members
-                members.append((to_ascii(user_id), to_ascii(event_id)))
+                members.append((user_id, event_id))
 
             return res
 
@@ -584,13 +583,9 @@ class RoomMemberWorkerStore(EventsWorkerStore):
             ev_entry = event_map.get(event_id)
             if ev_entry:
                 if ev_entry.event.membership == Membership.JOIN:
-                    users_in_room[to_ascii(ev_entry.event.state_key)] = ProfileInfo(
-                        display_name=to_ascii(
-                            ev_entry.event.content.get("displayname", None)
-                        ),
-                        avatar_url=to_ascii(
-                            ev_entry.event.content.get("avatar_url", None)
-                        ),
+                    users_in_room[ev_entry.event.state_key] = ProfileInfo(
+                        display_name=ev_entry.event.content.get("displayname", None),
+                        avatar_url=ev_entry.event.content.get("avatar_url", None),
                     )
             else:
                 missing_member_event_ids.append(event_id)
@@ -604,9 +599,9 @@ class RoomMemberWorkerStore(EventsWorkerStore):
         if event is not None and event.type == EventTypes.Member:
             if event.membership == Membership.JOIN:
                 if event.event_id in member_event_ids:
-                    users_in_room[to_ascii(event.state_key)] = ProfileInfo(
-                        display_name=to_ascii(event.content.get("displayname", None)),
-                        avatar_url=to_ascii(event.content.get("avatar_url", None)),
+                    users_in_room[event.state_key] = ProfileInfo(
+                        display_name=event.content.get("displayname", None),
+                        avatar_url=event.content.get("avatar_url", None),
                     )
 
         return users_in_room
diff --git a/synapse/storage/data_stores/main/state.py b/synapse/storage/data_stores/main/state.py
index 21052fcc7a..347cc50778 100644
--- a/synapse/storage/data_stores/main/state.py
+++ b/synapse/storage/data_stores/main/state.py
@@ -29,7 +29,6 @@ from synapse.storage.database import Database
 from synapse.storage.state import StateFilter
 from synapse.util.caches import intern_string
 from synapse.util.caches.descriptors import cached, cachedList
-from synapse.util.stringutils import to_ascii
 
 logger = logging.getLogger(__name__)
 
@@ -185,9 +184,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
                 (room_id,),
             )
 
-            return {
-                (intern_string(r[0]), intern_string(r[1])): to_ascii(r[2]) for r in txn
-            }
+            return {(intern_string(r[0]), intern_string(r[1])): r[2] for r in txn}
 
         return self.db.runInteraction(
             "get_current_state_ids", _get_current_state_ids_txn
diff --git a/synapse/storage/data_stores/main/transactions.py b/synapse/storage/data_stores/main/transactions.py
index 5b07c2fbc0..a9bf457939 100644
--- a/synapse/storage/data_stores/main/transactions.py
+++ b/synapse/storage/data_stores/main/transactions.py
@@ -16,8 +16,6 @@
 import logging
 from collections import namedtuple
 
-import six
-
 from canonicaljson import encode_canonical_json
 
 from twisted.internet import defer
@@ -27,12 +25,7 @@ from synapse.storage._base import SQLBaseStore, db_to_json
 from synapse.storage.database import Database
 from synapse.util.caches.expiringcache import ExpiringCache
 
-# py2 sqlite has buffer hardcoded as only binary type, so we must use it,
-# despite being deprecated and removed in favor of memoryview
-if six.PY2:
-    db_binary_type = six.moves.builtins.buffer
-else:
-    db_binary_type = memoryview
+db_binary_type = memoryview
 
 logger = logging.getLogger(__name__)
 
diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index c3d0863429..9947dbce77 100644
--- a/synapse/storage/database.py
+++ b/synapse/storage/database.py
@@ -50,7 +50,6 @@ from synapse.storage.background_updates import BackgroundUpdater
 from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine
 from synapse.storage.types import Connection, Cursor
 from synapse.types import Collection
-from synapse.util.stringutils import exception_to_unicode
 
 logger = logging.getLogger(__name__)
 
@@ -424,20 +423,14 @@ class Database(object):
                     # This can happen if the database disappears mid
                     # transaction.
                     logger.warning(
-                        "[TXN OPERROR] {%s} %s %d/%d",
-                        name,
-                        exception_to_unicode(e),
-                        i,
-                        N,
+                        "[TXN OPERROR] {%s} %s %d/%d", name, e, i, N,
                     )
                     if i < N:
                         i += 1
                         try:
                             conn.rollback()
                         except self.engine.module.Error as e1:
-                            logger.warning(
-                                "[TXN EROLL] {%s} %s", name, exception_to_unicode(e1)
-                            )
+                            logger.warning("[TXN EROLL] {%s} %s", name, e1)
                         continue
                     raise
                 except self.engine.module.DatabaseError as e:
@@ -449,9 +442,7 @@ class Database(object):
                                 conn.rollback()
                             except self.engine.module.Error as e1:
                                 logger.warning(
-                                    "[TXN EROLL] {%s} %s",
-                                    name,
-                                    exception_to_unicode(e1),
+                                    "[TXN EROLL] {%s} %s", name, e1,
                                 )
                             continue
                     raise
diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py
index 4b8a0c7a8f..dd356bf156 100644
--- a/synapse/util/caches/__init__.py
+++ b/synapse/util/caches/__init__.py
@@ -15,11 +15,9 @@
 # limitations under the License.
 
 import logging
+from sys import intern
 from typing import Callable, Dict, Optional
 
-import six
-from six.moves import intern
-
 import attr
 from prometheus_client.core import Gauge
 
@@ -154,9 +152,6 @@ def intern_string(string):
         return None
 
     try:
-        if six.PY2:
-            string = string.encode("ascii")
-
         return intern(string)
     except UnicodeEncodeError:
         return string
diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py
index 6899bcb788..08c86e92b8 100644
--- a/synapse/util/stringutils.py
+++ b/synapse/util/stringutils.py
@@ -19,10 +19,6 @@ import re
 import string
 from collections import Iterable
 
-import six
-from six import PY2, PY3
-from six.moves import range
-
 from synapse.api.errors import Codes, SynapseError
 
 _string_with_symbols = string.digits + string.ascii_letters + ".,;:^&*-_+=#~@"
@@ -47,80 +43,16 @@ def random_string_with_symbols(length):
 
 
 def is_ascii(s):
-
-    if PY3:
-        if isinstance(s, bytes):
-            try:
-                s.decode("ascii").encode("ascii")
-            except UnicodeDecodeError:
-                return False
-            except UnicodeEncodeError:
-                return False
-            return True
-
-    try:
-        s.encode("ascii")
-    except UnicodeEncodeError:
-        return False
-    except UnicodeDecodeError:
-        return False
-    else:
+    if isinstance(s, bytes):
+        try:
+            s.decode("ascii").encode("ascii")
+        except UnicodeDecodeError:
+            return False
+        except UnicodeEncodeError:
+            return False
         return True
 
 
-def to_ascii(s):
-    """Converts a string to ascii if it is ascii, otherwise leave it alone.
-
-    If given None then will return None.
-    """
-    if PY3:
-        return s
-
-    if s is None:
-        return None
-
-    try:
-        return s.encode("ascii")
-    except UnicodeEncodeError:
-        return s
-
-
-def exception_to_unicode(e):
-    """Helper function to extract the text of an exception as a unicode string
-
-    Args:
-        e (Exception): exception to be stringified
-
-    Returns:
-        unicode
-    """
-    # urgh, this is a mess. The basic problem here is that psycopg2 constructs its
-    # exceptions with PyErr_SetString, with a (possibly non-ascii) argument. str() will
-    # then produce the raw byte sequence. Under Python 2, this will then cause another
-    # error if it gets mixed with a `unicode` object, as per
-    # https://github.com/matrix-org/synapse/issues/4252
-
-    # First of all, if we're under python3, everything is fine because it will sort this
-    # nonsense out for us.
-    if not PY2:
-        return str(e)
-
-    # otherwise let's have a stab at decoding the exception message. We'll circumvent
-    # Exception.__str__(), which would explode if someone raised Exception(u'non-ascii')
-    # and instead look at what is in the args member.
-
-    if len(e.args) == 0:
-        return ""
-    elif len(e.args) > 1:
-        return six.text_type(repr(e.args))
-
-    msg = e.args[0]
-    if isinstance(msg, bytes):
-        return msg.decode("utf-8", errors="replace")
-    else:
-        return msg
-
-
 def assert_valid_client_secret(client_secret):
     """Validate that a given string matches the client_secret regex defined by the spec"""
     if client_secret_regex.match(client_secret) is None: