summary refs log tree commit diff
path: root/synapse
diff options
context:
space:
mode:
Diffstat (limited to 'synapse')
-rw-r--r--synapse/config/api.py1
-rw-r--r--synapse/config/appservice.py2
-rw-r--r--synapse/config/captcha.py3
-rw-r--r--synapse/config/cas.py1
-rw-r--r--synapse/config/consent_config.py28
-rw-r--r--synapse/config/groups.py4
-rw-r--r--synapse/config/jwt_config.py8
-rw-r--r--synapse/config/key.py11
-rw-r--r--synapse/config/logger.py1
-rw-r--r--synapse/config/metrics.py2
-rw-r--r--synapse/config/password.py1
-rw-r--r--synapse/config/password_auth_providers.py28
-rw-r--r--synapse/config/push.py4
-rw-r--r--synapse/config/ratelimiting.py7
-rw-r--r--synapse/config/registration.py36
-rw-r--r--synapse/config/repository.py83
-rw-r--r--synapse/config/room_directory.py20
-rw-r--r--synapse/config/saml2_config.py65
-rw-r--r--synapse/config/server.py92
-rw-r--r--synapse/config/server_notices_config.py10
-rw-r--r--synapse/config/spam_checker.py8
-rw-r--r--synapse/config/tls.py20
-rw-r--r--synapse/config/user_directory.py2
-rw-r--r--synapse/config/voip.py7
-rw-r--r--synapse/federation/transaction_queue.py22
-rw-r--r--synapse/http/__init__.py15
-rw-r--r--synapse/http/client.py15
-rw-r--r--synapse/http/matrixfederationclient.py17
-rw-r--r--synapse/metrics/__init__.py2
-rw-r--r--synapse/push/httppusher.py16
-rw-r--r--synapse/push/pusher.py11
-rw-r--r--synapse/storage/_base.py146
32 files changed, 443 insertions, 245 deletions
diff --git a/synapse/config/api.py b/synapse/config/api.py
index 9f25bbc5cb..e8a753f002 100644
--- a/synapse/config/api.py
+++ b/synapse/config/api.py
@@ -33,6 +33,7 @@ class ApiConfig(Config):
         ## API Configuration ##
 
         # A list of event types that will be included in the room_invite_state
+        #
         room_invite_state_types:
             - "{JoinRules}"
             - "{CanonicalAlias}"
diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py
index c21cb3dd87..c260d59464 100644
--- a/synapse/config/appservice.py
+++ b/synapse/config/appservice.py
@@ -38,10 +38,12 @@ class AppServiceConfig(Config):
     def default_config(cls, **kwargs):
         return """\
         # A list of application service config file to use
+        #
         app_service_config_files: []
 
         # Whether or not to track application service IP addresses. Implicitly
         # enables MAU tracking for application service users.
+        #
         track_appservice_user_ips: False
         """
 
diff --git a/synapse/config/captcha.py b/synapse/config/captcha.py
index 7ba0c2de6a..4064891ffb 100644
--- a/synapse/config/captcha.py
+++ b/synapse/config/captcha.py
@@ -30,14 +30,17 @@ class CaptchaConfig(Config):
         # See docs/CAPTCHA_SETUP for full details of configuring this.
 
         # This Home Server's ReCAPTCHA public key.
+        #
         recaptcha_public_key: "YOUR_PUBLIC_KEY"
 
         # This Home Server's ReCAPTCHA private key.
+        #
         recaptcha_private_key: "YOUR_PRIVATE_KEY"
 
         # Enables ReCaptcha checks when registering, preventing signup
         # unless a captcha is answered. Requires a valid ReCaptcha
         # public/private key.
+        #
         enable_registration_captcha: False
 
         # A secret key used to bypass the captcha test entirely.
diff --git a/synapse/config/cas.py b/synapse/config/cas.py
index 8109e5f95e..609c0815c8 100644
--- a/synapse/config/cas.py
+++ b/synapse/config/cas.py
@@ -38,6 +38,7 @@ class CasConfig(Config):
     def default_config(self, config_dir_path, server_name, **kwargs):
         return """
         # Enable CAS for registration and login.
+        #
         #cas_config:
         #   enabled: true
         #   server_url: "https://cas-server.com"
diff --git a/synapse/config/consent_config.py b/synapse/config/consent_config.py
index 9f2e85342f..abeb0180d3 100644
--- a/synapse/config/consent_config.py
+++ b/synapse/config/consent_config.py
@@ -54,20 +54,20 @@ DEFAULT_CONFIG = """\
 # for an account. Has no effect unless `require_at_registration` is enabled.
 # Defaults to "Privacy Policy".
 #
-# user_consent:
-#   template_dir: res/templates/privacy
-#   version: 1.0
-#   server_notice_content:
-#     msgtype: m.text
-#     body: >-
-#       To continue using this homeserver you must review and agree to the
-#       terms and conditions at %(consent_uri)s
-#   send_server_notice_to_guests: True
-#   block_events_error: >-
-#     To continue using this homeserver you must review and agree to the
-#     terms and conditions at %(consent_uri)s
-#   require_at_registration: False
-#   policy_name: Privacy Policy
+#user_consent:
+#  template_dir: res/templates/privacy
+#  version: 1.0
+#  server_notice_content:
+#    msgtype: m.text
+#    body: >-
+#      To continue using this homeserver you must review and agree to the
+#      terms and conditions at %(consent_uri)s
+#  send_server_notice_to_guests: True
+#  block_events_error: >-
+#    To continue using this homeserver you must review and agree to the
+#    terms and conditions at %(consent_uri)s
+#  require_at_registration: False
+#  policy_name: Privacy Policy
 #
 """
 
diff --git a/synapse/config/groups.py b/synapse/config/groups.py
index 997fa2881f..46933a904c 100644
--- a/synapse/config/groups.py
+++ b/synapse/config/groups.py
@@ -24,9 +24,11 @@ class GroupsConfig(Config):
     def default_config(self, **kwargs):
         return """\
         # Whether to allow non server admins to create groups on this server
+        #
         enable_group_creation: false
 
         # If enabled, non server admins can only create groups with local parts
         # starting with this prefix
-        # group_creation_prefix: "unofficial/"
+        #
+        #group_creation_prefix: "unofficial/"
         """
diff --git a/synapse/config/jwt_config.py b/synapse/config/jwt_config.py
index 51e7f7e003..ecb4124096 100644
--- a/synapse/config/jwt_config.py
+++ b/synapse/config/jwt_config.py
@@ -46,8 +46,8 @@ class JWTConfig(Config):
         return """\
         # The JWT needs to contain a globally unique "sub" (subject) claim.
         #
-        # jwt_config:
-        #    enabled: true
-        #    secret: "a secret"
-        #    algorithm: "HS256"
+        #jwt_config:
+        #   enabled: true
+        #   secret: "a secret"
+        #   algorithm: "HS256"
         """
diff --git a/synapse/config/key.py b/synapse/config/key.py
index 499ffd4e06..35f05fa974 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -40,7 +40,7 @@ class KeyConfig(Config):
     def read_config(self, config):
         self.signing_key = self.read_signing_key(config["signing_key_path"])
         self.old_signing_keys = self.read_old_signing_keys(
-            config["old_signing_keys"]
+            config.get("old_signing_keys", {})
         )
         self.key_refresh_interval = self.parse_duration(
             config["key_refresh_interval"]
@@ -83,24 +83,29 @@ class KeyConfig(Config):
         # a secret which is used to sign access tokens. If none is specified,
         # the registration_shared_secret is used, if one is given; otherwise,
         # a secret key is derived from the signing key.
+        #
         %(macaroon_secret_key)s
 
         # Used to enable access token expiration.
+        #
         expire_access_token: False
 
         # a secret which is used to calculate HMACs for form values, to stop
         # falsification of values. Must be specified for the User Consent
         # forms to work.
+        #
         %(form_secret)s
 
         ## Signing Keys ##
 
         # Path to the signing key to sign messages with
+        #
         signing_key_path: "%(base_key_name)s.signing.key"
 
         # The keys that the server used to sign messages with but won't use
         # to sign new messages. E.g. it has lost its private key
-        old_signing_keys: {}
+        #
+        #old_signing_keys:
         #  "ed25519:auto":
         #    # Base64 encoded public key
         #    key: "The public part of your old signing key."
@@ -111,9 +116,11 @@ class KeyConfig(Config):
         # Used to set the valid_until_ts in /key/v2 APIs.
         # Determines how quickly servers will query to check which keys
         # are still valid.
+        #
         key_refresh_interval: "1d" # 1 Day.
 
         # The trusted servers to download signing keys from.
+        #
         perspectives:
           servers:
             "matrix.org":
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index 9b5994d55e..f6940b65fd 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -83,6 +83,7 @@ class LoggingConfig(Config):
         log_config = os.path.join(config_dir_path, server_name + ".log.config")
         return """
         # A yaml python logging config file
+        #
         log_config: "%(log_config)s"
         """ % locals()
 
diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py
index f7833053ba..ed0498c634 100644
--- a/synapse/config/metrics.py
+++ b/synapse/config/metrics.py
@@ -47,6 +47,7 @@ class MetricsConfig(Config):
         ## Metrics ###
 
         # Enable collection and rendering of performance metrics
+        #
         enable_metrics: False
 
         # Enable sentry integration
@@ -55,6 +56,7 @@ class MetricsConfig(Config):
         # this option the sentry server may therefore receive sensitive
         # information, and it in turn may then diseminate sensitive information
         # through insecure notification channels if so configured.
+        #
         #sentry:
         #    dsn: "..."
 
diff --git a/synapse/config/password.py b/synapse/config/password.py
index a4bd171399..2a52b9db54 100644
--- a/synapse/config/password.py
+++ b/synapse/config/password.py
@@ -28,6 +28,7 @@ class PasswordConfig(Config):
     def default_config(self, config_dir_path, server_name, **kwargs):
         return """
         # Enable password for login.
+        #
         password_config:
            enabled: true
            # Uncomment and change to a secret random string for extra security.
diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py
index f4066abc28..f0a6be0679 100644
--- a/synapse/config/password_auth_providers.py
+++ b/synapse/config/password_auth_providers.py
@@ -52,18 +52,18 @@ class PasswordAuthProviderConfig(Config):
 
     def default_config(self, **kwargs):
         return """\
-        # password_providers:
-        #     - module: "ldap_auth_provider.LdapAuthProvider"
-        #       config:
-        #         enabled: true
-        #         uri: "ldap://ldap.example.com:389"
-        #         start_tls: true
-        #         base: "ou=users,dc=example,dc=com"
-        #         attributes:
-        #            uid: "cn"
-        #            mail: "email"
-        #            name: "givenName"
-        #         #bind_dn:
-        #         #bind_password:
-        #         #filter: "(objectClass=posixAccount)"
+        #password_providers:
+        #    - module: "ldap_auth_provider.LdapAuthProvider"
+        #      config:
+        #        enabled: true
+        #        uri: "ldap://ldap.example.com:389"
+        #        start_tls: true
+        #        base: "ou=users,dc=example,dc=com"
+        #        attributes:
+        #           uid: "cn"
+        #           mail: "email"
+        #           name: "givenName"
+        #        #bind_dn:
+        #        #bind_password:
+        #        #filter: "(objectClass=posixAccount)"
         """
diff --git a/synapse/config/push.py b/synapse/config/push.py
index b7e0d46afa..62c0060c9c 100644
--- a/synapse/config/push.py
+++ b/synapse/config/push.py
@@ -51,11 +51,11 @@ class PushConfig(Config):
         # notification request includes the content of the event (other details
         # like the sender are still included). For `event_id_only` push, it
         # has no effect.
-
+        #
         # For modern android devices the notification content will still appear
         # because it is loaded by the app. iPhone, however will send a
         # notification saying only that a message arrived and who it came from.
         #
         #push:
-        #   include_content: true
+        #  include_content: true
         """
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 83b22dc199..54b71e6841 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -32,27 +32,34 @@ class RatelimitConfig(Config):
         ## Ratelimiting ##
 
         # Number of messages a client can send per second
+        #
         rc_messages_per_second: 0.2
 
         # Number of message a client can send before being throttled
+        #
         rc_message_burst_count: 10.0
 
         # The federation window size in milliseconds
+        #
         federation_rc_window_size: 1000
 
         # The number of federation requests from a single server in a window
         # before the server will delay processing the request.
+        #
         federation_rc_sleep_limit: 10
 
         # The duration in milliseconds to delay processing events from
         # remote servers by if they go over the sleep limit.
+        #
         federation_rc_sleep_delay: 500
 
         # The maximum number of concurrent federation requests allowed
         # from a single server
+        #
         federation_rc_reject_limit: 50
 
         # The number of federation requests to concurrently process from a
         # single server
+        #
         federation_rc_concurrent: 3
         """
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index d808a989f3..2881482f96 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -70,28 +70,29 @@ class RegistrationConfig(Config):
 
         # The user must provide all of the below types of 3PID when registering.
         #
-        # registrations_require_3pid:
-        #     - email
-        #     - msisdn
+        #registrations_require_3pid:
+        #  - email
+        #  - msisdn
 
         # Explicitly disable asking for MSISDNs from the registration
         # flow (overrides registrations_require_3pid if MSISDNs are set as required)
         #
-        # disable_msisdn_registration = True
+        #disable_msisdn_registration: True
 
         # Mandate that users are only allowed to associate certain formats of
         # 3PIDs with accounts on this server.
         #
-        # allowed_local_3pids:
-        #     - medium: email
-        #       pattern: '.*@matrix\\.org'
-        #     - medium: email
-        #       pattern: '.*@vector\\.im'
-        #     - medium: msisdn
-        #       pattern: '\\+44'
+        #allowed_local_3pids:
+        #  - medium: email
+        #    pattern: '.*@matrix\\.org'
+        #  - medium: email
+        #    pattern: '.*@vector\\.im'
+        #  - medium: msisdn
+        #    pattern: '\\+44'
 
         # If set, allows registration by anyone who also has the shared
         # secret, even if registration is otherwise disabled.
+        #
         %(registration_shared_secret)s
 
         # Set the number of bcrypt rounds used to generate password hash.
@@ -99,11 +100,13 @@ class RegistrationConfig(Config):
         # The default number is 12 (which equates to 2^12 rounds).
         # N.B. that increasing this will exponentially increase the time required
         # to register or login - e.g. 24 => 2^24 rounds which will take >20 mins.
+        #
         bcrypt_rounds: 12
 
         # Allows users to register as guests without a password/email/etc, and
         # participate in rooms hosted on this server which have been made
         # accessible to anonymous users.
+        #
         allow_guest_access: False
 
         # The identity server which we suggest that clients should use when users log
@@ -112,27 +115,30 @@ class RegistrationConfig(Config):
         # (By default, no suggestion is made, so it is left up to the client.
         # This setting is ignored unless public_baseurl is also set.)
         #
-        # default_identity_server: https://matrix.org
+        #default_identity_server: https://matrix.org
 
         # The list of identity servers trusted to verify third party
         # identifiers by this server.
         #
         # Also defines the ID server which will be called when an account is
         # deactivated (one will be picked arbitrarily).
+        #
         trusted_third_party_id_servers:
-            - matrix.org
-            - vector.im
+          - matrix.org
+          - vector.im
 
         # Users who register on this homeserver will automatically be joined
         # to these rooms
+        #
         #auto_join_rooms:
-        #    - "#example:example.com"
+        #  - "#example:example.com"
 
         # Where auto_join_rooms are specified, setting this flag ensures that the
         # the rooms exist by creating them when the first user on the
         # homeserver registers.
         # Setting to false means that if the rooms are not manually created,
         # users cannot be auto-joined since they do not exist.
+        #
         autocreate_auto_join_rooms: true
         """ % locals()
 
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 76e3340a91..97db2a5b7a 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -180,29 +180,34 @@ class ContentRepositoryConfig(Config):
         uploads_path = os.path.join(data_dir_path, "uploads")
         return r"""
         # Directory where uploaded images and attachments are stored.
+        #
         media_store_path: "%(media_store)s"
 
         # Media storage providers allow media to be stored in different
         # locations.
-        # media_storage_providers:
-        # - module: file_system
-        #   # Whether to write new local files.
-        #   store_local: false
-        #   # Whether to write new remote media
-        #   store_remote: false
-        #   # Whether to block upload requests waiting for write to this
-        #   # provider to complete
-        #   store_synchronous: false
-        #   config:
-        #     directory: /mnt/some/other/directory
+        #
+        #media_storage_providers:
+        #  - module: file_system
+        #    # Whether to write new local files.
+        #    store_local: false
+        #    # Whether to write new remote media
+        #    store_remote: false
+        #    # Whether to block upload requests waiting for write to this
+        #    # provider to complete
+        #    store_synchronous: false
+        #    config:
+        #       directory: /mnt/some/other/directory
 
         # Directory where in-progress uploads are stored.
+        #
         uploads_path: "%(uploads_path)s"
 
         # The largest allowed upload size in bytes
+        #
         max_upload_size: "10M"
 
         # Maximum number of pixels that will be thumbnailed
+        #
         max_image_pixels: "32M"
 
         # Whether to generate new thumbnails on the fly to precisely match
@@ -210,9 +215,11 @@ class ContentRepositoryConfig(Config):
         # a new resolution is requested by the client the server will
         # generate a new thumbnail. If false the server will pick a thumbnail
         # from a precalculated list.
+        #
         dynamic_thumbnails: false
 
-        # List of thumbnail to precalculate when an image is uploaded.
+        # List of thumbnails to precalculate when an image is uploaded.
+        #
         thumbnail_sizes:
         - width: 32
           height: 32
@@ -233,6 +240,7 @@ class ContentRepositoryConfig(Config):
         # Is the preview URL API enabled?  If enabled, you *must* specify
         # an explicit url_preview_ip_range_blacklist of IPs that the spider is
         # denied from accessing.
+        #
         url_preview_enabled: False
 
         # List of IP address CIDR ranges that the URL preview spider is denied
@@ -243,16 +251,16 @@ class ContentRepositoryConfig(Config):
         # synapse to issue arbitrary GET requests to your internal services,
         # causing serious security issues.
         #
-        # url_preview_ip_range_blacklist:
-        # - '127.0.0.0/8'
-        # - '10.0.0.0/8'
-        # - '172.16.0.0/12'
-        # - '192.168.0.0/16'
-        # - '100.64.0.0/10'
-        # - '169.254.0.0/16'
-        # - '::1/128'
-        # - 'fe80::/64'
-        # - 'fc00::/7'
+        #url_preview_ip_range_blacklist:
+        #  - '127.0.0.0/8'
+        #  - '10.0.0.0/8'
+        #  - '172.16.0.0/12'
+        #  - '192.168.0.0/16'
+        #  - '100.64.0.0/10'
+        #  - '169.254.0.0/16'
+        #  - '::1/128'
+        #  - 'fe80::/64'
+        #  - 'fc00::/7'
         #
         # List of IP address CIDR ranges that the URL preview spider is allowed
         # to access even if they are specified in url_preview_ip_range_blacklist.
@@ -260,8 +268,8 @@ class ContentRepositoryConfig(Config):
         # target IP ranges - e.g. for enabling URL previews for a specific private
         # website only visible in your network.
         #
-        # url_preview_ip_range_whitelist:
-        # - '192.168.1.1'
+        #url_preview_ip_range_whitelist:
+        #   - '192.168.1.1'
 
         # Optional list of URL matches that the URL preview spider is
         # denied from accessing.  You should use url_preview_ip_range_blacklist
@@ -279,26 +287,25 @@ class ContentRepositoryConfig(Config):
         # specified component matches for a given list item succeed, the URL is
         # blacklisted.
         #
-        # url_preview_url_blacklist:
-        # # blacklist any URL with a username in its URI
-        # - username: '*'
+        #url_preview_url_blacklist:
+        #  # blacklist any URL with a username in its URI
+        #  - username: '*'
         #
-        # # blacklist all *.google.com URLs
-        # - netloc: 'google.com'
-        # - netloc: '*.google.com'
+        #  # blacklist all *.google.com URLs
+        #  - netloc: 'google.com'
+        #  - netloc: '*.google.com'
         #
-        # # blacklist all plain HTTP URLs
-        # - scheme: 'http'
+        #  # blacklist all plain HTTP URLs
+        #  - scheme: 'http'
         #
-        # # blacklist http(s)://www.acme.com/foo
-        # - netloc: 'www.acme.com'
-        #   path: '/foo'
+        #  # blacklist http(s)://www.acme.com/foo
+        #  - netloc: 'www.acme.com'
+        #    path: '/foo'
         #
-        # # blacklist any URL with a literal IPv4 address
-        # - netloc: '^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$'
+        #  # blacklist any URL with a literal IPv4 address
+        #  - netloc: '^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$'
 
         # The largest allowed URL preview spidering size in bytes
         max_spider_size: "10M"
 
-
         """ % locals()
diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py
index c8e0abbae7..9b897abe3c 100644
--- a/synapse/config/room_directory.py
+++ b/synapse/config/room_directory.py
@@ -76,11 +76,11 @@ class RoomDirectoryConfig(Config):
         #
         # The default is:
         #
-        # alias_creation_rules:
-        #     - user_id: "*"
-        #       alias: "*"
-        #       room_id: "*"
-        #       action: allow
+        #alias_creation_rules:
+        #  - user_id: "*"
+        #    alias: "*"
+        #    room_id: "*"
+        #    action: allow
 
         # The `room_list_publication_rules` option controls who can publish and
         # which rooms can be published in the public room list.
@@ -105,11 +105,11 @@ class RoomDirectoryConfig(Config):
         #
         # The default is:
         #
-        # room_list_publication_rules:
-        #     - user_id: "*"
-        #       alias: "*"
-        #       room_id: "*"
-        #       action: allow
+        #room_list_publication_rules:
+        #  - user_id: "*"
+        #    alias: "*"
+        #    room_id: "*"
+        #    action: allow
         """
 
     def is_alias_creation_allowed(self, user_id, room_id, alias):
diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py
index 86ffe334f5..aff0a1f00c 100644
--- a/synapse/config/saml2_config.py
+++ b/synapse/config/saml2_config.py
@@ -67,44 +67,43 @@ class SAML2Config(Config):
         return """
         # Enable SAML2 for registration and login. Uses pysaml2.
         #
-        # saml2_config:
+        # `sp_config` is the configuration for the pysaml2 Service Provider.
+        # See pysaml2 docs for format of config.
         #
-        #   # The following is the configuration for the pysaml2 Service Provider.
-        #   # See pysaml2 docs for format of config.
-        #   #
-        #   # Default values will be used for the 'entityid' and 'service' settings,
-        #   # so it is not normally necessary to specify them unless you need to
-        #   # override them.
+        # Default values will be used for the 'entityid' and 'service' settings,
+        # so it is not normally necessary to specify them unless you need to
+        # override them.
         #
-        #   sp_config:
-        #     # point this to the IdP's metadata. You can use either a local file or
-        #     # (preferably) a URL.
-        #     metadata:
-        #       # local: ["saml2/idp.xml"]
-        #       remote:
-        #         - url: https://our_idp/metadata.xml
+        #saml2_config:
+        #  sp_config:
+        #    # point this to the IdP's metadata. You can use either a local file or
+        #    # (preferably) a URL.
+        #    metadata:
+        #      #local: ["saml2/idp.xml"]
+        #      remote:
+        #        - url: https://our_idp/metadata.xml
         #
-        #     # The following is just used to generate our metadata xml, and you
-        #     # may well not need it, depending on your setup. Alternatively you
-        #     # may need a whole lot more detail - see the pysaml2 docs!
+        #    # The rest of sp_config is just used to generate our metadata xml, and you
+        #    # may well not need it, depending on your setup. Alternatively you
+        #    # may need a whole lot more detail - see the pysaml2 docs!
         #
-        #     description: ["My awesome SP", "en"]
-        #     name: ["Test SP", "en"]
+        #    description: ["My awesome SP", "en"]
+        #    name: ["Test SP", "en"]
         #
-        #     organization:
-        #       name: Example com
-        #       display_name:
-        #         - ["Example co", "en"]
-        #       url: "http://example.com"
+        #    organization:
+        #      name: Example com
+        #      display_name:
+        #        - ["Example co", "en"]
+        #      url: "http://example.com"
         #
-        #     contact_person:
-        #       - given_name: Bob
-        #         sur_name: "the Sysadmin"
-        #         email_address": ["admin@example.com"]
-        #         contact_type": technical
+        #    contact_person:
+        #      - given_name: Bob
+        #        sur_name: "the Sysadmin"
+        #        email_address": ["admin@example.com"]
+        #        contact_type": technical
         #
-        #   # Instead of putting the config inline as above, you can specify a
-        #   # separate pysaml2 configuration file:
-        #   #
-        #   # config_path: "%(config_dir_path)s/sp_conf.py"
+        #  # Instead of putting the config inline as above, you can specify a
+        #  # separate pysaml2 configuration file:
+        #  #
+        #  config_path: "%(config_dir_path)s/sp_conf.py"
         """ % {"config_dir_path": config_dir_path}
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 93a30e4cfa..4200f10da3 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -286,19 +286,20 @@ class ServerConfig(Config):
         #
         # This setting requires the affinity package to be installed!
         #
-        # cpu_affinity: 0xFFFFFFFF
+        #cpu_affinity: 0xFFFFFFFF
 
         # The path to the web client which will be served at /_matrix/client/
         # if 'webclient' is configured under the 'listeners' configuration.
         #
-        # web_client_location: "/path/to/web/root"
+        #web_client_location: "/path/to/web/root"
 
         # The public-facing base URL that clients use to access this HS
         # (not including _matrix/...). This is the same URL a user would
         # enter into the 'custom HS URL' field on their client. If you
         # use synapse with a reverse proxy, this should be the URL to reach
         # synapse via the proxy.
-        # public_baseurl: https://example.com/
+        #
+        #public_baseurl: https://example.com/
 
         # Set the soft limit on the number of file descriptors synapse can use
         # Zero is used to indicate synapse should set the soft limit to the
@@ -309,15 +310,25 @@ class ServerConfig(Config):
         use_presence: true
 
         # The GC threshold parameters to pass to `gc.set_threshold`, if defined
-        # gc_thresholds: [700, 10, 10]
+        #
+        #gc_thresholds: [700, 10, 10]
 
         # Set the limit on the returned events in the timeline in the get
         # and sync operations. The default value is -1, means no upper limit.
-        # filter_timeline_limit: 5000
+        #
+        #filter_timeline_limit: 5000
 
         # Whether room invites to users on this server should be blocked
         # (except those sent by local server admins). The default is False.
-        # block_non_admin_invites: True
+        #
+        #block_non_admin_invites: True
+
+        # Room searching
+        #
+        # If disabled, new messages will not be indexed for searching and users
+        # will receive errors when searching for messages. Defaults to enabled.
+        #
+        #enable_search: false
 
         # Restrict federation to the following whitelist of domains.
         # N.B. we recommend also firewalling your federation listener to limit
@@ -325,7 +336,7 @@ class ServerConfig(Config):
         # purely on this application-layer restriction.  If not specified, the
         # default is to whitelist everything.
         #
-        # federation_domain_whitelist:
+        #federation_domain_whitelist:
         #  - lon.example.com
         #  - nyc.example.com
         #  - syd.example.com
@@ -397,11 +408,11 @@ class ServerConfig(Config):
           # will also need to give Synapse a TLS key and certificate: see the TLS section
           # below.)
           #
-          # - port: %(bind_port)s
-          #   type: http
-          #   tls: true
-          #   resources:
-          #     - names: [client, federation]
+          #- port: %(bind_port)s
+          #  type: http
+          #  tls: true
+          #  resources:
+          #    - names: [client, federation]
 
           # Unsecure HTTP listener: for when matrix traffic passes through a reverse proxy
           # that unwraps TLS.
@@ -421,52 +432,49 @@ class ServerConfig(Config):
 
             # example additonal_resources:
             #
-            # additional_resources:
-            #   "/_matrix/my/custom/endpoint":
-            #     module: my_module.CustomRequestHandler
-            #     config: {}
+            #additional_resources:
+            #  "/_matrix/my/custom/endpoint":
+            #    module: my_module.CustomRequestHandler
+            #    config: {}
 
           # Turn on the twisted ssh manhole service on localhost on the given
           # port.
-          # - port: 9000
-          #   bind_addresses: ['::1', '127.0.0.1']
-          #   type: manhole
+          #
+          #- port: 9000
+          #  bind_addresses: ['::1', '127.0.0.1']
+          #  type: manhole
+
+
+        ## Homeserver blocking ##
 
-        # Homeserver blocking
-        #
         # How to reach the server admin, used in ResourceLimitError
-        # admin_contact: 'mailto:admin@server.com'
-        #
-        # Global block config
         #
-        # hs_disabled: False
-        # hs_disabled_message: 'Human readable reason for why the HS is blocked'
-        # hs_disabled_limit_type: 'error code(str), to help clients decode reason'
+        #admin_contact: 'mailto:admin@server.com'
+
+        # Global blocking
         #
+        #hs_disabled: False
+        #hs_disabled_message: 'Human readable reason for why the HS is blocked'
+        #hs_disabled_limit_type: 'error code(str), to help clients decode reason'
+
         # Monthly Active User Blocking
         #
-        # Enables monthly active user checking
-        # limit_usage_by_mau: False
-        # max_mau_value: 50
-        # mau_trial_days: 2
-        #
+        #limit_usage_by_mau: False
+        #max_mau_value: 50
+        #mau_trial_days: 2
+
         # If enabled, the metrics for the number of monthly active users will
         # be populated, however no one will be limited. If limit_usage_by_mau
         # is true, this is implied to be true.
-        # mau_stats_only: False
         #
+        #mau_stats_only: False
+
         # Sometimes the server admin will want to ensure certain accounts are
         # never blocked by mau checking. These accounts are specified here.
         #
-        # mau_limit_reserved_threepids:
-        # - medium: 'email'
-        #   address: 'reserved_user@example.com'
-        #
-        # Room searching
-        #
-        # If disabled, new messages will not be indexed for searching and users
-        # will receive errors when searching for messages. Defaults to enabled.
-        # enable_search: true
+        #mau_limit_reserved_threepids:
+        #  - medium: 'email'
+        #    address: 'reserved_user@example.com'
         """ % locals()
 
     def read_arguments(self, args):
diff --git a/synapse/config/server_notices_config.py b/synapse/config/server_notices_config.py
index 3c39850ac6..529dc0a617 100644
--- a/synapse/config/server_notices_config.py
+++ b/synapse/config/server_notices_config.py
@@ -30,11 +30,11 @@ DEFAULT_CONFIG = """\
 # It's also possible to override the room name, the display name of the
 # "notices" user, and the avatar for the user.
 #
-# server_notices:
-#   system_mxid_localpart: notices
-#   system_mxid_display_name: "Server Notices"
-#   system_mxid_avatar_url: "mxc://server.com/oumMVlgDnLYFaPVkExemNVVZ"
-#   room_name: "Server Notices"
+#server_notices:
+#  system_mxid_localpart: notices
+#  system_mxid_display_name: "Server Notices"
+#  system_mxid_avatar_url: "mxc://server.com/oumMVlgDnLYFaPVkExemNVVZ"
+#  room_name: "Server Notices"
 """
 
 
diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py
index 3fec42bdb0..1502e9faba 100644
--- a/synapse/config/spam_checker.py
+++ b/synapse/config/spam_checker.py
@@ -28,8 +28,8 @@ class SpamCheckerConfig(Config):
 
     def default_config(self, **kwargs):
         return """\
-        # spam_checker:
-        #     module: "my_custom_project.SuperSpamChecker"
-        #     config:
-        #         example_option: 'things'
+        #spam_checker:
+        #  module: "my_custom_project.SuperSpamChecker"
+        #  config:
+        #    example_option: 'things'
         """
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index 38425bb056..8d5d287357 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -177,10 +177,11 @@ class TlsConfig(Config):
         # See 'ACME support' below to enable auto-provisioning this certificate via
         # Let's Encrypt.
         #
-        # tls_certificate_path: "%(tls_certificate_path)s"
+        #tls_certificate_path: "%(tls_certificate_path)s"
 
         # PEM-encoded private key for TLS
-        # tls_private_key_path: "%(tls_private_key_path)s"
+        #
+        #tls_private_key_path: "%(tls_private_key_path)s"
 
         # ACME support: This will configure Synapse to request a valid TLS certificate
         # for your configured `server_name` via Let's Encrypt.
@@ -207,28 +208,28 @@ class TlsConfig(Config):
             # ACME support is disabled by default. Uncomment the following line
             # (and tls_certificate_path and tls_private_key_path above) to enable it.
             #
-            # enabled: true
+            #enabled: true
 
             # Endpoint to use to request certificates. If you only want to test,
             # use Let's Encrypt's staging url:
             #     https://acme-staging.api.letsencrypt.org/directory
             #
-            # url: https://acme-v01.api.letsencrypt.org/directory
+            #url: https://acme-v01.api.letsencrypt.org/directory
 
             # Port number to listen on for the HTTP-01 challenge. Change this if
             # you are forwarding connections through Apache/Nginx/etc.
             #
-            # port: 80
+            #port: 80
 
             # Local addresses to listen on for incoming connections.
             # Again, you may want to change this if you are forwarding connections
             # through Apache/Nginx/etc.
             #
-            # bind_addresses: ['::', '0.0.0.0']
+            #bind_addresses: ['::', '0.0.0.0']
 
             # How many days remaining on a certificate before it is renewed.
             #
-            # reprovision_threshold: 30
+            #reprovision_threshold: 30
 
             # The domain that the certificate should be for. Normally this
             # should be the same as your Matrix domain (i.e., 'server_name'), but,
@@ -242,7 +243,7 @@ class TlsConfig(Config):
             #
             # If not set, defaults to your 'server_name'.
             #
-            # domain: matrix.example.com
+            #domain: matrix.example.com
 
         # List of allowed TLS fingerprints for this server to publish along
         # with the signing keys for this server. Other matrix servers that
@@ -269,8 +270,7 @@ class TlsConfig(Config):
         #   openssl x509 -outform DER | openssl sha256 -binary | base64 | tr -d '='
         # or by checking matrix.org/federationtester/api/report?server_name=$host
         #
-        tls_fingerprints: []
-        # tls_fingerprints: [{"sha256": "<base64_encoded_sha256_fingerprint>"}]
+        #tls_fingerprints: [{"sha256": "<base64_encoded_sha256_fingerprint>"}]
 
         """
             % locals()
diff --git a/synapse/config/user_directory.py b/synapse/config/user_directory.py
index 38e8947843..fab3a7d1c8 100644
--- a/synapse/config/user_directory.py
+++ b/synapse/config/user_directory.py
@@ -40,5 +40,5 @@ class UserDirectoryConfig(Config):
         # on your database to tell it to rebuild the user_directory search indexes.
         #
         #user_directory:
-        #   search_all_users: false
+        #  search_all_users: false
         """
diff --git a/synapse/config/voip.py b/synapse/config/voip.py
index d07bd24ffd..257f7c86e7 100644
--- a/synapse/config/voip.py
+++ b/synapse/config/voip.py
@@ -27,20 +27,24 @@ class VoipConfig(Config):
 
     def default_config(self, **kwargs):
         return """\
-        ## Turn ##
+        ## TURN ##
 
         # The public URIs of the TURN server to give to clients
+        #
         #turn_uris: []
 
         # The shared secret used to compute passwords for the TURN server
+        #
         #turn_shared_secret: "YOUR_SHARED_SECRET"
 
         # The Username and password if the TURN server needs them and
         # does not use a token
+        #
         #turn_username: "TURNSERVER_USERNAME"
         #turn_password: "TURNSERVER_PASSWORD"
 
         # How long generated TURN credentials last
+        #
         turn_user_lifetime: "1h"
 
         # Whether guests should be allowed to use the TURN server.
@@ -48,5 +52,6 @@ class VoipConfig(Config):
         # However, it does introduce a slight security risk as it allows users to
         # connect to arbitrary endpoints without having first signed up for a
         # valid account (e.g. by passing a CAPTCHA).
+        #
         turn_allow_guests: True
         """
diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py
index 1f0b67f5f8..30941f5ad6 100644
--- a/synapse/federation/transaction_queue.py
+++ b/synapse/federation/transaction_queue.py
@@ -33,7 +33,6 @@ from synapse.metrics import (
     event_processing_loop_counter,
     event_processing_loop_room_count,
     events_processed_counter,
-    sent_edus_counter,
     sent_transactions_counter,
 )
 from synapse.metrics.background_process_metrics import run_as_background_process
@@ -47,10 +46,24 @@ from .units import Edu, Transaction
 logger = logging.getLogger(__name__)
 
 sent_pdus_destination_dist_count = Counter(
-    "synapse_federation_client_sent_pdu_destinations:count", ""
+    "synapse_federation_client_sent_pdu_destinations:count",
+    "Number of PDUs queued for sending to one or more destinations",
 )
+
 sent_pdus_destination_dist_total = Counter(
     "synapse_federation_client_sent_pdu_destinations:total", ""
+    "Total number of PDUs queued for sending across all destinations",
+)
+
+sent_edus_counter = Counter(
+    "synapse_federation_client_sent_edus",
+    "Total number of EDUs successfully sent",
+)
+
+sent_edus_by_type = Counter(
+    "synapse_federation_client_sent_edus_by_type",
+    "Number of sent EDUs successfully sent, by event type",
+    ["type"],
 )
 
 
@@ -360,8 +373,6 @@ class TransactionQueue(object):
             logger.info("Not sending EDU to ourselves")
             return
 
-        sent_edus_counter.inc()
-
         if key:
             self.pending_edus_keyed_by_dest.setdefault(
                 destination, {}
@@ -496,6 +507,9 @@ class TransactionQueue(object):
                 )
                 if success:
                     sent_transactions_counter.inc()
+                    sent_edus_counter.inc(len(pending_edus))
+                    for edu in pending_edus:
+                        sent_edus_by_type.labels(edu.edu_type).inc()
                     # Remove the acknowledged device messages from the database
                     # Only bother if we actually sent some device messages
                     if device_message_edus:
diff --git a/synapse/http/__init__.py b/synapse/http/__init__.py
index a3f9e4f67c..d36bcd6336 100644
--- a/synapse/http/__init__.py
+++ b/synapse/http/__init__.py
@@ -15,8 +15,10 @@
 # limitations under the License.
 import re
 
+from twisted.internet import task
 from twisted.internet.defer import CancelledError
 from twisted.python import failure
+from twisted.web.client import FileBodyProducer
 
 from synapse.api.errors import SynapseError
 
@@ -47,3 +49,16 @@ def redact_uri(uri):
         r'\1<redacted>\3',
         uri
     )
+
+
+class QuieterFileBodyProducer(FileBodyProducer):
+    """Wrapper for FileBodyProducer that avoids CRITICAL errors when the connection drops.
+
+    Workaround for https://github.com/matrix-org/synapse/issues/4003 /
+    https://twistedmatrix.com/trac/ticket/6528
+    """
+    def stopProducing(self):
+        try:
+            FileBodyProducer.stopProducing(self)
+        except task.TaskStopped:
+            pass
diff --git a/synapse/http/client.py b/synapse/http/client.py
index 47a1f82ff0..ad454f4964 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -15,6 +15,7 @@
 # limitations under the License.
 
 import logging
+from io import BytesIO
 
 from six import text_type
 from six.moves import urllib
@@ -39,7 +40,11 @@ from twisted.web.http import PotentialDataLoss
 from twisted.web.http_headers import Headers
 
 from synapse.api.errors import Codes, HttpResponseException, SynapseError
-from synapse.http import cancelled_to_request_timed_out_error, redact_uri
+from synapse.http import (
+    QuieterFileBodyProducer,
+    cancelled_to_request_timed_out_error,
+    redact_uri,
+)
 from synapse.util.async_helpers import timeout_deferred
 from synapse.util.caches import CACHE_SIZE_FACTOR
 from synapse.util.logcontext import make_deferred_yieldable
@@ -246,7 +251,7 @@ class SimpleHttpClient(object):
             )
 
     @defer.inlineCallbacks
-    def request(self, method, uri, data=b'', headers=None):
+    def request(self, method, uri, data=None, headers=None):
         """
         Args:
             method (str): HTTP method to use.
@@ -265,11 +270,15 @@ class SimpleHttpClient(object):
         logger.info("Sending request %s %s", method, redact_uri(uri))
 
         try:
+            body_producer = None
+            if data is not None:
+                body_producer = QuieterFileBodyProducer(BytesIO(data))
+
             request_deferred = treq.request(
                 method,
                 uri,
                 agent=self.agent,
-                data=data,
+                data=body_producer,
                 headers=headers,
                 **self._extra_treq_args
             )
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index 3c24bf3805..1682c9af13 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -28,11 +28,10 @@ from canonicaljson import encode_canonical_json
 from prometheus_client import Counter
 from signedjson.sign import sign_json
 
-from twisted.internet import defer, protocol, task
+from twisted.internet import defer, protocol
 from twisted.internet.error import DNSLookupError
 from twisted.internet.task import _EPSILON, Cooperator
 from twisted.web._newclient import ResponseDone
-from twisted.web.client import FileBodyProducer
 from twisted.web.http_headers import Headers
 
 import synapse.metrics
@@ -44,6 +43,7 @@ from synapse.api.errors import (
     RequestSendFailed,
     SynapseError,
 )
+from synapse.http import QuieterFileBodyProducer
 from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
 from synapse.util.async_helpers import timeout_deferred
 from synapse.util.logcontext import make_deferred_yieldable
@@ -839,16 +839,3 @@ def encode_query_args(args):
     query_bytes = urllib.parse.urlencode(encoded_args, True)
 
     return query_bytes.encode('utf8')
-
-
-class QuieterFileBodyProducer(FileBodyProducer):
-    """Wrapper for FileBodyProducer that avoids CRITICAL errors when the connection drops.
-
-    Workaround for https://github.com/matrix-org/synapse/issues/4003 /
-    https://twistedmatrix.com/trac/ticket/6528
-    """
-    def stopProducing(self):
-        try:
-            FileBodyProducer.stopProducing(self)
-        except task.TaskStopped:
-            pass
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index 59900aa5d1..ef48984fdd 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -274,8 +274,6 @@ pending_calls_metric = Histogram(
 # Federation Metrics
 #
 
-sent_edus_counter = Counter("synapse_federation_client_sent_edus", "")
-
 sent_transactions_counter = Counter("synapse_federation_client_sent_transactions", "")
 
 events_processed_counter = Counter("synapse_federation_client_events_processed", "")
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index 87fa7f006a..98d8d9560b 100644
--- a/synapse/push/httppusher.py
+++ b/synapse/push/httppusher.py
@@ -333,10 +333,10 @@ class HttpPusher(object):
             defer.returnValue([])
         try:
             resp = yield self.http_client.post_json_get_json(self.url, notification_dict)
-        except Exception:
-            logger.warn(
-                "Failed to push event %s to %s",
-                event.event_id, self.name, exc_info=True,
+        except Exception as e:
+            logger.warning(
+                "Failed to push event %s to %s: %s %s",
+                event.event_id, self.name, type(e), e,
             )
             defer.returnValue(False)
         rejected = []
@@ -367,10 +367,10 @@ class HttpPusher(object):
         }
         try:
             resp = yield self.http_client.post_json_get_json(self.url, d)
-        except Exception:
-            logger.warn(
-                "Failed to send badge count to %s",
-                self.name, exc_info=True,
+        except Exception as e:
+            logger.warning(
+                "Failed to send badge count to %s: %s %s",
+                self.name, type(e), e,
             )
             defer.returnValue(False)
         rejected = []
diff --git a/synapse/push/pusher.py b/synapse/push/pusher.py
index fcee6d9d7e..368d5094be 100644
--- a/synapse/push/pusher.py
+++ b/synapse/push/pusher.py
@@ -52,11 +52,12 @@ class PusherFactory(object):
             logger.info("defined email pusher type")
 
     def create_pusher(self, pusherdict):
-        logger.info("trying to create_pusher for %r", pusherdict)
-
-        if pusherdict['kind'] in self.pusher_types:
-            logger.info("found pusher")
-            return self.pusher_types[pusherdict['kind']](self.hs, pusherdict)
+        kind = pusherdict['kind']
+        f = self.pusher_types.get(kind, None)
+        if not f:
+            return None
+        logger.info("creating %s pusher for %r", kind, pusherdict)
+        return f(self.hs, pusherdict)
 
     def _create_email_pusher(self, _hs, pusherdict):
         app_name = self._app_name_from_pusherdict(pusherdict)
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index f1a5366b95..3d895da43c 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -106,6 +106,14 @@ class LoggingTransaction(object):
     def __iter__(self):
         return self.txn.__iter__()
 
+    def execute_batch(self, sql, args):
+        if isinstance(self.database_engine, PostgresEngine):
+            from psycopg2.extras import execute_batch
+            self._do_execute(lambda *x: execute_batch(self.txn, *x), sql, args)
+        else:
+            for val in args:
+                self.execute(sql, val)
+
     def execute(self, sql, *args):
         self._do_execute(self.txn.execute, sql, *args)
 
@@ -699,20 +707,34 @@ class SQLBaseStore(object):
             else:
                 return "%s = ?" % (key,)
 
-        # First try to update.
-        sql = "UPDATE %s SET %s WHERE %s" % (
-            table,
-            ", ".join("%s = ?" % (k,) for k in values),
-            " AND ".join(_getwhere(k) for k in keyvalues)
-        )
-        sqlargs = list(values.values()) + list(keyvalues.values())
+        if not values:
+            # If `values` is empty, then all of the values we care about are in
+            # the unique key, so there is nothing to UPDATE. We can just do a
+            # SELECT instead to see if it exists.
+            sql = "SELECT 1 FROM %s WHERE %s" % (
+                table,
+                " AND ".join(_getwhere(k) for k in keyvalues)
+            )
+            sqlargs = list(keyvalues.values())
+            txn.execute(sql, sqlargs)
+            if txn.fetchall():
+                # We have an existing record.
+                return False
+        else:
+            # First try to update.
+            sql = "UPDATE %s SET %s WHERE %s" % (
+                table,
+                ", ".join("%s = ?" % (k,) for k in values),
+                " AND ".join(_getwhere(k) for k in keyvalues)
+            )
+            sqlargs = list(values.values()) + list(keyvalues.values())
 
-        txn.execute(sql, sqlargs)
-        if txn.rowcount > 0:
-            # successfully updated at least one row.
-            return False
+            txn.execute(sql, sqlargs)
+            if txn.rowcount > 0:
+                # successfully updated at least one row.
+                return False
 
-        # We didn't update any rows so insert a new one
+        # We didn't find any existing rows, so insert a new one
         allvalues = {}
         allvalues.update(keyvalues)
         allvalues.update(values)
@@ -759,6 +781,106 @@ class SQLBaseStore(object):
         )
         txn.execute(sql, list(allvalues.values()))
 
+    def _simple_upsert_many_txn(
+        self, txn, table, key_names, key_values, value_names, value_values
+    ):
+        """
+        Upsert, many times.
+
+        Args:
+            table (str): The table to upsert into
+            key_names (list[str]): The key column names.
+            key_values (list[list]): A list of each row's key column values.
+            value_names (list[str]): The value column names. If empty, no
+                values will be used, even if value_values is provided.
+            value_values (list[list]): A list of each row's value column values.
+        Returns:
+            None
+        """
+        if (
+            self.database_engine.can_native_upsert
+            and table not in self._unsafe_to_upsert_tables
+        ):
+            return self._simple_upsert_many_txn_native_upsert(
+                txn, table, key_names, key_values, value_names, value_values
+            )
+        else:
+            return self._simple_upsert_many_txn_emulated(
+                txn, table, key_names, key_values, value_names, value_values
+            )
+
+    def _simple_upsert_many_txn_emulated(
+        self, txn, table, key_names, key_values, value_names, value_values
+    ):
+        """
+        Upsert, many times, but without native UPSERT support or batching.
+
+        Args:
+            table (str): The table to upsert into
+            key_names (list[str]): The key column names.
+            key_values (list[list]): A list of each row's key column values.
+            value_names (list[str]): The value column names. If empty, no
+                values will be used, even if value_values is provided.
+            value_values (list[list]): A list of each row's value column values.
+        Returns:
+            None
+        """
+        # No value columns, therefore make a blank list so that the following
+        # zip() works correctly.
+        if not value_names:
+            value_values = [() for x in range(len(key_values))]
+
+        for keyv, valv in zip(key_values, value_values):
+            _keys = {x: y for x, y in zip(key_names, keyv)}
+            _vals = {x: y for x, y in zip(value_names, valv)}
+
+            self._simple_upsert_txn_emulated(txn, table, _keys, _vals)
+
+    def _simple_upsert_many_txn_native_upsert(
+        self, txn, table, key_names, key_values, value_names, value_values
+    ):
+        """
+        Upsert, many times, using batching where possible.
+
+        Args:
+            table (str): The table to upsert into
+            key_names (list[str]): The key column names.
+            key_values (list[list]): A list of each row's key column values.
+            value_names (list[str]): The value column names. If empty, no
+                values will be used, even if value_values is provided.
+            value_values (list[list]): A list of each row's value column values.
+        Returns:
+            None
+        """
+        allnames = []
+        allnames.extend(key_names)
+        allnames.extend(value_names)
+
+        if not value_names:
+            # No value columns, therefore make a blank list so that the
+            # following zip() works correctly.
+            latter = "NOTHING"
+            value_values = [() for x in range(len(key_values))]
+        else:
+            latter = (
+                "UPDATE SET " + ", ".join(k + "=EXCLUDED." + k for k in value_names)
+            )
+
+        sql = "INSERT INTO %s (%s) VALUES (%s) ON CONFLICT (%s) DO %s" % (
+            table,
+            ", ".join(k for k in allnames),
+            ", ".join("?" for _ in allnames),
+            ", ".join(key_names),
+            latter,
+        )
+
+        args = []
+
+        for x, y in zip(key_values, value_values):
+            args.append(tuple(x) + tuple(y))
+
+        return txn.execute_batch(sql, args)
+
     def _simple_select_one(self, table, keyvalues, retcols,
                            allow_none=False, desc="_simple_select_one"):
         """Executes a SELECT query on the named table, which is expected to