diff options
author | Matthew Hodgson <matthew@arasphere.net> | 2016-05-16 13:13:26 +0100 |
---|---|---|
committer | Matthew Hodgson <matthew@arasphere.net> | 2016-05-16 13:13:26 +0100 |
commit | 2d98c960ecfe50faae2eaedff45eebe8ba54cf6e (patch) | |
tree | 7f7cff56157068180daef95e1994af8d22f107d7 /synapse/rest | |
parent | fix logo (diff) | |
parent | Clean up the blacklist/whitelist handling. (diff) | |
download | synapse-2d98c960ecfe50faae2eaedff45eebe8ba54cf6e.tar.xz |
Merge pull request #760 from matrix-org/matthew/preview_url_ip_whitelist
add a url_preview_ip_range_whitelist config param
Diffstat (limited to 'synapse/rest')
-rw-r--r-- | synapse/rest/media/v1/preview_url_resource.py | 63 |
1 files changed, 30 insertions, 33 deletions
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index dc1e5fbdb3..37dd1de899 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -56,8 +56,7 @@ class PreviewUrlResource(Resource): self.client = SpiderHttpClient(hs) self.media_repo = media_repo - if hasattr(hs.config, "url_preview_url_blacklist"): - self.url_preview_url_blacklist = hs.config.url_preview_url_blacklist + self.url_preview_url_blacklist = hs.config.url_preview_url_blacklist # simple memory cache mapping urls to OG metadata self.cache = ExpiringCache( @@ -86,39 +85,37 @@ class PreviewUrlResource(Resource): else: ts = self.clock.time_msec() - # impose the URL pattern blacklist - if hasattr(self, "url_preview_url_blacklist"): - url_tuple = urlparse.urlsplit(url) - for entry in self.url_preview_url_blacklist: - match = True - for attrib in entry: - pattern = entry[attrib] - value = getattr(url_tuple, attrib) - logger.debug(( - "Matching attrib '%s' with value '%s' against" - " pattern '%s'" - ) % (attrib, value, pattern)) - - if value is None: + url_tuple = urlparse.urlsplit(url) + for entry in self.url_preview_url_blacklist: + match = True + for attrib in entry: + pattern = entry[attrib] + value = getattr(url_tuple, attrib) + logger.debug(( + "Matching attrib '%s' with value '%s' against" + " pattern '%s'" + ) % (attrib, value, pattern)) + + if value is None: + match = False + continue + + if pattern.startswith('^'): + if not re.match(pattern, getattr(url_tuple, attrib)): match = False continue - - if pattern.startswith('^'): - if not re.match(pattern, getattr(url_tuple, attrib)): - match = False - continue - else: - if not fnmatch.fnmatch(getattr(url_tuple, attrib), pattern): - match = False - continue - if match: - logger.warn( - "URL %s blocked by url_blacklist entry %s", url, entry - ) - raise SynapseError( - 403, "URL blocked by url pattern blacklist entry", - Codes.UNKNOWN - ) + else: + if not fnmatch.fnmatch(getattr(url_tuple, attrib), pattern): + match = False + continue + if match: + logger.warn( + "URL %s blocked by url_blacklist entry %s", url, entry + ) + raise SynapseError( + 403, "URL blocked by url pattern blacklist entry", + Codes.UNKNOWN + ) # first check the memory cache - good to handle all the clients on this # HS thundering away to preview the same URL at the same time. |