From 24d21887ed2311b67ccd9013387c450acd019628 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Tue, 21 Apr 2015 14:14:19 +0100 Subject: SYN-350: Don't ratelimit the individual events generated during room creation --- synapse/handlers/room.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'synapse') diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 823affc380..f9fc4a9c98 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -124,7 +124,7 @@ class RoomCreationHandler(BaseHandler): msg_handler = self.hs.get_handlers().message_handler for event in creation_events: - yield msg_handler.create_and_send_event(event) + yield msg_handler.create_and_send_event(event, ratelimit=False) if "name" in config: name = config["name"] @@ -134,7 +134,7 @@ class RoomCreationHandler(BaseHandler): "sender": user_id, "state_key": "", "content": {"name": name}, - }) + }, ratelimit=False) if "topic" in config: topic = config["topic"] @@ -144,7 +144,7 @@ class RoomCreationHandler(BaseHandler): "sender": user_id, "state_key": "", "content": {"topic": topic}, - }) + }, ratelimit=False) for invitee in invite_list: yield msg_handler.create_and_send_event({ @@ -153,7 +153,7 @@ class RoomCreationHandler(BaseHandler): "room_id": room_id, "sender": user_id, "content": {"membership": Membership.INVITE}, - }) + }, ratelimit=False) result = {"room_id": room_id} -- cgit 1.4.1 From 1ebff9736b3a74db3976a69db056438907b45e7b Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Tue, 21 Apr 2015 16:07:20 +0100 Subject: Split out the JsonResource request logging and error handling into a separate wrapper function --- synapse/http/server.py | 207 +++++++++++++++++++++++++++---------------------- 1 file changed, 113 insertions(+), 94 deletions(-) (limited to 'synapse') diff --git a/synapse/http/server.py b/synapse/http/server.py index dee49b9e18..f1193f309d 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -51,6 +51,71 @@ response_timer = metrics.register_distribution( labels=["method", "servlet"] ) +_next_request_id = 0 + + +def request_handler(request_handler): + """Wraps a method that acts as a request handler with the necessary logging + and exception handling. + The method must have a signature of "handle_foo(self, request)". + The argument "self" must have "version_string" and "clock" attributes. + The argument "request" must be a twisted HTTP request. + """ + + @defer.inlineCallbacks + def wrapped_request_handler(self, request): + global _next_request_id + request_id = "%s-%s" % (request.method, _next_request_id) + _next_request_id += 1 + with LoggingContext(request_id) as request_context: + request_context.request = request_id + code = None + start = self.clock.time_msec() + try: + logger.info( + "Received request: %s %s", + request.method, request.path + ) + yield request_handler(self, request) + code = request.code + except CodeMessageException as e: + code = e.code + if isinstance(e, SynapseError): + logger.info( + "%s SynapseError: %s - %s", request, code, e.msg + ) + else: + logger.exception(e) + outgoing_responses_counter.inc(request.method, str(code)) + respond_with_json( + request, code, cs_exception(e), send_cors=True, + pretty_print=_request_user_agent_is_curl(request), + version_string=self.version_string, + ) + except: + code = 500 + logger.exception( + "Failed handle request %s.%s on %r: %r", + request_handler.__module__, + request_handler.__name__, + self, + request + ) + respond_with_json( + request, + 500, + {"error": "Internal server error"}, + send_cors=True + ) + finally: + code = str(code) if code else "-" + end = self.clock.time_msec() + logger.info( + "Processed request: %dms %s %s %s", + end-start, code, request.method, request.path + ) + return wrapped_request_handler + class HttpServer(object): """ Interface for registering callbacks on a HTTP server @@ -115,101 +180,55 @@ class JsonResource(HttpServer, resource.Resource): def render(self, request): """ This get's called by twisted every time someone sends us a request. """ - self._async_render_with_logging_context(request) + self._async_render(request) return server.NOT_DONE_YET - _request_id = 0 - - @defer.inlineCallbacks - def _async_render_with_logging_context(self, request): - request_id = "%s-%s" % (request.method, JsonResource._request_id) - JsonResource._request_id += 1 - with LoggingContext(request_id) as request_context: - request_context.request = request_id - yield self._async_render(request) - + @request_handler @defer.inlineCallbacks def _async_render(self, request): """ This get's called by twisted every time someone sends us a request. This checks if anyone has registered a callback for that method and path. """ - code = None start = self.clock.time_msec() - try: - # Just say yes to OPTIONS. - if request.method == "OPTIONS": - self._send_response(request, 200, {}) - return - - # Loop through all the registered callbacks to check if the method - # and path regex match - for path_entry in self.path_regexs.get(request.method, []): - m = path_entry.pattern.match(request.path) - if not m: - continue - - # We found a match! Trigger callback and then return the - # returned response. We pass both the request and any - # matched groups from the regex to the callback. - - callback = path_entry.callback - - servlet_instance = getattr(callback, "__self__", None) - if servlet_instance is not None: - servlet_classname = servlet_instance.__class__.__name__ - else: - servlet_classname = "%r" % callback - incoming_requests_counter.inc(request.method, servlet_classname) - - args = [ - urllib.unquote(u).decode("UTF-8") for u in m.groups() - ] - - logger.info( - "Received request: %s %s", - request.method, request.path - ) - - code, response = yield callback(request, *args) + if request.method == "OPTIONS": + self._send_response(request, 200, {}) + return + # Loop through all the registered callbacks to check if the method + # and path regex match + for path_entry in self.path_regexs.get(request.method, []): + m = path_entry.pattern.match(request.path) + if not m: + continue + + # We found a match! Trigger callback and then return the + # returned response. We pass both the request and any + # matched groups from the regex to the callback. + + callback = path_entry.callback + + servlet_instance = getattr(callback, "__self__", None) + if servlet_instance is not None: + servlet_classname = servlet_instance.__class__.__name__ + else: + servlet_classname = "%r" % callback + incoming_requests_counter.inc(request.method, servlet_classname) - self._send_response(request, code, response) - response_timer.inc_by( - self.clock.time_msec() - start, request.method, servlet_classname - ) + args = [ + urllib.unquote(u).decode("UTF-8") for u in m.groups() + ] - return + code, response = yield callback(request, *args) - # Huh. No one wanted to handle that? Fiiiiiine. Send 400. - raise UnrecognizedRequestError() - except CodeMessageException as e: - if isinstance(e, SynapseError): - logger.info("%s SynapseError: %s - %s", request, e.code, e.msg) - else: - logger.exception(e) - - code = e.code - self._send_response( - request, - code, - cs_exception(e), - response_code_message=e.response_code_message - ) - except Exception as e: - logger.exception(e) - self._send_response( - request, - 500, - {"error": "Internal server error"} + self._send_response(request, code, response) + response_timer.inc_by( + self.clock.time_msec() - start, request.method, servlet_classname ) - finally: - code = str(code) if code else "-" - end = self.clock.time_msec() - logger.info( - "Processed request: %dms %s %s %s", - end-start, code, request.method, request.path - ) + return + + # Huh. No one wanted to handle that? Fiiiiiine. Send 400. + raise UnrecognizedRequestError() def _send_response(self, request, code, response_json_object, response_code_message=None): @@ -229,20 +248,10 @@ class JsonResource(HttpServer, resource.Resource): request, code, response_json_object, send_cors=True, response_code_message=response_code_message, - pretty_print=self._request_user_agent_is_curl, + pretty_print=_request_user_agent_is_curl(request), version_string=self.version_string, ) - @staticmethod - def _request_user_agent_is_curl(request): - user_agents = request.requestHeaders.getRawHeaders( - "User-Agent", default=[] - ) - for user_agent in user_agents: - if "curl" in user_agent: - return True - return False - class RootRedirect(resource.Resource): """Redirects the root '/' path to another path.""" @@ -263,8 +272,8 @@ class RootRedirect(resource.Resource): def respond_with_json(request, code, json_object, send_cors=False, response_code_message=None, pretty_print=False, version_string=""): - if not pretty_print: - json_bytes = encode_pretty_printed_json(json_object) + if pretty_print: + json_bytes = encode_pretty_printed_json(json_object) + "\n" else: json_bytes = encode_canonical_json(json_object) @@ -304,3 +313,13 @@ def respond_with_json_bytes(request, code, json_bytes, send_cors=False, request.write(json_bytes) request.finish() return NOT_DONE_YET + + +def _request_user_agent_is_curl(request): + user_agents = request.requestHeaders.getRawHeaders( + "User-Agent", default=[] + ) + for user_agent in user_agents: + if "curl" in user_agent: + return True + return False -- cgit 1.4.1 From 1967650bc4cbeea3deacaf540036f9cae4cbc330 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Tue, 21 Apr 2015 16:35:53 +0100 Subject: Combine the request wrappers in rest/media/v1 and http/server into a single wrapper decorator --- synapse/http/server.py | 15 +++- synapse/http/servlet.py | 110 ++++++++++++++-------------- synapse/rest/client/v2_alpha/sync.py | 20 ++--- synapse/rest/media/v1/base_resource.py | 80 ++++---------------- synapse/rest/media/v1/download_resource.py | 12 +-- synapse/rest/media/v1/thumbnail_resource.py | 16 ++-- synapse/rest/media/v1/upload_resource.py | 85 +++++++++------------ 7 files changed, 140 insertions(+), 198 deletions(-) (limited to 'synapse') diff --git a/synapse/http/server.py b/synapse/http/server.py index f1193f309d..b3706889ab 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -57,9 +57,18 @@ _next_request_id = 0 def request_handler(request_handler): """Wraps a method that acts as a request handler with the necessary logging and exception handling. - The method must have a signature of "handle_foo(self, request)". - The argument "self" must have "version_string" and "clock" attributes. - The argument "request" must be a twisted HTTP request. + + The method must have a signature of "handle_foo(self, request)". The + argument "self" must have "version_string" and "clock" attributes. The + argument "request" must be a twisted HTTP request. + + The method must return a deferred. If the deferred succeeds we assume that + a response has been sent. If the deferred fails with a SynapseError we use + it to send a JSON response with the appropriate HTTP reponse code. If the + deferred fails with any other type of error we send a 500 reponse. + + We insert a unique request-id into the logging context for this request and + log the response and duration for this request. """ @defer.inlineCallbacks diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 265559a3ea..9cda17fcf8 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -23,6 +23,61 @@ import logging logger = logging.getLogger(__name__) +def parse_integer(request, name, default=None, required=False): + if name in request.args: + try: + return int(request.args[name][0]) + except: + message = "Query parameter %r must be an integer" % (name,) + raise SynapseError(400, message) + else: + if required: + message = "Missing integer query parameter %r" % (name,) + raise SynapseError(400, message) + else: + return default + + +def parse_boolean(request, name, default=None, required=False): + if name in request.args: + try: + return { + "true": True, + "false": False, + }[request.args[name][0]] + except: + message = ( + "Boolean query parameter %r must be one of" + " ['true', 'false']" + ) % (name,) + raise SynapseError(400, message) + else: + if required: + message = "Missing boolean query parameter %r" % (name,) + raise SynapseError(400, message) + else: + return default + + +def parse_string(request, name, default=None, required=False, + allowed_values=None, param_type="string"): + if name in request.args: + value = request.args[name][0] + if allowed_values is not None and value not in allowed_values: + message = "Query parameter %r must be one of [%s]" % ( + name, ", ".join(repr(v) for v in allowed_values) + ) + raise SynapseError(message) + else: + return value + else: + if required: + message = "Missing %s query parameter %r" % (param_type, name) + raise SynapseError(400, message) + else: + return default + + class RestServlet(object): """ A Synapse REST Servlet. @@ -56,58 +111,3 @@ class RestServlet(object): http_server.register_path(method, pattern, method_handler) else: raise NotImplementedError("RestServlet must register something.") - - @staticmethod - def parse_integer(request, name, default=None, required=False): - if name in request.args: - try: - return int(request.args[name][0]) - except: - message = "Query parameter %r must be an integer" % (name,) - raise SynapseError(400, message) - else: - if required: - message = "Missing integer query parameter %r" % (name,) - raise SynapseError(400, message) - else: - return default - - @staticmethod - def parse_boolean(request, name, default=None, required=False): - if name in request.args: - try: - return { - "true": True, - "false": False, - }[request.args[name][0]] - except: - message = ( - "Boolean query parameter %r must be one of" - " ['true', 'false']" - ) % (name,) - raise SynapseError(400, message) - else: - if required: - message = "Missing boolean query parameter %r" % (name,) - raise SynapseError(400, message) - else: - return default - - @staticmethod - def parse_string(request, name, default=None, required=False, - allowed_values=None, param_type="string"): - if name in request.args: - value = request.args[name][0] - if allowed_values is not None and value not in allowed_values: - message = "Query parameter %r must be one of [%s]" % ( - name, ", ".join(repr(v) for v in allowed_values) - ) - raise SynapseError(message) - else: - return value - else: - if required: - message = "Missing %s query parameter %r" % (param_type, name) - raise SynapseError(400, message) - else: - return default diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 3056ec45cf..f2fd0b9f32 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -15,7 +15,9 @@ from twisted.internet import defer -from synapse.http.servlet import RestServlet +from synapse.http.servlet import ( + RestServlet, parse_string, parse_integer, parse_boolean +) from synapse.handlers.sync import SyncConfig from synapse.types import StreamToken from synapse.events.utils import ( @@ -87,20 +89,20 @@ class SyncRestServlet(RestServlet): def on_GET(self, request): user, client = yield self.auth.get_user_by_req(request) - timeout = self.parse_integer(request, "timeout", default=0) - limit = self.parse_integer(request, "limit", required=True) - gap = self.parse_boolean(request, "gap", default=True) - sort = self.parse_string( + timeout = parse_integer(request, "timeout", default=0) + limit = parse_integer(request, "limit", required=True) + gap = parse_boolean(request, "gap", default=True) + sort = parse_string( request, "sort", default="timeline,asc", allowed_values=self.ALLOWED_SORT ) - since = self.parse_string(request, "since") - set_presence = self.parse_string( + since = parse_string(request, "since") + set_presence = parse_string( request, "set_presence", default="online", allowed_values=self.ALLOWED_PRESENCE ) - backfill = self.parse_boolean(request, "backfill", default=False) - filter_id = self.parse_string(request, "filter", default=None) + backfill = parse_boolean(request, "backfill", default=False) + filter_id = parse_string(request, "filter", default=None) logger.info( "/sync: user=%r, timeout=%r, limit=%r, gap=%r, sort=%r, since=%r," diff --git a/synapse/rest/media/v1/base_resource.py b/synapse/rest/media/v1/base_resource.py index b10cbddb81..fc078fca82 100644 --- a/synapse/rest/media/v1/base_resource.py +++ b/synapse/rest/media/v1/base_resource.py @@ -18,7 +18,7 @@ from .thumbnailer import Thumbnailer from synapse.http.server import respond_with_json from synapse.util.stringutils import random_string from synapse.api.errors import ( - cs_exception, CodeMessageException, cs_error, Codes, SynapseError + cs_error, Codes, SynapseError ) from twisted.internet import defer @@ -32,6 +32,18 @@ import logging logger = logging.getLogger(__name__) +def parse_media_id(request): + try: + server_name, media_id = request.postpath + return (server_name, media_id) + except: + raise SynapseError( + 404, + "Invalid media id token %r" % (request.postpath,), + Codes.UNKNOWN, + ) + + class BaseMediaResource(Resource): isLeaf = True @@ -47,72 +59,6 @@ class BaseMediaResource(Resource): self.filepaths = filepaths self.downloads = {} - @staticmethod - def catch_errors(request_handler): - @defer.inlineCallbacks - def wrapped_request_handler(self, request): - try: - yield request_handler(self, request) - except CodeMessageException as e: - logger.info("Responding with error: %r", e) - respond_with_json( - request, e.code, cs_exception(e), send_cors=True - ) - except: - logger.exception( - "Failed handle request %s.%s on %r", - request_handler.__module__, - request_handler.__name__, - self, - ) - respond_with_json( - request, - 500, - {"error": "Internal server error"}, - send_cors=True - ) - return wrapped_request_handler - - @staticmethod - def _parse_media_id(request): - try: - server_name, media_id = request.postpath - return (server_name, media_id) - except: - raise SynapseError( - 404, - "Invalid media id token %r" % (request.postpath,), - Codes.UNKNOWN, - ) - - @staticmethod - def _parse_integer(request, arg_name, default=None): - try: - if default is None: - return int(request.args[arg_name][0]) - else: - return int(request.args.get(arg_name, [default])[0]) - except: - raise SynapseError( - 400, - "Missing integer argument %r" % (arg_name,), - Codes.UNKNOWN, - ) - - @staticmethod - def _parse_string(request, arg_name, default=None): - try: - if default is None: - return request.args[arg_name][0] - else: - return request.args.get(arg_name, [default])[0] - except: - raise SynapseError( - 400, - "Missing string argument %r" % (arg_name,), - Codes.UNKNOWN, - ) - def _respond_404(self, request): respond_with_json( request, 404, diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py index c585bb11f7..0fe6abf647 100644 --- a/synapse/rest/media/v1/download_resource.py +++ b/synapse/rest/media/v1/download_resource.py @@ -13,7 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .base_resource import BaseMediaResource +from .base_resource import BaseMediaResource, parse_media_id +from synapse.http.server import request_handler from twisted.web.server import NOT_DONE_YET from twisted.internet import defer @@ -28,15 +29,10 @@ class DownloadResource(BaseMediaResource): self._async_render_GET(request) return NOT_DONE_YET - @BaseMediaResource.catch_errors + @request_handler @defer.inlineCallbacks def _async_render_GET(self, request): - try: - server_name, media_id = request.postpath - except: - self._respond_404(request) - return - + server_name, media_id = parse_media_id(request) if server_name == self.server_name: yield self._respond_local_file(request, media_id) else: diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py index 84f5e3463c..1dadd880b2 100644 --- a/synapse/rest/media/v1/thumbnail_resource.py +++ b/synapse/rest/media/v1/thumbnail_resource.py @@ -14,7 +14,9 @@ # limitations under the License. -from .base_resource import BaseMediaResource +from .base_resource import BaseMediaResource, parse_media_id +from synapse.http.servlet import parse_string, parse_integer +from synapse.http.server import request_handler from twisted.web.server import NOT_DONE_YET from twisted.internet import defer @@ -31,14 +33,14 @@ class ThumbnailResource(BaseMediaResource): self._async_render_GET(request) return NOT_DONE_YET - @BaseMediaResource.catch_errors + @request_handler @defer.inlineCallbacks def _async_render_GET(self, request): - server_name, media_id = self._parse_media_id(request) - width = self._parse_integer(request, "width") - height = self._parse_integer(request, "height") - method = self._parse_string(request, "method", "scale") - m_type = self._parse_string(request, "type", "image/png") + server_name, media_id = parse_media_id(request) + width = parse_integer(request, "width") + height = parse_integer(request, "height") + method = parse_string(request, "method", "scale") + m_type = parse_string(request, "type", "image/png") if server_name == self.server_name: yield self._respond_local_thumbnail( diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py index e5aba3af4c..cc571976a5 100644 --- a/synapse/rest/media/v1/upload_resource.py +++ b/synapse/rest/media/v1/upload_resource.py @@ -13,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.http.server import respond_with_json +from synapse.http.server import respond_with_json, request_handler from synapse.util.stringutils import random_string -from synapse.api.errors import ( - cs_exception, SynapseError, CodeMessageException -) +from synapse.api.errors import SynapseError from twisted.web.server import NOT_DONE_YET from twisted.internet import defer @@ -69,53 +67,42 @@ class UploadResource(BaseMediaResource): defer.returnValue("mxc://%s/%s" % (self.server_name, media_id)) + @request_handler @defer.inlineCallbacks def _async_render_POST(self, request): - try: - auth_user, client = yield self.auth.get_user_by_req(request) - # TODO: The checks here are a bit late. The content will have - # already been uploaded to a tmp file at this point - content_length = request.getHeader("Content-Length") - if content_length is None: - raise SynapseError( - msg="Request must specify a Content-Length", code=400 - ) - if int(content_length) > self.max_upload_size: - raise SynapseError( - msg="Upload request body is too large", - code=413, - ) - - headers = request.requestHeaders - - if headers.hasHeader("Content-Type"): - media_type = headers.getRawHeaders("Content-Type")[0] - else: - raise SynapseError( - msg="Upload request missing 'Content-Type'", - code=400, - ) - - # if headers.hasHeader("Content-Disposition"): - # disposition = headers.getRawHeaders("Content-Disposition")[0] - # TODO(markjh): parse content-dispostion - - content_uri = yield self.create_content( - media_type, None, request.content.read(), - content_length, auth_user + auth_user, client = yield self.auth.get_user_by_req(request) + # TODO: The checks here are a bit late. The content will have + # already been uploaded to a tmp file at this point + content_length = request.getHeader("Content-Length") + if content_length is None: + raise SynapseError( + msg="Request must specify a Content-Length", code=400 ) - - respond_with_json( - request, 200, {"content_uri": content_uri}, send_cors=True + if int(content_length) > self.max_upload_size: + raise SynapseError( + msg="Upload request body is too large", + code=413, ) - except CodeMessageException as e: - logger.exception(e) - respond_with_json(request, e.code, cs_exception(e), send_cors=True) - except: - logger.exception("Failed to store file") - respond_with_json( - request, - 500, - {"error": "Internal server error"}, - send_cors=True + + headers = request.requestHeaders + + if headers.hasHeader("Content-Type"): + media_type = headers.getRawHeaders("Content-Type")[0] + else: + raise SynapseError( + msg="Upload request missing 'Content-Type'", + code=400, ) + + # if headers.hasHeader("Content-Disposition"): + # disposition = headers.getRawHeaders("Content-Disposition")[0] + # TODO(markjh): parse content-dispostion + + content_uri = yield self.create_content( + media_type, None, request.content.read(), + content_length, auth_user + ) + + respond_with_json( + request, 200, {"content_uri": content_uri}, send_cors=True + ) -- cgit 1.4.1 From 812a99100bb86e3a09b3e5739c849e55c74cf448 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Tue, 21 Apr 2015 16:43:58 +0100 Subject: Set a version_string in BaseMediaResource so that the request_handler wrapper works --- synapse/rest/media/v1/base_resource.py | 1 + 1 file changed, 1 insertion(+) (limited to 'synapse') diff --git a/synapse/rest/media/v1/base_resource.py b/synapse/rest/media/v1/base_resource.py index fc078fca82..edd4f78024 100644 --- a/synapse/rest/media/v1/base_resource.py +++ b/synapse/rest/media/v1/base_resource.py @@ -57,6 +57,7 @@ class BaseMediaResource(Resource): self.max_upload_size = hs.config.max_upload_size self.max_image_pixels = hs.config.max_image_pixels self.filepaths = filepaths + self.version_string = hs.version_string self.downloads = {} def _respond_404(self, request): -- cgit 1.4.1 From 6080830beffd38a08b08d73ee1588cf2289c0080 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Tue, 21 Apr 2015 17:03:57 +0100 Subject: Bump syutil version to 0.0.5 --- synapse/python_dependencies.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'synapse') diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index dac927d0a7..ee72f774b3 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -4,7 +4,7 @@ from distutils.version import LooseVersion logger = logging.getLogger(__name__) REQUIREMENTS = { - "syutil>=0.0.4": ["syutil"], + "syutil>=0.0.5": ["syutil"], "Twisted==14.0.2": ["twisted==14.0.2"], "service_identity>=1.0.0": ["service_identity>=1.0.0"], "pyopenssl>=0.14": ["OpenSSL>=0.14"], @@ -43,8 +43,8 @@ DEPENDENCY_LINKS = [ ), github_link( project="matrix-org/syutil", - version="v0.0.4", - egg="syutil-0.0.4", + version="v0.0.5", + egg="syutil-0.0.5", ), github_link( project="matrix-org/matrix-angular-sdk", -- cgit 1.4.1 From 3a7d7a3f22fe7358b23250e1e8b8d5a9e4559f23 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 21 Apr 2015 20:18:29 +0100 Subject: Sanitise a user's powerlevel to an int() before numerical comparison, because otherwise Python is "helpful" with it (SYN-351) --- synapse/api/auth.py | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'synapse') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index e159e4503f..c1b3ae1734 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -189,6 +189,12 @@ class Auth(object): auth_events, ) + # TODO(paul): There's an awful lot of int()-casting in this code; + # surely we should be squashing strings to integers at a higher + # level, maybe when we insert? + if user_level is not None: + user_level = int(user_level) + ban_level, kick_level, redact_level = ( self._get_ops_level_from_event_state( event, @@ -269,6 +275,7 @@ class Auth(object): 403, "You cannot kick user %s." % target_user_id ) elif Membership.BAN == membership: + print "I wonder how user's level of %r compares to ban level of %r" % (user_level, ban_level) if user_level < ban_level: raise AuthError(403, "You don't have permission to ban") else: -- cgit 1.4.1 From b568c0231c708431532aa385ebcc121e0a8ef986 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 21 Apr 2015 20:21:14 +0100 Subject: Remove debugging print statement accidentally committed --- synapse/api/auth.py | 1 - 1 file changed, 1 deletion(-) (limited to 'synapse') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index c1b3ae1734..43b21897b9 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -275,7 +275,6 @@ class Auth(object): 403, "You cannot kick user %s." % target_user_id ) elif Membership.BAN == membership: - print "I wonder how user's level of %r compares to ban level of %r" % (user_level, ban_level) if user_level < ban_level: raise AuthError(403, "You don't have permission to ban") else: -- cgit 1.4.1 From d3309933f52f4382470b72ec1079f403ca412904 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 21 Apr 2015 20:53:23 +0100 Subject: Much neater fetching of defined powerlevels from m.room.power_levels state event --- synapse/api/auth.py | 52 +++++++++++++++++++++------------------------------- 1 file changed, 21 insertions(+), 31 deletions(-) (limited to 'synapse') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 43b21897b9..9a5058a364 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -195,12 +195,8 @@ class Auth(object): if user_level is not None: user_level = int(user_level) - ban_level, kick_level, redact_level = ( - self._get_ops_level_from_event_state( - event, - auth_events, - ) - ) + # FIXME (erikj): What should we do here as the default? + ban_level = self._get_named_level(auth_events, "ban", 50) logger.debug( "is_membership_change_allowed: %s", @@ -216,11 +212,6 @@ class Auth(object): } ) - if ban_level: - ban_level = int(ban_level) - else: - ban_level = 50 # FIXME (erikj): What should we do here? - if Membership.JOIN != membership: # JOIN is the only action you can perform if you're not in the room if not caller_in_room: # caller isn't joined @@ -265,10 +256,7 @@ class Auth(object): 403, "You cannot unban user &s." % (target_user_id,) ) elif target_user_id != event.user_id: - if kick_level: - kick_level = int(kick_level) - else: - kick_level = 50 # FIXME (erikj): What should we do here? + kick_level = self._get_named_level(auth_events, "kick", 50) if user_level < kick_level: raise AuthError( @@ -282,10 +270,14 @@ class Auth(object): return True - def _get_power_level_from_event_state(self, event, user_id, auth_events): + def _get_power_level_event(self, auth_events): key = (EventTypes.PowerLevels, "", ) - power_level_event = auth_events.get(key) + return auth_events.get(key) + + def _get_power_level_from_event_state(self, event, user_id, auth_events): + power_level_event = self._get_power_level_event(auth_events) level = None + if power_level_event: level = power_level_event.content.get("users", {}).get(user_id) if not level: @@ -299,17 +291,18 @@ class Auth(object): return level - def _get_ops_level_from_event_state(self, event, auth_events): - key = (EventTypes.PowerLevels, "", ) - power_level_event = auth_events.get(key) - if power_level_event: - return ( - power_level_event.content.get("ban", 50), - power_level_event.content.get("kick", 50), - power_level_event.content.get("redact", 50), - ) - return None, None, None, + def _get_named_level(self, auth_events, name, default): + power_level_event = self._get_power_level_event(auth_events) + + if not power_level_event: + return default + + level = power_level_event.content.get(name, None) + if level is not None: + return int(level) + else: + return default @defer.inlineCallbacks def get_user_by_req(self, request): @@ -551,10 +544,7 @@ class Auth(object): auth_events, ) - _, _, redact_level = self._get_ops_level_from_event_state( - event, - auth_events, - ) + redact_level = self._get_named_level(auth_events, "redact", 50) if user_level < redact_level: raise AuthError( -- cgit 1.4.1 From f43063158afb33bc1601632583b9e6377ff76aca Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 22 Apr 2015 13:12:11 +0100 Subject: Appease pep8 --- synapse/api/auth.py | 1 - 1 file changed, 1 deletion(-) (limited to 'synapse') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 9a5058a364..bae210c579 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -291,7 +291,6 @@ class Auth(object): return level - def _get_named_level(self, auth_events, name, default): power_level_event = self._get_power_level_event(auth_events) -- cgit 1.4.1 From a16eaa0c337c29a932b5effddfddff78849836c9 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 22 Apr 2015 14:20:04 +0100 Subject: Neater fetching of user's auth level in a room - squash to int() at access time (SYN-353) --- synapse/api/auth.py | 47 ++++++++++++----------------------------------- 1 file changed, 12 insertions(+), 35 deletions(-) (limited to 'synapse') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index bae210c579..a21120b313 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -183,17 +183,7 @@ class Auth(object): else: join_rule = JoinRules.INVITE - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) - - # TODO(paul): There's an awful lot of int()-casting in this code; - # surely we should be squashing strings to integers at a higher - # level, maybe when we insert? - if user_level is not None: - user_level = int(user_level) + user_level = self._get_user_power_level(event.user_id, auth_events) # FIXME (erikj): What should we do here as the default? ban_level = self._get_named_level(auth_events, "ban", 50) @@ -274,22 +264,26 @@ class Auth(object): key = (EventTypes.PowerLevels, "", ) return auth_events.get(key) - def _get_power_level_from_event_state(self, event, user_id, auth_events): + def _get_user_power_level(self, user_id, auth_events): power_level_event = self._get_power_level_event(auth_events) - level = None if power_level_event: level = power_level_event.content.get("users", {}).get(user_id) if not level: level = power_level_event.content.get("users_default", 0) + + if level is None: + return 0 + else: + return int(level) else: key = (EventTypes.Create, "", ) create_event = auth_events.get(key) if (create_event is not None and create_event.content["creator"] == user_id): return 100 - - return level + else: + return 0 def _get_named_level(self, auth_events, name, default): power_level_event = self._get_power_level_event(auth_events) @@ -496,16 +490,7 @@ class Auth(object): else: send_level = 0 - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) - - if user_level: - user_level = int(user_level) - else: - user_level = 0 + user_level = self._get_user_power_level(event.user_id, auth_events) if user_level < send_level: raise AuthError( @@ -537,11 +522,7 @@ class Auth(object): return True def _check_redaction(self, event, auth_events): - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) + user_level = self._get_user_power_level(event.user_id, auth_events) redact_level = self._get_named_level(auth_events, "redact", 50) @@ -571,11 +552,7 @@ class Auth(object): if not current_state: return - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) + user_level = self._get_user_power_level(event.user_id, auth_events) # Check other levels: levels_to_check = [ -- cgit 1.4.1 From 191f7f09cee4e148949af9e33c5c8f60184acf90 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 23 Apr 2015 18:27:25 +0100 Subject: Generate presence event-stream JSON structures directly --- synapse/handlers/presence.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) (limited to 'synapse') diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index bbc7a0f200..6332f50974 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -836,6 +836,8 @@ class PresenceEventSource(object): presence = self.hs.get_handlers().presence_handler cachemap = presence._user_cachemap + clock = self.clock + latest_serial = None updates = [] # TODO(paul): use a DeferredList ? How to limit concurrency. @@ -845,18 +847,17 @@ class PresenceEventSource(object): if cached.serial <= from_key: continue - if (yield self.is_visible(observer_user, observed_user)): - updates.append((observed_user, cached)) + if not (yield self.is_visible(observer_user, observed_user)): + continue + + if latest_serial is None or cached.serial > latest_serial: + latest_serial = cached.serial + updates.append(cached.make_event(user=observed_user, clock=clock)) # TODO(paul): limit if updates: - clock = self.clock - - latest_serial = max([x[1].serial for x in updates]) - data = [x[1].make_event(user=x[0], clock=clock) for x in updates] - - defer.returnValue((data, latest_serial)) + defer.returnValue((updates, latest_serial)) else: defer.returnValue(([], presence._user_cachemap_latest_serial)) -- cgit 1.4.1 From 8a785c3006327076245428d26e5ca1634e9caeb2 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 23 Apr 2015 18:40:19 +0100 Subject: Store a list of the presence serial number at which remote users went offline, so that when we delete them from the cachemap, we can still synthesize OFFLINE events for them (SYN-261) --- synapse/handlers/presence.py | 21 +++++++++++++++++++++ tests/handlers/test_presence.py | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+) (limited to 'synapse') diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 6332f50974..42fb622c48 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -135,6 +135,9 @@ class PresenceHandler(BaseHandler): self._remote_sendmap = {} # map remote users to sets of local users who're interested in them self._remote_recvmap = {} + # list of (serial, set of(userids)) tuples, ordered by serial, latest + # first + self._remote_offline_serials = [] # map any user to a UserPresenceCache self._user_cachemap = {} @@ -715,6 +718,10 @@ class PresenceHandler(BaseHandler): ) if state["presence"] == PresenceState.OFFLINE: + self._remote_offline_serials.insert( + 0, + (self._user_cachemap_latest_serial, set([user.to_string()])) + ) del self._user_cachemap[user] for poll in content.get("poll", []): @@ -856,6 +863,20 @@ class PresenceEventSource(object): # TODO(paul): limit + for serial, user_ids in presence._remote_offline_serials: + if serial < from_key: + break + + for u in user_ids: + updates.append({ + "type": "m.presence", + "content": {"user_id": u, "presence": PresenceState.OFFLINE}, + }) + # TODO(paul): For the v2 API we want to tell the client their from_key + # is too old if we fell off the end of the _remote_offline_serials + # list, and get them to invalidate+resync. In v1 we have no such + # concept so this is a best-effort result. + if updates: defer.returnValue((updates, latest_serial)) else: diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 04eba4289e..bb497b6f09 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -878,6 +878,44 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase): state ) + @defer.inlineCallbacks + def test_recv_remote_offline(self): + """ Various tests relating to SYN-261 """ + potato_set = self.handler._remote_recvmap.setdefault(self.u_potato, + set()) + potato_set.add(self.u_apple) + + self.room_members = [self.u_banana, self.u_potato] + + self.assertEquals(self.event_source.get_current_key(), 0) + + yield self.mock_federation_resource.trigger("PUT", + "/_matrix/federation/v1/send/1000000/", + _make_edu_json("elsewhere", "m.presence", + content={ + "push": [ + {"user_id": "@potato:remote", + "presence": "offline"}, + ], + } + ) + ) + + self.assertEquals(self.event_source.get_current_key(), 1) + + (events, _) = yield self.event_source.get_new_events_for_user( + self.u_apple, 0, None + ) + self.assertEquals(events, + [ + {"type": "m.presence", + "content": { + "user_id": "@potato:remote", + "presence": OFFLINE, + }} + ] + ) + @defer.inlineCallbacks def test_join_room_local(self): self.room_members = [self.u_apple, self.u_banana] -- cgit 1.4.1 From b3bda8a75f9745fd351d2c2093ffc68774e8a2e2 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 23 Apr 2015 18:40:47 +0100 Subject: Don't let the remote offline serial list grow arbitrarily large --- synapse/handlers/presence.py | 5 +++++ 1 file changed, 5 insertions(+) (limited to 'synapse') diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 42fb622c48..f929bcf853 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -36,6 +36,9 @@ metrics = synapse.metrics.get_metrics_for(__name__) # Don't bother bumping "last active" time if it differs by less than 60 seconds LAST_ACTIVE_GRANULARITY = 60*1000 +# Keep no more than this number of offline serial revisions +MAX_OFFLINE_SERIALS = 1000 + # TODO(paul): Maybe there's one of these I can steal from somewhere def partition(l, func): @@ -722,6 +725,8 @@ class PresenceHandler(BaseHandler): 0, (self._user_cachemap_latest_serial, set([user.to_string()])) ) + while len(self._remote_offline_serials) > MAX_OFFLINE_SERIALS: + self._remote_offline_serials.pop() # remove the oldest del self._user_cachemap[user] for poll in content.get("poll", []): -- cgit 1.4.1 From e1e5e53127540fbaa4e23fbc628113983efd767b Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 23 Apr 2015 19:01:37 +0100 Subject: Remove users from the remote_offline_serials list (and clean up empty elements) when they go online again --- synapse/handlers/presence.py | 12 +++++++++++- tests/handlers/test_presence.py | 27 +++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) (limited to 'synapse') diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index f929bcf853..571eacd343 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -720,14 +720,24 @@ class PresenceHandler(BaseHandler): statuscache=statuscache, ) + user_id = user.to_string() + if state["presence"] == PresenceState.OFFLINE: self._remote_offline_serials.insert( 0, - (self._user_cachemap_latest_serial, set([user.to_string()])) + (self._user_cachemap_latest_serial, set([user_id])) ) while len(self._remote_offline_serials) > MAX_OFFLINE_SERIALS: self._remote_offline_serials.pop() # remove the oldest del self._user_cachemap[user] + else: + # Remove the user from remote_offline_serials now that they're + # no longer offline + for idx, elem in enumerate(self._remote_offline_serials): + (_, user_ids) = elem + user_ids.discard(user_id) + if not user_ids: + self._remote_offline_serials.pop(idx) for poll in content.get("poll", []): user = UserID.from_string(poll) diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index bb497b6f09..9f5580c096 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -916,6 +916,33 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase): ] ) + yield self.mock_federation_resource.trigger("PUT", + "/_matrix/federation/v1/send/1000001/", + _make_edu_json("elsewhere", "m.presence", + content={ + "push": [ + {"user_id": "@potato:remote", + "presence": "online"}, + ], + } + ) + ) + + self.assertEquals(self.event_source.get_current_key(), 2) + + (events, _) = yield self.event_source.get_new_events_for_user( + self.u_apple, 0, None + ) + self.assertEquals(events, + [ + {"type": "m.presence", + "content": { + "user_id": "@potato:remote", + "presence": ONLINE, + }} + ] + ) + @defer.inlineCallbacks def test_join_room_local(self): self.room_members = [self.u_apple, self.u_banana] -- cgit 1.4.1 From 74270defdaf4070ba001713ae9f1f668790fc9a3 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 24 Apr 2015 09:27:42 +0100 Subject: No commas here, otherwise our error string constants become tuples. --- synapse/api/errors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/api/errors.py b/synapse/api/errors.py index eddd889778..72d2bd5b4c 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -35,8 +35,8 @@ class Codes(object): LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED" CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED" CAPTCHA_INVALID = "M_CAPTCHA_INVALID" - MISSING_PARAM = "M_MISSING_PARAM", - TOO_LARGE = "M_TOO_LARGE", + MISSING_PARAM = "M_MISSING_PARAM" + TOO_LARGE = "M_TOO_LARGE" EXCLUSIVE = "M_EXCLUSIVE" -- cgit 1.4.1 From 4e2f8b87221a7c2391a399b729b191ce40b91ab6 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 24 Apr 2015 10:35:29 +0100 Subject: Copyright notices --- synapse/push/baserules.py | 14 ++++++++++++++ synapse/push/rulekinds.py | 14 ++++++++++++++ synapse/python_dependencies.py | 14 ++++++++++++++ synapse/rest/media/v1/identicon_resource.py | 14 ++++++++++++++ synapse/storage/schema/delta/14/upgrade_appservice_db.py | 14 ++++++++++++++ synapse/storage/schema/delta/14/v14.sql | 14 ++++++++++++++ 6 files changed, 84 insertions(+) (limited to 'synapse') diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 60fd35fbfb..f8408d6596 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP diff --git a/synapse/push/rulekinds.py b/synapse/push/rulekinds.py index 660aa4e10e..4c591aa638 100644 --- a/synapse/push/rulekinds.py +++ b/synapse/push/rulekinds.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + PRIORITY_CLASS_MAP = { 'underride': 1, 'sender': 2, diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index ee72f774b3..8b457419cf 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import logging from distutils.version import LooseVersion diff --git a/synapse/rest/media/v1/identicon_resource.py b/synapse/rest/media/v1/identicon_resource.py index 912856386a..603859d5d4 100644 --- a/synapse/rest/media/v1/identicon_resource.py +++ b/synapse/rest/media/v1/identicon_resource.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from pydenticon import Generator from twisted.web.resource import Resource diff --git a/synapse/storage/schema/delta/14/upgrade_appservice_db.py b/synapse/storage/schema/delta/14/upgrade_appservice_db.py index 847b1c5b89..9f3a4dd4c5 100644 --- a/synapse/storage/schema/delta/14/upgrade_appservice_db.py +++ b/synapse/storage/schema/delta/14/upgrade_appservice_db.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import json import logging diff --git a/synapse/storage/schema/delta/14/v14.sql b/synapse/storage/schema/delta/14/v14.sql index 0212726448..1d09ad7a15 100644 --- a/synapse/storage/schema/delta/14/v14.sql +++ b/synapse/storage/schema/delta/14/v14.sql @@ -1,3 +1,17 @@ +/* Copyright 2015 OpenMarket Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ CREATE TABLE IF NOT EXISTS push_rules_enable ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT NOT NULL, -- cgit 1.4.1 From 869dc94cbb5810e50efb7b6dd8320817aca01554 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 24 Apr 2015 11:27:56 +0100 Subject: Call the super classes when generating config --- synapse/config/registration.py | 1 + 1 file changed, 1 insertion(+) (limited to 'synapse') diff --git a/synapse/config/registration.py b/synapse/config/registration.py index d5c8f4bf7b..ad81cc4f45 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -53,6 +53,7 @@ class RegistrationConfig(Config): @classmethod def generate_config(cls, args, config_dir_path): + super(RegistrationConfig, cls).genenerate_config(args, config_dir_path) if args.enable_registration is None: args.enable_registration = False -- cgit 1.4.1 From bdcb23ca25bc2ea72ab3bc28d76c6b72d68206b3 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 24 Apr 2015 11:29:19 +0100 Subject: Fix spelling --- synapse/config/registration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'synapse') diff --git a/synapse/config/registration.py b/synapse/config/registration.py index ad81cc4f45..f412a72f59 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -53,7 +53,7 @@ class RegistrationConfig(Config): @classmethod def generate_config(cls, args, config_dir_path): - super(RegistrationConfig, cls).genenerate_config(args, config_dir_path) + super(RegistrationConfig, cls).generate_config(args, config_dir_path) if args.enable_registration is None: args.enable_registration = False -- cgit 1.4.1 From ed836386680acfeebe0ad2eb26985e5a88ccc3ab Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 24 Apr 2015 14:26:33 +0100 Subject: Make one-to-one rule an underride otherwise bings don't work in one-to-one wrooms. Likewise a couple of other rules. --- synapse/push/baserules.py | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) (limited to 'synapse') diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index f8408d6596..f3d1cf5c5f 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -126,7 +126,25 @@ def make_base_prepend_override_rules(): def make_base_append_override_rules(): return [ { - 'rule_id': 'global/override/.m.rule.call', + 'rule_id': 'global/override/.m.rule.suppress_notices', + 'conditions': [ + { + 'kind': 'event_match', + 'key': 'content.msgtype', + 'pattern': 'm.notice', + } + ], + 'actions': [ + 'dont_notify', + ] + } + ] + + +def make_base_append_underride_rules(user): + return [ + { + 'rule_id': 'global/underride/.m.rule.call', 'conditions': [ { 'kind': 'event_match', @@ -145,19 +163,6 @@ def make_base_append_override_rules(): } ] }, - { - 'rule_id': 'global/override/.m.rule.suppress_notices', - 'conditions': [ - { - 'kind': 'event_match', - 'key': 'content.msgtype', - 'pattern': 'm.notice', - } - ], - 'actions': [ - 'dont_notify', - ] - }, { 'rule_id': 'global/override/.m.rule.contains_display_name', 'conditions': [ @@ -176,7 +181,7 @@ def make_base_append_override_rules(): ] }, { - 'rule_id': 'global/override/.m.rule.room_one_to_one', + 'rule_id': 'global/underride/.m.rule.room_one_to_one', 'conditions': [ { 'kind': 'room_member_count', @@ -193,12 +198,7 @@ def make_base_append_override_rules(): 'value': False } ] - } - ] - - -def make_base_append_underride_rules(user): - return [ + }, { 'rule_id': 'global/underride/.m.rule.invite_for_me', 'conditions': [ -- cgit 1.4.1 From 1c82fbd2eb99d689d8fe835eca9f394518e25316 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Apr 2015 13:59:37 +0100 Subject: Implement create_observer. `create_observer` takes a deferred and create a new deferred that *observers* the original deferred. Any callbacks added to the observing deferred will *not* affect the origin deferred. --- synapse/util/async.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) (limited to 'synapse') diff --git a/synapse/util/async.py b/synapse/util/async.py index c4fe5d522f..d8febdb90c 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -32,3 +32,22 @@ def run_on_reactor(): iteration of the main loop """ return sleep(0) + + +def create_observer(deferred): + """Creates a deferred that observes the result or failure of the given + deferred *without* affecting the given deferred. + """ + d = defer.Deferred() + + def callback(r): + d.callback(r) + return r + + def errback(f): + d.errback(f) + return f + + deferred.addCallbacks(callback, errback) + + return d -- cgit 1.4.1 From e701aec2d1e9a565d29bc27d2bde61032cba5fd1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Apr 2015 14:20:26 +0100 Subject: Implement locks using create_observer for fetching media and server keys --- synapse/crypto/keyring.py | 138 +++++++++++++++++++-------------- synapse/rest/media/v1/base_resource.py | 4 +- 2 files changed, 82 insertions(+), 60 deletions(-) (limited to 'synapse') diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index f4db7b8a05..d98341f5c2 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -24,6 +24,8 @@ from synapse.api.errors import SynapseError, Codes from synapse.util.retryutils import get_retry_limiter +from synapse.util.async import create_observer + from OpenSSL import crypto import logging @@ -38,6 +40,8 @@ class Keyring(object): self.clock = hs.get_clock() self.hs = hs + self.key_downloads = {} + @defer.inlineCallbacks def verify_json_for_server(self, server_name, json_object): logger.debug("Verifying for %s", server_name) @@ -97,76 +101,92 @@ class Keyring(object): defer.returnValue(cached[0]) return - # Try to fetch the key from the remote server. - - limiter = yield get_retry_limiter( - server_name, - self.clock, - self.store, - ) + @defer.inlineCallbacks + def fetch_keys(): + # Try to fetch the key from the remote server. - with limiter: - (response, tls_certificate) = yield fetch_server_key( - server_name, self.hs.tls_context_factory + limiter = yield get_retry_limiter( + server_name, + self.clock, + self.store, ) - # Check the response. - - x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, tls_certificate - ) - - if ("signatures" not in response - or server_name not in response["signatures"]): - raise ValueError("Key response not signed by remote server") - - if "tls_certificate" not in response: - raise ValueError("Key response missing TLS certificate") + with limiter: + (response, tls_certificate) = yield fetch_server_key( + server_name, self.hs.tls_context_factory + ) - tls_certificate_b64 = response["tls_certificate"] + # Check the response. - if encode_base64(x509_certificate_bytes) != tls_certificate_b64: - raise ValueError("TLS certificate doesn't match") + x509_certificate_bytes = crypto.dump_certificate( + crypto.FILETYPE_ASN1, tls_certificate + ) - verify_keys = {} - for key_id, key_base64 in response["verify_keys"].items(): - if is_signing_algorithm_supported(key_id): - key_bytes = decode_base64(key_base64) - verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = verify_key + if ("signatures" not in response + or server_name not in response["signatures"]): + raise ValueError("Key response not signed by remote server") + + if "tls_certificate" not in response: + raise ValueError("Key response missing TLS certificate") + + tls_certificate_b64 = response["tls_certificate"] + + if encode_base64(x509_certificate_bytes) != tls_certificate_b64: + raise ValueError("TLS certificate doesn't match") + + verify_keys = {} + for key_id, key_base64 in response["verify_keys"].items(): + if is_signing_algorithm_supported(key_id): + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + verify_keys[key_id] = verify_key + + for key_id in response["signatures"][server_name]: + if key_id not in response["verify_keys"]: + raise ValueError( + "Key response must include verification keys for all" + " signatures" + ) + if key_id in verify_keys: + verify_signed_json( + response, + server_name, + verify_keys[key_id] + ) + + # Cache the result in the datastore. + + time_now_ms = self.clock.time_msec() + + yield self.store.store_server_certificate( + server_name, + server_name, + time_now_ms, + tls_certificate, + ) - for key_id in response["signatures"][server_name]: - if key_id not in response["verify_keys"]: - raise ValueError( - "Key response must include verification keys for all" - " signatures" - ) - if key_id in verify_keys: - verify_signed_json( - response, - server_name, - verify_keys[key_id] + for key_id, key in verify_keys.items(): + yield self.store.store_server_verify_key( + server_name, server_name, time_now_ms, key ) - # Cache the result in the datastore. + for key_id in key_ids: + if key_id in verify_keys: + defer.returnValue(verify_keys[key_id]) + return - time_now_ms = self.clock.time_msec() + raise ValueError("No verification key found for given key ids") - yield self.store.store_server_certificate( - server_name, - server_name, - time_now_ms, - tls_certificate, - ) + download = self.key_downloads.get(server_name) - for key_id, key in verify_keys.items(): - yield self.store.store_server_verify_key( - server_name, server_name, time_now_ms, key - ) + if download is None: + download = fetch_keys() + self.key_downloads[server_name] = download - for key_id in key_ids: - if key_id in verify_keys: - defer.returnValue(verify_keys[key_id]) - return + @download.addBoth + def callback(ret): + del self.key_downloads[server_name] + return ret - raise ValueError("No verification key found for given key ids") + r = yield create_observer(download) + defer.returnValue(r) diff --git a/synapse/rest/media/v1/base_resource.py b/synapse/rest/media/v1/base_resource.py index edd4f78024..08c8d75af4 100644 --- a/synapse/rest/media/v1/base_resource.py +++ b/synapse/rest/media/v1/base_resource.py @@ -25,6 +25,8 @@ from twisted.internet import defer from twisted.web.resource import Resource from twisted.protocols.basic import FileSender +from synapse.util.async import create_observer + import os import logging @@ -87,7 +89,7 @@ class BaseMediaResource(Resource): def callback(media_info): del self.downloads[key] return media_info - return download + return create_observer(download) @defer.inlineCallbacks def _get_remote_media_impl(self, server_name, media_id): -- cgit 1.4.1 From 0a016b0525c918927c8134d5cb11d9be520a9efc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Apr 2015 14:37:24 +0100 Subject: Pull inner function out. --- synapse/crypto/keyring.py | 153 +++++++++++++++++++++++----------------------- 1 file changed, 77 insertions(+), 76 deletions(-) (limited to 'synapse') diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index d98341f5c2..14f8f536e4 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -101,92 +101,93 @@ class Keyring(object): defer.returnValue(cached[0]) return - @defer.inlineCallbacks - def fetch_keys(): - # Try to fetch the key from the remote server. - - limiter = yield get_retry_limiter( - server_name, - self.clock, - self.store, - ) + download = self.key_downloads.get(server_name) - with limiter: - (response, tls_certificate) = yield fetch_server_key( - server_name, self.hs.tls_context_factory - ) + if download is None: + download = self._get_server_verify_key_impl(server_name, key_ids) + self.key_downloads[server_name] = download + + @download.addBoth + def callback(ret): + del self.key_downloads[server_name] + return ret + + r = yield create_observer(download) + defer.returnValue(r) - # Check the response. - x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, tls_certificate + @defer.inlineCallbacks + def _get_server_verify_key_impl(self, server_name, key_ids): + # Try to fetch the key from the remote server. + + limiter = yield get_retry_limiter( + server_name, + self.clock, + self.store, + ) + + with limiter: + (response, tls_certificate) = yield fetch_server_key( + server_name, self.hs.tls_context_factory ) - if ("signatures" not in response - or server_name not in response["signatures"]): - raise ValueError("Key response not signed by remote server") - - if "tls_certificate" not in response: - raise ValueError("Key response missing TLS certificate") - - tls_certificate_b64 = response["tls_certificate"] - - if encode_base64(x509_certificate_bytes) != tls_certificate_b64: - raise ValueError("TLS certificate doesn't match") - - verify_keys = {} - for key_id, key_base64 in response["verify_keys"].items(): - if is_signing_algorithm_supported(key_id): - key_bytes = decode_base64(key_base64) - verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = verify_key - - for key_id in response["signatures"][server_name]: - if key_id not in response["verify_keys"]: - raise ValueError( - "Key response must include verification keys for all" - " signatures" - ) - if key_id in verify_keys: - verify_signed_json( - response, - server_name, - verify_keys[key_id] - ) - - # Cache the result in the datastore. - - time_now_ms = self.clock.time_msec() - - yield self.store.store_server_certificate( - server_name, - server_name, - time_now_ms, - tls_certificate, - ) + # Check the response. + + x509_certificate_bytes = crypto.dump_certificate( + crypto.FILETYPE_ASN1, tls_certificate + ) - for key_id, key in verify_keys.items(): - yield self.store.store_server_verify_key( - server_name, server_name, time_now_ms, key + if ("signatures" not in response + or server_name not in response["signatures"]): + raise ValueError("Key response not signed by remote server") + + if "tls_certificate" not in response: + raise ValueError("Key response missing TLS certificate") + + tls_certificate_b64 = response["tls_certificate"] + + if encode_base64(x509_certificate_bytes) != tls_certificate_b64: + raise ValueError("TLS certificate doesn't match") + + verify_keys = {} + for key_id, key_base64 in response["verify_keys"].items(): + if is_signing_algorithm_supported(key_id): + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + verify_keys[key_id] = verify_key + + for key_id in response["signatures"][server_name]: + if key_id not in response["verify_keys"]: + raise ValueError( + "Key response must include verification keys for all" + " signatures" + ) + if key_id in verify_keys: + verify_signed_json( + response, + server_name, + verify_keys[key_id] ) - for key_id in key_ids: - if key_id in verify_keys: - defer.returnValue(verify_keys[key_id]) - return + # Cache the result in the datastore. - raise ValueError("No verification key found for given key ids") + time_now_ms = self.clock.time_msec() - download = self.key_downloads.get(server_name) + yield self.store.store_server_certificate( + server_name, + server_name, + time_now_ms, + tls_certificate, + ) - if download is None: - download = fetch_keys() - self.key_downloads[server_name] = download + for key_id, key in verify_keys.items(): + yield self.store.store_server_verify_key( + server_name, server_name, time_now_ms, key + ) - @download.addBoth - def callback(ret): - del self.key_downloads[server_name] - return ret + for key_id in key_ids: + if key_id in verify_keys: + defer.returnValue(verify_keys[key_id]) + return - r = yield create_observer(download) - defer.returnValue(r) + raise ValueError("No verification key found for given key ids") \ No newline at end of file -- cgit 1.4.1 From 2c70849dc32a52157217d75298c99c4cfccce639 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Apr 2015 14:38:29 +0100 Subject: Fix newlines --- synapse/crypto/keyring.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'synapse') diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 14f8f536e4..2b4faee4c1 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -115,7 +115,6 @@ class Keyring(object): r = yield create_observer(download) defer.returnValue(r) - @defer.inlineCallbacks def _get_server_verify_key_impl(self, server_name, key_ids): # Try to fetch the key from the remote server. @@ -190,4 +189,4 @@ class Keyring(object): defer.returnValue(verify_keys[key_id]) return - raise ValueError("No verification key found for given key ids") \ No newline at end of file + raise ValueError("No verification key found for given key ids") -- cgit 1.4.1