summary refs log tree commit diff
path: root/tests/rest/media/test_media_retention.py
blob: b98a5cd586f03fd414ec8bd78ac0e813905224fb (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import io
from typing import Iterable, Optional, Tuple

from twisted.test.proto_helpers import MemoryReactor

from synapse.rest import admin
from synapse.rest.client import login, register, room
from synapse.server import HomeServer
from synapse.types import UserID
from synapse.util import Clock

from tests import unittest
from tests.unittest import override_config
from tests.utils import MockClock


class MediaRetentionTestCase(unittest.HomeserverTestCase):

    ONE_DAY_IN_MS = 24 * 60 * 60 * 1000
    THIRTY_DAYS_IN_MS = 30 * ONE_DAY_IN_MS

    servlets = [
        room.register_servlets,
        login.register_servlets,
        register.register_servlets,
        admin.register_servlets_for_client_rest_resource,
    ]

    def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
        # We need to be able to test advancing time in the homeserver, so we
        # replace the test homeserver's default clock with a MockClock, which
        # supports advancing time.
        return self.setup_test_homeserver(clock=MockClock())

    def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
        self.remote_server_name = "remote.homeserver"
        self.store = hs.get_datastores().main

        # Create a user to upload media with
        test_user_id = self.register_user("alice", "password")

        # Inject media (3 images each; recently accessed, old access, never accessed)
        # into both the local store and the remote cache
        media_repository = hs.get_media_repository()
        test_media_content = b"example string"

        def _create_media_and_set_last_accessed(
            last_accessed_ms: Optional[int],
        ) -> str:
            # "Upload" some media to the local media store
            mxc_uri = self.get_success(
                media_repository.create_content(
                    media_type="text/plain",
                    upload_name=None,
                    content=io.BytesIO(test_media_content),
                    content_length=len(test_media_content),
                    auth_user=UserID.from_string(test_user_id),
                )
            )

            media_id = mxc_uri.split("/")[-1]

            # Set the last recently accessed time for this media
            if last_accessed_ms is not None:
                self.get_success(
                    self.store.update_cached_last_access_time(
                        local_media=(media_id,),
                        remote_media=(),
                        time_ms=last_accessed_ms,
                    )
                )

            return media_id

        def _cache_remote_media_and_set_last_accessed(
            media_id: str, last_accessed_ms: Optional[int]
        ) -> str:
            # Pretend to cache some remote media
            self.get_success(
                self.store.store_cached_remote_media(
                    origin=self.remote_server_name,
                    media_id=media_id,
                    media_type="text/plain",
                    media_length=1,
                    time_now_ms=clock.time_msec(),
                    upload_name="testfile.txt",
                    filesystem_id="abcdefg12345",
                )
            )

            # Set the last recently accessed time for this media
            if last_accessed_ms is not None:
                self.get_success(
                    hs.get_datastores().main.update_cached_last_access_time(
                        local_media=(),
                        remote_media=((self.remote_server_name, media_id),),
                        time_ms=last_accessed_ms,
                    )
                )

            return media_id

        # Start with the local media store
        self.local_recently_accessed_media = _create_media_and_set_last_accessed(
            self.THIRTY_DAYS_IN_MS
        )
        self.local_not_recently_accessed_media = _create_media_and_set_last_accessed(
            self.ONE_DAY_IN_MS
        )
        self.local_never_accessed_media = _create_media_and_set_last_accessed(None)

        # And now the remote media store
        self.remote_recently_accessed_media = _cache_remote_media_and_set_last_accessed(
            "a", self.THIRTY_DAYS_IN_MS
        )
        self.remote_not_recently_accessed_media = (
            _cache_remote_media_and_set_last_accessed("b", self.ONE_DAY_IN_MS)
        )
        # Remote media will always have a "last accessed" attribute, as it would not
        # be fetched from the remote homeserver unless instigated by a user.

    @override_config(
        {
            "media_retention": {
                # Enable retention for local media
                "local_media_lifetime": "30d"
                # Cached remote media should not be purged
            }
        }
    )
    def test_local_media_retention(self) -> None:
        """
        Tests that local media that have not been accessed recently is purged, while
        cached remote media is unaffected.
        """
        # Advance 31 days (in seconds)
        self.reactor.advance(31 * 24 * 60 * 60)

        # Check that media has been correctly purged.
        # Local media accessed <30 days ago should still exist.
        # Remote media should be unaffected.
        self._assert_if_mxc_uris_purged(
            purged=[
                (
                    self.hs.config.server.server_name,
                    self.local_not_recently_accessed_media,
                ),
                (self.hs.config.server.server_name, self.local_never_accessed_media),
            ],
            not_purged=[
                (self.hs.config.server.server_name, self.local_recently_accessed_media),
                (self.remote_server_name, self.remote_recently_accessed_media),
                (self.remote_server_name, self.remote_not_recently_accessed_media),
            ],
        )

    @override_config(
        {
            "media_retention": {
                # Enable retention for cached remote media
                "remote_media_lifetime": "30d"
                # Local media should not be purged
            }
        }
    )
    def test_remote_media_cache_retention(self) -> None:
        """
        Tests that entries from the remote media cache that have not been accessed
        recently is purged, while local media is unaffected.
        """
        # Advance 31 days (in seconds)
        self.reactor.advance(31 * 24 * 60 * 60)

        # Check that media has been correctly purged.
        # Local media should be unaffected.
        # Remote media accessed <30 days ago should still exist.
        self._assert_if_mxc_uris_purged(
            purged=[
                (self.remote_server_name, self.remote_not_recently_accessed_media),
            ],
            not_purged=[
                (self.remote_server_name, self.remote_recently_accessed_media),
                (self.hs.config.server.server_name, self.local_recently_accessed_media),
                (
                    self.hs.config.server.server_name,
                    self.local_not_recently_accessed_media,
                ),
                (self.hs.config.server.server_name, self.local_never_accessed_media),
            ],
        )

    def _assert_if_mxc_uris_purged(
        self, purged: Iterable[Tuple[str, str]], not_purged: Iterable[Tuple[str, str]]
    ) -> None:
        def _assert_mxc_uri_purge_state(
            server_name: str, media_id: str, expect_purged: bool
        ) -> None:
            """Given an MXC URI, assert whether it has been purged or not."""
            if server_name == self.hs.config.server.server_name:
                found_media_dict = self.get_success(
                    self.store.get_local_media(media_id)
                )
            else:
                found_media_dict = self.get_success(
                    self.store.get_cached_remote_media(server_name, media_id)
                )

            mxc_uri = f"mxc://{server_name}/{media_id}"

            if expect_purged:
                self.assertIsNone(
                    found_media_dict, msg=f"{mxc_uri} unexpectedly not purged"
                )
            else:
                self.assertIsNotNone(
                    found_media_dict,
                    msg=f"{mxc_uri} unexpectedly purged",
                )

        # Assert that the given MXC URIs have either been correctly purged or not.
        for server_name, media_id in purged:
            _assert_mxc_uri_purge_state(server_name, media_id, expect_purged=True)
        for server_name, media_id in not_purged:
            _assert_mxc_uri_purge_state(server_name, media_id, expect_purged=False)