Coverage for /builds/BuildGrid/buildgrid/buildgrid/server/referencestorage/storage.py: 91.43%

35 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-05 15:37 +0000

1# Copyright (C) 2018 Bloomberg LP 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# <http://www.apache.org/licenses/LICENSE-2.0> 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14 

15 

16""" 

17Reference Cache 

18================== 

19 

20Implements an in-memory reference cache. 

21 

22For a given key, it 

23""" 

24 

25import collections 

26 

27from buildgrid._exceptions import NotFoundError 

28from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 

29from buildgrid._protos.buildstream.v2 import buildstream_pb2 

30from buildgrid.server.cas.storage.storage_abc import StorageABC 

31from buildgrid.server.servicer import Instance 

32 

33 

34class ReferenceCache(Instance): 

35 SERVICE_NAME = buildstream_pb2.DESCRIPTOR.services_by_name["ReferenceStorage"].full_name 

36 

37 def __init__(self, storage: StorageABC, max_cached_refs: int, allow_updates: bool = True) -> None: 

38 """Initialises a new ReferenceCache instance. 

39 

40 Args: 

41 storage (StorageABC): storage backend instance to be used. 

42 max_cached_refs (int): maximum number of entries to be stored. 

43 allow_updates (bool): allow the client to write to storage 

44 """ 

45 self.__storage = storage 

46 

47 self._allow_updates = allow_updates 

48 self._max_cached_refs = max_cached_refs 

49 self._digest_map: "collections.OrderedDict[str, remote_execution_pb2.Digest]" = collections.OrderedDict() 

50 

51 # --- Public API --- 

52 

53 def setup_grpc(self) -> None: 

54 self.__storage.setup_grpc() 

55 

56 @property 

57 def allow_updates(self) -> bool: 

58 return self._allow_updates 

59 

60 def get_digest_reference(self, key: str) -> remote_execution_pb2.Digest: 

61 """Retrieves the cached Digest for the given key. 

62 

63 Args: 

64 key: key for Digest to query. 

65 

66 Returns: 

67 The cached Digest matching the given key or raises 

68 NotFoundError. 

69 """ 

70 if key in self._digest_map: 

71 reference_result = self.__storage.get_message(self._digest_map[key], remote_execution_pb2.Digest) 

72 

73 if reference_result is not None: 

74 return reference_result 

75 

76 del self._digest_map[key] 

77 

78 raise NotFoundError(f"Key not found: {key}") 

79 

80 def update_reference(self, key: str, result: remote_execution_pb2.Digest) -> None: 

81 """Stores the result in cache for the given key. 

82 

83 If the cache size limit has been reached, the oldest cache entries will 

84 be dropped before insertion so that the cache size never exceeds the 

85 maximum numbers of entries allowed. 

86 

87 Args: 

88 key: key to store result. 

89 result (Digest): result digest to store. 

90 """ 

91 if not self._allow_updates: 

92 raise NotImplementedError("Updating cache not allowed") 

93 

94 if self._max_cached_refs == 0: 

95 return 

96 

97 while len(self._digest_map) >= self._max_cached_refs: 

98 self._digest_map.popitem(last=False) 

99 

100 result_digest = self.__storage.put_message(result) 

101 self._digest_map[key] = result_digest