Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

# Copyright (C) 2018 Bloomberg LP 

# 

# Licensed under the Apache License, Version 2.0 (the "License"); 

# you may not use this file except in compliance with the License. 

# You may obtain a copy of the License at 

# 

# <http://www.apache.org/licenses/LICENSE-2.0> 

# 

# Unless required by applicable law or agreed to in writing, software 

# distributed under the License is distributed on an "AS IS" BASIS, 

# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

# See the License for the specific language governing permissions and 

# limitations under the License. 

 

 

""" 

Reference Cache 

================== 

 

Implements an in-memory reference cache. 

 

For a given key, it 

""" 

 

import collections 

import logging 

 

from buildgrid._exceptions import NotFoundError 

from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 

 

 

class ReferenceCache: 

 

def __init__(self, storage, max_cached_refs, allow_updates=True): 

""" Initialises a new ReferenceCache instance. 

 

Args: 

storage (StorageABC): storage backend instance to be used. 

max_cached_refs (int): maximum number of entries to be stored. 

allow_updates (bool): allow the client to write to storage 

""" 

self.__logger = logging.getLogger(__name__) 

 

self._instance_name = None 

 

self.__storage = storage 

 

self._allow_updates = allow_updates 

self._max_cached_refs = max_cached_refs 

self._digest_map = collections.OrderedDict() 

 

# --- Public API --- 

 

@property 

def instance_name(self): 

return self._instance_name 

 

def register_instance_with_server(self, instance_name, server): 

"""Names and registers the refs instance with a given server.""" 

if self._instance_name is None: 

server.add_reference_storage_instance(self, instance_name) 

 

self._instance_name = instance_name 

 

else: 

raise AssertionError("Instance already registered") 

 

@property 

def allow_updates(self): 

return self._allow_updates 

 

def get_digest_reference(self, key): 

"""Retrieves the cached Digest for the given key. 

 

Args: 

key: key for Digest to query. 

 

Returns: 

The cached Digest matching the given key or raises 

NotFoundError. 

""" 

if key in self._digest_map: 

reference_result = self.__storage.get_message(self._digest_map[key], 

remote_execution_pb2.Digest) 

 

if reference_result is not None: 

return reference_result 

 

del self._digest_map[key] 

 

raise NotFoundError("Key not found: {}".format(key)) 

 

def get_action_reference(self, key): 

"""Retrieves the cached ActionResult for the given Action digest. 

 

Args: 

key: key for ActionResult to query. 

 

Returns: 

The cached ActionResult matching the given key or raises 

NotFoundError. 

""" 

if key in self._digest_map: 

reference_result = self.__storage.get_message(self._digest_map[key], 

remote_execution_pb2.ActionResult) 

 

if reference_result is not None: 

if self._action_result_blobs_still_exist(reference_result): 

self._digest_map.move_to_end(key) 

return reference_result 

 

del self._digest_map[key] 

 

raise NotFoundError("Key not found: {}".format(key)) 

 

def update_reference(self, key, result): 

"""Stores the result in cache for the given key. 

 

If the cache size limit has been reached, the oldest cache entries will 

be dropped before insertion so that the cache size never exceeds the 

maximum numbers of entries allowed. 

 

Args: 

key: key to store result. 

result (Digest): result digest to store. 

""" 

if not self._allow_updates: 

raise NotImplementedError("Updating cache not allowed") 

 

if self._max_cached_refs == 0: 

return 

 

while len(self._digest_map) >= self._max_cached_refs: 

self._digest_map.popitem(last=False) 

 

result_digest = self.__storage.put_message(result) 

self._digest_map[key] = result_digest 

 

# --- Private API --- 

 

def _action_result_blobs_still_exist(self, action_result): 

"""Checks CAS for ActionResult output blobs existance. 

 

Args: 

action_result (ActionResult): ActionResult to search referenced 

output blobs for. 

 

Returns: 

True if all referenced blobs are present in CAS, False otherwise. 

""" 

blobs_needed = [] 

 

for output_file in action_result.output_files: 

blobs_needed.append(output_file.digest) 

 

for output_directory in action_result.output_directories: 

blobs_needed.append(output_directory.tree_digest) 

tree = self.__storage.get_message(output_directory.tree_digest, 

remote_execution_pb2.Tree) 

if tree is None: 

return False 

 

for file_node in tree.root.files: 

blobs_needed.append(file_node.digest) 

 

for child in tree.children: 

for file_node in child.files: 

blobs_needed.append(file_node.digest) 

 

if action_result.stdout_digest.hash and not action_result.stdout_raw: 

blobs_needed.append(action_result.stdout_digest) 

 

if action_result.stderr_digest.hash and not action_result.stderr_raw: 

blobs_needed.append(action_result.stderr_digest) 

 

missing = self.__storage.missing_blobs(blobs_needed) 

return len(missing) == 0