-
Notifications
You must be signed in to change notification settings - Fork 525
/
Copy path_named_data_store.py
210 lines (182 loc) · 7.45 KB
/
_named_data_store.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import hashlib
import math
from dataclasses import dataclass
# from dataclasses import dataclass
from typing import Dict, List, Optional
@dataclass
class BufferEntry:
"""A class to hold the buffer entries for serialization.
Attributes:
buffer: The buffer bytes.
alignment: The alignment of the buffer.
"""
buffer: bytes
alignment: int
@dataclass
class NamedDataStoreOutput:
"""
Holds named data for serialization.
Attributes:
buffers: A list of unique buffer entries.
pte_data: Contains data that is stored inside the PTE file. A mapping from
{key: buffer_index}.
external_data: Contains data that is stored external to the PTE. A mapping
from {filename: {key: buffer_index}}.
"""
buffers: List[BufferEntry]
pte_data: Dict[str, int]
external_data: Dict[str, Dict[str, int]]
class NamedDataStore:
"""
NamedDataStore manages the data that delegates want to share. Backends add
bytes to the store under a unique key. These bytes can be retrieved at
runtime using the same key with the NamedDataMap.
Note:
- Keys are unique in the data store, regardless of whether they are stored
in the PTE or externally.
- Multiple keys can point to the same buffer entry.
- The same data can be added multiple times and all keys will point to one
buffer. If a duplicate blob is added with a different alignment, the
lcm of the current and new alignment is taken for that blob.
"""
# List of unique blobs.
buffers: List[BufferEntry]
# Named data stored inside the PTE file. Map of {key: buffer_index}.
pte_data: Dict[str, int]
# Named data stored outside of the PTE file.
# Map of {filename: {key: buffer_index}}.
external_data: Dict[str, Dict[str, int]]
# Cache of the data hash for deduplication.
# Use a hash instead of the data as a key because a sha256 collision is
# unlikely, and the data may be large.
data_hash_to_buffer_idx: Dict[bytes, int]
# Cache of the key to buffer idx to ensure uniqueness.
# If a key is added multiple times, check the buffer idx to ensure that the
# data is identical too.
key_to_buffer_idx: Dict[str, int]
def __init__(self) -> None:
"""
Initializes a new NamedDataStore.
"""
self.buffers = []
self.pte_data = {}
self.external_data = {}
self.data_hash_to_buffer_idx = {}
self.key_to_buffer_idx = {}
def _add_named_data_to_map(
self,
key: str,
data: bytes,
alignment: int,
local_key_to_buffer_idx: Dict[str, int],
) -> None:
"""
Add data to a map and update the alignment. Ensure that the key-data
pair is unique.
- If the key exists, the data must be identical.
- If multiple unique keys exist for the same data, those keys should
point to the same buffer.
Args:
key (str): key associated with the data.
data (bytes): Bytes being requested to be serialized.
alignment (int): alignment for bytes to be serialized with.
local_key_to_buffer_idx (Dict[str, int]): map to add the data to.
Raises:
ValueError: when the key exists in the store, and corresponding data
is different.
"""
# Get data hash.
hashed = hashlib.sha256(data).digest()
# Check if the key exists.
buffer_idx = self.key_to_buffer_idx.get(key, -1)
if buffer_idx != -1:
# If the key exists, the corresponding data must be identical.
if self.data_hash_to_buffer_idx.get(hashed, -1) != buffer_idx:
raise ValueError(
f"Duplicate key {key} with different data. "
f"Existing data: {self.buffers[buffer_idx].buffer}. "
f"New data: {data}."
)
self.buffers[buffer_idx].alignment = math.lcm(
self.buffers[buffer_idx].alignment, alignment
)
else:
# Key doesn't exist; check if the data exists.
buffer_idx = self.data_hash_to_buffer_idx.get(hashed, -1)
if buffer_idx != -1:
# The data exists; update the alignment.
self.buffers[buffer_idx].alignment = math.lcm(
self.buffers[buffer_idx].alignment, alignment
)
else:
# The data doesn't exist; add it to the data store.
buffer_idx = len(self.buffers)
self.buffers.append(BufferEntry(data, alignment))
self.data_hash_to_buffer_idx[hashed] = buffer_idx
# Add key to the map and the key cache.
local_key_to_buffer_idx[key] = buffer_idx
self.key_to_buffer_idx[key] = buffer_idx
def add_named_data(
self,
key: str,
data: bytes,
alignment: Optional[int] = 1,
external_tag: Optional[str] = None,
) -> None:
"""
Adds a named blob to the NamedDataStore.
Args:
key (str): key associated with the data.
data (bytes): Bytes being requested to be serialized.
alignment (int): alignment for bytes to be serialized with.
external (Optional[str]): the external filename that this data is saved to.
Raises:
ValueError: when the key exists in the store, and corresponding data
is different.
"""
# Set default alignment.
if alignment is None:
alignment = 1
if alignment <= 0:
raise ValueError(f"Alignment must be greater than 0, received {alignment}.")
if external_tag is None:
self._add_named_data_to_map(key, data, alignment, self.pte_data)
else:
self._add_named_data_to_map(
key, data, alignment, self.external_data.setdefault(external_tag, {})
)
def get_named_data_store_output(self) -> NamedDataStoreOutput:
# Clean up empty maps inside self.external_data
self.external_data = {k: v for k, v in self.external_data.items() if len(v) > 0}
return NamedDataStoreOutput(self.buffers, self.pte_data, self.external_data)
def merge_named_data_store(self, other: NamedDataStoreOutput) -> None:
"""
Merge another NamedDataStore into this one.
Args:
other (NamedDataStore): the other NamedDataStore to merge.
Raises:
ValueError: when the key exists in both stores, and corresponding
data is different between them.
"""
# Merge the pte_data.
for key, buffer_idx in other.pte_data.items():
self.add_named_data(
key,
other.buffers[buffer_idx].buffer,
other.buffers[buffer_idx].alignment,
)
# Merge the external_data.
for filename, key_to_buffer_idx in other.external_data.items():
for key, buffer_idx in key_to_buffer_idx.items():
self.add_named_data(
key,
other.buffers[buffer_idx].buffer,
other.buffers[buffer_idx].alignment,
external_tag=filename,
)