Skip to content

Commit b40d59b

Browse files
authored
clarify store in cookbook (#154)
1 parent 9666866 commit b40d59b

File tree

1 file changed

+14
-17
lines changed

1 file changed

+14
-17
lines changed

docs/cookbook.md

Lines changed: 14 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,7 @@ Use the [`obstore.list`][] method.
77
```py
88
import obstore as obs
99

10-
# Create a Store
11-
store = get_object_store()
10+
store = ... # store of your choice
1211

1312
# Recursively list all files below the 'data' path.
1413
# 1. On AWS S3 this would be the 'data/' prefix
@@ -35,8 +34,7 @@ This Arrow integration requires the [`arro3-core` dependency](https://kylebarron
3534
```py
3635
import obstore as obs
3736

38-
# Create a Store
39-
store = get_object_store()
37+
store = ... # store of your choice
4038

4139
# Get a stream of Arrow RecordBatches of metadata
4240
list_stream = obs.list(store, prefix="data", return_arrow=True)
@@ -80,8 +78,7 @@ Use the [`obstore.get`][] function to fetch data bytes from remote storage or fi
8078
```py
8179
import obstore as obs
8280

83-
# Create a Store
84-
store = get_object_store()
81+
store = ... # store of your choice
8582

8683
# Retrieve a specific file
8784
path = "data/file01.parquet"
@@ -131,7 +128,7 @@ Use the [`obstore.put`][] function to atomically write data. `obstore.put` will
131128
```py
132129
import obstore as obs
133130

134-
store = get_object_store()
131+
store = ... # store of your choice
135132
path = "data/file1"
136133
content = b"hello"
137134
obs.put(store, path, content)
@@ -143,7 +140,7 @@ You can also upload local files:
143140
from pathlib import Path
144141
import obstore as obs
145142

146-
store = get_object_store()
143+
store = ... # store of your choice
147144
path = "data/file1"
148145
content = Path("path/to/local/file")
149146
obs.put(store, path, content)
@@ -154,7 +151,7 @@ Or file-like objects:
154151
```py
155152
import obstore as obs
156153

157-
store = get_object_store()
154+
store = ... # store of your choice
158155
path = "data/file1"
159156
with open("path/to/local/file", "rb") as content:
160157
obs.put(store, path, content)
@@ -169,7 +166,7 @@ def bytes_iter():
169166
for i in range(5):
170167
yield b"foo"
171168

172-
store = get_object_store()
169+
store = ... # store of your choice
173170
path = "data/file1"
174171
content = bytes_iter()
175172
obs.put(store, path, content)
@@ -184,7 +181,7 @@ async def bytes_stream():
184181
for i in range(5):
185182
yield b"foo"
186183

187-
store = get_object_store()
184+
store = ... # store of your choice
188185
path = "data/file1"
189186
content = bytes_stream()
190187
obs.put(store, path, content)
@@ -201,8 +198,8 @@ Download the file, collect its bytes in memory, then upload it. Note that this w
201198
```py
202199
import obstore as obs
203200

204-
store1 = get_object_store()
205-
store2 = get_object_store()
201+
store1 = ... # store of your choice
202+
store2 = ... # store of your choice
206203

207204
path1 = "data/file1"
208205
path2 = "data/file2"
@@ -219,8 +216,8 @@ First download the file to disk, then upload it.
219216
from pathlib import Path
220217
import obstore as obs
221218

222-
store1 = get_object_store()
223-
store2 = get_object_store()
219+
store1 = ... # store of your choice
220+
store2 = ... # store of your choice
224221

225222
path1 = "data/file1"
226223
path2 = "data/file2"
@@ -245,8 +242,8 @@ Using the async API is currently required to use streaming copies.
245242
```py
246243
import obstore as obs
247244

248-
store1 = get_object_store()
249-
store2 = get_object_store()
245+
store1 = ... # store of your choice
246+
store2 = ... # store of your choice
250247

251248
path1 = "data/file1"
252249
path2 = "data/file2"

0 commit comments

Comments
 (0)