@@ -7,8 +7,7 @@ Use the [`obstore.list`][] method.
7
7
``` py
8
8
import obstore as obs
9
9
10
- # Create a Store
11
- store = get_object_store()
10
+ store = ... # store of your choice
12
11
13
12
# Recursively list all files below the 'data' path.
14
13
# 1. On AWS S3 this would be the 'data/' prefix
@@ -35,8 +34,7 @@ This Arrow integration requires the [`arro3-core` dependency](https://kylebarron
35
34
``` py
36
35
import obstore as obs
37
36
38
- # Create a Store
39
- store = get_object_store()
37
+ store = ... # store of your choice
40
38
41
39
# Get a stream of Arrow RecordBatches of metadata
42
40
list_stream = obs.list(store, prefix = " data" , return_arrow = True )
@@ -80,8 +78,7 @@ Use the [`obstore.get`][] function to fetch data bytes from remote storage or fi
80
78
``` py
81
79
import obstore as obs
82
80
83
- # Create a Store
84
- store = get_object_store()
81
+ store = ... # store of your choice
85
82
86
83
# Retrieve a specific file
87
84
path = " data/file01.parquet"
@@ -131,7 +128,7 @@ Use the [`obstore.put`][] function to atomically write data. `obstore.put` will
131
128
``` py
132
129
import obstore as obs
133
130
134
- store = get_object_store()
131
+ store = ... # store of your choice
135
132
path = " data/file1"
136
133
content = b " hello"
137
134
obs.put(store, path, content)
@@ -143,7 +140,7 @@ You can also upload local files:
143
140
from pathlib import Path
144
141
import obstore as obs
145
142
146
- store = get_object_store()
143
+ store = ... # store of your choice
147
144
path = " data/file1"
148
145
content = Path(" path/to/local/file" )
149
146
obs.put(store, path, content)
@@ -154,7 +151,7 @@ Or file-like objects:
154
151
``` py
155
152
import obstore as obs
156
153
157
- store = get_object_store()
154
+ store = ... # store of your choice
158
155
path = " data/file1"
159
156
with open (" path/to/local/file" , " rb" ) as content:
160
157
obs.put(store, path, content)
@@ -169,7 +166,7 @@ def bytes_iter():
169
166
for i in range (5 ):
170
167
yield b " foo"
171
168
172
- store = get_object_store()
169
+ store = ... # store of your choice
173
170
path = " data/file1"
174
171
content = bytes_iter()
175
172
obs.put(store, path, content)
@@ -184,7 +181,7 @@ async def bytes_stream():
184
181
for i in range (5 ):
185
182
yield b " foo"
186
183
187
- store = get_object_store()
184
+ store = ... # store of your choice
188
185
path = " data/file1"
189
186
content = bytes_stream()
190
187
obs.put(store, path, content)
@@ -201,8 +198,8 @@ Download the file, collect its bytes in memory, then upload it. Note that this w
201
198
``` py
202
199
import obstore as obs
203
200
204
- store1 = get_object_store()
205
- store2 = get_object_store()
201
+ store1 = ... # store of your choice
202
+ store2 = ... # store of your choice
206
203
207
204
path1 = " data/file1"
208
205
path2 = " data/file2"
@@ -219,8 +216,8 @@ First download the file to disk, then upload it.
219
216
from pathlib import Path
220
217
import obstore as obs
221
218
222
- store1 = get_object_store()
223
- store2 = get_object_store()
219
+ store1 = ... # store of your choice
220
+ store2 = ... # store of your choice
224
221
225
222
path1 = " data/file1"
226
223
path2 = " data/file2"
@@ -245,8 +242,8 @@ Using the async API is currently required to use streaming copies.
245
242
``` py
246
243
import obstore as obs
247
244
248
- store1 = get_object_store()
249
- store2 = get_object_store()
245
+ store1 = ... # store of your choice
246
+ store2 = ... # store of your choice
250
247
251
248
path1 = " data/file1"
252
249
path2 = " data/file2"
0 commit comments