Skip to content

Commit edfdd0b

Browse files
Ingestion 3d pointclouds (#23)
* wip: File3d class for point clouds upload * wip: sensor fusion JSON upload * wip: improve performance by decoupling animate from render * wip: support adding intensity and device ID values * fix: original filename generation * wip * fix: add checks for empty point cloud list * cleanup Co-authored-by: Pablo <[email protected]>
1 parent 8a7ae8c commit edfdd0b

File tree

1 file changed

+112
-0
lines changed

1 file changed

+112
-0
lines changed

sdk/diffgram/file/file_3d.py

Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
from diffgram.core.core import Project
2+
import json
3+
import os
4+
import requests
5+
import io
6+
import math
7+
import uuid
8+
9+
10+
class File3D:
11+
point_list: list
12+
client: Project
13+
14+
def __init__(self, client, name, point_list = []):
15+
self.point_list = []
16+
self.client = client
17+
self.original_filename = name
18+
self.point_list = point_list
19+
20+
def add_point(self,
21+
x: float,
22+
y: float,
23+
z: float,
24+
intensity = None,
25+
device_id = None,
26+
timestamp = None,
27+
is_ground = False):
28+
29+
if intensity is not None:
30+
if intensity > 1.0 or intensity < 0:
31+
raise Exception('Intensity point must be between 0 and 1. Value is: {}'.format(intensity))
32+
33+
self.point_list.append({
34+
'x': x,
35+
'y': y,
36+
'z': z,
37+
'intensity': intensity,
38+
'device_id': device_id,
39+
'timestamp': timestamp,
40+
'is_ground': is_ground,
41+
})
42+
return self.point_list
43+
44+
def upload(self, dataset_name = None, chunk_size = 5000000):
45+
"""
46+
Builds a JSON file from current point list and uploads it to
47+
Diffgram.
48+
:param dataset_name:
49+
:param chunk_size: Size of each chunk of the JSON file. Default is 5MB
50+
:return:
51+
"""
52+
53+
if len(self.point_list) == 0:
54+
print('At least 1 point should be provided in the point list. Please add point using add_point()')
55+
return False
56+
file_data = {
57+
'point_list': self.point_list
58+
}
59+
json_data = json.dumps(file_data)
60+
61+
with open('data.json', 'w') as outfile:
62+
json.dump(json_data, outfile)
63+
endpoint = "/api/walrus/project/{}/upload/large".format(
64+
self.client.project_string_id
65+
)
66+
chunk_size = 5000000 # 5 MB chunks
67+
dataset_id = self.client.default_directory['id']
68+
if dataset_name is not None:
69+
dataset_id = self.client.directory.get(dataset_name).id
70+
71+
with io.StringIO(json_data) as s:
72+
pos = s.tell()
73+
s.seek(0, os.SEEK_END)
74+
file_size = s.tell()
75+
s.seek(pos)
76+
num_chunks = int(math.ceil(file_size / chunk_size))
77+
last_chunk_size = -1
78+
if file_size % chunk_size != 0:
79+
last_chunk_size = file_size % chunk_size
80+
81+
uid_upload = str(uuid.uuid4())
82+
for i in range(0, num_chunks):
83+
84+
payload = {
85+
'dzuuid': uid_upload,
86+
'original_filename': self.original_filename,
87+
'dzchunkindex': i,
88+
'dztotalfilesize': file_size,
89+
'dzchunksize': chunk_size,
90+
'dztotalchunkcount': num_chunks,
91+
'dzchunkbyteoffset': i * chunk_size,
92+
'directory_id': dataset_id,
93+
'source': 'from_sensor_fusion_json',
94+
}
95+
# Adjust final chunk size
96+
if i == (num_chunks - 1) and last_chunk_size != -1:
97+
# Read last chunk size here...
98+
payload['dzchunksize'] = last_chunk_size
99+
100+
# Read file Chunk
101+
s.seek(payload['dzchunkbyteoffset'])
102+
file_chunk = s.read(payload['dzchunksize'])
103+
files = {'file': ('{}_sf.json'.format(self.original_filename), file_chunk)}
104+
# Make request to server
105+
url = self.client.host + endpoint
106+
response = self.client.session.post(url,
107+
data = payload,
108+
files = files)
109+
110+
self.client.handle_errors(response)
111+
112+
return True

0 commit comments

Comments
 (0)