Skip to content

Automated Protos Update #896

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
210 changes: 107 additions & 103 deletions src/viam/gen/app/data/v1/data_pb2.py

Large diffs are not rendered by default.

67 changes: 60 additions & 7 deletions src/viam/gen/app/data/v1/data_pb2.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,31 @@ TAGS_FILTER_TYPE_UNTAGGED: TagsFilterType.ValueType
'TAGS_FILTER_TYPE_UNTAGGED specifes that all untagged documents should be returned.'
global___TagsFilterType = TagsFilterType

class _TabularDataSourceType:
ValueType = typing.NewType('ValueType', builtins.int)
V: typing_extensions.TypeAlias = ValueType

class _TabularDataSourceTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_TabularDataSourceType.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
TABULAR_DATA_SOURCE_TYPE_UNSPECIFIED: _TabularDataSourceType.ValueType
TABULAR_DATA_SOURCE_TYPE_STANDARD: _TabularDataSourceType.ValueType
'TABULAR_DATA_SOURCE_TYPE_STANDARD indicates reading from standard storage. This is\n the default option and available for all data synced to Viam.\n '
TABULAR_DATA_SOURCE_TYPE_HOT_STORAGE: _TabularDataSourceType.ValueType
'TABULAR_DATA_SOURCE_TYPE_HOT_STORAGE indicates reading from hot storage. This is a\n premium feature requiring opting in specific data sources.\n See docs at https://docs.viam.com/data-ai/capture-data/advanced/advanced-data-capture-sync/#capture-to-the-hot-data-store\n '
TABULAR_DATA_SOURCE_TYPE_PIPELINE_SINK: _TabularDataSourceType.ValueType
'TABULAR_DATA_SOURCE_TYPE_PIPELINE_SINK indicates reading the output data of\n a data pipeline. When using this, a pipeline ID needs to be specified.\n '

class TabularDataSourceType(_TabularDataSourceType, metaclass=_TabularDataSourceTypeEnumTypeWrapper):
"""TabularDataSourceType specifies the data source type for TabularDataByMQL queries."""
TABULAR_DATA_SOURCE_TYPE_UNSPECIFIED: TabularDataSourceType.ValueType
TABULAR_DATA_SOURCE_TYPE_STANDARD: TabularDataSourceType.ValueType
'TABULAR_DATA_SOURCE_TYPE_STANDARD indicates reading from standard storage. This is\nthe default option and available for all data synced to Viam.\n'
TABULAR_DATA_SOURCE_TYPE_HOT_STORAGE: TabularDataSourceType.ValueType
'TABULAR_DATA_SOURCE_TYPE_HOT_STORAGE indicates reading from hot storage. This is a\npremium feature requiring opting in specific data sources.\nSee docs at https://docs.viam.com/data-ai/capture-data/advanced/advanced-data-capture-sync/#capture-to-the-hot-data-store\n'
TABULAR_DATA_SOURCE_TYPE_PIPELINE_SINK: TabularDataSourceType.ValueType
'TABULAR_DATA_SOURCE_TYPE_PIPELINE_SINK indicates reading the output data of\na data pipeline. When using this, a pipeline ID needs to be specified.\n'
global___TabularDataSourceType = TabularDataSourceType

@typing.final
class DataRequest(google.protobuf.message.Message):
"""DataRequest encapsulates the filter for the data, a limit on the maximum results returned,
Expand Down Expand Up @@ -383,18 +408,40 @@ class TabularDataBySQLResponse(google.protobuf.message.Message):
...
global___TabularDataBySQLResponse = TabularDataBySQLResponse

@typing.final
class TabularDataSource(google.protobuf.message.Message):
"""TabularDataSource specifies the data source for user queries to execute on."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TYPE_FIELD_NUMBER: builtins.int
PIPELINE_ID_FIELD_NUMBER: builtins.int
type: global___TabularDataSourceType.ValueType
pipeline_id: builtins.str
'pipeline_id is the ID of the pipeline to query. Required when using\n TABULAR_DATA_SOURCE_TYPE_PIPELINE_SINK.\n '

def __init__(self, *, type: global___TabularDataSourceType.ValueType=..., pipeline_id: builtins.str | None=...) -> None:
...

def HasField(self, field_name: typing.Literal['_pipeline_id', b'_pipeline_id', 'pipeline_id', b'pipeline_id']) -> builtins.bool:
...

def ClearField(self, field_name: typing.Literal['_pipeline_id', b'_pipeline_id', 'pipeline_id', b'pipeline_id', 'type', b'type']) -> None:
...

def WhichOneof(self, oneof_group: typing.Literal['_pipeline_id', b'_pipeline_id']) -> typing.Literal['pipeline_id'] | None:
...
global___TabularDataSource = TabularDataSource

@typing.final
class TabularDataByMQLRequest(google.protobuf.message.Message):
"""TabularDataByMQLRequest requests tabular data using an MQL query."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ORGANIZATION_ID_FIELD_NUMBER: builtins.int
MQL_BINARY_FIELD_NUMBER: builtins.int
USE_RECENT_DATA_FIELD_NUMBER: builtins.int
USE_DATA_PIPELINE_FIELD_NUMBER: builtins.int
DATA_SOURCE_FIELD_NUMBER: builtins.int
organization_id: builtins.str
use_recent_data: builtins.bool
use_data_pipeline: builtins.str
'if set, MQL query will target the sink collection for the data pipeline name\n referenced by this value under the given organization.\n '
'Deprecated, please use TABULAR_DATA_SOURCE_TYPE_HOT_STORAGE instead.'

@property
def mql_binary(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]:
Expand All @@ -403,17 +450,23 @@ class TabularDataByMQLRequest(google.protobuf.message.Message):
namespace, which holds the Viam organization's tabular data.
"""

def __init__(self, *, organization_id: builtins.str=..., mql_binary: collections.abc.Iterable[builtins.bytes] | None=..., use_recent_data: builtins.bool | None=..., use_data_pipeline: builtins.str | None=...) -> None:
@property
def data_source(self) -> global___TabularDataSource:
"""data_source is an optional field that can be used to specify the data source for the query.
If not specified, the query will run on "standard" storage.
"""

def __init__(self, *, organization_id: builtins.str=..., mql_binary: collections.abc.Iterable[builtins.bytes] | None=..., use_recent_data: builtins.bool | None=..., data_source: global___TabularDataSource | None=...) -> None:
...

def HasField(self, field_name: typing.Literal['_use_data_pipeline', b'_use_data_pipeline', '_use_recent_data', b'_use_recent_data', 'use_data_pipeline', b'use_data_pipeline', 'use_recent_data', b'use_recent_data']) -> builtins.bool:
def HasField(self, field_name: typing.Literal['_data_source', b'_data_source', '_use_recent_data', b'_use_recent_data', 'data_source', b'data_source', 'use_recent_data', b'use_recent_data']) -> builtins.bool:
...

def ClearField(self, field_name: typing.Literal['_use_data_pipeline', b'_use_data_pipeline', '_use_recent_data', b'_use_recent_data', 'mql_binary', b'mql_binary', 'organization_id', b'organization_id', 'use_data_pipeline', b'use_data_pipeline', 'use_recent_data', b'use_recent_data']) -> None:
def ClearField(self, field_name: typing.Literal['_data_source', b'_data_source', '_use_recent_data', b'_use_recent_data', 'data_source', b'data_source', 'mql_binary', b'mql_binary', 'organization_id', b'organization_id', 'use_recent_data', b'use_recent_data']) -> None:
...

@typing.overload
def WhichOneof(self, oneof_group: typing.Literal['_use_data_pipeline', b'_use_data_pipeline']) -> typing.Literal['use_data_pipeline'] | None:
def WhichOneof(self, oneof_group: typing.Literal['_data_source', b'_data_source']) -> typing.Literal['data_source'] | None:
...

@typing.overload
Expand Down
20 changes: 18 additions & 2 deletions src/viam/gen/app/datapipelines/v1/data_pipelines_grpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,16 @@ async def UpdateDataPipeline(self, stream: 'grpclib.server.Stream[app.datapipeli
async def DeleteDataPipeline(self, stream: 'grpclib.server.Stream[app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineResponse]') -> None:
pass

@abc.abstractmethod
async def EnableDataPipeline(self, stream: 'grpclib.server.Stream[app.datapipelines.v1.data_pipelines_pb2.EnableDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.EnableDataPipelineResponse]') -> None:
pass

@abc.abstractmethod
async def DisableDataPipeline(self, stream: 'grpclib.server.Stream[app.datapipelines.v1.data_pipelines_pb2.DisableDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.DisableDataPipelineResponse]') -> None:
pass

def __mapping__(self) -> typing.Dict[str, grpclib.const.Handler]:
return {'/viam.app.datapipelines.v1.DataPipelinesService/GetDataPipeline': grpclib.const.Handler(self.GetDataPipeline, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.GetDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.GetDataPipelineResponse), '/viam.app.datapipelines.v1.DataPipelinesService/ListDataPipelines': grpclib.const.Handler(self.ListDataPipelines, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.ListDataPipelinesRequest, app.datapipelines.v1.data_pipelines_pb2.ListDataPipelinesResponse), '/viam.app.datapipelines.v1.DataPipelinesService/CreateDataPipeline': grpclib.const.Handler(self.CreateDataPipeline, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.CreateDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.CreateDataPipelineResponse), '/viam.app.datapipelines.v1.DataPipelinesService/UpdateDataPipeline': grpclib.const.Handler(self.UpdateDataPipeline, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.UpdateDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.UpdateDataPipelineResponse), '/viam.app.datapipelines.v1.DataPipelinesService/DeleteDataPipeline': grpclib.const.Handler(self.DeleteDataPipeline, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineResponse)}
return {'/viam.app.datapipelines.v1.DataPipelinesService/GetDataPipeline': grpclib.const.Handler(self.GetDataPipeline, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.GetDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.GetDataPipelineResponse), '/viam.app.datapipelines.v1.DataPipelinesService/ListDataPipelines': grpclib.const.Handler(self.ListDataPipelines, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.ListDataPipelinesRequest, app.datapipelines.v1.data_pipelines_pb2.ListDataPipelinesResponse), '/viam.app.datapipelines.v1.DataPipelinesService/CreateDataPipeline': grpclib.const.Handler(self.CreateDataPipeline, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.CreateDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.CreateDataPipelineResponse), '/viam.app.datapipelines.v1.DataPipelinesService/UpdateDataPipeline': grpclib.const.Handler(self.UpdateDataPipeline, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.UpdateDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.UpdateDataPipelineResponse), '/viam.app.datapipelines.v1.DataPipelinesService/DeleteDataPipeline': grpclib.const.Handler(self.DeleteDataPipeline, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineResponse), '/viam.app.datapipelines.v1.DataPipelinesService/EnableDataPipeline': grpclib.const.Handler(self.EnableDataPipeline, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.EnableDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.EnableDataPipelineResponse), '/viam.app.datapipelines.v1.DataPipelinesService/DisableDataPipeline': grpclib.const.Handler(self.DisableDataPipeline, grpclib.const.Cardinality.UNARY_UNARY, app.datapipelines.v1.data_pipelines_pb2.DisableDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.DisableDataPipelineResponse)}

class UnimplementedDataPipelinesServiceBase(DataPipelinesServiceBase):

Expand All @@ -50,11 +58,19 @@ async def UpdateDataPipeline(self, stream: 'grpclib.server.Stream[app.datapipeli
async def DeleteDataPipeline(self, stream: 'grpclib.server.Stream[app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineResponse]') -> None:
raise grpclib.exceptions.GRPCError(grpclib.const.Status.UNIMPLEMENTED)

async def EnableDataPipeline(self, stream: 'grpclib.server.Stream[app.datapipelines.v1.data_pipelines_pb2.EnableDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.EnableDataPipelineResponse]') -> None:
raise grpclib.exceptions.GRPCError(grpclib.const.Status.UNIMPLEMENTED)

async def DisableDataPipeline(self, stream: 'grpclib.server.Stream[app.datapipelines.v1.data_pipelines_pb2.DisableDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.DisableDataPipelineResponse]') -> None:
raise grpclib.exceptions.GRPCError(grpclib.const.Status.UNIMPLEMENTED)

class DataPipelinesServiceStub:

def __init__(self, channel: grpclib.client.Channel) -> None:
self.GetDataPipeline = grpclib.client.UnaryUnaryMethod(channel, '/viam.app.datapipelines.v1.DataPipelinesService/GetDataPipeline', app.datapipelines.v1.data_pipelines_pb2.GetDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.GetDataPipelineResponse)
self.ListDataPipelines = grpclib.client.UnaryUnaryMethod(channel, '/viam.app.datapipelines.v1.DataPipelinesService/ListDataPipelines', app.datapipelines.v1.data_pipelines_pb2.ListDataPipelinesRequest, app.datapipelines.v1.data_pipelines_pb2.ListDataPipelinesResponse)
self.CreateDataPipeline = grpclib.client.UnaryUnaryMethod(channel, '/viam.app.datapipelines.v1.DataPipelinesService/CreateDataPipeline', app.datapipelines.v1.data_pipelines_pb2.CreateDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.CreateDataPipelineResponse)
self.UpdateDataPipeline = grpclib.client.UnaryUnaryMethod(channel, '/viam.app.datapipelines.v1.DataPipelinesService/UpdateDataPipeline', app.datapipelines.v1.data_pipelines_pb2.UpdateDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.UpdateDataPipelineResponse)
self.DeleteDataPipeline = grpclib.client.UnaryUnaryMethod(channel, '/viam.app.datapipelines.v1.DataPipelinesService/DeleteDataPipeline', app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineResponse)
self.DeleteDataPipeline = grpclib.client.UnaryUnaryMethod(channel, '/viam.app.datapipelines.v1.DataPipelinesService/DeleteDataPipeline', app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.DeleteDataPipelineResponse)
self.EnableDataPipeline = grpclib.client.UnaryUnaryMethod(channel, '/viam.app.datapipelines.v1.DataPipelinesService/EnableDataPipeline', app.datapipelines.v1.data_pipelines_pb2.EnableDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.EnableDataPipelineResponse)
self.DisableDataPipeline = grpclib.client.UnaryUnaryMethod(channel, '/viam.app.datapipelines.v1.DataPipelinesService/DisableDataPipeline', app.datapipelines.v1.data_pipelines_pb2.DisableDataPipelineRequest, app.datapipelines.v1.data_pipelines_pb2.DisableDataPipelineResponse)
Loading