Skip to content

Commit

Permalink
[SPARK-51142][ML][CONNECT] ML protobufs clean up
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?
ML protobufs clean up

### Why are the changes needed?
to follow the guide https://github.com/apache/spark/blob/ece14704cc083f17689d2e0b9ab8e31cf71a7a2d/sql/connect/docs/adding-proto-messages.md

### Does this PR introduce _any_ user-facing change?
no

### How was this patch tested?
existing tests

### Was this patch authored or co-authored using generative AI tooling?
no

Closes #49862 from zhengruifeng/ml_connect_protos.

Authored-by: Ruifeng Zheng <ruifengz@apache.org>
Signed-off-by: Ruifeng Zheng <ruifengz@apache.org>
  • Loading branch information
zhengruifeng committed Feb 11, 2025
1 parent bdb7704 commit 6a71f76
Show file tree
Hide file tree
Showing 14 changed files with 240 additions and 124 deletions.
4 changes: 3 additions & 1 deletion python/pyspark/ml/connect/proto.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,9 @@ def plan(self, session: "SparkConnectClient") -> pb2.Relation:
plan.ml_relation.transform.obj_ref.CopyFrom(pb2.ObjectRef(id=self._name))
else:
plan.ml_relation.transform.transformer.CopyFrom(
pb2.MlOperator(name=self._name, uid=self._uid, type=pb2.MlOperator.TRANSFORMER)
pb2.MlOperator(
name=self._name, uid=self._uid, type=pb2.MlOperator.OPERATOR_TYPE_TRANSFORMER
)
)

if self._ml_params is not None:
Expand Down
19 changes: 10 additions & 9 deletions python/pyspark/ml/connect/readwrite.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,13 +118,13 @@ def saveInstance(
elif isinstance(instance, (JavaEstimator, JavaTransformer, JavaEvaluator)):
operator: Union[JavaEstimator, JavaTransformer, JavaEvaluator]
if isinstance(instance, JavaEstimator):
ml_type = pb2.MlOperator.ESTIMATOR
ml_type = pb2.MlOperator.OPERATOR_TYPE_ESTIMATOR
operator = cast("JavaEstimator", instance)
elif isinstance(instance, JavaEvaluator):
ml_type = pb2.MlOperator.EVALUATOR
ml_type = pb2.MlOperator.OPERATOR_TYPE_EVALUATOR
operator = cast("JavaEvaluator", instance)
else:
ml_type = pb2.MlOperator.TRANSFORMER
ml_type = pb2.MlOperator.OPERATOR_TYPE_TRANSFORMER
operator = cast("JavaTransformer", instance)

params = serialize_ml_params(operator, session.client)
Expand Down Expand Up @@ -249,13 +249,13 @@ def loadInstance(
or issubclass(clazz, JavaTransformer)
):
if issubclass(clazz, JavaModel):
ml_type = pb2.MlOperator.MODEL
ml_type = pb2.MlOperator.OPERATOR_TYPE_MODEL
elif issubclass(clazz, JavaEstimator):
ml_type = pb2.MlOperator.ESTIMATOR
ml_type = pb2.MlOperator.OPERATOR_TYPE_ESTIMATOR
elif issubclass(clazz, JavaEvaluator):
ml_type = pb2.MlOperator.EVALUATOR
ml_type = pb2.MlOperator.OPERATOR_TYPE_EVALUATOR
else:
ml_type = pb2.MlOperator.TRANSFORMER
ml_type = pb2.MlOperator.OPERATOR_TYPE_TRANSFORMER

# to get the java corresponding qualified class name
java_qualified_class_name = (
Expand All @@ -281,7 +281,7 @@ def _get_class() -> Type[RL]:
py_type = _get_class()
# It must be JavaWrapper, since we're passing the string to the _java_obj
if issubclass(py_type, JavaWrapper):
if ml_type == pb2.MlOperator.MODEL:
if ml_type == pb2.MlOperator.OPERATOR_TYPE_MODEL:
session.client.add_ml_cache(result.obj_ref.id)
instance = py_type(result.obj_ref.id)
else:
Expand Down Expand Up @@ -358,7 +358,8 @@ def _get_class() -> Type[RL]:
command.ml_command.read.CopyFrom(
pb2.MlCommand.Read(
operator=pb2.MlOperator(
name=java_qualified_class_name, type=pb2.MlOperator.TRANSFORMER
name=java_qualified_class_name,
type=pb2.MlOperator.OPERATOR_TYPE_TRANSFORMER,
),
path=path,
)
Expand Down
4 changes: 2 additions & 2 deletions python/pyspark/ml/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def wrapped(self: "JavaEstimator", dataset: "ConnectDataFrame") -> Any:
input = dataset._plan.plan(client)
assert isinstance(self._java_obj, str)
estimator = pb2.MlOperator(
name=self._java_obj, uid=self.uid, type=pb2.MlOperator.ESTIMATOR
name=self._java_obj, uid=self.uid, type=pb2.MlOperator.OPERATOR_TYPE_ESTIMATOR
)
command = pb2.Command()
command.ml_command.fit.CopyFrom(
Expand Down Expand Up @@ -361,7 +361,7 @@ def wrapped(self: "JavaEvaluator", dataset: "ConnectDataFrame") -> Any:
input = dataset._plan.plan(client)
assert isinstance(self._java_obj, str)
evaluator = pb2.MlOperator(
name=self._java_obj, uid=self.uid, type=pb2.MlOperator.EVALUATOR
name=self._java_obj, uid=self.uid, type=pb2.MlOperator.OPERATOR_TYPE_EVALUATOR
)
command = pb2.Command()
command.ml_command.evaluate.CopyFrom(
Expand Down
12 changes: 6 additions & 6 deletions python/pyspark/sql/connect/proto/ml_common_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@


DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x1dspark/connect/ml_common.proto\x12\rspark.connect\x1a\x1fspark/connect/expressions.proto"\xa5\x01\n\x08MlParams\x12;\n\x06params\x18\x01 \x03(\x0b\x32#.spark.connect.MlParams.ParamsEntryR\x06params\x1a\\\n\x0bParamsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32!.spark.connect.Expression.LiteralR\x05value:\x02\x38\x01"\xc9\x01\n\nMlOperator\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x10\n\x03uid\x18\x02 \x01(\tR\x03uid\x12:\n\x04type\x18\x03 \x01(\x0e\x32&.spark.connect.MlOperator.OperatorTypeR\x04type"Y\n\x0cOperatorType\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tESTIMATOR\x10\x01\x12\x0f\n\x0bTRANSFORMER\x10\x02\x12\r\n\tEVALUATOR\x10\x03\x12\t\n\x05MODEL\x10\x04"\x1b\n\tObjectRef\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02idB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3'
b'\n\x1dspark/connect/ml_common.proto\x12\rspark.connect\x1a\x1fspark/connect/expressions.proto"\xa5\x01\n\x08MlParams\x12;\n\x06params\x18\x01 \x03(\x0b\x32#.spark.connect.MlParams.ParamsEntryR\x06params\x1a\\\n\x0bParamsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32!.spark.connect.Expression.LiteralR\x05value:\x02\x38\x01"\x90\x02\n\nMlOperator\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x10\n\x03uid\x18\x02 \x01(\tR\x03uid\x12:\n\x04type\x18\x03 \x01(\x0e\x32&.spark.connect.MlOperator.OperatorTypeR\x04type"\x9f\x01\n\x0cOperatorType\x12\x1d\n\x19OPERATOR_TYPE_UNSPECIFIED\x10\x00\x12\x1b\n\x17OPERATOR_TYPE_ESTIMATOR\x10\x01\x12\x1d\n\x19OPERATOR_TYPE_TRANSFORMER\x10\x02\x12\x1b\n\x17OPERATOR_TYPE_EVALUATOR\x10\x03\x12\x17\n\x13OPERATOR_TYPE_MODEL\x10\x04"\x1b\n\tObjectRef\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02idB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3'
)

_globals = globals()
Expand All @@ -58,9 +58,9 @@
_globals["_MLPARAMS_PARAMSENTRY"]._serialized_start = 155
_globals["_MLPARAMS_PARAMSENTRY"]._serialized_end = 247
_globals["_MLOPERATOR"]._serialized_start = 250
_globals["_MLOPERATOR"]._serialized_end = 451
_globals["_MLOPERATOR_OPERATORTYPE"]._serialized_start = 362
_globals["_MLOPERATOR_OPERATORTYPE"]._serialized_end = 451
_globals["_OBJECTREF"]._serialized_start = 453
_globals["_OBJECTREF"]._serialized_end = 480
_globals["_MLOPERATOR"]._serialized_end = 522
_globals["_MLOPERATOR_OPERATORTYPE"]._serialized_start = 363
_globals["_MLOPERATOR_OPERATORTYPE"]._serialized_end = 522
_globals["_OBJECTREF"]._serialized_start = 524
_globals["_OBJECTREF"]._serialized_end = 551
# @@protoc_insertion_point(module_scope)
38 changes: 24 additions & 14 deletions python/pyspark/sql/connect/proto/ml_common_pb2.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -112,28 +112,36 @@ class MlOperator(google.protobuf.message.Message):
builtins.type,
): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
UNSPECIFIED: MlOperator._OperatorType.ValueType # 0
ESTIMATOR: MlOperator._OperatorType.ValueType # 1
TRANSFORMER: MlOperator._OperatorType.ValueType # 2
EVALUATOR: MlOperator._OperatorType.ValueType # 3
MODEL: MlOperator._OperatorType.ValueType # 4
OPERATOR_TYPE_UNSPECIFIED: MlOperator._OperatorType.ValueType # 0
OPERATOR_TYPE_ESTIMATOR: MlOperator._OperatorType.ValueType # 1
"""ML estimator"""
OPERATOR_TYPE_TRANSFORMER: MlOperator._OperatorType.ValueType # 2
"""ML transformer (non-model)"""
OPERATOR_TYPE_EVALUATOR: MlOperator._OperatorType.ValueType # 3
"""ML evaluator"""
OPERATOR_TYPE_MODEL: MlOperator._OperatorType.ValueType # 4
"""ML model"""

class OperatorType(_OperatorType, metaclass=_OperatorTypeEnumTypeWrapper): ...
UNSPECIFIED: MlOperator.OperatorType.ValueType # 0
ESTIMATOR: MlOperator.OperatorType.ValueType # 1
TRANSFORMER: MlOperator.OperatorType.ValueType # 2
EVALUATOR: MlOperator.OperatorType.ValueType # 3
MODEL: MlOperator.OperatorType.ValueType # 4
OPERATOR_TYPE_UNSPECIFIED: MlOperator.OperatorType.ValueType # 0
OPERATOR_TYPE_ESTIMATOR: MlOperator.OperatorType.ValueType # 1
"""ML estimator"""
OPERATOR_TYPE_TRANSFORMER: MlOperator.OperatorType.ValueType # 2
"""ML transformer (non-model)"""
OPERATOR_TYPE_EVALUATOR: MlOperator.OperatorType.ValueType # 3
"""ML evaluator"""
OPERATOR_TYPE_MODEL: MlOperator.OperatorType.ValueType # 4
"""ML model"""

NAME_FIELD_NUMBER: builtins.int
UID_FIELD_NUMBER: builtins.int
TYPE_FIELD_NUMBER: builtins.int
name: builtins.str
"""The qualified name of the ML operator."""
"""(Required) The qualified name of the ML operator."""
uid: builtins.str
"""Unique id of the ML operator"""
"""(Required) Unique id of the ML operator"""
type: global___MlOperator.OperatorType.ValueType
"""Represents what the ML operator is"""
"""(Required) Represents what the ML operator is"""
def __init__(
self,
*,
Expand All @@ -156,7 +164,9 @@ class ObjectRef(google.protobuf.message.Message):

ID_FIELD_NUMBER: builtins.int
id: builtins.str
"""The ID is used to lookup the object on the server side."""
"""(Required) The ID is used to lookup the object on the server side.
Note it is different from the 'uid' of a ML object.
"""
def __init__(
self,
*,
Expand Down
34 changes: 17 additions & 17 deletions python/pyspark/sql/connect/proto/ml_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@


DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x16spark/connect/ml.proto\x12\rspark.connect\x1a\x1dspark/connect/relations.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/ml_common.proto"\xb1\t\n\tMlCommand\x12\x30\n\x03\x66it\x18\x01 \x01(\x0b\x32\x1c.spark.connect.MlCommand.FitH\x00R\x03\x66it\x12,\n\x05\x66\x65tch\x18\x02 \x01(\x0b\x32\x14.spark.connect.FetchH\x00R\x05\x66\x65tch\x12\x39\n\x06\x64\x65lete\x18\x03 \x01(\x0b\x32\x1f.spark.connect.MlCommand.DeleteH\x00R\x06\x64\x65lete\x12\x36\n\x05write\x18\x04 \x01(\x0b\x32\x1e.spark.connect.MlCommand.WriteH\x00R\x05write\x12\x33\n\x04read\x18\x05 \x01(\x0b\x32\x1d.spark.connect.MlCommand.ReadH\x00R\x04read\x12?\n\x08\x65valuate\x18\x06 \x01(\x0b\x32!.spark.connect.MlCommand.EvaluateH\x00R\x08\x65valuate\x1a\xa2\x01\n\x03\x46it\x12\x37\n\testimator\x18\x01 \x01(\x0b\x32\x19.spark.connect.MlOperatorR\testimator\x12/\n\x06params\x18\x02 \x01(\x0b\x32\x17.spark.connect.MlParamsR\x06params\x12\x31\n\x07\x64\x61taset\x18\x03 \x01(\x0b\x32\x17.spark.connect.RelationR\x07\x64\x61taset\x1a;\n\x06\x44\x65lete\x12\x31\n\x07obj_ref\x18\x01 \x01(\x0b\x32\x18.spark.connect.ObjectRefR\x06objRef\x1a\xf0\x02\n\x05Write\x12\x37\n\x08operator\x18\x01 \x01(\x0b\x32\x19.spark.connect.MlOperatorH\x00R\x08operator\x12\x33\n\x07obj_ref\x18\x02 \x01(\x0b\x32\x18.spark.connect.ObjectRefH\x00R\x06objRef\x12/\n\x06params\x18\x03 \x01(\x0b\x32\x17.spark.connect.MlParamsR\x06params\x12\x12\n\x04path\x18\x04 \x01(\tR\x04path\x12)\n\x10should_overwrite\x18\x05 \x01(\x08R\x0fshouldOverwrite\x12\x45\n\x07options\x18\x06 \x03(\x0b\x32+.spark.connect.MlCommand.Write.OptionsEntryR\x07options\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x06\n\x04type\x1aQ\n\x04Read\x12\x35\n\x08operator\x18\x01 \x01(\x0b\x32\x19.spark.connect.MlOperatorR\x08operator\x12\x12\n\x04path\x18\x02 \x01(\tR\x04path\x1a\xa7\x01\n\x08\x45valuate\x12\x37\n\tevaluator\x18\x01 \x01(\x0b\x32\x19.spark.connect.MlOperatorR\tevaluator\x12/\n\x06params\x18\x02 \x01(\x0b\x32\x17.spark.connect.MlParamsR\x06params\x12\x31\n\x07\x64\x61taset\x18\x03 \x01(\x0b\x32\x17.spark.connect.RelationR\x07\x64\x61tasetB\t\n\x07\x63ommand"\xf6\x02\n\x0fMlCommandResult\x12\x39\n\x05param\x18\x01 \x01(\x0b\x32!.spark.connect.Expression.LiteralH\x00R\x05param\x12\x1a\n\x07summary\x18\x02 \x01(\tH\x00R\x07summary\x12T\n\roperator_info\x18\x03 \x01(\x0b\x32-.spark.connect.MlCommandResult.MlOperatorInfoH\x00R\x0coperatorInfo\x1a\xa6\x01\n\x0eMlOperatorInfo\x12\x33\n\x07obj_ref\x18\x01 \x01(\x0b\x32\x18.spark.connect.ObjectRefH\x00R\x06objRef\x12\x14\n\x04name\x18\x02 \x01(\tH\x00R\x04name\x12\x10\n\x03uid\x18\x03 \x01(\tR\x03uid\x12/\n\x06params\x18\x04 \x01(\x0b\x32\x17.spark.connect.MlParamsR\x06paramsB\x06\n\x04typeB\r\n\x0bresult_typeB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3'
b'\n\x16spark/connect/ml.proto\x12\rspark.connect\x1a\x1dspark/connect/relations.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/ml_common.proto"\xfb\t\n\tMlCommand\x12\x30\n\x03\x66it\x18\x01 \x01(\x0b\x32\x1c.spark.connect.MlCommand.FitH\x00R\x03\x66it\x12,\n\x05\x66\x65tch\x18\x02 \x01(\x0b\x32\x14.spark.connect.FetchH\x00R\x05\x66\x65tch\x12\x39\n\x06\x64\x65lete\x18\x03 \x01(\x0b\x32\x1f.spark.connect.MlCommand.DeleteH\x00R\x06\x64\x65lete\x12\x36\n\x05write\x18\x04 \x01(\x0b\x32\x1e.spark.connect.MlCommand.WriteH\x00R\x05write\x12\x33\n\x04read\x18\x05 \x01(\x0b\x32\x1d.spark.connect.MlCommand.ReadH\x00R\x04read\x12?\n\x08\x65valuate\x18\x06 \x01(\x0b\x32!.spark.connect.MlCommand.EvaluateH\x00R\x08\x65valuate\x1a\xb2\x01\n\x03\x46it\x12\x37\n\testimator\x18\x01 \x01(\x0b\x32\x19.spark.connect.MlOperatorR\testimator\x12\x34\n\x06params\x18\x02 \x01(\x0b\x32\x17.spark.connect.MlParamsH\x00R\x06params\x88\x01\x01\x12\x31\n\x07\x64\x61taset\x18\x03 \x01(\x0b\x32\x17.spark.connect.RelationR\x07\x64\x61tasetB\t\n\x07_params\x1a;\n\x06\x44\x65lete\x12\x31\n\x07obj_ref\x18\x01 \x01(\x0b\x32\x18.spark.connect.ObjectRefR\x06objRef\x1a\x9a\x03\n\x05Write\x12\x37\n\x08operator\x18\x01 \x01(\x0b\x32\x19.spark.connect.MlOperatorH\x00R\x08operator\x12\x33\n\x07obj_ref\x18\x02 \x01(\x0b\x32\x18.spark.connect.ObjectRefH\x00R\x06objRef\x12\x34\n\x06params\x18\x03 \x01(\x0b\x32\x17.spark.connect.MlParamsH\x01R\x06params\x88\x01\x01\x12\x12\n\x04path\x18\x04 \x01(\tR\x04path\x12.\n\x10should_overwrite\x18\x05 \x01(\x08H\x02R\x0fshouldOverwrite\x88\x01\x01\x12\x45\n\x07options\x18\x06 \x03(\x0b\x32+.spark.connect.MlCommand.Write.OptionsEntryR\x07options\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x06\n\x04typeB\t\n\x07_paramsB\x13\n\x11_should_overwrite\x1aQ\n\x04Read\x12\x35\n\x08operator\x18\x01 \x01(\x0b\x32\x19.spark.connect.MlOperatorR\x08operator\x12\x12\n\x04path\x18\x02 \x01(\tR\x04path\x1a\xb7\x01\n\x08\x45valuate\x12\x37\n\tevaluator\x18\x01 \x01(\x0b\x32\x19.spark.connect.MlOperatorR\tevaluator\x12\x34\n\x06params\x18\x02 \x01(\x0b\x32\x17.spark.connect.MlParamsH\x00R\x06params\x88\x01\x01\x12\x31\n\x07\x64\x61taset\x18\x03 \x01(\x0b\x32\x17.spark.connect.RelationR\x07\x64\x61tasetB\t\n\x07_paramsB\t\n\x07\x63ommand"\x93\x03\n\x0fMlCommandResult\x12\x39\n\x05param\x18\x01 \x01(\x0b\x32!.spark.connect.Expression.LiteralH\x00R\x05param\x12\x1a\n\x07summary\x18\x02 \x01(\tH\x00R\x07summary\x12T\n\roperator_info\x18\x03 \x01(\x0b\x32-.spark.connect.MlCommandResult.MlOperatorInfoH\x00R\x0coperatorInfo\x1a\xc3\x01\n\x0eMlOperatorInfo\x12\x33\n\x07obj_ref\x18\x01 \x01(\x0b\x32\x18.spark.connect.ObjectRefH\x00R\x06objRef\x12\x14\n\x04name\x18\x02 \x01(\tH\x00R\x04name\x12\x15\n\x03uid\x18\x03 \x01(\tH\x01R\x03uid\x88\x01\x01\x12\x34\n\x06params\x18\x04 \x01(\x0b\x32\x17.spark.connect.MlParamsH\x02R\x06params\x88\x01\x01\x42\x06\n\x04typeB\x06\n\x04_uidB\t\n\x07_paramsB\r\n\x0bresult_typeB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3'
)

_globals = globals()
Expand All @@ -54,21 +54,21 @@
_globals["_MLCOMMAND_WRITE_OPTIONSENTRY"]._loaded_options = None
_globals["_MLCOMMAND_WRITE_OPTIONSENTRY"]._serialized_options = b"8\001"
_globals["_MLCOMMAND"]._serialized_start = 137
_globals["_MLCOMMAND"]._serialized_end = 1338
_globals["_MLCOMMAND"]._serialized_end = 1412
_globals["_MLCOMMAND_FIT"]._serialized_start = 480
_globals["_MLCOMMAND_FIT"]._serialized_end = 642
_globals["_MLCOMMAND_DELETE"]._serialized_start = 644
_globals["_MLCOMMAND_DELETE"]._serialized_end = 703
_globals["_MLCOMMAND_WRITE"]._serialized_start = 706
_globals["_MLCOMMAND_WRITE"]._serialized_end = 1074
_globals["_MLCOMMAND_WRITE_OPTIONSENTRY"]._serialized_start = 1008
_globals["_MLCOMMAND_WRITE_OPTIONSENTRY"]._serialized_end = 1066
_globals["_MLCOMMAND_READ"]._serialized_start = 1076
_globals["_MLCOMMAND_READ"]._serialized_end = 1157
_globals["_MLCOMMAND_EVALUATE"]._serialized_start = 1160
_globals["_MLCOMMAND_EVALUATE"]._serialized_end = 1327
_globals["_MLCOMMANDRESULT"]._serialized_start = 1341
_globals["_MLCOMMANDRESULT"]._serialized_end = 1715
_globals["_MLCOMMANDRESULT_MLOPERATORINFO"]._serialized_start = 1534
_globals["_MLCOMMANDRESULT_MLOPERATORINFO"]._serialized_end = 1700
_globals["_MLCOMMAND_FIT"]._serialized_end = 658
_globals["_MLCOMMAND_DELETE"]._serialized_start = 660
_globals["_MLCOMMAND_DELETE"]._serialized_end = 719
_globals["_MLCOMMAND_WRITE"]._serialized_start = 722
_globals["_MLCOMMAND_WRITE"]._serialized_end = 1132
_globals["_MLCOMMAND_WRITE_OPTIONSENTRY"]._serialized_start = 1034
_globals["_MLCOMMAND_WRITE_OPTIONSENTRY"]._serialized_end = 1092
_globals["_MLCOMMAND_READ"]._serialized_start = 1134
_globals["_MLCOMMAND_READ"]._serialized_end = 1215
_globals["_MLCOMMAND_EVALUATE"]._serialized_start = 1218
_globals["_MLCOMMAND_EVALUATE"]._serialized_end = 1401
_globals["_MLCOMMANDRESULT"]._serialized_start = 1415
_globals["_MLCOMMANDRESULT"]._serialized_end = 1818
_globals["_MLCOMMANDRESULT_MLOPERATORINFO"]._serialized_start = 1608
_globals["_MLCOMMANDRESULT_MLOPERATORINFO"]._serialized_end = 1803
# @@protoc_insertion_point(module_scope)
Loading

0 comments on commit 6a71f76

Please sign in to comment.