spark-milvus connector connection issue in databricks #39023
Unanswered
vihariazure
asked this question in
Q&A and General discussion
Replies: 1 comment 1 reply
-
Unfortunately, currently we have no human power to maintain this tool. It would be good if some contributors can help on this. |
Beta Was this translation helpful? Give feedback.
1 reply
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
-
hi team
i tiring to use spark-milvus connector in databricks
i followed below link
https://milvus.io/docs/integrate_with_spark.md
i installed spark-milvus-1.0.0-SNAPSHOT.jar in the cluster.
i run below code
from pyspark.sql import SparkSession
columns = ["id", "text", "vec"]
data = [(1, "a", [1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0]),
(2, "b", [1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0]),
(3, "c", [1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0]),
(4, "d", [1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0])]
sample_df = spark.sparkContext.parallelize(data).toDF(columns)
sample_df.write
.mode("append")
.option("milvus.host", "ip address")
.option("milvus.port", "19530")
.option("milvus.collection.name", "hello_spark_milvus")
.option("milvus.collection.vectorField", "vec")
.option("milvus.collection.vectorDim", "8")
.option("milvus.collection.primaryKeyField", "id")
.format("milvus")
.save()
iam getting below error
**Py4JJavaError: An error occurred while calling o9659.save.
: java.lang.Exception: Fail to list databases: R{exception=StatusRuntimeException: INTERNAL: Panic! This is a bug!, status=-3, data=null}
at zilliztech.spark.milvus.Milvus.getTable(Milvus.scala:29)
at org.apache.spark.sql.DataFrameWriter.getTable$1(DataFrameWriter.scala:331)
at org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:349)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:281)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:569)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:397)
at py4j.Gateway.invoke(Gateway.java:306)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:199)
at py4j.ClientServerConnection.run(ClientServerConnection.java:119)
at java.base/java.lang.Thread.run(Thread.java:840)
File , line 10
1 sample_df.write
2 .mode("append")
3 .option("milvus.host", "ip address)
4 .option("milvus.port", "19530")
5 .option("milvus.collection.name", "hello_spark_milvus")
6 .option("milvus.collection.vectorField", "vec")
7 .option("milvus.collection.vectorDim", "8")
8 .option("milvus.collection.primaryKeyField", "id")
9 .format("milvus")
---> 10 .save()
File /databricks/spark/python/pyspark/instrumentation_utils.py:47, in _wrap_function..wrapper(*args, **kwargs)
45 start = time.perf_counter()
46 try:
---> 47 res = func(*args, **kwargs)
48 logger.log_success(
49 module_name, class_name, function_name, time.perf_counter() - start, signature
50 )
51 return res
File /databricks/spark/python/pyspark/sql/readwriter.py:1730, in DataFrameWriter.save(self, path, format, mode, partitionBy, **options)
1728 self.format(format)
1729 if path is None:
-> 1730 self._jwrite.save()
1731 else:
1732 self._jwrite.save(path)
File /databricks/spark/python/lib/py4j-0.10.9.7-src.zip/py4j/java_gateway.py:1355, in JavaMember.call(self, *args)
1349 command = proto.CALL_COMMAND_NAME +
1350 self.command_header +
1351 args_command +
1352 proto.END_COMMAND_PART
1354 answer = self.gateway_client.send_command(command)
-> 1355 return_value = get_return_value(
1356 answer, self.gateway_client, self.target_id, self.name)
1358 for temp_arg in temp_args:
1359 if hasattr(temp_arg, "_detach"):
File /databricks/spark/python/pyspark/errors/exceptions/captured.py:263, in capture_sql_exception..deco(*a, **kw)
260 from py4j.protocol import Py4JJavaError
262 try:
--> 263 return f(*a, kw)
264 except Py4JJavaError as e:
265 converted = convert_exception(e.java_exception)
File /databricks/spark/python/lib/py4j-0.10.9.7-src.zip/py4j/protocol.py:326, in get_return_value(answer, gateway_client, target_id, name)
324 value = OUTPUT_CONVERTER[type](answer[2:], gateway_client)
325 if answer[1] == REFERENCE_TYPE:
--> 326 raise Py4JJavaError(
327 "An error occurred while calling {0}{1}{2}.\n".
328 format(target_id, ".", name), value)
329 else:
330 raise Py4JError(
331 "An error occurred while calling {0}{1}{2}. Trace:\n{3}\n".
332 format(target_id, ".", name, value))
Beta Was this translation helpful? Give feedback.
All reactions