You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Py4JJavaError: An error occurred while calling o33373.showString.
: java.lang.Exception:
Input type encoding WKT is not supported!
Supported encodings are: COORDS, GEOJSON
Expected behavior
Transforms the geometry.
Additional context
---------------------------------------------------------------------------
Py4JJavaError Traceback (most recent call last)
File <command-230049760647307>, line 5
1 df = (
2 spark.createDataFrame([{'wkt': 'MULTIPOINT ((10 40), (40 30), (20 20), (30 10))'}])
3 .withColumn('geom', mos.st_setsrid(mos.st_asgeojson('wkt'), lit(4326)))
4 )
----> 5 df.select(mos.st_astext(mos.st_transform('geom', lit(3857)))).show(1, False)
File /databricks/spark/python/pyspark/instrumentation_utils.py:48, in _wrap_function.<locals>.wrapper(*args, **kwargs)
46 start = time.perf_counter()
47 try:
---> 48 res = func(*args, **kwargs)
49 logger.log_success(
50 module_name, class_name, function_name, time.perf_counter() - start, signature
51 )
52 return res
File /databricks/spark/python/pyspark/sql/dataframe.py:947, in DataFrame.show(self, n, truncate, vertical)
938 except ValueError:
939 raise PySparkTypeError(
940 error_class="NOT_BOOL",
941 message_parameters={
(...)
944 },
945 )
--> 947 print(self._jdf.showString(n, int_truncate, vertical))
File /databricks/spark/python/lib/py4j-0.10.9.7-src.zip/py4j/java_gateway.py:1322, in JavaMember.__call__(self, *args)
1316 command = proto.CALL_COMMAND_NAME +\
1317 self.command_header +\
1318 args_command +\
1319 proto.END_COMMAND_PART
1321 answer = self.gateway_client.send_command(command)
-> 1322 return_value = get_return_value(
1323 answer, self.gateway_client, self.target_id, self.name)
1325 for temp_arg in temp_args:
1326 if hasattr(temp_arg, "_detach"):
File /databricks/spark/python/pyspark/errors/exceptions/captured.py:188, in capture_sql_exception.<locals>.deco(*a, **kw)
186 def deco(*a: Any, **kw: Any) -> Any:
187 try:
--> 188 return f(*a, **kw)
189 except Py4JJavaError as e:
190 converted = convert_exception(e.java_exception)
File /databricks/spark/python/lib/py4j-0.10.9.7-src.zip/py4j/protocol.py:326, in get_return_value(answer, gateway_client, target_id, name)
324 value = OUTPUT_CONVERTER[type](answer[2:], gateway_client)
325 if answer[1] == REFERENCE_TYPE:
--> 326 raise Py4JJavaError(
327 "An error occurred while calling {0}{1}{2}.\n".
328 format(target_id, ".", name), value)
329 else:
330 raise Py4JError(
331 "An error occurred while calling {0}{1}{2}. Trace:\n{3}\n".
332 format(target_id, ".", name, value))
Py4JJavaError: An error occurred while calling o33373.showString.
: java.lang.Exception:
Input type encoding WKT is not supported!
Supported encodings are: COORDS, GEOJSON
at com.databricks.labs.mosaic.expressions.geometry.base.RequiresCRS.checkEncoding(RequiresCRS.scala:28)
at com.databricks.labs.mosaic.expressions.geometry.base.RequiresCRS.checkEncoding$(RequiresCRS.scala:22)
at com.databricks.labs.mosaic.expressions.geometry.ST_Transform.checkEncoding(ST_Transform.scala:22)
at com.databricks.labs.mosaic.expressions.geometry.ST_Transform.geometryCodeGen(ST_Transform.scala:43)
at com.databricks.labs.mosaic.expressions.geometry.base.UnaryVector1ArgExpression.$anonfun$doGenCode$1(UnaryVector1ArgExpression.scala:116)
at org.apache.spark.sql.catalyst.expressions.BinaryExpression.nullSafeCodeGen(Expression.scala:853)
at com.databricks.labs.mosaic.expressions.geometry.base.UnaryVector1ArgExpression.doGenCode(UnaryVector1ArgExpression.scala:113)
at org.apache.spark.sql.catalyst.expressions.Expression.genCodeInternal(Expression.scala:256)
at org.apache.spark.sql.catalyst.expressions.Expression.$anonfun$genCode$2(Expression.scala:232)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:232)
at org.apache.spark.sql.catalyst.expressions.UnaryExpression.nullSafeCodeGen(Expression.scala:720)
at com.databricks.labs.mosaic.expressions.format.ConvertTo.$anonfun$doGenCode$1(ConvertTo.scala:105)
at com.databricks.labs.mosaic.codegen.format.ConvertToCodeGen$.doCodeGen(ConvertToCodeGen.scala:23)
at com.databricks.labs.mosaic.expressions.format.ConvertTo.doGenCode(ConvertTo.scala:108)
at org.apache.spark.sql.catalyst.expressions.Expression.genCodeInternal(Expression.scala:256)
at org.apache.spark.sql.catalyst.expressions.Expression.$anonfun$genCode$2(Expression.scala:232)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:232)
at org.apache.spark.sql.catalyst.expressions.ToPrettyString.doGenCode(ToPrettyString.scala:62)
at org.apache.spark.sql.catalyst.expressions.Expression.genCodeInternal(Expression.scala:256)
at org.apache.spark.sql.catalyst.expressions.Expression.$anonfun$genCode$2(Expression.scala:232)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:232)
at org.apache.spark.sql.catalyst.expressions.Alias.genCodeInternal(namedExpressions.scala:170)
at com.databricks.sql.expressions.codegen.EdgeExpressionCodegen$.$anonfun$genCodeWithFallback$2(EdgeExpressionCodegen.scala:270)
at scala.Option.getOrElse(Option.scala:189)
at com.databricks.sql.expressions.codegen.EdgeExpressionCodegen$.$anonfun$genCodeWithFallback$1(EdgeExpressionCodegen.scala:270)
at scala.Option.getOrElse(Option.scala:189)
at com.databricks.sql.expressions.codegen.EdgeExpressionCodegen$.genCodeWithFallback(EdgeExpressionCodegen.scala:268)
at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext.generateExpression(CodeGenerator.scala:1450)
at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext.$anonfun$generateExpressionsForWholeStageWithCSE$2(CodeGenerator.scala:1531)
at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
at scala.collection.TraversableLike.map(TraversableLike.scala:286)
at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
at scala.collection.AbstractTraversable.map(Traversable.scala:108)
at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext.$anonfun$generateExpressionsForWholeStageWithCSE$1(CodeGenerator.scala:1529)
at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext.withSubExprEliminationExprs(CodeGenerator.scala:1183)
at org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext.generateExpressionsForWholeStageWithCSE(CodeGenerator.scala:1529)
at org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:76)
at org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:199)
at org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:154)
at org.apache.spark.sql.execution.RDDScanExec.consume(ExistingRDD.scala:304)
at org.apache.spark.sql.execution.InputRDDCodegen.doProduce(WholeStageCodegenExec.scala:490)
at org.apache.spark.sql.execution.InputRDDCodegen.doProduce$(WholeStageCodegenExec.scala:463)
at org.apache.spark.sql.execution.RDDScanExec.doProduce(ExistingRDD.scala:304)
at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:99)
at org.apache.spark.sql.execution.SparkPlan$.org$apache$spark$sql$execution$SparkPlan$$withExecuteQueryLogging(SparkPlan.scala:122)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:347)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:165)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:343)
at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:94)
at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:93)
at org.apache.spark.sql.execution.RDDScanExec.produce(ExistingRDD.scala:304)
at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:59)
at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:99)
at org.apache.spark.sql.execution.SparkPlan$.org$apache$spark$sql$execution$SparkPlan$$withExecuteQueryLogging(SparkPlan.scala:122)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:347)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:165)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:343)
at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:94)
at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:93)
at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:46)
at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:663)
at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:726)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$2(SparkPlan.scala:289)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:289)
at org.apache.spark.sql.execution.SparkPlan$.org$apache$spark$sql$execution$SparkPlan$$withExecuteQueryLogging(SparkPlan.scala:122)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:347)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:165)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:343)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:284)
at org.apache.spark.sql.execution.collect.Collector$.collect(Collector.scala:112)
at org.apache.spark.sql.execution.collect.Collector$.collect(Collector.scala:124)
at org.apache.spark.sql.execution.qrc.InternalRowFormat$.collect(cachedSparkResults.scala:126)
at org.apache.spark.sql.execution.qrc.InternalRowFormat$.collect(cachedSparkResults.scala:114)
at org.apache.spark.sql.execution.qrc.InternalRowFormat$.collect(cachedSparkResults.scala:94)
at org.apache.spark.sql.execution.qrc.ResultCacheManager.$anonfun$computeResult$1(ResultCacheManager.scala:553)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)
at org.apache.spark.sql.execution.qrc.ResultCacheManager.collectResult$1(ResultCacheManager.scala:545)
at org.apache.spark.sql.execution.qrc.ResultCacheManager.computeResult(ResultCacheManager.scala:565)
at org.apache.spark.sql.execution.qrc.ResultCacheManager.$anonfun$getOrComputeResultInternal$1(ResultCacheManager.scala:426)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.execution.qrc.ResultCacheManager.getOrComputeResultInternal(ResultCacheManager.scala:419)
at org.apache.spark.sql.execution.qrc.ResultCacheManager.getOrComputeResult(ResultCacheManager.scala:313)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeCollectResult$1(SparkPlan.scala:519)
at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)
at org.apache.spark.sql.execution.SparkPlan.executeCollectResult(SparkPlan.scala:516)
at org.apache.spark.sql.Dataset.collectResult(Dataset.scala:3628)
at org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:4553)
at org.apache.spark.sql.Dataset.$anonfun$head$1(Dataset.scala:3336)
at org.apache.spark.sql.Dataset.$anonfun$withAction$3(Dataset.scala:4544)
at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:945)
at org.apache.spark.sql.Dataset.$anonfun$withAction$2(Dataset.scala:4542)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$8(SQLExecution.scala:282)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:510)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$1(SQLExecution.scala:209)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1138)
at org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.scala:152)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:459)
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:4542)
at org.apache.spark.sql.Dataset.head(Dataset.scala:3336)
at org.apache.spark.sql.Dataset.take(Dataset.scala:3559)
at org.apache.spark.sql.Dataset.getRows(Dataset.scala:322)
at org.apache.spark.sql.Dataset.showString(Dataset.scala:357)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:397)
at py4j.Gateway.invoke(Gateway.java:306)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:195)
at py4j.ClientServerConnection.run(ClientServerConnection.java:115)
at java.lang.Thread.run(Thread.java:750)
The text was updated successfully, but these errors were encountered:
Describe the bug
When following the documentation on how to use st_transform, mosaic throws an error.
To Reproduce
Expected behavior
Transforms the geometry.
Additional context
The text was updated successfully, but these errors were encountered: