Dear Palantir Team,
While building a spatial analysis pipeline in Palantir Foundry, I tried to calculate the shortest route distance between two geographic points using the built-in geometry functions:
route_distance = Geometry shortest distance(
*** Geometry a: Convert GeoPoint to geometry(departure_geo_point),***
*** Geometry b: Convert GeoPoint to geometry(arrival_geo_point)***
)
This formula computes the distance between two coordinates by first converting GeoPoint’s into Geometry objects.
During preview and job execution, the pipeline failed with this stack trace:
Query failed to complete successfully: {jobId=e045854c-e6b1-478e-b4ff-db2a7f2e393e, errorInstanceId=, errorCode=500, errorName=Default:Internal, causeMessage=‘int com.google.common.geometry.S2ShapeIndex$Cell.numEdges()’
Stacktrace:
java.lang.NoSuchMethodError: ‘int com.google.common.geometry.S2ShapeIndex$Cell.numEdges()’
at com.palantir.eddie.geotime.S2Utils.countS2CellsAndEdges(S2Utils.java:513)
at com.palantir.eddie.geotime.S2Utils.isFirstBestIndex(S2Utils.java:497)
at com.palantir.eddie.geotime.S2Utils.distanceBetweenGeometries(S2Utils.java:474)
at com.palantir.eddie.geotime.GeoUtils.shortestDistanceBetweenGeometries(GeoUtils.java:1142)
at org.apache.spark.sql.catalyst.expressions.EddieNativeGeometryShortestDistance.nullSafeEval(EddieNativeGeometryShortestDistance.scala:29)
at org.apache.spark.sql.catalyst.expressions.BinaryExpression.eval(Expression.scala:672)
at org.apache.spark.sql.catalyst.expressions.Alias.eval(namedExpressions.scala:158)
at org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.apply(InterpretedMutableProjection.scala:89)
at org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$47.$anonfun$applyOrElse$82(Optimizer.scala:2162)
at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
at scala.collection.TraversableLike.map(TraversableLike.scala:286)
at scala.collection.TraversableLike.map$(TraversableLike.scala:279)
at scala.collection.AbstractTraversable.map(Traversable.scala:108)
at org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$47.applyOrElse(Optimizer.scala:2162)
at org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$47.applyOrElse(Optimizer.scala:2157)
at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:461)
at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:76)
at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:461)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$3(TreeNode.scala:466)
at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1216)
at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1215)
at org.apache.spark.sql.catalyst.plans.logical.Project.mapChildren(basicLogicalOperators.scala:71)
at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:466)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$3(TreeNode.scala:466)
at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1216)
at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1215)
at org.apache.spark.sql.catalyst.plans.logical.LocalLimit.mapChildren(basicLogicalOperators.scala:1608)
at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:466)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$3(TreeNode.scala:466)
at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1216)
at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1215)
at org.apache.spark.sql.catalyst.plans.logical.GlobalLimit.mapChildren(basicLogicalOperators.scala:1587)
at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:466)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at org.apache.spark.sql.catalyst.trees.TreeNode.transformWithPruning(TreeNode.scala:427)
at org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$.apply(Optimizer.scala:2157)
at org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$.apply(Optimizer.scala:2155)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:222)
at scala.collection.IndexedSeqOptimized.foldLeft(IndexedSeqOptimized.scala:60)
at scala.collection.IndexedSeqOptimized.foldLeft$(IndexedSeqOptimized.scala:68)
at scala.collection.mutable.WrappedArray.foldLeft(WrappedArray.scala:38)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:219)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:211)
at scala.collection.immutable.List.foreach(List.scala:431)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:211)
at org.apache.spark.sql.FoundrySessionStateBuilder$$anon$2.super$execute(FoundrySessionStateBuilder.scala:96)
at org.apache.spark.sql.FoundrySessionStateBuilder$$anon$2.$anonfun$execute$2(FoundrySessionStateBuilder.scala:96)
at com.palantir.foundry.spark.Tracing$.trace(Tracing.scala:13)
at org.apache.spark.sql.FoundrySessionStateBuilder$$anon$2.execute(FoundrySessionStateBuilder.scala:96)
at org.apache.spark.sql.FoundrySessionStateBuilder$$anon$2.execute(FoundrySessionStateBuilder.scala:93)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:182)
at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:89)
at org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:182)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:152)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:138)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:219)
at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:546)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:219)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:218)
at org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:148)
at org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:144)
at org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:162)
at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:182)
at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:179)
at org.apache.spark.sql.execution.QueryExecution.assertExecutedPlanPrepared(QueryExecution.scala:194)
at com.palantir.eddie.spark.serialization.SparkResultSerializerV1.lambda$collectAndSerialize$0(SparkResultSerializerV1.java:96)
at com.codahale.metrics.Timer.timeSupplier(Timer.java:136)
at com.palantir.eddie.functions.compute.spark.module.serialization.SparkResultSerializerFactory.lambda$constructTimer$2(SparkResultSerializerFactory.java:41)
at com.palantir.eddie.spark.serialization.SparkResultSerializerV1.collectAndSerialize(SparkResultSerializerV1.java:95)
at com.palantir.eddie.functions.compute.spark.module.serialization.PreviewTableCollector.collect(PreviewTableCollector.java:43)
at com.palantir.eddie.functions.compute.spark.module.ResultHelperV2$1.visitTable(ResultHelperV2.java:62)
at com.palantir.eddie.functions.compute.spark.module.ResultHelperV2$1.visitTable(ResultHelperV2.java:58)
at com.palantir.eddie.functions.implementations.spark.SparkData$SparkTable.accept(SparkData.java:97)
at com.palantir.eddie.functions.compute.spark.module.ResultHelperV2.collect(ResultHelperV2.java:58)
at com.palantir.eddie.functions.compute.spark.module.postprocessors.PreviewPostProcessorFactory.lambda$createPreviewPostProcessor$0(PreviewPostProcessorFactory.java:71)
at com.palantir.eddie.functions.compute.spark.module.preview.DefaultPreviewComputer.lambda$run$11(DefaultPreviewComputer.java:381)
at java.base/java.util.stream.Collectors.lambda$uniqKeysMapAccumulator$1(Collectors.java:180)
at java.base/java.util.stream.ReduceOps$3ReducingSink.accept(ReduceOps.java:169)
at java.base/java.util.Iterator.forEachRemaining(Iterator.java:133)
at java.base/java.util.Spliterators$IteratorSpliterator.forEachRemaining(Spliterators.java:1939)
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509)
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499)
at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921)
at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682)
at com.palantir.eddie.functions.compute.spark.module.preview.DefaultPreviewComputer.run(DefaultPreviewComputer.java:379)
at com.palantir.eddie.functions.compute.spark.module.preview.FoundryPreviewManager.lambda$run$1(FoundryPreviewManager.java:209)
at com.palantir.sparkreporter.tagging.SparkReporterTaggingUtils.runWithSparkReporterProperties(SparkReporterTaggingUtils.java:50)
at com.palantir.eddie.functions.compute.spark.module.preview.FoundryPreviewManager.lambda$wrapWithSparkReporterProperties$6(FoundryPreviewManager.java:308)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)
at com.palantir.eddie.functions.compute.spark.module.preview.FoundryPreviewManager.lambda$run$2(FoundryPreviewManager.java:224)
at com.palantir.foundry.spark.api.SparkAuthorization.runAsUser(SparkAuthorization.java:65)
at com.palantir.eddie.functions.compute.spark.module.preview.FoundryPreviewManager.run(FoundryPreviewManager.java:235)
at com.palantir.eddie.functions.compute.spark.module.preview.PreviewQueryEvaluator.execute(PreviewQueryEvaluator.java:144)
at com.palantir.eddie.functions.compute.spark.module.preview.PreviewQueryEvaluator.lambda$apply$0(PreviewQueryEvaluator.java:108)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)
at com.palantir.eddie.functions.compute.spark.module.checkpoint.DefaultFoundryCheckpointManagerWrapper.run(DefaultFoundryCheckpointManagerWrapper.java:60)
at com.palantir.eddie.functions.compute.spark.module.preview.PreviewQueryEvaluator.apply(PreviewQueryEvaluator.java:102)
at com.palantir.eddie.functions.compute.spark.module.preview.PreviewQueryEvaluator.apply(PreviewQueryEvaluator.java:78)
at com.palantir.eddie.functions.compute.spark.module.preview.PreviewQueryEvaluator.apply(PreviewQueryEvaluator.java:48)
at com.palantir.interactive.module.tasks.queries.QueryRunner.runBlockingUnmapped(QueryRunner.java:132)
at com.palantir.interactive.module.tasks.queries.QueryRunner.runBlocking(QueryRunner.java:128)
at com.palantir.interactive.module.tasks.InteractiveModuleResource.lambda$submitInternal$11(InteractiveModuleResource.java:398)
at com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:128)
at com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:74)
at com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:80)
at com.palantir.tracing.Tracers$TracingAwareRunnable.run(Tracers.java:588)
at com.palantir.tritium.metrics.TaggedMetricsExecutorService$TaggedMetricsRunnable.run(TaggedMetricsExecutorService.java:134)
at org.jboss.threads.ContextHandler$1.runWith(ContextHandler.java:18)
at org.jboss.threads.EnhancedQueueExecutor$Task.doRunWith(EnhancedQueueExecutor.java:2651)
at org.jboss.threads.EnhancedQueueExecutor$Task.run(EnhancedQueueExecutor.java:2630)
at org.jboss.threads.EnhancedQueueExecutor$ThreadBody.run(EnhancedQueueExecutor.java:1586)
at com.palantir.tritium.metrics.TaggedMetricsThreadFactory$InstrumentedTask.run(TaggedMetricsThreadFactory.java:94)
at java.base/java.lang.Thread.run(Thread.java:1583)
, jobId=e045854c-e6b1-478e-b4ff-db2a7f2e393e}