Quantcast
Channel: Active questions tagged redis+java - Stack Overflow
Viewing all articles
Browse latest Browse all 2204

Error in using Redis lettuce Task not serializable

$
0
0

I am using redis lettuce in my SCALA project for simple key value database I just want to use set and get method from this to update my values of column my code is as follows

I have created a helper .java code to set and get key values

public class Helper {    public static void update(RedisCommands<String,String> sync, String key, String value){        sync.set(key,value);    }    public static String get_id(RedisCommands<String,String> sync, String key){        return(sync.get(key));    }}

In my Scala code I use

val client = RedisClient.create("redis://localhost:6378")val connection = client.connectval sync = connection.sync

now to access get_id and update I pass this sync as

def getid2(s: String):String= {      Helper.get_id(sync, s)}val getIdUDF2 = udf[String,String](getid2)getIdUDF2(col("id1"))

here id1 is a column

now I get this error

Exception in thread "main" org.apache.spark.SparkException: Task not serializable    at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:416)    at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:406)    at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:162)    at org.apache.spark.SparkContext.clean(SparkContext.scala:2477)    at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsWithIndex$1(RDD.scala:912)    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)    at org.apache.spark.rdd.RDD.withScope(RDD.scala:414)    at org.apache.spark.rdd.RDD.mapPartitionsWithIndex(RDD.scala:911)    at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:753)    at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:184)    at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:222)    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)    at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:219)    at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:180)    at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:325)    at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:443)    at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:429)    at org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:48)    at org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.$anonfun$executeCollect$1(AdaptiveSparkPlanExec.scala:338)    at org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.withFinalPlanUpdate(AdaptiveSparkPlanExec.scala:366)    at org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.executeCollect(AdaptiveSparkPlanExec.scala:338)    at org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:3715)    at org.apache.spark.sql.Dataset.$anonfun$head$1(Dataset.scala:2728)    at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3706)    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)    at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)    at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)    at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3704)    at org.apache.spark.sql.Dataset.head(Dataset.scala:2728)    at org.apache.spark.sql.Dataset.take(Dataset.scala:2935)    at org.apache.spark.sql.Dataset.getRows(Dataset.scala:287)    at org.apache.spark.sql.Dataset.showString(Dataset.scala:326)    at org.apache.spark.sql.Dataset.show(Dataset.scala:808)    at org.apache.spot.netflow.FlowSuspiciousConnectsAnalysis$.run(FlowSuspiciousConnectsAnalysis.scala:140)    at org.apache.spot.SuspiciousConnects$.delayedEndpoint$org$apache$spot$SuspiciousConnects$1(SuspiciousConnects.scala:183)    at org.apache.spot.SuspiciousConnects$delayedInit$body.apply(SuspiciousConnects.scala:21)    at scala.Function0.apply$mcV$sp(Function0.scala:39)    at scala.Function0.apply$mcV$sp$(Function0.scala:39)    at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:17)    at scala.App.$anonfun$main$1$adapted(App.scala:80)    at scala.collection.immutable.List.foreach(List.scala:431)    at scala.App.main(App.scala:80)    at scala.App.main$(App.scala:78)    at org.apache.spot.SuspiciousConnects$.main(SuspiciousConnects.scala:21)    at org.apache.spot.SuspiciousConnects.main(SuspiciousConnects.scala)Caused by: java.io.NotSerializableException: io.lettuce.core.FutureSyncInvocationHandlerSerialization stack:    - object not serializable (class: io.lettuce.core.FutureSyncInvocationHandler, value: io.lettuce.core.FutureSyncInvocationHandler@78fee50c)    - field (class: java.lang.reflect.Proxy, name: h, type: interface java.lang.reflect.InvocationHandler)    - object (class com.sun.proxy.$Proxy22, io.lettuce.core.FutureSyncInvocationHandler@78fee50c)    - element of array (index: 0)    - array (class [Ljava.lang.Object;, size 1)    - field (class: java.lang.invoke.SerializedLambda, name: capturedArgs, type: class [Ljava.lang.Object;)    - object (class java.lang.invoke.SerializedLambda, SerializedLambda[capturingClass=class org.apache.spot.netflow.FlowSuspiciousConnectsAnalysis$, functionalInterfaceMethod=scala/Function1.apply:(Ljava/lang/Object;)Ljava/lang/Object;, implementation=invokeStatic org/apache/spot/netflow/FlowSuspiciousConnectsAnalysis$.$anonfun$run$3:(Lio/lettuce/core/api/sync/RedisCommands;Ljava/lang/String;)Ljava/lang/String;, instantiatedMethodType=(Ljava/lang/String;)Ljava/lang/String;, numCaptured=1])    - writeReplace data (class: java.lang.invoke.SerializedLambda)    - object (class org.apache.spot.netflow.FlowSuspiciousConnectsAnalysis$$$Lambda$4817/0x0000000801047840, org.apache.spot.netflow.FlowSuspiciousConnectsAnalysis$$$Lambda$4817/0x0000000801047840@415542ba)    - element of array (index: 1)    - array (class [Ljava.lang.Object;, size 4)    - field (class: java.lang.invoke.SerializedLambda, name: capturedArgs, type: class [Ljava.lang.Object;)    - object (class java.lang.invoke.SerializedLambda, SerializedLambda[capturingClass=class org.apache.spark.sql.catalyst.expressions.ScalaUDF, functionalInterfaceMethod=scala/Function1.apply:(Ljava/lang/Object;)Ljava/lang/Object;, implementation=invokeStatic org/apache/spark/sql/catalyst/expressions/ScalaUDF.$anonfun$f$2:(Lorg/apache/spark/sql/catalyst/expressions/ScalaUDF;Lscala/Function1;Lorg/apache/spark/sql/catalyst/expressions/Expression;Lscala/runtime/LazyRef;Lorg/apache/spark/sql/catalyst/InternalRow;)Ljava/lang/Object;, instantiatedMethodType=(Lorg/apache/spark/sql/catalyst/InternalRow;)Ljava/lang/Object;, numCaptured=4])    - writeReplace data (class: java.lang.invoke.SerializedLambda)    - object (class org.apache.spark.sql.catalyst.expressions.ScalaUDF$$Lambda$3676/0x0000000801552040, org.apache.spark.sql.catalyst.expressions.ScalaUDF$$Lambda$3676/0x0000000801552040@3734e41b)    - field (class: org.apache.spark.sql.catalyst.expressions.ScalaUDF, name: f, type: interface scala.Function1)    - object (class org.apache.spark.sql.catalyst.expressions.ScalaUDF, UDF(concat(input[36, string, true], :, input[38, string, true])))    - field (class: org.apache.spark.sql.catalyst.expressions.ScalaUDF, name: canonicalized, type: class org.apache.spark.sql.catalyst.expressions.Expression)    - object (class org.apache.spark.sql.catalyst.expressions.ScalaUDF, UDF(concat(input[36, string, true], :, input[38, string, true])))    - element of array (index: 0)    - array (class [Ljava.lang.Object;, size 2)    - field (class: java.lang.invoke.SerializedLambda, name: capturedArgs, type: class [Ljava.lang.Object;)    - object (class java.lang.invoke.SerializedLambda, SerializedLambda[capturingClass=class org.apache.spark.sql.catalyst.expressions.ScalaUDF, functionalInterfaceMethod=scala/Function1.apply:(Ljava/lang/Object;)Ljava/lang/Object;, implementation=invokeStatic org/apache/spark/sql/catalyst/expressions/ScalaUDF.$anonfun$catalystConverter$3:(Lorg/apache/spark/sql/catalyst/expressions/ScalaUDF;Lscala/Function1;Ljava/lang/Object;)Ljava/lang/Object;, instantiatedMethodType=(Ljava/lang/Object;)Ljava/lang/Object;, numCaptured=2])    - writeReplace data (class: java.lang.invoke.SerializedLambda)    - object (class org.apache.spark.sql.catalyst.expressions.ScalaUDF$$Lambda$3678/0x0000000801553840, org.apache.spark.sql.catalyst.expressions.ScalaUDF$$Lambda$3678/0x0000000801553840@533ad9b)    - element of array (index: 1)    - array (class [Lscala.Function1;, size 2)    - element of array (index: 4)    - array (class [Ljava.lang.Object;, size 8)    - element of array (index: 1)    - array (class [Ljava.lang.Object;, size 3)    - field (class: java.lang.invoke.SerializedLambda, name: capturedArgs, type: class [Ljava.lang.Object;)    - object (class java.lang.invoke.SerializedLambda, SerializedLambda[capturingClass=class org.apache.spark.sql.execution.WholeStageCodegenExec, functionalInterfaceMethod=scala/Function2.apply:(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;, implementation=invokeStatic org/apache/spark/sql/execution/WholeStageCodegenExec.$anonfun$doExecute$4$adapted:(Lorg/apache/spark/sql/catalyst/expressions/codegen/CodeAndComment;[Ljava/lang/Object;Lorg/apache/spark/sql/execution/metric/SQLMetric;Ljava/lang/Object;Lscala/collection/Iterator;)Lscala/collection/Iterator;, instantiatedMethodType=(Ljava/lang/Object;Lscala/collection/Iterator;)Lscala/collection/Iterator;, numCaptured=3])    - writeReplace data (class: java.lang.invoke.SerializedLambda)    - object (class org.apache.spark.sql.execution.WholeStageCodegenExec$$Lambda$3152/0x0000000801335040, org.apache.spark.sql.execution.WholeStageCodegenExec$$Lambda$3152/0x0000000801335040@51d5a186)    at org.apache.spark.serializer.SerializationDebugger$.improveException(SerializationDebugger.scala:41)    at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:47)    at org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:101)    at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:413)    ... 47 more

I dont understand the error

NOTE: I am open to use any other key value database if its more compatible with scala


Viewing all articles
Browse latest Browse all 2204

Trending Articles