Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Select an option

  • Save marcellustavares/9e319d2c8ebd4a940d7705748b278c68 to your computer and use it in GitHub Desktop.

Select an option

Save marcellustavares/9e319d2c8ebd4a940d7705748b278c68 to your computer and use it in GitHub Desktop.
error.log
21/09/03 03:38:03 ERROR org.apache.spark.network.server.TransportRequestHandler: Error sending result RpcResponse[requestId=9072343615543885884,body=NioManagedBuffer[buf=java.nio.HeapByteBuffer[pos=0 lim=47 cap=64]]] to /10.158.0.12:32894; closing connection
java.nio.channels.ClosedChannelException
at io.netty.channel.AbstractChannel$AbstractUnsafe.newClosedChannelException(AbstractChannel.java:957)
at io.netty.channel.AbstractChannel$AbstractUnsafe.write(AbstractChannel.java:865)
at io.netty.channel.DefaultChannelPipeline$HeadContext.write(DefaultChannelPipeline.java:1367)
at io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:717)
at io.netty.channel.AbstractChannelHandlerContext.invokeWriteAndFlush(AbstractChannelHandlerContext.java:764)
at io.netty.channel.AbstractChannelHandlerContext$WriteTask.run(AbstractChannelHandlerContext.java:1071)
at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:164)
at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:472)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:500)
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
21/09/03 03:38:03 ERROR org.apache.spark.network.server.TransportRequestHandler: Error sending result RpcResponse[requestId=7047502491092075107,body=NioManagedBuffer[buf=java.nio.HeapByteBuffer[pos=0 lim=47 cap=64]]] to /10.158.0.13:50590; closing connection
java.nio.channels.ClosedChannelException
at io.netty.channel.AbstractChannel$AbstractUnsafe.newClosedChannelException(AbstractChannel.java:957)
at io.netty.channel.AbstractChannel$AbstractUnsafe.write(AbstractChannel.java:865)
at io.netty.channel.DefaultChannelPipeline$HeadContext.write(DefaultChannelPipeline.java:1367)
at io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:717)
at io.netty.channel.AbstractChannelHandlerContext.invokeWriteAndFlush(AbstractChannelHandlerContext.java:764)
at io.netty.channel.AbstractChannelHandlerContext$WriteTask.run(AbstractChannelHandlerContext.java:1071)
at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:164)
at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:472)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:500)
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
21/09/03 03:38:03 ERROR org.apache.spark.network.server.TransportRequestHandler: Error sending result RpcResponse[requestId=6074883810899000227,body=NioManagedBuffer[buf=java.nio.HeapByteBuffer[pos=0 lim=47 cap=64]]] to /10.158.0.13:50592; closing connection
java.nio.channels.ClosedChannelException
at io.netty.channel.AbstractChannel$AbstractUnsafe.newClosedChannelException(AbstractChannel.java:957)
at io.netty.channel.AbstractChannel$AbstractUnsafe.write(AbstractChannel.java:865)
at io.netty.channel.DefaultChannelPipeline$HeadContext.write(DefaultChannelPipeline.java:1367)
at io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:717)
at io.netty.channel.AbstractChannelHandlerContext.invokeWriteAndFlush(AbstractChannelHandlerContext.java:764)
at io.netty.channel.AbstractChannelHandlerContext$WriteTask.run(AbstractChannelHandlerContext.java:1071)
at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:164)
at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:472)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:500)
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
21/09/03 03:41:47 WARN org.apache.spark.sql.execution.streaming.FileStreamSource: Listed 2742 file(s) in 268391 ms
21/09/03 03:43:07 ERROR org.apache.spark.network.server.TransportRequestHandler: Error sending result RpcResponse[requestId=4616588079814214943,body=NioManagedBuffer[buf=java.nio.HeapByteBuffer[pos=0 lim=81 cap=156]]] to /10.158.0.13:56876; closing connection
java.nio.channels.ClosedChannelException
at io.netty.channel.AbstractChannel$AbstractUnsafe.newClosedChannelException(AbstractChannel.java:957)
at io.netty.channel.AbstractChannel$AbstractUnsafe.write(AbstractChannel.java:865)
at io.netty.channel.DefaultChannelPipeline$HeadContext.write(DefaultChannelPipeline.java:1367)
at io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:717)
at io.netty.channel.AbstractChannelHandlerContext.invokeWriteAndFlush(AbstractChannelHandlerContext.java:764)
at io.netty.channel.AbstractChannelHandlerContext$WriteTask.run(AbstractChannelHandlerContext.java:1071)
at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:164)
at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:472)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:500)
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
21/09/03 03:43:25 INFO com.liferay.osb.asah.spark.session.function.SessionBatchSinkFunction: Saving finished session events
21/09/03 03:44:25 ERROR org.apache.spark.sql.execution.datasources.FileFormatWriter: Aborting job e46d571a-72a4-47ca-abed-1e4c68e946ef.
java.lang.OutOfMemoryError: Java heap space
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.buildContentChunk(MediaHttpUploader.java:579)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.resumableUpload(MediaHttpUploader.java:380)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.upload(MediaHttpUploader.java:308)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:528)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:455)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.execute(AbstractGoogleClientRequest.java:565)
at com.google.cloud.hadoop.repackaged.gcs.com.google.cloud.hadoop.util.AbstractGoogleAsyncWriteChannel$UploadOperation.call(AbstractGoogleAsyncWriteChannel.java:85)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
21/09/03 03:44:25 ERROR org.apache.spark.sql.execution.streaming.MicroBatchExecution: Query [id = 43f96638-2285-4fc0-96fb-b93014daa8f5, runId = 4d1ce4ac-dd6d-47eb-880b-c7e8dfdbd13d] terminated with error
org.apache.spark.SparkException: Job aborted.
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:231)
at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:188)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:108)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:106)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute(commands.scala:131)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:132)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:131)
at org.apache.spark.sql.DataFrameWriter.$anonfun$runCommand$1(DataFrameWriter.scala:989)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:989)
at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:438)
at org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:415)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:293)
at com.liferay.osb.asah.spark.session.function.SessionBatchSinkFunction.saveFinishedSessionEvents(SessionBatchSinkFunction.java:91)
at com.liferay.osb.asah.spark.session.function.SessionBatchSinkFunction.call(SessionBatchSinkFunction.java:53)
at com.liferay.osb.asah.spark.session.function.SessionBatchSinkFunction.call(SessionBatchSinkFunction.java:32)
at org.apache.spark.sql.streaming.DataStreamWriter.$anonfun$foreachBatch$1(DataStreamWriter.scala:539)
at org.apache.spark.sql.streaming.DataStreamWriter.$anonfun$foreachBatch$1$adapted(DataStreamWriter.scala:539)
at org.apache.spark.sql.execution.streaming.sources.ForeachBatchSink.addBatch(ForeachBatchSink.scala:35)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$16(MicroBatchExecution.scala:586)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$15(MicroBatchExecution.scala:584)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:357)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:355)
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:584)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:226)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:357)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:355)
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:194)
at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:188)
at org.apache.spark.sql.execution.streaming.StreamExecution.$anonfun$runStream$1(StreamExecution.scala:334)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:317)
at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)
Caused by: java.lang.OutOfMemoryError: Java heap space
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.buildContentChunk(MediaHttpUploader.java:579)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.resumableUpload(MediaHttpUploader.java:380)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.upload(MediaHttpUploader.java:308)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:528)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:455)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.execute(AbstractGoogleClientRequest.java:565)
at com.google.cloud.hadoop.repackaged.gcs.com.google.cloud.hadoop.util.AbstractGoogleAsyncWriteChannel$UploadOperation.call(AbstractGoogleAsyncWriteChannel.java:85)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
21/09/03 03:44:25 ERROR com.liferay.osb.asah.spark.session.SessionSparkJob: org.apache.spark.sql.streaming.StreamingQueryException: Job aborted.
=== Streaming Query ===
Identifier: [id = 43f96638-2285-4fc0-96fb-b93014daa8f5, runId = 4d1ce4ac-dd6d-47eb-880b-c7e8dfdbd13d]
Current Committed Offsets: {FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*]: {"logOffset":220}}
Current Available Offsets: {FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*]: {"logOffset":221}}
Current State: ACTIVE
Thread State: RUNNABLE
Logical Plan:
SerializeFromObject [externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 6), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 7), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getAcquisition) AS acquisition#194, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getBrowserName, true, false) AS browserName#195, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 8), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCanonicalUrls, None) AS canonicalUrls#196, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getChannelId, true, false) AS channelId#197, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCity, true, false) AS city#198, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getClientIp, true, false) AS clientIp#199, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCountry, true, false) AS country#200, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDataSourceId, true, false) AS dataSourceId#201, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDate, true, false) AS date#202, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDeviceType, true, false) AS deviceType#203, mapobjects(lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9), if (isnull(lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9))) null else named_struct(applicationId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getApplicationId, true, false), channelId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getChannelId, true, false), clientIp, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getClientIp, true, false), context, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 10), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 11), true, false), lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getContext), createDate, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getCreateDate, true, false), dataSourceId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getDataSourceId, true, false), date, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getDate, true, false), eventDate, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventDate, true, false), eventId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventId, true, false), eventProperties, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 12), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 13), true, false), lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventProperties), id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getId, true, false), individualId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getIndividualId, true, false), ... 12 more fields), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getEvents, None) AS events#204, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getFinished.booleanValue AS finished#205, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getFirstEventDate, true, false) AS firstEventDate#206, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getInteractionNumber AS interactionNumber#207, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getInteractionsCount AS interactionsCount#208, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getIterationNumber AS iterationNumber#209, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getLastEventDate, true, false) AS lastEventDate#210, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getPageViewsCount AS pageViewsCount#211, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getPlatformName, true, false) AS platformName#212, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getProjectId, true, false) AS projectId#213, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 15), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getReferrers, None) AS referrers#214, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getRegion, true, false) AS region#215, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getSessionId, true, false) AS sessionId#216, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 16), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getUrls, None) AS urls#217, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getUserId, true, false) AS userId#218]
+- FlatMapGroupsWithState org.apache.spark.sql.KeyValueGroupedDataset$$Lambda$2733/1335697630@7fe96ba0, newInstance(class scala.Tuple2), initializejavabean(newInstance(class com.liferay.osb.asah.spark.session.model.Event), (setDataSourceId,cast(dataSourceId#54 as string).toString), (setClientIp,cast(clientIp#51 as string).toString), (setKnownIndividual,staticinvoke(class java.lang.Boolean, ObjectType(class java.lang.Boolean), valueOf, cast(knownIndividual#58 as boolean), true, false)), (setId,cast(id#59 as string).toString), (setIndividualId,cast(individualId#60 as string).toString), (setEventProperties,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 77), lambdavariable(MapObject, StringType, true, 77).toString, map_keys(eventProperties#57), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 78), lambdavariable(MapObject, StringType, true, 78).toString, map_values(eventProperties#57), None).array, true, false)), (setEventDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(eventDate#55-T1800000ms as timestamp), true, false)), (setSegmentNames,mapobjects(lambdavariable(MapObject, StringType, true, 79), cast(lambdavariable(MapObject, StringType, true, 79) as string).toString, segmentNames#64, Some(interface java.util.List))), (setEventId,cast(eventId#56 as string).toString), (setApplicationId,cast(applicationId#49 as string).toString), (setIterationNumber,assertnotnull(cast(iterationNumber#108 as int))), (setProjectId,cast(projectId#62 as string).toString), (setDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Date), toJavaDate, cast(date#87 as date), true, false)), (setChannelId,cast(channelId#50 as string).toString), (setUserId,cast(userId#66 as string).toString), (setCreateDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(createDate#53 as timestamp), true, false)), (setContext,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 80), lambdavariable(MapObject, StringType, true, 80).toString, map_keys(context#52), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 81), lambdavariable(MapObject, StringType, true, 81).toString, map_values(context#52), None).array, true, false)), (setNormalizedEventDate,staticinvoke(class java.lang.Long, ObjectType(class java.lang.Long), valueOf, cast(normalizedEventDate#61L as bigint), true, false))), [_1#191, _2#192], [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55-T1800000ms, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108], obj#193: com.liferay.osb.asah.spark.session.model.Session, class[acquisition[0]: map<string,string>, browserName[0]: string, canonicalUrls[0]: array<string>, channelId[0]: string, city[0]: string, clientIp[0]: string, country[0]: string, dataSourceId[0]: string, date[0]: date, deviceType[0]: string, events[0]: array<struct<applicationId:string,channelId:string,clientIp:string,context:map<string,string>,createDate:timestamp,dataSourceId:string,date:date,eventDate:timestamp,eventId:string,eventProperties:map<string,string>,id:string,individualId:string,iterationNumber:int,knownIndividual:boolean,normalizedEventDate:bigint,projectId:string,segmentNames:array<string>,userId:string>>, finished[0]: boolean, firstEventDate[0]: timestamp, interactionNumber[0]: int, interactionsCount[0]: int, iterationNumber[0]: int, lastEventDate[0]: timestamp, pageViewsCount[0]: int, platformName[0]: string, projectId[0]: string, referrers[0]: array<string>, region[0]: string, sessionId[0]: string, urls[0]: array<string>, userId[0]: string], Append, false, EventTimeTimeout
+- AppendColumns org.apache.spark.sql.Dataset$$Lambda$2728/1995151823@1ad1c38d, class com.liferay.osb.asah.spark.session.model.Event, [StructField(applicationId,StringType,true), StructField(channelId,StringType,true), StructField(clientIp,StringType,true), StructField(context,MapType(StringType,StringType,true),true), StructField(createDate,TimestampType,true), StructField(dataSourceId,StringType,true), StructField(date,DateType,true), StructField(eventDate,TimestampType,true), StructField(eventId,StringType,true), StructField(eventProperties,MapType(StringType,StringType,true),true), StructField(id,StringType,true), StructField(individualId,StringType,true), StructField(iterationNumber,IntegerType,true), StructField(knownIndividual,BooleanType,true), StructField(normalizedEventDate,LongType,true), StructField(projectId,StringType,true), StructField(segmentNames,ArrayType(StringType,true),true), StructField(userId,StringType,true)], initializejavabean(newInstance(class com.liferay.osb.asah.spark.session.model.Event), (setDataSourceId,cast(dataSourceId#54 as string).toString), (setClientIp,cast(clientIp#51 as string).toString), (setKnownIndividual,staticinvoke(class java.lang.Boolean, ObjectType(class java.lang.Boolean), valueOf, cast(knownIndividual#58 as boolean), true, false)), (setId,cast(id#59 as string).toString), (setIndividualId,cast(individualId#60 as string).toString), (setEventProperties,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 72), lambdavariable(MapObject, StringType, true, 72).toString, map_keys(eventProperties#57), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 73), lambdavariable(MapObject, StringType, true, 73).toString, map_values(eventProperties#57), None).array, true, false)), (setEventDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(eventDate#55-T1800000ms as timestamp), true, false)), (setSegmentNames,mapobjects(lambdavariable(MapObject, StringType, true, 74), cast(lambdavariable(MapObject, StringType, true, 74) as string).toString, segmentNames#64, Some(interface java.util.List))), (setEventId,cast(eventId#56 as string).toString), (setApplicationId,cast(applicationId#49 as string).toString), (setIterationNumber,assertnotnull(cast(iterationNumber#108 as int))), (setProjectId,cast(projectId#62 as string).toString), (setDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Date), toJavaDate, cast(date#87 as date), true, false)), (setChannelId,cast(channelId#50 as string).toString), (setUserId,cast(userId#66 as string).toString), (setCreateDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(createDate#53 as timestamp), true, false)), (setContext,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 75), lambdavariable(MapObject, StringType, true, 75).toString, map_keys(context#52), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 76), lambdavariable(MapObject, StringType, true, 76).toString, map_values(context#52), None).array, true, false)), (setNormalizedEventDate,staticinvoke(class java.lang.Long, ObjectType(class java.lang.Long), valueOf, cast(normalizedEventDate#61L as bigint), true, false))), [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(input[0, scala.Tuple2, true])._1, true, false) AS _1#20 AS _1#191, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, assertnotnull(input[0, scala.Tuple2, true])._2, true, false) AS _2#21 AS _2#192]
+- EventTimeWatermark eventDate#55: timestamp, 30 minutes
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, 0 AS iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, to_date(from_utc_timestamp('eventDate, 'projectTimeZoneId), None) AS date#87]
+- StreamingExecutionRelation FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*], [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67]
org.apache.spark.sql.streaming.StreamingQueryException: Job aborted.
=== Streaming Query ===
Identifier: [id = 43f96638-2285-4fc0-96fb-b93014daa8f5, runId = 4d1ce4ac-dd6d-47eb-880b-c7e8dfdbd13d]
Current Committed Offsets: {FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*]: {"logOffset":220}}
Current Available Offsets: {FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*]: {"logOffset":221}}
Current State: ACTIVE
Thread State: RUNNABLE
Logical Plan:
SerializeFromObject [externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 6), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 7), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getAcquisition) AS acquisition#194, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getBrowserName, true, false) AS browserName#195, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 8), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCanonicalUrls, None) AS canonicalUrls#196, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getChannelId, true, false) AS channelId#197, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCity, true, false) AS city#198, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getClientIp, true, false) AS clientIp#199, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCountry, true, false) AS country#200, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDataSourceId, true, false) AS dataSourceId#201, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDate, true, false) AS date#202, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDeviceType, true, false) AS deviceType#203, mapobjects(lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9), if (isnull(lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9))) null else named_struct(applicationId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getApplicationId, true, false), channelId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getChannelId, true, false), clientIp, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getClientIp, true, false), context, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 10), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 11), true, false), lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getContext), createDate, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getCreateDate, true, false), dataSourceId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getDataSourceId, true, false), date, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getDate, true, false), eventDate, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventDate, true, false), eventId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventId, true, false), eventProperties, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 12), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 13), true, false), lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventProperties), id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getId, true, false), individualId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getIndividualId, true, false), ... 12 more fields), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getEvents, None) AS events#204, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getFinished.booleanValue AS finished#205, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getFirstEventDate, true, false) AS firstEventDate#206, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getInteractionNumber AS interactionNumber#207, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getInteractionsCount AS interactionsCount#208, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getIterationNumber AS iterationNumber#209, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getLastEventDate, true, false) AS lastEventDate#210, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getPageViewsCount AS pageViewsCount#211, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getPlatformName, true, false) AS platformName#212, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getProjectId, true, false) AS projectId#213, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 15), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getReferrers, None) AS referrers#214, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getRegion, true, false) AS region#215, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getSessionId, true, false) AS sessionId#216, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 16), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getUrls, None) AS urls#217, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getUserId, true, false) AS userId#218]
+- FlatMapGroupsWithState org.apache.spark.sql.KeyValueGroupedDataset$$Lambda$2733/1335697630@7fe96ba0, newInstance(class scala.Tuple2), initializejavabean(newInstance(class com.liferay.osb.asah.spark.session.model.Event), (setDataSourceId,cast(dataSourceId#54 as string).toString), (setClientIp,cast(clientIp#51 as string).toString), (setKnownIndividual,staticinvoke(class java.lang.Boolean, ObjectType(class java.lang.Boolean), valueOf, cast(knownIndividual#58 as boolean), true, false)), (setId,cast(id#59 as string).toString), (setIndividualId,cast(individualId#60 as string).toString), (setEventProperties,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 77), lambdavariable(MapObject, StringType, true, 77).toString, map_keys(eventProperties#57), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 78), lambdavariable(MapObject, StringType, true, 78).toString, map_values(eventProperties#57), None).array, true, false)), (setEventDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(eventDate#55-T1800000ms as timestamp), true, false)), (setSegmentNames,mapobjects(lambdavariable(MapObject, StringType, true, 79), cast(lambdavariable(MapObject, StringType, true, 79) as string).toString, segmentNames#64, Some(interface java.util.List))), (setEventId,cast(eventId#56 as string).toString), (setApplicationId,cast(applicationId#49 as string).toString), (setIterationNumber,assertnotnull(cast(iterationNumber#108 as int))), (setProjectId,cast(projectId#62 as string).toString), (setDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Date), toJavaDate, cast(date#87 as date), true, false)), (setChannelId,cast(channelId#50 as string).toString), (setUserId,cast(userId#66 as string).toString), (setCreateDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(createDate#53 as timestamp), true, false)), (setContext,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 80), lambdavariable(MapObject, StringType, true, 80).toString, map_keys(context#52), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 81), lambdavariable(MapObject, StringType, true, 81).toString, map_values(context#52), None).array, true, false)), (setNormalizedEventDate,staticinvoke(class java.lang.Long, ObjectType(class java.lang.Long), valueOf, cast(normalizedEventDate#61L as bigint), true, false))), [_1#191, _2#192], [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55-T1800000ms, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108], obj#193: com.liferay.osb.asah.spark.session.model.Session, class[acquisition[0]: map<string,string>, browserName[0]: string, canonicalUrls[0]: array<string>, channelId[0]: string, city[0]: string, clientIp[0]: string, country[0]: string, dataSourceId[0]: string, date[0]: date, deviceType[0]: string, events[0]: array<struct<applicationId:string,channelId:string,clientIp:string,context:map<string,string>,createDate:timestamp,dataSourceId:string,date:date,eventDate:timestamp,eventId:string,eventProperties:map<string,string>,id:string,individualId:string,iterationNumber:int,knownIndividual:boolean,normalizedEventDate:bigint,projectId:string,segmentNames:array<string>,userId:string>>, finished[0]: boolean, firstEventDate[0]: timestamp, interactionNumber[0]: int, interactionsCount[0]: int, iterationNumber[0]: int, lastEventDate[0]: timestamp, pageViewsCount[0]: int, platformName[0]: string, projectId[0]: string, referrers[0]: array<string>, region[0]: string, sessionId[0]: string, urls[0]: array<string>, userId[0]: string], Append, false, EventTimeTimeout
+- AppendColumns org.apache.spark.sql.Dataset$$Lambda$2728/1995151823@1ad1c38d, class com.liferay.osb.asah.spark.session.model.Event, [StructField(applicationId,StringType,true), StructField(channelId,StringType,true), StructField(clientIp,StringType,true), StructField(context,MapType(StringType,StringType,true),true), StructField(createDate,TimestampType,true), StructField(dataSourceId,StringType,true), StructField(date,DateType,true), StructField(eventDate,TimestampType,true), StructField(eventId,StringType,true), StructField(eventProperties,MapType(StringType,StringType,true),true), StructField(id,StringType,true), StructField(individualId,StringType,true), StructField(iterationNumber,IntegerType,true), StructField(knownIndividual,BooleanType,true), StructField(normalizedEventDate,LongType,true), StructField(projectId,StringType,true), StructField(segmentNames,ArrayType(StringType,true),true), StructField(userId,StringType,true)], initializejavabean(newInstance(class com.liferay.osb.asah.spark.session.model.Event), (setDataSourceId,cast(dataSourceId#54 as string).toString), (setClientIp,cast(clientIp#51 as string).toString), (setKnownIndividual,staticinvoke(class java.lang.Boolean, ObjectType(class java.lang.Boolean), valueOf, cast(knownIndividual#58 as boolean), true, false)), (setId,cast(id#59 as string).toString), (setIndividualId,cast(individualId#60 as string).toString), (setEventProperties,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 72), lambdavariable(MapObject, StringType, true, 72).toString, map_keys(eventProperties#57), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 73), lambdavariable(MapObject, StringType, true, 73).toString, map_values(eventProperties#57), None).array, true, false)), (setEventDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(eventDate#55-T1800000ms as timestamp), true, false)), (setSegmentNames,mapobjects(lambdavariable(MapObject, StringType, true, 74), cast(lambdavariable(MapObject, StringType, true, 74) as string).toString, segmentNames#64, Some(interface java.util.List))), (setEventId,cast(eventId#56 as string).toString), (setApplicationId,cast(applicationId#49 as string).toString), (setIterationNumber,assertnotnull(cast(iterationNumber#108 as int))), (setProjectId,cast(projectId#62 as string).toString), (setDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Date), toJavaDate, cast(date#87 as date), true, false)), (setChannelId,cast(channelId#50 as string).toString), (setUserId,cast(userId#66 as string).toString), (setCreateDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(createDate#53 as timestamp), true, false)), (setContext,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 75), lambdavariable(MapObject, StringType, true, 75).toString, map_keys(context#52), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 76), lambdavariable(MapObject, StringType, true, 76).toString, map_values(context#52), None).array, true, false)), (setNormalizedEventDate,staticinvoke(class java.lang.Long, ObjectType(class java.lang.Long), valueOf, cast(normalizedEventDate#61L as bigint), true, false))), [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(input[0, scala.Tuple2, true])._1, true, false) AS _1#20 AS _1#191, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, assertnotnull(input[0, scala.Tuple2, true])._2, true, false) AS _2#21 AS _2#192]
+- EventTimeWatermark eventDate#55: timestamp, 30 minutes
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, 0 AS iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, to_date(from_utc_timestamp('eventDate, 'projectTimeZoneId), None) AS date#87]
+- StreamingExecutionRelation FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*], [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67]
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:356)
at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)
Caused by: org.apache.spark.SparkException: Job aborted.
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:231)
at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:188)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:108)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:106)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute(commands.scala:131)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:132)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:131)
at org.apache.spark.sql.DataFrameWriter.$anonfun$runCommand$1(DataFrameWriter.scala:989)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:989)
at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:438)
at org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:415)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:293)
at com.liferay.osb.asah.spark.session.function.SessionBatchSinkFunction.saveFinishedSessionEvents(SessionBatchSinkFunction.java:91)
at com.liferay.osb.asah.spark.session.function.SessionBatchSinkFunction.call(SessionBatchSinkFunction.java:53)
at com.liferay.osb.asah.spark.session.function.SessionBatchSinkFunction.call(SessionBatchSinkFunction.java:32)
at org.apache.spark.sql.streaming.DataStreamWriter.$anonfun$foreachBatch$1(DataStreamWriter.scala:539)
at org.apache.spark.sql.streaming.DataStreamWriter.$anonfun$foreachBatch$1$adapted(DataStreamWriter.scala:539)
at org.apache.spark.sql.execution.streaming.sources.ForeachBatchSink.addBatch(ForeachBatchSink.scala:35)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$16(MicroBatchExecution.scala:586)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$15(MicroBatchExecution.scala:584)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:357)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:355)
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:584)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:226)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:357)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:355)
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:194)
at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:188)
at org.apache.spark.sql.execution.streaming.StreamExecution.$anonfun$runStream$1(StreamExecution.scala:334)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:317)
... 1 more
Caused by: java.lang.OutOfMemoryError: Java heap space
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.buildContentChunk(MediaHttpUploader.java:579)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.resumableUpload(MediaHttpUploader.java:380)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.upload(MediaHttpUploader.java:308)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:528)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:455)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.execute(AbstractGoogleClientRequest.java:565)
at com.google.cloud.hadoop.repackaged.gcs.com.google.cloud.hadoop.util.AbstractGoogleAsyncWriteChannel$UploadOperation.call(AbstractGoogleAsyncWriteChannel.java:85)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
21/09/03 03:44:26 INFO org.sparkproject.jetty.server.AbstractConnector: Stopped Spark@2d6aca33{HTTP/1.1, (http/1.1)}{0.0.0.0:0}
Exception in thread "main" java.lang.RuntimeException: org.apache.spark.sql.streaming.StreamingQueryException: Job aborted.
=== Streaming Query ===
Identifier: [id = 43f96638-2285-4fc0-96fb-b93014daa8f5, runId = 4d1ce4ac-dd6d-47eb-880b-c7e8dfdbd13d]
Current Committed Offsets: {FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*]: {"logOffset":220}}
Current Available Offsets: {FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*]: {"logOffset":221}}
Current State: ACTIVE
Thread State: RUNNABLE
Logical Plan:
SerializeFromObject [externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 6), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 7), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getAcquisition) AS acquisition#194, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getBrowserName, true, false) AS browserName#195, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 8), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCanonicalUrls, None) AS canonicalUrls#196, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getChannelId, true, false) AS channelId#197, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCity, true, false) AS city#198, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getClientIp, true, false) AS clientIp#199, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCountry, true, false) AS country#200, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDataSourceId, true, false) AS dataSourceId#201, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDate, true, false) AS date#202, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDeviceType, true, false) AS deviceType#203, mapobjects(lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9), if (isnull(lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9))) null else named_struct(applicationId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getApplicationId, true, false), channelId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getChannelId, true, false), clientIp, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getClientIp, true, false), context, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 10), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 11), true, false), lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getContext), createDate, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getCreateDate, true, false), dataSourceId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getDataSourceId, true, false), date, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getDate, true, false), eventDate, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventDate, true, false), eventId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventId, true, false), eventProperties, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 12), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 13), true, false), lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventProperties), id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getId, true, false), individualId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getIndividualId, true, false), ... 12 more fields), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getEvents, None) AS events#204, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getFinished.booleanValue AS finished#205, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getFirstEventDate, true, false) AS firstEventDate#206, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getInteractionNumber AS interactionNumber#207, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getInteractionsCount AS interactionsCount#208, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getIterationNumber AS iterationNumber#209, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getLastEventDate, true, false) AS lastEventDate#210, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getPageViewsCount AS pageViewsCount#211, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getPlatformName, true, false) AS platformName#212, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getProjectId, true, false) AS projectId#213, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 15), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getReferrers, None) AS referrers#214, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getRegion, true, false) AS region#215, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getSessionId, true, false) AS sessionId#216, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 16), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getUrls, None) AS urls#217, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getUserId, true, false) AS userId#218]
+- FlatMapGroupsWithState org.apache.spark.sql.KeyValueGroupedDataset$$Lambda$2733/1335697630@7fe96ba0, newInstance(class scala.Tuple2), initializejavabean(newInstance(class com.liferay.osb.asah.spark.session.model.Event), (setDataSourceId,cast(dataSourceId#54 as string).toString), (setClientIp,cast(clientIp#51 as string).toString), (setKnownIndividual,staticinvoke(class java.lang.Boolean, ObjectType(class java.lang.Boolean), valueOf, cast(knownIndividual#58 as boolean), true, false)), (setId,cast(id#59 as string).toString), (setIndividualId,cast(individualId#60 as string).toString), (setEventProperties,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 77), lambdavariable(MapObject, StringType, true, 77).toString, map_keys(eventProperties#57), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 78), lambdavariable(MapObject, StringType, true, 78).toString, map_values(eventProperties#57), None).array, true, false)), (setEventDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(eventDate#55-T1800000ms as timestamp), true, false)), (setSegmentNames,mapobjects(lambdavariable(MapObject, StringType, true, 79), cast(lambdavariable(MapObject, StringType, true, 79) as string).toString, segmentNames#64, Some(interface java.util.List))), (setEventId,cast(eventId#56 as string).toString), (setApplicationId,cast(applicationId#49 as string).toString), (setIterationNumber,assertnotnull(cast(iterationNumber#108 as int))), (setProjectId,cast(projectId#62 as string).toString), (setDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Date), toJavaDate, cast(date#87 as date), true, false)), (setChannelId,cast(channelId#50 as string).toString), (setUserId,cast(userId#66 as string).toString), (setCreateDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(createDate#53 as timestamp), true, false)), (setContext,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 80), lambdavariable(MapObject, StringType, true, 80).toString, map_keys(context#52), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 81), lambdavariable(MapObject, StringType, true, 81).toString, map_values(context#52), None).array, true, false)), (setNormalizedEventDate,staticinvoke(class java.lang.Long, ObjectType(class java.lang.Long), valueOf, cast(normalizedEventDate#61L as bigint), true, false))), [_1#191, _2#192], [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55-T1800000ms, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108], obj#193: com.liferay.osb.asah.spark.session.model.Session, class[acquisition[0]: map<string,string>, browserName[0]: string, canonicalUrls[0]: array<string>, channelId[0]: string, city[0]: string, clientIp[0]: string, country[0]: string, dataSourceId[0]: string, date[0]: date, deviceType[0]: string, events[0]: array<struct<applicationId:string,channelId:string,clientIp:string,context:map<string,string>,createDate:timestamp,dataSourceId:string,date:date,eventDate:timestamp,eventId:string,eventProperties:map<string,string>,id:string,individualId:string,iterationNumber:int,knownIndividual:boolean,normalizedEventDate:bigint,projectId:string,segmentNames:array<string>,userId:string>>, finished[0]: boolean, firstEventDate[0]: timestamp, interactionNumber[0]: int, interactionsCount[0]: int, iterationNumber[0]: int, lastEventDate[0]: timestamp, pageViewsCount[0]: int, platformName[0]: string, projectId[0]: string, referrers[0]: array<string>, region[0]: string, sessionId[0]: string, urls[0]: array<string>, userId[0]: string], Append, false, EventTimeTimeout
+- AppendColumns org.apache.spark.sql.Dataset$$Lambda$2728/1995151823@1ad1c38d, class com.liferay.osb.asah.spark.session.model.Event, [StructField(applicationId,StringType,true), StructField(channelId,StringType,true), StructField(clientIp,StringType,true), StructField(context,MapType(StringType,StringType,true),true), StructField(createDate,TimestampType,true), StructField(dataSourceId,StringType,true), StructField(date,DateType,true), StructField(eventDate,TimestampType,true), StructField(eventId,StringType,true), StructField(eventProperties,MapType(StringType,StringType,true),true), StructField(id,StringType,true), StructField(individualId,StringType,true), StructField(iterationNumber,IntegerType,true), StructField(knownIndividual,BooleanType,true), StructField(normalizedEventDate,LongType,true), StructField(projectId,StringType,true), StructField(segmentNames,ArrayType(StringType,true),true), StructField(userId,StringType,true)], initializejavabean(newInstance(class com.liferay.osb.asah.spark.session.model.Event), (setDataSourceId,cast(dataSourceId#54 as string).toString), (setClientIp,cast(clientIp#51 as string).toString), (setKnownIndividual,staticinvoke(class java.lang.Boolean, ObjectType(class java.lang.Boolean), valueOf, cast(knownIndividual#58 as boolean), true, false)), (setId,cast(id#59 as string).toString), (setIndividualId,cast(individualId#60 as string).toString), (setEventProperties,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 72), lambdavariable(MapObject, StringType, true, 72).toString, map_keys(eventProperties#57), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 73), lambdavariable(MapObject, StringType, true, 73).toString, map_values(eventProperties#57), None).array, true, false)), (setEventDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(eventDate#55-T1800000ms as timestamp), true, false)), (setSegmentNames,mapobjects(lambdavariable(MapObject, StringType, true, 74), cast(lambdavariable(MapObject, StringType, true, 74) as string).toString, segmentNames#64, Some(interface java.util.List))), (setEventId,cast(eventId#56 as string).toString), (setApplicationId,cast(applicationId#49 as string).toString), (setIterationNumber,assertnotnull(cast(iterationNumber#108 as int))), (setProjectId,cast(projectId#62 as string).toString), (setDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Date), toJavaDate, cast(date#87 as date), true, false)), (setChannelId,cast(channelId#50 as string).toString), (setUserId,cast(userId#66 as string).toString), (setCreateDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(createDate#53 as timestamp), true, false)), (setContext,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 75), lambdavariable(MapObject, StringType, true, 75).toString, map_keys(context#52), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 76), lambdavariable(MapObject, StringType, true, 76).toString, map_values(context#52), None).array, true, false)), (setNormalizedEventDate,staticinvoke(class java.lang.Long, ObjectType(class java.lang.Long), valueOf, cast(normalizedEventDate#61L as bigint), true, false))), [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(input[0, scala.Tuple2, true])._1, true, false) AS _1#20 AS _1#191, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, assertnotnull(input[0, scala.Tuple2, true])._2, true, false) AS _2#21 AS _2#192]
+- EventTimeWatermark eventDate#55: timestamp, 30 minutes
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, 0 AS iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, to_date(from_utc_timestamp('eventDate, 'projectTimeZoneId), None) AS date#87]
+- StreamingExecutionRelation FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*], [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67]
at com.liferay.osb.asah.spark.session.SessionSparkJob.run(SessionSparkJob.java:89)
at java.util.Arrays$ArrayList.forEach(Arrays.java:3880)
at com.liferay.osb.asah.spark.common.SparkJobPipeline.run(SparkJobPipeline.java:25)
at com.liferay.osb.asah.spark.session.SessionSparkApplication.start(SessionSparkApplication.java:49)
at com.liferay.osb.asah.spark.session.SessionSparkApplication.main(SessionSparkApplication.java:36)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:951)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1039)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1048)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: org.apache.spark.sql.streaming.StreamingQueryException: Job aborted.
=== Streaming Query ===
Identifier: [id = 43f96638-2285-4fc0-96fb-b93014daa8f5, runId = 4d1ce4ac-dd6d-47eb-880b-c7e8dfdbd13d]
Current Committed Offsets: {FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*]: {"logOffset":220}}
Current Available Offsets: {FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*]: {"logOffset":221}}
Current State: ACTIVE
Thread State: RUNNABLE
Logical Plan:
SerializeFromObject [externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 6), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 7), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getAcquisition) AS acquisition#194, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getBrowserName, true, false) AS browserName#195, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 8), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCanonicalUrls, None) AS canonicalUrls#196, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getChannelId, true, false) AS channelId#197, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCity, true, false) AS city#198, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getClientIp, true, false) AS clientIp#199, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getCountry, true, false) AS country#200, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDataSourceId, true, false) AS dataSourceId#201, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDate, true, false) AS date#202, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getDeviceType, true, false) AS deviceType#203, mapobjects(lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9), if (isnull(lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9))) null else named_struct(applicationId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getApplicationId, true, false), channelId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getChannelId, true, false), clientIp, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getClientIp, true, false), context, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 10), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 11), true, false), lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getContext), createDate, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getCreateDate, true, false), dataSourceId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getDataSourceId, true, false), date, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getDate, true, false), eventDate, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventDate, true, false), eventId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventId, true, false), eventProperties, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.String), true, 12), true, false), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.String), true, 13), true, false), lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getEventProperties), id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getId, true, false), individualId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class com.liferay.osb.asah.spark.session.model.Event), true, 9).getIndividualId, true, false), ... 12 more fields), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getEvents, None) AS events#204, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getFinished.booleanValue AS finished#205, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getFirstEventDate, true, false) AS firstEventDate#206, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getInteractionNumber AS interactionNumber#207, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getInteractionsCount AS interactionsCount#208, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getIterationNumber AS iterationNumber#209, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getLastEventDate, true, false) AS lastEventDate#210, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getPageViewsCount AS pageViewsCount#211, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getPlatformName, true, false) AS platformName#212, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getProjectId, true, false) AS projectId#213, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 15), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getReferrers, None) AS referrers#214, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getRegion, true, false) AS region#215, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getSessionId, true, false) AS sessionId#216, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.String), true, 16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(MapObject, ObjectType(class java.lang.String), true, 16), true, false), assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getUrls, None) AS urls#217, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(assertnotnull(input[0, com.liferay.osb.asah.spark.session.model.Session, true])).getUserId, true, false) AS userId#218]
+- FlatMapGroupsWithState org.apache.spark.sql.KeyValueGroupedDataset$$Lambda$2733/1335697630@7fe96ba0, newInstance(class scala.Tuple2), initializejavabean(newInstance(class com.liferay.osb.asah.spark.session.model.Event), (setDataSourceId,cast(dataSourceId#54 as string).toString), (setClientIp,cast(clientIp#51 as string).toString), (setKnownIndividual,staticinvoke(class java.lang.Boolean, ObjectType(class java.lang.Boolean), valueOf, cast(knownIndividual#58 as boolean), true, false)), (setId,cast(id#59 as string).toString), (setIndividualId,cast(individualId#60 as string).toString), (setEventProperties,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 77), lambdavariable(MapObject, StringType, true, 77).toString, map_keys(eventProperties#57), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 78), lambdavariable(MapObject, StringType, true, 78).toString, map_values(eventProperties#57), None).array, true, false)), (setEventDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(eventDate#55-T1800000ms as timestamp), true, false)), (setSegmentNames,mapobjects(lambdavariable(MapObject, StringType, true, 79), cast(lambdavariable(MapObject, StringType, true, 79) as string).toString, segmentNames#64, Some(interface java.util.List))), (setEventId,cast(eventId#56 as string).toString), (setApplicationId,cast(applicationId#49 as string).toString), (setIterationNumber,assertnotnull(cast(iterationNumber#108 as int))), (setProjectId,cast(projectId#62 as string).toString), (setDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Date), toJavaDate, cast(date#87 as date), true, false)), (setChannelId,cast(channelId#50 as string).toString), (setUserId,cast(userId#66 as string).toString), (setCreateDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(createDate#53 as timestamp), true, false)), (setContext,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 80), lambdavariable(MapObject, StringType, true, 80).toString, map_keys(context#52), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 81), lambdavariable(MapObject, StringType, true, 81).toString, map_values(context#52), None).array, true, false)), (setNormalizedEventDate,staticinvoke(class java.lang.Long, ObjectType(class java.lang.Long), valueOf, cast(normalizedEventDate#61L as bigint), true, false))), [_1#191, _2#192], [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55-T1800000ms, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108], obj#193: com.liferay.osb.asah.spark.session.model.Session, class[acquisition[0]: map<string,string>, browserName[0]: string, canonicalUrls[0]: array<string>, channelId[0]: string, city[0]: string, clientIp[0]: string, country[0]: string, dataSourceId[0]: string, date[0]: date, deviceType[0]: string, events[0]: array<struct<applicationId:string,channelId:string,clientIp:string,context:map<string,string>,createDate:timestamp,dataSourceId:string,date:date,eventDate:timestamp,eventId:string,eventProperties:map<string,string>,id:string,individualId:string,iterationNumber:int,knownIndividual:boolean,normalizedEventDate:bigint,projectId:string,segmentNames:array<string>,userId:string>>, finished[0]: boolean, firstEventDate[0]: timestamp, interactionNumber[0]: int, interactionsCount[0]: int, iterationNumber[0]: int, lastEventDate[0]: timestamp, pageViewsCount[0]: int, platformName[0]: string, projectId[0]: string, referrers[0]: array<string>, region[0]: string, sessionId[0]: string, urls[0]: array<string>, userId[0]: string], Append, false, EventTimeTimeout
+- AppendColumns org.apache.spark.sql.Dataset$$Lambda$2728/1995151823@1ad1c38d, class com.liferay.osb.asah.spark.session.model.Event, [StructField(applicationId,StringType,true), StructField(channelId,StringType,true), StructField(clientIp,StringType,true), StructField(context,MapType(StringType,StringType,true),true), StructField(createDate,TimestampType,true), StructField(dataSourceId,StringType,true), StructField(date,DateType,true), StructField(eventDate,TimestampType,true), StructField(eventId,StringType,true), StructField(eventProperties,MapType(StringType,StringType,true),true), StructField(id,StringType,true), StructField(individualId,StringType,true), StructField(iterationNumber,IntegerType,true), StructField(knownIndividual,BooleanType,true), StructField(normalizedEventDate,LongType,true), StructField(projectId,StringType,true), StructField(segmentNames,ArrayType(StringType,true),true), StructField(userId,StringType,true)], initializejavabean(newInstance(class com.liferay.osb.asah.spark.session.model.Event), (setDataSourceId,cast(dataSourceId#54 as string).toString), (setClientIp,cast(clientIp#51 as string).toString), (setKnownIndividual,staticinvoke(class java.lang.Boolean, ObjectType(class java.lang.Boolean), valueOf, cast(knownIndividual#58 as boolean), true, false)), (setId,cast(id#59 as string).toString), (setIndividualId,cast(individualId#60 as string).toString), (setEventProperties,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 72), lambdavariable(MapObject, StringType, true, 72).toString, map_keys(eventProperties#57), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 73), lambdavariable(MapObject, StringType, true, 73).toString, map_values(eventProperties#57), None).array, true, false)), (setEventDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(eventDate#55-T1800000ms as timestamp), true, false)), (setSegmentNames,mapobjects(lambdavariable(MapObject, StringType, true, 74), cast(lambdavariable(MapObject, StringType, true, 74) as string).toString, segmentNames#64, Some(interface java.util.List))), (setEventId,cast(eventId#56 as string).toString), (setApplicationId,cast(applicationId#49 as string).toString), (setIterationNumber,assertnotnull(cast(iterationNumber#108 as int))), (setProjectId,cast(projectId#62 as string).toString), (setDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Date), toJavaDate, cast(date#87 as date), true, false)), (setChannelId,cast(channelId#50 as string).toString), (setUserId,cast(userId#66 as string).toString), (setCreateDate,staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, cast(createDate#53 as timestamp), true, false)), (setContext,staticinvoke(class org.apache.spark.sql.catalyst.util.ArrayBasedMapData$, ObjectType(interface java.util.Map), toJavaMap, mapobjects(lambdavariable(MapObject, StringType, true, 75), lambdavariable(MapObject, StringType, true, 75).toString, map_keys(context#52), None).array, mapobjects(lambdavariable(MapObject, StringType, true, 76), lambdavariable(MapObject, StringType, true, 76).toString, map_values(context#52), None).array, true, false)), (setNormalizedEventDate,staticinvoke(class java.lang.Long, ObjectType(class java.lang.Long), valueOf, cast(normalizedEventDate#61L as bigint), true, false))), [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, assertnotnull(input[0, scala.Tuple2, true])._1, true, false) AS _1#20 AS _1#191, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, DateType, fromJavaDate, assertnotnull(input[0, scala.Tuple2, true])._2, true, false) AS _2#21 AS _2#192]
+- EventTimeWatermark eventDate#55: timestamp, 30 minutes
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, date#87, 0 AS iterationNumber#108]
+- Project [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67, to_date(from_utc_timestamp('eventDate, 'projectTimeZoneId), None) AS date#87]
+- StreamingExecutionRelation FileStreamSource[gs://analytics-cloud-staging-osbasahspark-southamerica-east1/analytics-events/*/*], [applicationId#49, channelId#50, clientIP#51, context#52, createDate#53, dataSourceId#54, eventDate#55, eventId#56, eventProperties#57, knownIndividual#58, id#59, individualId#60, normalizedEventDate#61L, projectId#62, projectTimeZoneId#63, segmentNames#64, sessionId#65, userId#66, variantId#67]
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:356)
at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)
Caused by: org.apache.spark.SparkException: Job aborted.
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:231)
at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:188)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:108)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:106)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute(commands.scala:131)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:132)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:131)
at org.apache.spark.sql.DataFrameWriter.$anonfun$runCommand$1(DataFrameWriter.scala:989)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:989)
at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:438)
at org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:415)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:293)
at com.liferay.osb.asah.spark.session.function.SessionBatchSinkFunction.saveFinishedSessionEvents(SessionBatchSinkFunction.java:91)
at com.liferay.osb.asah.spark.session.function.SessionBatchSinkFunction.call(SessionBatchSinkFunction.java:53)
at com.liferay.osb.asah.spark.session.function.SessionBatchSinkFunction.call(SessionBatchSinkFunction.java:32)
at org.apache.spark.sql.streaming.DataStreamWriter.$anonfun$foreachBatch$1(DataStreamWriter.scala:539)
at org.apache.spark.sql.streaming.DataStreamWriter.$anonfun$foreachBatch$1$adapted(DataStreamWriter.scala:539)
at org.apache.spark.sql.execution.streaming.sources.ForeachBatchSink.addBatch(ForeachBatchSink.scala:35)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$16(MicroBatchExecution.scala:586)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$15(MicroBatchExecution.scala:584)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:357)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:355)
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:584)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:226)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:357)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:355)
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:194)
at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:188)
at org.apache.spark.sql.execution.streaming.StreamExecution.$anonfun$runStream$1(StreamExecution.scala:334)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:317)
... 1 more
Caused by: java.lang.OutOfMemoryError: Java heap space
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.buildContentChunk(MediaHttpUploader.java:579)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.resumableUpload(MediaHttpUploader.java:380)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.media.MediaHttpUploader.upload(MediaHttpUploader.java:308)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:528)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:455)
at com.google.cloud.hadoop.repackaged.gcs.com.google.api.client.googleapis.services.AbstractGoogleClientRequest.execute(AbstractGoogleClientRequest.java:565)
at com.google.cloud.hadoop.repackaged.gcs.com.google.cloud.hadoop.util.AbstractGoogleAsyncWriteChannel$UploadOperation.call(AbstractGoogleAsyncWriteChannel.java:85)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment