Search code examples
apache-sparkapache-spark-sqldatabrickssocket-timeout-exception

Databricks notebook time out error when calling other notebooks: com.databricks.WorkflowException: java.net.SocketTimeoutException: Read timed out


I have a main notebook that call a series of other notebook. Each notebook performs a MERGE on a delta table to update or insert new records on it.

When I ran the main notebook with a job cluster, one notebook, Medications, failed with a timeout error . When I ran the Medication notebook with an interactive cluster, it passed.

The job and the interactive cluster have the same setup as shown below: enter image description here

What could be the problem? The standard error from the spark driver logs is shown below:

---------------------------------------------------------------------------
Py4JJavaError                             Traceback (most recent call last)
<command-3958057957970596> in <module>()
      1 #Run CDMMedications
----> 2 dbutils.notebook.run("CDMMedications", 0, {"TheScope":TheScope, "TheKey":TheKey, "StorageAccount":StorageAccount, "FileSystem":FileSystem, "Database":Database})

/local_disk0/tmp/1565905071244-0/dbutils.py in run(self, path, timeout_seconds, arguments, _NotebookHandler__databricks_internal_cluster_spec)
    134                 arguments,
    135                 __databricks_internal_cluster_spec,
--> 136                 self.shell.currentJobGroup)
    137 
    138         def __repr__(self):

/databricks/spark/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py in __call__(self, *args)
   1255         answer = self.gateway_client.send_command(command)
   1256         return_value = get_return_value(
-> 1257             answer, self.gateway_client, self.target_id, self.name)
   1258 
   1259         for temp_arg in temp_args:

/databricks/spark/python/pyspark/sql/utils.py in deco(*a, **kw)
     61     def deco(*a, **kw):
     62         try:
---> 63             return f(*a, **kw)
     64         except py4j.protocol.Py4JJavaError as e:
     65             s = e.java_exception.toString()

/databricks/spark/python/lib/py4j-0.10.7-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)
    326                 raise Py4JJavaError(
    327                     "An error occurred while calling {0}{1}{2}.\n".
--> 328                     format(target_id, ".", name), value)
    329             else:
    330                 raise Py4JError(

Py4JJavaError: An error occurred while calling o779._run.
: com.databricks.WorkflowException: java.net.SocketTimeoutException: Read timed out
    at com.databricks.workflow.WorkflowDriver.run(WorkflowDriver.scala:75)
    at com.databricks.dbutils_v1.impl.NotebookUtilsImpl.run(NotebookUtilsImpl.scala:90)
    at com.databricks.dbutils_v1.impl.NotebookUtilsImpl._run(NotebookUtilsImpl.scala:69)
    at sun.reflect.GeneratedMethodAccessor605.invoke(Unknown Source)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
    at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)
    at py4j.Gateway.invoke(Gateway.java:295)
    at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
    at py4j.commands.CallCommand.execute(CallCommand.java:79)
    at py4j.GatewayConnection.run(GatewayConnection.java:251)
    at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.SocketTimeoutException: Read timed out
    at java.net.SocketInputStream.socketRead0(Native Method)
    at java.net.SocketInputStream.socketRead(SocketInputStream.java:116)
    at java.net.SocketInputStream.read(SocketInputStream.java:171)
    at java.net.SocketInputStream.read(SocketInputStream.java:141)
    at sun.security.ssl.InputRecord.readFully(InputRecord.java:465)
    at sun.security.ssl.InputRecord.read(InputRecord.java:503)
    at sun.security.ssl.SSLSocketImpl.readRecord(SSLSocketImpl.java:975)
    at sun.security.ssl.SSLSocketImpl.readDataRecord(SSLSocketImpl.java:933)
    at sun.security.ssl.AppInputStream.read(AppInputStream.java:105)
    at org.apache.http.impl.io.SessionInputBufferImpl.streamRead(SessionInputBufferImpl.java:137)
    at org.apache.http.impl.io.SessionInputBufferImpl.fillBuffer(SessionInputBufferImpl.java:153)
    at org.apache.http.impl.io.SessionInputBufferImpl.readLine(SessionInputBufferImpl.java:282)
    at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:138)
    at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:56)
    at org.apache.http.impl.io.AbstractMessageParser.parse(AbstractMessageParser.java:259)
    at org.apache.http.impl.DefaultBHttpClientConnection.receiveResponseHeader(DefaultBHttpClientConnection.java:163)
    at org.apache.http.impl.conn.CPoolProxy.receiveResponseHeader(CPoolProxy.java:165)
    at org.apache.http.protocol.HttpRequestExecutor.doReceiveResponse(HttpRequestExecutor.java:273)
    at org.apache.http.protocol.HttpRequestExecutor.execute(HttpRequestExecutor.java:125)
    at org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:272)
    at org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:185)
    at org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:89)
    at org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:111)
    at org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)
    at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:72)
    at com.databricks.common.client.RawDBHttpClient.httpRequestInternal(DBHttpClient.scala:498)
    at com.databricks.common.client.RawDBHttpClient.entityEnclosingRequestInternal(DBHttpClient.scala:489)
    at com.databricks.common.client.RawDBHttpClient.postInternal(DBHttpClient.scala:420)
    at com.databricks.common.client.RawDBHttpClient.postJson(DBHttpClient.scala:283)
    at com.databricks.common.client.DBHttpClient.postJson(DBHttpClient.scala:200)
    at com.databricks.workflow.SimpleJobsSessionClient.createNotebookJob(JobsSessionClient.scala:160)
    at com.databricks.workflow.ReliableJobsSessionClient$$anonfun$createNotebookJob$1.apply$mcJ$sp(JobsSessionClient.scala:249)
    at com.databricks.workflow.ReliableJobsSessionClient$$anonfun$createNotebookJob$1.apply(JobsSessionClient.scala:249)
    at com.databricks.workflow.ReliableJobsSessionClient$$anonfun$createNotebookJob$1.apply(JobsSessionClient.scala:249)
    at com.databricks.common.client.DBHttpClient$.retryWithDeadline(DBHttpClient.scala:133)
    at com.databricks.workflow.ReliableJobsSessionClient.withRetry(JobsSessionClient.scala:313)
    at com.databricks.workflow.ReliableJobsSessionClient.createNotebookJob(JobsSessionClient.scala:248)
    at com.databricks.workflow.WorkflowDriver.run0(WorkflowDriver.scala:93)
    at com.databricks.workflow.WorkflowDriver.run(WorkflowDriver.scala:61)
    ... 12 more
---------------------------------------------------------------------------
Py4JJavaError                             Traceback (most recent call last)
<command-3615515772639167> in <module>()
      1 #Run CDMLoad
----> 2 dbutils.notebook.run("CDMLoads/CDMLoad",0,{"TheScope":TheScope,"TheKey":TheKey,"StorageAccount":StorageAccount, "FileSystem":FileSystem, "Database":Database})

/local_disk0/tmp/1565905071244-0/dbutils.py in run(self, path, timeout_seconds, arguments, _NotebookHandler__databricks_internal_cluster_spec)
    134                 arguments,
    135                 __databricks_internal_cluster_spec,
--> 136                 self.shell.currentJobGroup)
    137 
    138         def __repr__(self):

/databricks/spark/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py in __call__(self, *args)
   1255         answer = self.gateway_client.send_command(command)
   1256         return_value = get_return_value(
-> 1257             answer, self.gateway_client, self.target_id, self.name)
   1258 
   1259         for temp_arg in temp_args:

/databricks/spark/python/pyspark/sql/utils.py in deco(*a, **kw)
     61     def deco(*a, **kw):
     62         try:
---> 63             return f(*a, **kw)
     64         except py4j.protocol.Py4JJavaError as e:
     65             s = e.java_exception.toString()

/databricks/spark/python/lib/py4j-0.10.7-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)
    326                 raise Py4JJavaError(
    327                     "An error occurred while calling {0}{1}{2}.\n".
--> 328                     format(target_id, ".", name), value)
    329             else:
    330                 raise Py4JError(

Py4JJavaError: An error occurred while calling o866._run.
: com.databricks.WorkflowException: com.databricks.NotebookExecutionException: FAILED
    at com.databricks.workflow.WorkflowDriver.run(WorkflowDriver.scala:75)
    at com.databricks.dbutils_v1.impl.NotebookUtilsImpl.run(NotebookUtilsImpl.scala:90)
    at com.databricks.dbutils_v1.impl.NotebookUtilsImpl._run(NotebookUtilsImpl.scala:69)
    at sun.reflect.GeneratedMethodAccessor605.invoke(Unknown Source)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
    at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)
    at py4j.Gateway.invoke(Gateway.java:295)
    at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
    at py4j.commands.CallCommand.execute(CallCommand.java:79)
    at py4j.GatewayConnection.run(GatewayConnection.java:251)
    at java.lang.Thread.run(Thread.java:748)
Caused by: com.databricks.NotebookExecutionException: FAILED
    at com.databricks.workflow.WorkflowDriver.run0(WorkflowDriver.scala:118)
    at com.databricks.workflow.WorkflowDriver.run(WorkflowDriver.scala:61)
    ... 12 more

Solution

  • I fixed the problem by tuning the default spark configuration. I increase the executor heartbeat and the networko spark.executor.heartbeat 60s spark.network.timeout 720s