Sorry, I thought you were able to access the logs if provided with the job ID.
The error is the following:
openeo::log_job("vito-8a342504-52be-4064-be0e-4e359abdfe23")
# [ERROR] error processing batch job
# Traceback (most recent call last):
# File "batch_job.py", line 319, in main
# run_driver()
# File "batch_job.py", line 292, in run_driver
# run_job(
# File "/data2/hadoop/yarn/local/usercache/hendrik.wagenseil/appcache/application_1654997540016_13264/container_e5040_1654997540016_13264_01_000001/venv/lib/python3.8/site-packages/openeogeotrellis/utils.py", line 43, in memory_logging_wrapper
# return function(*args, **kwargs)
# File "batch_job.py", line 388, in run_job
# assets_metadata = result.write_assets(str(output_file))
# File "/data2/hadoop/yarn/local/usercache/hendrik.wagenseil/appcache/application_1654997540016_13264/container_e5040_1654997540016_13264_01_000001/venv/lib/python3.8/site-packages/openeo_driver/save_result.py", line 110, in write_assets
# return self.cube.write_assets(filename=directory, format=self.format, format_options=self.options)
# File "/data2/hadoop/yarn/local/usercache/hendrik.wagenseil/appcache/application_1654997540016_13264/container_e5040_1654997540016_13264_01_000001/venv/lib/python3.8/site-packages/openeogeotrellis/geopysparkdatacube.py", line 1547, in write_assets
# timestamped_paths = self._get_jvm().org.openeo.geotrellis.geotiff.package.saveRDDTemporal(
# File "/opt/spark3_2_0/python/lib/py4j-0.10.9.2-src.zip/py4j/java_gateway.py", line 1309, in __call__
# return_value = get_return_value(
# File "/opt/spark3_2_0/python/lib/py4j-0.10.9.2-src.zip/py4j/protocol.py", line 326, in get_return_value
# raise Py4JJavaError(
# py4j.protocol.Py4JJavaError: An error occurred while calling z:org.openeo.geotrellis.geotiff.package.saveRDDTemporal.
# : org.apache.spark.SparkException: Job aborted due to stage failure: Task 236 in stage 9.0 failed 4 times, most recent failure: Lost task 236.3 in stage 9.0 (TID 447) (epod071.vgt.vito.be executor 70): org.openeo.geotrellissentinelhub.SentinelHubException: Sentinel Hub returned an error
# response: HTTP/1.1 500 Internal Server Error with body: {"error":{"status":500,"reason":"Internal Server Error","message":"Illegal request to creo://EODATA/Sentinel-3/SLSTR/SL_1_RBT/2020/12/02/S3A_SL_1_RBT____20201202T182300_20201202T182600_20201202T202032_0180_065_355_2160_LN2_O_NR_004.SEN3/S9_BT_in.nc. HTTP Status: '404' On CreoDIAS, you get 'Illegal request 403 error' also when a file is missing. So first make sure that the file is present.","code":"RENDERER_EXCEPTION"}}
# request: POST https://creodias.sentinel-hub.com/api/v1/process with body: {
# "input": {
# "bounds": {
# "bbox": [-119.471475, 44.375460460394756, -116.93179246039475, 46.915143],
# "properties": {
# "crs": "http://www.opengis.net/def/crs/EPSG/0/4326"
# }
# },
# "data": [
# {
# "type": "sentinel-3-slstr",
# "dataFilter": {"timeRange":{"from":"2020-12-02T00:00:00Z","to":"2020-12-03T00:00:00Z"},"maxCloudCoverage":50,"orbitDirection":"DESCENDING"},
# "processing": {}
# }
# ]
# },
# "output": {
# "width": 256,
# "height": 256,
# "responses": [
# {
# "identifier": "default",
# "format": {
# "type": "image/tiff"
# }
# }
# ]
# },
# "evalscript": "//VERSION=3\nfunction setup() {\n return {\n input: [{\n \"bands\": [\"S9\"]\n }],\n output: {\n bands: 1,\n sampleType: \"FLOAT32\",\n }\n };\n}\n\nfunction evaluatePixel(sample) {\n return [sample.S9];\n}"
# }
# at org.openeo.geotrellissentinelhub.SentinelHubException$.apply(SentinelHubException.scala:19)
# at org.openeo.geotrellissentinelhub.DefaultProcessApi.$anonfun$getTile$8(ProcessApi.scala:130)
# at org.openeo.geotrellissentinelhub.DefaultProcessApi.$anonfun$getTile$8$adapted(ProcessApi.scala:120)
# at scalaj.http.HttpRequest.$anonfun$toResponse$17(Http.scala:422)
# at scala.Option.getOrElse(Option.scala:189)
# at scalaj.http.HttpRequest.$anonfun$toResponse$14(Http.scala:414)
# at scala.Option.getOrElse(Option.scala:189)
# at scalaj.http.HttpRequest.toResponse(Http.scala:414)
# at scalaj.http.HttpRequest.doConnection(Http.scala:368)
# at scalaj.http.HttpRequest.exec(Http.scala:343)
# at org.openeo.geotrellissentinelhub.DefaultProcessApi.$anonfun$getTile$7(ProcessApi.scala:120)
# at org.openeo.geotrellissentinelhub.package$$anon$1.get(package.scala:60)
# at net.jodah.failsafe.Functions.lambda$get$0(Functions.java:46)
# at net.jodah.failsafe.RetryPolicyExecutor.lambda$supply$0(RetryPolicyExecutor.java:65)
# at net.jodah.failsafe.Execution.executeSync(Execution.java:128)
# at net.jodah.failsafe.FailsafeExecutor.call(FailsafeExecutor.java:378)
# at net.jodah.failsafe.FailsafeExecutor.get(FailsafeExecutor.java:68)
# at org.openeo.geotrellissentinelhub.package$.withRetries(package.scala:59)
# at org.openeo.geotrellissentinelhub.DefaultProcessApi.getTile(ProcessApi.scala:119)
# at org.openeo.geotrellissentinelhub.PyramidFactory.$anonfun$datacube_seq$1(PyramidFactory.scala:193)
# at org.openeo.geotrellissentinelhub.MemoizedRlGuardAdapterCachedAccessTokenWithAuthApiFallbackAuthorizer.authorized(Authorizer.scala:46)
# at org.openeo.geotrellissentinelhub.PyramidFactory.authorized(PyramidFactory.scala:56)
# at org.openeo.geotrellissentinelhub.PyramidFactory.org$openeo$geotrellissentinelhub$PyramidFactory$$getTile$1(PyramidFactory.scala:191)
# at org.openeo.geotrellissentinelhub.PyramidFactory.org$openeo$geotrellissentinelhub$PyramidFactory$$dataTile$1(PyramidFactory.scala:201)
# at org.openeo.geotrellissentinelhub.PyramidFactory.loadMasked$1(PyramidFactory.scala:226)
# at org.openeo.geotrellissentinelhub.PyramidFactory.$anonfun$datacube_seq$17(PyramidFactory.scala:286)
# at scala.collection.Iterator$$anon$10.next(Iterator.scala:459)
# at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:512)
# at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
# at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:511)
# at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:489)
# at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
# at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
# at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
# at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.write(UnsafeShuffleWriter.java:179)
# at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
# at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)
# at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:52)
# at org.apache.spark.scheduler.Task.run(Task.scala:131)
# at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:506)
# at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1462)
# at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:509)
# at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
# at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
# at java.base/java.lang.Thread.run(Thread.java:829)
#
# Driver stacktrace:
# at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2403)
# at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2352)
# at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2351)
# at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
# at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
# at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
# at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2351)
# at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1109)
# at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1109)
# at scala.Option.foreach(Option.scala:407)
# at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1109)
# at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2591)
# at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2533)
# at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2522)
# at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
# at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:898)
# at org.apache.spark.SparkContext.runJob(SparkContext.scala:2214)
# at org.apache.spark.SparkContext.runJob(SparkContext.scala:2235)
# at org.apache.spark.SparkContext.runJob(SparkContext.scala:2254)
# at org.apache.spark.SparkContext.runJob(SparkContext.scala:2279)
# at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1030)
# at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
# at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
# at org.apache.spark.rdd.RDD.withScope(RDD.scala:414)
# at org.apache.spark.rdd.RDD.collect(RDD.scala:1029)
# at org.openeo.geotrellis.geotiff.package$.saveRDDTemporal(package.scala:136)
# at org.openeo.geotrellis.geotiff.package.saveRDDTemporal(package.scala)
# at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
# at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
# at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
# at java.base/java.lang.reflect.Method.invoke(Method.java:566)
# at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
# at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
# at py4j.Gateway.invoke(Gateway.java:282)
# at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
# at py4j.commands.CallCommand.execute(CallCommand.java:79)
# at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
# at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
# at java.base/java.lang.Thread.run(Thread.java:829)
# Caused by: org.openeo.geotrellissentinelhub.SentinelHubException: Sentinel Hub returned an error
# response: HTTP/1.1 500 Internal Server Error with body: {"error":{"status":500,"reason":"Internal Server Error","message":"Illegal request to creo://EODATA/Sentinel-3/SLSTR/SL_1_RBT/2020/12/02/S3A_SL_1_RBT____20201202T182300_20201202T182600_20201202T202032_0180_065_355_2160_LN2_O_NR_004.SEN3/S9_BT_in.nc. HTTP Status: '404' On CreoDIAS, you get 'Illegal request 403 error' also when a file is missing. So first make sure that the file is present.","code":"RENDERER_EXCEPTION"}}
# request: POST https://creodias.sentinel-hub.com/api/v1/process with body: {
# "input": {
# "bounds": {
# "bbox": [-119.471475, 44.375460460394756, -116.93179246039475, 46.915143],
# "properties": {
# "crs": "http://www.opengis.net/def/crs/EPSG/0/4326"
# }
# },
# "data": [
# {
# "type": "sentinel-3-slstr",
# "dataFilter": {"timeRange":{"from":"2020-12-02T00:00:00Z","to":"2020-12-03T00:00:00Z"},"maxCloudCoverage":50,"orbitDirection":"DESCENDING"},
# "processing": {}
# }
# ]
# },
# "output": {
# "width": 256,
# "height": 256,
# "responses": [
# {
# "identifier": "default",
# "format": {
# "type": "image/tiff"
# }
# }
# ]
# },
# "evalscript": "//VERSION=3\nfunction setup() {\n return {\n input: [{\n \"bands\": [\"S9\"]\n }],\n output: {\n bands: 1,\n sampleType: \"FLOAT32\",\n }\n };\n}\n\nfunction evaluatePixel(sample) {\n return [sample.S9];\n}"
# }
# at org.openeo.geotrellissentinelhub.SentinelHubException$.apply(SentinelHubException.scala:19)
# at org.openeo.geotrellissentinelhub.DefaultProcessApi.$anonfun$getTile$8(ProcessApi.scala:130)
# at org.openeo.geotrellissentinelhub.DefaultProcessApi.$anonfun$getTile$8$adapted(ProcessApi.scala:120)
# at scalaj.http.HttpRequest.$anonfun$toResponse$17(Http.scala:422)
# at scala.Option.getOrElse(Option.scala:189)
# at scalaj.http.HttpRequest.$anonfun$toResponse$14(Http.scala:414)
# at scala.Option.getOrElse(Option.scala:189)
# at scalaj.http.HttpRequest.toResponse(Http.scala:414)
# at scalaj.http.HttpRequest.doConnection(Http.scala:368)
# at scalaj.http.HttpRequest.exec(Http.scala:343)
# at org.openeo.geotrellissentinelhub.DefaultProcessApi.$anonfun$getTile$7(ProcessApi.scala:120)
# at org.openeo.geotrellissentinelhub.package$$anon$1.get(package.scala:60)
# at net.jodah.failsafe.Functions.lambda$get$0(Functions.java:46)
# at net.jodah.failsafe.RetryPolicyExecutor.lambda$supply$0(RetryPolicyExecutor.java:65)
# at net.jodah.failsafe.Execution.executeSync(Execution.java:128)
# at net.jodah.failsafe.FailsafeExecutor.call(FailsafeExecutor.java:378)
# at net.jodah.failsafe.FailsafeExecutor.get(FailsafeExecutor.java:68)
# at org.openeo.geotrellissentinelhub.package$.withRetries(package.scala:59)
# at org.openeo.geotrellissentinelhub.DefaultProcessApi.getTile(ProcessApi.scala:119)
# at org.openeo.geotrellissentinelhub.PyramidFactory.$anonfun$datacube_seq$1(PyramidFactory.scala:193)
# at org.openeo.geotrellissentinelhub.MemoizedRlGuardAdapterCachedAccessTokenWithAuthApiFallbackAuthorizer.authorized(Authorizer.scala:46)
# at org.openeo.geotrellissentinelhub.PyramidFactory.authorized(PyramidFactory.scala:56)
# at org.openeo.geotrellissentinelhub.PyramidFactory.org$openeo$geotrellissentinelhub$PyramidFactory$$getTile$1(PyramidFactory.scala:191)
# at org.openeo.geotrellissentinelhub.PyramidFactory.org$openeo$geotrellissentinelhub$PyramidFactory$$dataTile$1(PyramidFactory.scala:201)
# at org.openeo.geotrellissentinelhub.PyramidFactory.loadMasked$1(PyramidFactory.scala:226)
# at org.openeo.geotrellissentinelhub.PyramidFactory.$anonfun$datacube_seq$17(PyramidFactory.scala:286)
# at scala.collection.Iterator$$anon$10.next(Iterator.scala:459)
# at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:512)
# at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
# at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:511)
# at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:489)
# at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
# at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
# at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
# at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.write(UnsafeShuffleWriter.java:179)
# at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
# at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)
# at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:52)
# at org.apache.spark.scheduler.Task.run(Task.scala:131)
# at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:506)
# at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1462)
# at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:509)
# at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
# at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
# ... 1 more