Error

org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.token.SecretManager$InvalidToken): Token for real user: , can't be found in cache

Details

    Traceback (most recent call last):
  File "/opt/acc-bpt/release_2026_01/acc_bpt/shared/BptV2.py", line 284, in build
    rawdata = self.bptStatic.fetch(vars, times)
              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/acc_bpt/psb/shared/script_psb_h0hm.py", line 83, in fetch
    return super().fetch(var_defs, time_ranges, cache)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/acc_bpt/psb/shared/PsbBptStatic.py", line 30, in fetch
    sdf = self.nxcals.fetch_vars_list(time_ranges[0], time_ranges[1], vars_list=var_defs, acc_name='PSB',
          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/acc_bpt/shared/utils_nxcals.py", line 385, in fetch_vars_list
    return self._fetch_vars_of_same_type(ts1, ts2, vars_like=None, vars_list=vars_list, acc_name=acc_name,
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/acc_bpt/shared/utils_nxcals.py", line 436, in _fetch_vars_of_same_type
    sdfs = self._get_vars_raw(nxcals_query, vars_like, vars_list, system)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/acc_bpt/shared/utils_nxcals.py", line 452, in _get_vars_raw
    return nxcals_query._get_varslist_raw(vars_list, system)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/acc_bpt/shared/utils_nxcals_queries.py", line 185, in _get_varslist_raw
    .buildDataset() \
     ^^^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/venv/lib/python3.11/site-packages/nxcals/api/extraction/data/common.py", line 111, in buildDataset
    return self.build()
           ^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/venv/lib/python3.11/site-packages/nxcals/api/extraction/data/common.py", line 114, in build
    return self._builder._build()
           ^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/venv/lib/python3.11/site-packages/nxcals/api/extraction/data/builders.py", line 33, in _build
    df = self._get_java_builder().build()
         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/venv/lib/python3.11/site-packages/py4j/java_gateway.py", line 1322, in __call__
    return_value = get_return_value(
                   ^^^^^^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/venv/lib/python3.11/site-packages/pyspark/errors/exceptions/captured.py", line 179, in deco
    return f(*a, **kw)
           ^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/venv/lib/python3.11/site-packages/py4j/protocol.py", line 326, in get_return_value
    raise Py4JJavaError(
py4j.protocol.Py4JJavaError: An error occurred while calling o1336.build.
: java.io.UncheckedIOException: Cannot access hdfs file system to check for path /project/nxcals/nxcals_pro/data/2/39223/335508/2026/4/23/__sys_nxcals_time_partition__=0-__sys_nxcals_entity_bucket__=0-*.parquet
	at cern.nxcals.api.extraction.data.spark.HdfsDatasetCreator.exists(HdfsDatasetCreator.java:80)
	at java.base/java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:176)
	at java.base/java.util.HashMap$KeySpliterator.forEachRemaining(HashMap.java:1621)
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484)
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:550)
	at java.base/java.util.stream.AbstractPipeline.evaluateToArrayNode(AbstractPipeline.java:260)
	at java.base/java.util.stream.ReferencePipeline.toArray(ReferencePipeline.java:517)
	at cern.nxcals.api.extraction.data.spark.HdfsDatasetCreator.getValidPaths(HdfsDatasetCreator.java:65)
	at cern.nxcals.api.extraction.data.spark.HdfsDatasetCreator.apply(HdfsDatasetCreator.java:46)
	at cern.nxcals.api.extraction.data.spark.HdfsDatasetCreator.apply(HdfsDatasetCreator.java:31)
	at cern.nxcals.api.extraction.data.spark.SparkExtractionTaskProcessor.execute(SparkExtractionTaskProcessor.java:27)
	at cern.nxcals.api.extraction.data.spark.SparkExtractionTaskProcessor.execute(SparkExtractionTaskProcessor.java:13)
	at cern.nxcals.common.domain.HdfsExtractionTask.processWith(HdfsExtractionTask.java:20)
	at cern.nxcals.api.extraction.data.spark.SparkExtractionTaskExecutor.toOptionalDataset(SparkExtractionTaskExecutor.java:85)
	at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:195)
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1655)
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484)
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
	at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:913)
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
	at java.base/java.util.stream.ReferencePipeline.reduce(ReferencePipeline.java:558)
	at cern.nxcals.api.extraction.data.spark.SparkExtractionTaskExecutor.execute(SparkExtractionTaskExecutor.java:56)
	at cern.nxcals.api.extraction.data.builders.SparkDatasetProducer.apply(SparkDatasetProducer.java:34)
	at cern.nxcals.api.extraction.data.builders.SparkDatasetProducer.apply(SparkDatasetProducer.java:17)
	at cern.nxcals.api.extraction.data.builders.fluent.QueryData.build(QueryData.java:167)
	at cern.nxcals.api.extraction.data.builders.fluent.VariableStageLoop.build(VariableStageLoop.java:18)
	at jdk.internal.reflect.GeneratedMethodAccessor97.invoke(Unknown Source)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:374)
	at py4j.Gateway.invoke(Gateway.java:282)
	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
	at py4j.commands.CallCommand.execute(CallCommand.java:79)
	at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
	at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
	at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.token.SecretManager$InvalidToken): Token for real user: , can't be found in cache
	at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1567)
	at org.apache.hadoop.ipc.Client.call(Client.java:1513)
	at org.apache.hadoop.ipc.Client.call(Client.java:1410)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139)
	at com.sun.proxy.$Proxy32.getListing(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getListing(ClientNamenodeProtocolTranslatorPB.java:689)
	at jdk.internal.reflect.GeneratedMethodAccessor19.invoke(Unknown Source)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
	at com.sun.proxy.$Proxy33.getListing(Unknown Source)
	at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:1702)
	at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:1686)
	at org.apache.hadoop.hdfs.DistributedFileSystem.listStatusInternal(DistributedFileSystem.java:1113)
	at org.apache.hadoop.hdfs.DistributedFileSystem.access$600(DistributedFileSystem.java:149)
	at org.apache.hadoop.hdfs.DistributedFileSystem$24.doCall(DistributedFileSystem.java:1188)
	at org.apache.hadoop.hdfs.DistributedFileSystem$24.doCall(DistributedFileSystem.java:1185)
	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
	at org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(DistributedFileSystem.java:1195)
	at org.apache.hadoop.fs.Globber.listStatus(Globber.java:128)
	at org.apache.hadoop.fs.Globber.doGlob(Globber.java:291)
	at org.apache.hadoop.fs.Globber.glob(Globber.java:202)
	at org.apache.hadoop.fs.FileSystem.globStatus(FileSystem.java:2225)
	at cern.nxcals.api.extraction.data.spark.HdfsDatasetCreator.exists(HdfsDatasetCreator.java:78)
	... 37 more


During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "/opt/acc-bpt/release_2026_01/jinja/build_static_plots.py", line 85, in _create_static_html_content
    output_html_file_path = driver.build(abs_plot_dest_file_html)
                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/opt/acc-bpt/release_2026_01/acc_bpt/shared/BptV2.py", line 304, in build
    raise RuntimeError(cause)
RuntimeError: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.token.SecretManager$InvalidToken): Token for real user: , can't be found in cache

    

Generated 2026-04-30 23:52:03.373113+02:00