|
2845
|
Delta: Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1
+details
Delta: Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:07
|
0.7 s
|
[5115]
|
|
|
2846
|
Delta: Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1
+details
Delta: Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:07
|
2 s
|
[5116][5117]
|
|
|
3202
|
Delta: Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1
+details
Delta: Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:07:49
|
0.8 s
|
[5817]
|
|
|
3203
|
Delta: Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1
+details
Delta: Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:07:49
|
2 s
|
[5818][5819]
|
|
|
3467
|
Delta: Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1
+details
Delta: Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:45
|
0.8 s
|
[6235]
|
|
|
3468
|
Delta: Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1
+details
Delta: Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 1 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:46
|
2 s
|
[6236][6237]
|
|
|
2983
|
Delta: Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query: Compute snapshot for version: 11
+details
Delta: Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query: Compute snapshot for version: 11 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:02
|
0.6 s
|
[5346]
|
|
|
2984
|
Delta: Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query: Compute snapshot for version: 11
+details
Delta: Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query: Compute snapshot for version: 11 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:03
|
2 s
|
[5347][5348]
|
|
|
3006
|
Delta: Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query: Compute snapshot for version: 8
+details
Delta: Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query: Compute snapshot for version: 8 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:18
|
0.3 s
|
[5379]
|
|
|
3007
|
Delta: Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query: Compute snapshot for version: 8
+details
Delta: Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query: Compute snapshot for version: 8 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:18
|
1 s
|
[5380][5381]
|
|
|
2847
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:09
|
0.2 s
|
[5118]
|
|
|
2848
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:09
|
0.3 s
|
[5119]
|
|
|
2849
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:10
|
20 ms
|
|
|
|
2850
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:10
|
0.5 s
|
[5120]
|
|
|
3204
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:07:51
|
0.2 s
|
[5820]
|
|
|
3205
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:07:51
|
0.3 s
|
[5821]
|
|
|
3206
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:07:52
|
47 ms
|
|
|
|
3207
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:07:52
|
0.5 s
|
[5822]
|
|
|
3469
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:47
|
0.2 s
|
[6238]
|
|
|
3470
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:47
|
0.4 s
|
[6239]
|
|
|
3471
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:48
|
20 ms
|
|
|
|
3472
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:48
|
0.4 s
|
[6240]
|
|
|
2985
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:05
|
0.2 s
|
[5349]
|
|
|
2986
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:05
|
0.6 s
|
[5350]
|
|
|
2987
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:06
|
23 ms
|
|
|
|
2988
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:06
|
0.8 s
|
[5351]
|
|
|
2997
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:14
|
0.4 s
|
[5372]
|
|
|
2998
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:14
|
0.2 s
|
[5373]
|
|
|
2999
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:14
|
94 ms
|
[5374]
|
|
|
3000
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:15
|
0.7 s
|
[5375]
|
|
|
3001
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:15
|
19 ms
|
|
|
|
3002
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:15
|
1 s
|
[5376]
|
|
|
3003
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:17
|
0.3 s
|
[5377]
|
|
|
3004
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:17
|
17 ms
|
|
|
|
3005
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:17
|
0.5 s
|
[5378]
|
|
|
3008
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:19
|
91 ms
|
[5382]
|
|
|
3009
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:19
|
0.2 s
|
[5383]
|
|
|
3010
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:20
|
18 ms
|
|
|
|
3011
|
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 05:59:20
|
0.3 s
|
[5384]
|
|
|
2840
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 01:06:04
|
0.5 s
|
[5109]
|
|
|
2841
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 01:06:05
|
89 ms
|
[5110]
|
|
|
2842
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 01:06:06
|
0.3 s
|
[5111]
|
|
|
2843
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 01:06:06
|
0.4 s
|
[5112]
|
|
|
2844
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 01:06:06
|
0.1 s
|
[5113]
|
|
|
2851
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.Dataset.isEmpty(Dataset.scala:559)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.mapExternalProductPlantIds(InsightHandlerDataAccessImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.createUnknownDemandInsight(InsightHandlerImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.attachRawDemand(DemandSnapshotEnhancementImpl.java:65)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.withDemand(DemandSnapshotEnhancementImpl.java:35)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:56)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/03/20 01:06:10
|
3 s
|
[5121][5122][5123][5124][5125][5126][5127][5128]
|
|
|
2852
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.Dataset.count(Dataset.scala:1499)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.mapExternalProductPlantIds(InsightHandlerDataAccessImpl.java:81)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.createUnknownDemandInsight(InsightHandlerImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.attachRawDemand(DemandSnapshotEnhancementImpl.java:65)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.withDemand(DemandSnapshotEnhancementImpl.java:35)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:56)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/03/20 01:06:13
|
0.2 s
|
[5129][5130]
|
|
|
2853
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.Dataset.count(Dataset.scala:1499)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.mapExternalProductPlantIds(InsightHandlerDataAccessImpl.java:82)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.createUnknownDemandInsight(InsightHandlerImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.attachRawDemand(DemandSnapshotEnhancementImpl.java:65)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.withDemand(DemandSnapshotEnhancementImpl.java:35)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:56)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/03/20 01:06:13
|
2 s
|
[5133][5134][5135][5136]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
2854
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:14
|
19 ms
|
|
|
2855
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:14
|
0.7 s
|
[5131]
|
|
2856
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PreparedDeltaFileIndex.matchingFiles(PrepareDeltaScan.scala:389)
org.apache.spark.sql.delta.files.TahoeFileIndex.listAddFiles(TahoeFileIndex.scala:111)
org.apache.spark.sql.delta.files.TahoeFileIndex.listFiles(TahoeFileIndex.scala:103)
org.apache.spark.sql.execution.FileSourceScanLike.selectedPartitions(DataSourceScanExec.scala:297)
org.apache.spark.sql.execution.FileSourceScanLike.selectedPartitions$(DataSourceScanExec.scala:288)
org.apache.spark.sql.execution.FileSourceScanExec.selectedPartitions$lzycompute(DataSourceScanExec.scala:607)
org.apache.spark.sql.execution.FileSourceScanExec.selectedPartitions(DataSourceScanExec.scala:607)
org.apache.spark.sql.execution.FileSourceScanLike.dynamicallySelectedPartitions(DataSourceScanExec.scala:330)
org.apache.spark.sql.execution.FileSourceScanLike.dynamicallySelectedPartitions$(DataSourceScanExec.scala:309)
|
2026/03/20 01:06:14
|
0.1 s
|
[5132]
|
|
|
2857
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 01:06:15
|
0.2 s
|
[5137]
|
|
|
2859
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.Dataset.first(Dataset.scala:2687)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ConfigurationSnapshotEnhancementImpl.fetchEnableLostSalesStockPeriod(ConfigurationSnapshotEnhancementImpl.java:41)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ConfigurationSnapshotEnhancementImpl.withConfiguration(ConfigurationSnapshotEnhancementImpl.java:33)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:58)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 01:06:16
|
3 s
|
[5140][5141]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
2860
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:16
|
0.9 s
|
[5138]
|
|
2861
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:17
|
19 ms
|
|
|
2862
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:17
|
2 s
|
[5139]
|
|
|
2863
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.Dataset.isEmpty(Dataset.scala:559)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.muListingCheck(ListingSnapshotEnhancementImpl.java:53)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.withListing(ListingSnapshotEnhancementImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:59)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 01:06:19
|
0.8 s
|
[5143][5144][5145][5146]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
2864
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:19
|
0.2 s
|
[5142]
|
|
|
2865
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.Dataset.first(Dataset.scala:2687)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.muListingCheck(ListingSnapshotEnhancementImpl.java:54)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.withListing(ListingSnapshotEnhancementImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:59)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 01:06:20
|
0.8 s
|
[5148][5149]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
2866
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:20
|
0.2 s
|
[5147]
|
|
|
2867
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:262)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeDataSnapshot(DataSnapshotGenerationDataAccessImpl.java:39)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 01:06:21
|
14 s
|
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
2868
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:262)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeDataSnapshot(DataSnapshotGenerationDataAccessImpl.java:39)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 01:06:21
|
13 s
|
[5160][5161][5162][5163][5164][5165][5166][5167][5168][5169][5170][5171][5172][5173][5174][5175][5176][5177][5178][5179][5180][5181][5182][5183][5184][5185][5186][5187][5188][5189][5190][5191][5192][5193][5194]
|
|
2869
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:22
|
0.2 s
|
[5150]
|
|
2870
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:22
|
0.1 s
|
[5151]
|
|
2871
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:22
|
0.2 s
|
[5152]
|
|
2872
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:22
|
17 ms
|
|
|
2873
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:22
|
0.6 s
|
[5153]
|
|
2874
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:23
|
0.2 s
|
[5154]
|
|
2875
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:24
|
84 ms
|
[5155]
|
|
2876
|
Delta: Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2
+details
Delta: Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:24
|
0.3 s
|
[5156]
|
|
2877
|
Delta: Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2
+details
Delta: Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:24
|
0.7 s
|
[5157][5158]
|
|
2878
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:25
|
78 ms
|
[5159]
|
|
2879
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1090)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1089)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.commands.WriteIntoDelta.writeAndReturnCommitData(WriteIntoDelta.scala:336)
org.apache.spark.sql.delta.commands.WriteIntoDelta.$anonfun$run$1(WriteIntoDelta.scala:109)
org.apache.spark.sql.delta.DeltaLog.withNewTransaction(DeltaLog.scala:247)
|
2026/03/20 01:06:34
|
29 ms
|
[5195]
|
|
2880
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.OptimisticTransaction.recordOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.OptimisticTransaction.recordDeltaOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$doCommitRetryIteratively$1(OptimisticTransaction.scala:2328)
org.apache.spark.sql.delta.OptimisticTransactionImpl.lockCommitIfEnabled(OptimisticTransaction.scala:2299)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively(OptimisticTransaction.scala:2315)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively$(OptimisticTransaction.scala:2311)
org.apache.spark.sql.delta.OptimisticTransaction.doCommitRetryIteratively(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$commitImpl$1(OptimisticTransaction.scala:1515)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile(DeltaLogging.scala:171)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile$(DeltaLogging.scala:169)
org.apache.spark.sql.delta.OptimisticTransaction.recordFrameProfile(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
|
2026/03/20 01:06:34
|
61 ms
|
[5196]
|
|
|
2881
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:189)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.notifyShardFinished(CreateDataSnapshotForShardActivityImpl.java:58)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeDataSnapshot(DataSnapshotGenerationDataAccessImpl.java:40)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/03/20 01:06:35
|
3 s
|
[5197][5198][5199][5200][5201]
|
|
|
2883
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:126)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.KafkaAdapter.publishMessageToBTPKafkaWithEncryption(KafkaAdapter.java:173)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.KafkaAdapter.publishInsights(KafkaAdapter.java:58)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.publishInsights(InsightHandlerDataAccessImpl.java:47)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.publish(InsightHandlerImpl.java:46)
java.base/java.lang.Iterable.forEach(Unknown Source)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:63)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
|
2026/03/20 01:06:38
|
2 s
|
[5205][5206][5207][5208][5209][5210][5211][5212][5213]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
2884
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:38
|
91 ms
|
[5202]
|
|
2885
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:38
|
0.1 s
|
[5203]
|
|
2886
|
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 01:06:39
|
0.2 s
|
[5204]
|
|
|
2887
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = cef5d1bf-9c10-3645-a9ca-4bab0d7169c0 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = 141c3efe-47f0-32a8-981f-09028ec902c6 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.Dataset.isEmpty(Dataset.scala:559)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storeOGRDocumentInformation(FileStorageAdapterImpl.java:347)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.TraceabilityDataAccessImpl.storeOGRDocumentInformation(TraceabilityDataAccessImpl.java:40)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.OpenGoodsMovementsTracerImpl.publish(OpenGoodsMovementsTracerImpl.java:51)
java.base/java.lang.Iterable.forEach(Unknown Source)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:63)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/03/20 01:06:40
|
15 ms
|
|
|
|
2824
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.Dataset.javaRDD(Dataset.scala:3270)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.CachePortImpl.<init>(CachePortImpl.java:66)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.OrderQuantityKpiProviderImpl.create(OrderQuantityKpiProviderImpl.java:38)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.OrderQuantityKpiProviderImpl.create(OrderQuantityKpiProviderImpl.java:11)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl$LifecycleControlledObjectHolder.<init>(CalculationObjectProviderImpl.java:23)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl.lambda$lookupOrCreate$0(CalculationObjectProviderImpl.java:83)
java.base/java.util.concurrent.ConcurrentHashMap.computeIfAbsent(Unknown Source)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl.lookupOrCreate(CalculationObjectProviderImpl.java:83)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl.get(CalculationObjectProviderImpl.java:57)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:47)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
|
2026/03/20 01:04:43
|
58 ms
|
|
|
|
2825
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.writeToStorage(FileStorageAdapterImpl.java:129)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.prepareKpiCurveUpdateStream(FileStorageAdapterImpl.java:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:47)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 01:04:43
|
3 s
|
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
2826
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.writeToStorage(FileStorageAdapterImpl.java:129)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.prepareKpiCurveUpdateStream(FileStorageAdapterImpl.java:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:47)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 01:04:44
|
2 s
|
[5091]
|
|
2827
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1090)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1089)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.commands.WriteIntoDelta.writeAndReturnCommitData(WriteIntoDelta.scala:336)
org.apache.spark.sql.delta.commands.WriteIntoDelta.$anonfun$run$1(WriteIntoDelta.scala:109)
org.apache.spark.sql.delta.DeltaLog.withNewTransaction(DeltaLog.scala:247)
|
2026/03/20 01:04:45
|
0.1 s
|
[5092]
|
|
2828
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.OptimisticTransaction.recordOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.OptimisticTransaction.recordDeltaOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$doCommitRetryIteratively$1(OptimisticTransaction.scala:2328)
org.apache.spark.sql.delta.OptimisticTransactionImpl.lockCommitIfEnabled(OptimisticTransaction.scala:2299)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively(OptimisticTransaction.scala:2315)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively$(OptimisticTransaction.scala:2311)
org.apache.spark.sql.delta.OptimisticTransaction.doCommitRetryIteratively(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$commitImpl$1(OptimisticTransaction.scala:1515)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile(DeltaLogging.scala:171)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile$(DeltaLogging.scala:169)
org.apache.spark.sql.delta.OptimisticTransaction.recordFrameProfile(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
|
2026/03/20 01:04:46
|
0.2 s
|
[5093]
|
|
|
2829
|
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000348 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 9e1ec4a1-ddd5-3bbd-a75a-1c206f456548 workflowType = CalculateOrderProposalsWorkflow workflowId = 6776ab81-8705-353e-bb18-6ea8447555d8 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:189)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:50)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 01:04:46
|
2 s
|
[5094][5095][5096][5097][5098]
|
|
|
3197
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 20:07:46
|
0.3 s
|
[5811]
|
|
|
3198
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 20:07:46
|
0.4 s
|
[5812]
|
|
|
3199
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 20:07:47
|
0.3 s
|
[5813]
|
|
|
3200
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 20:07:47
|
0.4 s
|
[5814]
|
|
|
3201
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 20:07:48
|
78 ms
|
[5815]
|
|
|
3208
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.Dataset.isEmpty(Dataset.scala:559)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.mapExternalProductPlantIds(InsightHandlerDataAccessImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.createUnknownDemandInsight(InsightHandlerImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.attachRawDemand(DemandSnapshotEnhancementImpl.java:65)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.withDemand(DemandSnapshotEnhancementImpl.java:35)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:56)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/03/20 20:07:52
|
2 s
|
[5823][5824][5825][5826][5827][5828][5829][5830]
|
|
|
3209
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.Dataset.count(Dataset.scala:1499)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.mapExternalProductPlantIds(InsightHandlerDataAccessImpl.java:81)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.createUnknownDemandInsight(InsightHandlerImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.attachRawDemand(DemandSnapshotEnhancementImpl.java:65)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.withDemand(DemandSnapshotEnhancementImpl.java:35)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:56)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/03/20 20:07:55
|
0.3 s
|
[5831][5832]
|
|
|
3210
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.Dataset.count(Dataset.scala:1499)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.mapExternalProductPlantIds(InsightHandlerDataAccessImpl.java:82)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.createUnknownDemandInsight(InsightHandlerImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.attachRawDemand(DemandSnapshotEnhancementImpl.java:65)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.withDemand(DemandSnapshotEnhancementImpl.java:35)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:56)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/03/20 20:07:55
|
1 s
|
[5834][5836][5837][5838]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3211
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:07:55
|
17 ms
|
|
|
3212
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:07:55
|
0.7 s
|
[5833]
|
|
3213
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PreparedDeltaFileIndex.matchingFiles(PrepareDeltaScan.scala:389)
org.apache.spark.sql.delta.files.TahoeFileIndex.listAddFiles(TahoeFileIndex.scala:111)
org.apache.spark.sql.delta.files.TahoeFileIndex.listFiles(TahoeFileIndex.scala:103)
org.apache.spark.sql.execution.FileSourceScanLike.selectedPartitions(DataSourceScanExec.scala:297)
org.apache.spark.sql.execution.FileSourceScanLike.selectedPartitions$(DataSourceScanExec.scala:288)
org.apache.spark.sql.execution.FileSourceScanExec.selectedPartitions$lzycompute(DataSourceScanExec.scala:607)
org.apache.spark.sql.execution.FileSourceScanExec.selectedPartitions(DataSourceScanExec.scala:607)
org.apache.spark.sql.execution.FileSourceScanLike.dynamicallySelectedPartitions(DataSourceScanExec.scala:330)
org.apache.spark.sql.execution.FileSourceScanLike.dynamicallySelectedPartitions$(DataSourceScanExec.scala:309)
|
2026/03/20 20:07:56
|
0.3 s
|
[5835]
|
|
|
3214
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 20:07:57
|
0.2 s
|
[5839]
|
|
|
3216
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.Dataset.first(Dataset.scala:2687)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ConfigurationSnapshotEnhancementImpl.fetchEnableLostSalesStockPeriod(ConfigurationSnapshotEnhancementImpl.java:41)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ConfigurationSnapshotEnhancementImpl.withConfiguration(ConfigurationSnapshotEnhancementImpl.java:33)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:58)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 20:07:57
|
3 s
|
[5842][5843]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3217
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:07:58
|
0.9 s
|
[5840]
|
|
3218
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:07:58
|
17 ms
|
|
|
3219
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:07:58
|
1 s
|
[5841]
|
|
|
3220
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.Dataset.isEmpty(Dataset.scala:559)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.muListingCheck(ListingSnapshotEnhancementImpl.java:53)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.withListing(ListingSnapshotEnhancementImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:59)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 20:08:00
|
0.5 s
|
[5845][5846][5847][5848]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3221
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:00
|
0.2 s
|
[5844]
|
|
|
3222
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.Dataset.first(Dataset.scala:2687)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.muListingCheck(ListingSnapshotEnhancementImpl.java:54)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.withListing(ListingSnapshotEnhancementImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:59)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 20:08:01
|
0.5 s
|
[5850][5851]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3223
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:01
|
0.1 s
|
[5849]
|
|
|
3224
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:262)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeDataSnapshot(DataSnapshotGenerationDataAccessImpl.java:39)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 20:08:02
|
14 s
|
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3225
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:262)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeDataSnapshot(DataSnapshotGenerationDataAccessImpl.java:39)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 20:08:02
|
13 s
|
[5862][5863][5864][5865][5866][5867][5868][5869][5870][5871][5872][5873][5874][5875][5876][5877][5878][5879][5880][5881][5882][5883][5884][5885][5886][5887][5888][5889][5890][5891][5892][5893][5894]
|
|
3226
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:02
|
0.2 s
|
[5852]
|
|
3227
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:02
|
0.2 s
|
[5853]
|
|
3228
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:03
|
0.3 s
|
[5854]
|
|
3229
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:03
|
18 ms
|
|
|
3230
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:03
|
0.6 s
|
[5855]
|
|
3231
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:04
|
0.2 s
|
[5856]
|
|
3232
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:04
|
96 ms
|
[5857]
|
|
3233
|
Delta: Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2
+details
Delta: Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:04
|
0.2 s
|
[5858]
|
|
3234
|
Delta: Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2
+details
Delta: Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:05
|
0.8 s
|
[5859][5860]
|
|
3235
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:05
|
79 ms
|
[5861]
|
|
3236
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1090)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1089)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.commands.WriteIntoDelta.writeAndReturnCommitData(WriteIntoDelta.scala:336)
org.apache.spark.sql.delta.commands.WriteIntoDelta.$anonfun$run$1(WriteIntoDelta.scala:109)
org.apache.spark.sql.delta.DeltaLog.withNewTransaction(DeltaLog.scala:247)
|
2026/03/20 20:08:15
|
26 ms
|
[5895]
|
|
3237
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.OptimisticTransaction.recordOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.OptimisticTransaction.recordDeltaOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$doCommitRetryIteratively$1(OptimisticTransaction.scala:2328)
org.apache.spark.sql.delta.OptimisticTransactionImpl.lockCommitIfEnabled(OptimisticTransaction.scala:2299)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively(OptimisticTransaction.scala:2315)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively$(OptimisticTransaction.scala:2311)
org.apache.spark.sql.delta.OptimisticTransaction.doCommitRetryIteratively(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$commitImpl$1(OptimisticTransaction.scala:1515)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile(DeltaLogging.scala:171)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile$(DeltaLogging.scala:169)
org.apache.spark.sql.delta.OptimisticTransaction.recordFrameProfile(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
|
2026/03/20 20:08:15
|
53 ms
|
[5896]
|
|
|
3238
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:189)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.notifyShardFinished(CreateDataSnapshotForShardActivityImpl.java:58)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeDataSnapshot(DataSnapshotGenerationDataAccessImpl.java:40)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/03/20 20:08:16
|
1 s
|
[5897][5898][5899][5900][5901]
|
|
|
3240
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:126)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.KafkaAdapter.publishMessageToBTPKafkaWithEncryption(KafkaAdapter.java:173)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.KafkaAdapter.publishInsights(KafkaAdapter.java:58)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.publishInsights(InsightHandlerDataAccessImpl.java:47)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.publish(InsightHandlerImpl.java:46)
java.base/java.lang.Iterable.forEach(Unknown Source)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:63)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
|
2026/03/20 20:08:17
|
2 s
|
[5905][5906][5907][5908][5909][5910][5911][5912][5913]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3241
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:17
|
87 ms
|
[5902]
|
|
3242
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:17
|
85 ms
|
[5903]
|
|
3243
|
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/20 20:08:18
|
0.2 s
|
[5904]
|
|
|
3244
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = 9ce6dde1-bee3-3e54-97ac-988588623651 workflowType = KpiPrepareDataSnapshotWorkflow workflowId = d6e60974-0b16-3a8e-9769-26b7657438e2 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.Dataset.isEmpty(Dataset.scala:559)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storeOGRDocumentInformation(FileStorageAdapterImpl.java:347)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.TraceabilityDataAccessImpl.storeOGRDocumentInformation(TraceabilityDataAccessImpl.java:40)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.OpenGoodsMovementsTracerImpl.publish(OpenGoodsMovementsTracerImpl.java:51)
java.base/java.lang.Iterable.forEach(Unknown Source)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:63)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/03/20 20:08:19
|
9 ms
|
|
|
|
3181
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.Dataset.javaRDD(Dataset.scala:3270)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.CachePortImpl.<init>(CachePortImpl.java:66)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.OrderQuantityKpiProviderImpl.create(OrderQuantityKpiProviderImpl.java:38)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.OrderQuantityKpiProviderImpl.create(OrderQuantityKpiProviderImpl.java:11)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl$LifecycleControlledObjectHolder.<init>(CalculationObjectProviderImpl.java:23)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl.lambda$lookupOrCreate$0(CalculationObjectProviderImpl.java:83)
java.base/java.util.concurrent.ConcurrentHashMap.computeIfAbsent(Unknown Source)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl.lookupOrCreate(CalculationObjectProviderImpl.java:83)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl.get(CalculationObjectProviderImpl.java:57)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:47)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
|
2026/03/20 20:06:37
|
54 ms
|
|
|
|
3182
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.writeToStorage(FileStorageAdapterImpl.java:129)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.prepareKpiCurveUpdateStream(FileStorageAdapterImpl.java:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:47)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 20:06:37
|
4 s
|
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3183
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.writeToStorage(FileStorageAdapterImpl.java:129)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.prepareKpiCurveUpdateStream(FileStorageAdapterImpl.java:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:47)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 20:06:38
|
2 s
|
[5793]
|
|
3184
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1090)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1089)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.commands.WriteIntoDelta.writeAndReturnCommitData(WriteIntoDelta.scala:336)
org.apache.spark.sql.delta.commands.WriteIntoDelta.$anonfun$run$1(WriteIntoDelta.scala:109)
org.apache.spark.sql.delta.DeltaLog.withNewTransaction(DeltaLog.scala:247)
|
2026/03/20 20:06:40
|
0.2 s
|
[5794]
|
|
3185
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.OptimisticTransaction.recordOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.OptimisticTransaction.recordDeltaOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$doCommitRetryIteratively$1(OptimisticTransaction.scala:2328)
org.apache.spark.sql.delta.OptimisticTransactionImpl.lockCommitIfEnabled(OptimisticTransaction.scala:2299)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively(OptimisticTransaction.scala:2315)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively$(OptimisticTransaction.scala:2311)
org.apache.spark.sql.delta.OptimisticTransaction.doCommitRetryIteratively(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$commitImpl$1(OptimisticTransaction.scala:1515)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile(DeltaLogging.scala:171)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile$(DeltaLogging.scala:169)
org.apache.spark.sql.delta.OptimisticTransaction.recordFrameProfile(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
|
2026/03/20 20:06:40
|
0.2 s
|
[5795]
|
|
|
3186
|
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD
+details
replenishmentRunId = 10000000349 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = ffc7fa2c-d8df-38fa-88f0-fd3124a069ad workflowType = CalculateOrderProposalsWorkflow workflowId = f1005e7f-1925-3940-8274-10d50a8b3333 attempt = 1 cornerstoneTenantId = 8469 marketUnit = STEPHI_MU scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:189)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:50)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 20:06:41
|
3 s
|
[5796][5797][5798][5799][5800]
|
|
|
3462
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/21 01:07:42
|
0.4 s
|
[6229]
|
|
|
3463
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/21 01:07:43
|
0.2 s
|
[6230]
|
|
|
3464
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/21 01:07:44
|
0.2 s
|
[6231]
|
|
|
3465
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/21 01:07:44
|
0.3 s
|
[6232]
|
|
|
3466
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/21 01:07:44
|
83 ms
|
[6233]
|
|
|
3473
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.Dataset.isEmpty(Dataset.scala:559)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.mapExternalProductPlantIds(InsightHandlerDataAccessImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.createUnknownDemandInsight(InsightHandlerImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.attachRawDemand(DemandSnapshotEnhancementImpl.java:65)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.withDemand(DemandSnapshotEnhancementImpl.java:35)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:56)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
jdk.internal.reflect.GeneratedMethodAccessor1659.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/03/21 01:07:48
|
3 s
|
[6241][6242][6243][6244][6245][6246][6247][6248]
|
|
|
3474
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.Dataset.count(Dataset.scala:1499)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.mapExternalProductPlantIds(InsightHandlerDataAccessImpl.java:81)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.createUnknownDemandInsight(InsightHandlerImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.attachRawDemand(DemandSnapshotEnhancementImpl.java:65)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.withDemand(DemandSnapshotEnhancementImpl.java:35)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:56)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
jdk.internal.reflect.GeneratedMethodAccessor1659.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/03/21 01:07:51
|
0.3 s
|
[6249][6250]
|
|
|
3475
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.Dataset.count(Dataset.scala:1499)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.mapExternalProductPlantIds(InsightHandlerDataAccessImpl.java:82)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.createUnknownDemandInsight(InsightHandlerImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.attachRawDemand(DemandSnapshotEnhancementImpl.java:65)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DemandSnapshotEnhancementImpl.withDemand(DemandSnapshotEnhancementImpl.java:35)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:56)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
jdk.internal.reflect.GeneratedMethodAccessor1659.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/03/21 01:07:51
|
2 s
|
[6253][6254][6255][6256]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3476
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:51
|
20 ms
|
|
|
3477
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:51
|
0.7 s
|
[6251]
|
|
3478
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PreparedDeltaFileIndex.matchingFiles(PrepareDeltaScan.scala:389)
org.apache.spark.sql.delta.files.TahoeFileIndex.listAddFiles(TahoeFileIndex.scala:111)
org.apache.spark.sql.delta.files.TahoeFileIndex.listFiles(TahoeFileIndex.scala:103)
org.apache.spark.sql.execution.FileSourceScanLike.selectedPartitions(DataSourceScanExec.scala:297)
org.apache.spark.sql.execution.FileSourceScanLike.selectedPartitions$(DataSourceScanExec.scala:288)
org.apache.spark.sql.execution.FileSourceScanExec.selectedPartitions$lzycompute(DataSourceScanExec.scala:607)
org.apache.spark.sql.execution.FileSourceScanExec.selectedPartitions(DataSourceScanExec.scala:607)
org.apache.spark.sql.execution.FileSourceScanLike.dynamicallySelectedPartitions(DataSourceScanExec.scala:330)
org.apache.spark.sql.execution.FileSourceScanLike.dynamicallySelectedPartitions$(DataSourceScanExec.scala:309)
|
2026/03/21 01:07:52
|
0.1 s
|
[6252]
|
|
|
3479
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/21 01:07:53
|
0.2 s
|
[6257]
|
|
|
3481
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.Dataset.first(Dataset.scala:2687)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ConfigurationSnapshotEnhancementImpl.fetchEnableLostSalesStockPeriod(ConfigurationSnapshotEnhancementImpl.java:41)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ConfigurationSnapshotEnhancementImpl.withConfiguration(ConfigurationSnapshotEnhancementImpl.java:33)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:58)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
jdk.internal.reflect.GeneratedMethodAccessor1659.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/21 01:07:54
|
2 s
|
[6260][6261]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3482
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:54
|
0.7 s
|
[6258]
|
|
3483
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:55
|
18 ms
|
|
|
3484
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:55
|
1 s
|
[6259]
|
|
|
3485
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.Dataset.isEmpty(Dataset.scala:559)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.muListingCheck(ListingSnapshotEnhancementImpl.java:53)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.withListing(ListingSnapshotEnhancementImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:59)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
jdk.internal.reflect.GeneratedMethodAccessor1659.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/21 01:07:56
|
1 s
|
[6263][6264][6265][6266]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3486
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:57
|
0.1 s
|
[6262]
|
|
|
3487
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.Dataset.first(Dataset.scala:2687)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.muListingCheck(ListingSnapshotEnhancementImpl.java:54)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.ListingSnapshotEnhancementImpl.withListing(ListingSnapshotEnhancementImpl.java:34)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:59)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
jdk.internal.reflect.GeneratedMethodAccessor1659.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/21 01:07:58
|
0.9 s
|
[6268][6269]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3488
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:58
|
0.1 s
|
[6267]
|
|
|
3489
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:262)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeDataSnapshot(DataSnapshotGenerationDataAccessImpl.java:39)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
jdk.internal.reflect.GeneratedMethodAccessor1659.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/21 01:07:59
|
14 s
|
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3490
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:262)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeDataSnapshot(DataSnapshotGenerationDataAccessImpl.java:39)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
jdk.internal.reflect.GeneratedMethodAccessor1659.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/21 01:07:59
|
13 s
|
[6280][6281][6282][6283][6284][6285][6286][6287][6288][6289][6290][6291][6292][6293][6294][6295][6296][6297][6298][6299][6300][6301][6302][6303][6304][6305][6306][6307][6308][6309][6310][6311][6312][6313][6314][6315]
|
|
3491
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:07:59
|
0.1 s
|
[6270]
|
|
3492
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:00
|
0.2 s
|
[6271]
|
|
3493
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:00
|
0.2 s
|
[6272]
|
|
3494
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:00
|
17 ms
|
|
|
3495
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:00
|
0.6 s
|
[6273]
|
|
3496
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:02
|
0.1 s
|
[6274]
|
|
3497
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:02
|
77 ms
|
[6275]
|
|
3498
|
Delta: Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2
+details
Delta: Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:02
|
0.2 s
|
[6276]
|
|
3499
|
Delta: Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2
+details
Delta: Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:02
|
0.6 s
|
[6277][6278]
|
|
3500
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:03
|
80 ms
|
[6279]
|
|
3501
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1090)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1089)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.commands.WriteIntoDelta.writeAndReturnCommitData(WriteIntoDelta.scala:336)
org.apache.spark.sql.delta.commands.WriteIntoDelta.$anonfun$run$1(WriteIntoDelta.scala:109)
org.apache.spark.sql.delta.DeltaLog.withNewTransaction(DeltaLog.scala:247)
|
2026/03/21 01:08:12
|
27 ms
|
[6316]
|
|
3502
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.OptimisticTransaction.recordOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.OptimisticTransaction.recordDeltaOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$doCommitRetryIteratively$1(OptimisticTransaction.scala:2328)
org.apache.spark.sql.delta.OptimisticTransactionImpl.lockCommitIfEnabled(OptimisticTransaction.scala:2299)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively(OptimisticTransaction.scala:2315)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively$(OptimisticTransaction.scala:2311)
org.apache.spark.sql.delta.OptimisticTransaction.doCommitRetryIteratively(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$commitImpl$1(OptimisticTransaction.scala:1515)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile(DeltaLogging.scala:171)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile$(DeltaLogging.scala:169)
org.apache.spark.sql.delta.OptimisticTransaction.recordFrameProfile(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
|
2026/03/21 01:08:12
|
56 ms
|
[6317]
|
|
|
3503
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:189)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.notifyShardFinished(CreateDataSnapshotForShardActivityImpl.java:58)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeDataSnapshot(DataSnapshotGenerationDataAccessImpl.java:40)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:62)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
jdk.internal.reflect.GeneratedMethodAccessor1659.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/21 01:08:13
|
2 s
|
[6318][6319][6320][6321][6322]
|
|
|
3505
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:126)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.KafkaAdapter.publishMessageToBTPKafkaWithEncryption(KafkaAdapter.java:173)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.KafkaAdapter.publishInsights(KafkaAdapter.java:58)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.InsightHandlerDataAccessImpl.publishInsights(InsightHandlerDataAccessImpl.java:47)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.insights.InsightHandlerImpl.publish(InsightHandlerImpl.java:46)
java.base/java.lang.Iterable.forEach(Unknown Source)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:63)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
jdk.internal.reflect.GeneratedMethodAccessor1659.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/03/21 01:08:15
|
2 s
|
[6326][6327][6328][6329][6330][6331][6332][6333][6334]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3506
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:15
|
0.1 s
|
[6323]
|
|
3507
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:15
|
0.1 s
|
[6324]
|
|
3508
|
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1265)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:134)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:119)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:114)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:308)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:152)
|
2026/03/21 01:08:15
|
0.2 s
|
[6325]
|
|
|
3509
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = BufferDataSnapShot activityId = d11927ad-3153-358a-914c-ba06a15db40a workflowType = KpiPrepareDataSnapshotWorkflow workflowId = ae4415f9-75c3-3ec1-ab81-34528dde231c attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.Dataset.isEmpty(Dataset.scala:559)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storeOGRDocumentInformation(FileStorageAdapterImpl.java:347)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.TraceabilityDataAccessImpl.storeOGRDocumentInformation(TraceabilityDataAccessImpl.java:40)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.OpenGoodsMovementsTracerImpl.publish(OpenGoodsMovementsTracerImpl.java:51)
java.base/java.lang.Iterable.forEach(Unknown Source)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:63)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:44)
jdk.internal.reflect.GeneratedMethodAccessor1659.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/03/21 01:08:17
|
9 ms
|
|
|
|
3446
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.Dataset.javaRDD(Dataset.scala:3270)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.business.CachePortImpl.<init>(CachePortImpl.java:66)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.OrderQuantityKpiProviderImpl.create(OrderQuantityKpiProviderImpl.java:38)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.OrderQuantityKpiProviderImpl.create(OrderQuantityKpiProviderImpl.java:11)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl$LifecycleControlledObjectHolder.<init>(CalculationObjectProviderImpl.java:23)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl.lambda$lookupOrCreate$0(CalculationObjectProviderImpl.java:83)
java.base/java.util.concurrent.ConcurrentHashMap.computeIfAbsent(Unknown Source)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl.lookupOrCreate(CalculationObjectProviderImpl.java:83)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.cachelifecycle.CalculationObjectProviderImpl.get(CalculationObjectProviderImpl.java:57)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:47)
jdk.internal.reflect.GeneratedMethodAccessor1609.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
|
2026/03/21 01:06:11
|
59 ms
|
|
|
|
3447
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.writeToStorage(FileStorageAdapterImpl.java:129)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.prepareKpiCurveUpdateStream(FileStorageAdapterImpl.java:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:47)
jdk.internal.reflect.GeneratedMethodAccessor1609.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/21 01:06:11
|
3 s
|
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
3448
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.writeToStorage(FileStorageAdapterImpl.java:129)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.prepareKpiCurveUpdateStream(FileStorageAdapterImpl.java:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:47)
jdk.internal.reflect.GeneratedMethodAccessor1609.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/21 01:06:12
|
2 s
|
[6211]
|
|
3449
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:87)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1207)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1204)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:87)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1090)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1089)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransactionImpl.filterFiles$(OptimisticTransaction.scala:1086)
org.apache.spark.sql.delta.OptimisticTransaction.filterFiles(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.commands.WriteIntoDelta.writeAndReturnCommitData(WriteIntoDelta.scala:336)
org.apache.spark.sql.delta.commands.WriteIntoDelta.$anonfun$run$1(WriteIntoDelta.scala:109)
org.apache.spark.sql.delta.DeltaLog.withNewTransaction(DeltaLog.scala:247)
|
2026/03/21 01:06:13
|
0.1 s
|
[6212]
|
|
3450
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.OptimisticTransaction.recordOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.OptimisticTransaction.recordDeltaOperation(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$doCommitRetryIteratively$1(OptimisticTransaction.scala:2328)
org.apache.spark.sql.delta.OptimisticTransactionImpl.lockCommitIfEnabled(OptimisticTransaction.scala:2299)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively(OptimisticTransaction.scala:2315)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively$(OptimisticTransaction.scala:2311)
org.apache.spark.sql.delta.OptimisticTransaction.doCommitRetryIteratively(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$commitImpl$1(OptimisticTransaction.scala:1515)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile(DeltaLogging.scala:171)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile$(DeltaLogging.scala:169)
org.apache.spark.sql.delta.OptimisticTransaction.recordFrameProfile(OptimisticTransaction.scala:169)
org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
|
2026/03/21 01:06:14
|
0.2 s
|
[6213]
|
|
|
3451
|
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD
+details
replenishmentRunId = 10000000350 tenantId = 7233423560970044043 activityType = PrepareStreams activityId = 272f2b55-32ed-342f-81a4-d6e310c1b3aa workflowType = CalculateOrderProposalsWorkflow workflowId = 2c47fb86-b292-3663-a679-aa9cf2542963 attempt = 1 cornerstoneTenantId = 8469 marketUnit = AUTO_ALL_ProdLoc scenario = STANDARD org.apache.spark.sql.classic.DataFrameWriter.save(DataFrameWriter.scala:118)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:189)
com.sap.s4hana.eureka.business.crporderquantitykpiservice.core.controller.KpiCalculationStreamingActivityImpl.prepareStreams(KpiCalculationStreamingActivityImpl.java:50)
jdk.internal.reflect.GeneratedMethodAccessor1609.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/21 01:06:15
|
2 s
|
[6214][6215][6216][6217][6218]
|
|
|
2976
|
replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD
+details
replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 05:58:59
|
0.3 s
|
[5338]
|
|
|
2977
|
replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD
+details
replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 05:58:59
|
0.5 s
|
[5339]
|
|
|
2978
|
replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD
+details
replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:693)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:972)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:996)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4903)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3574)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2316)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2190)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2080)
com.google.common.cache.LocalCache.get(LocalCache.java:4017)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4898)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:995)
org.apache.spark.sql.delta.DeltaLog$.initializeDeltaLog$1(DeltaLog.scala:1006)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:1017)
|
2026/03/20 05:59:00
|
0.3 s
|
[5340]
|
|
|
2979
|
replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD
+details
replenishmentRunId = 10000000678 tenantId = 6951945722030153353 activityType = BufferDataSnapShot activityId = 372339e3-e78f-3202-9d96-da74ae3e269f workflowType = KpiPrepareDataSnapshotWorkflow workflowId = df824559-c1c8-3a48-92fe-1082cc951e9b attempt = 1 cornerstoneTenantId = 8468 marketUnit = IW_MU_CRP-125444_1 scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/03/20 05:59:01
|
0.2 s
|
[5341]
|
|