| ID | Description | Submitted ▴ | Duration | Job IDs | Sub Execution IDs | ||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 |
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:22:29 | 0.9 s | [0] |
|
||||||||||
| 1 |
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:22:50 | 39 ms |
|
|||||||||||
| 2 |
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:22:54 | 4 s | ||||||||||||
|
|||||||||||||||
| 5 |
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:25:36 | 18 ms |
|
|||||||||||
| 8 |
Spark Connect - session_id: "f15d8ea2-f375-4431-9a62-15dfcf3e3fdd"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "f15d8ea2-f375-4431-9a62-15dfcf3e3fdd"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:26:52 | 79 ms | [3] |
|
||||||||||
| 10 |
Spark Connect - session_id: "8c0de3c6-0219-44ff-b69c-319d15b20457"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "8c0de3c6-0219-44ff-b69c-319d15b20457"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:30:02 | 52 ms | [4] |
|
||||||||||
| 11 |
Spark Connect - session_id: "c72cf9f2-3595-4d09-adfc-629a083c596a"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "c72cf9f2-3595-4d09-adfc-629a083c596a"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:31:36 | 74 ms | [5] |
|
||||||||||
| 12 |
Spark Connect - session_id: "4ab3976b-dcbe-4020-b06b-15c63f8fd1c7"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "4ab3976b-dcbe-4020-b06b-15c63f8fd1c7"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:34:17 | 49 ms | [6] |
|
||||||||||
| 13 |
Spark Connect - session_id: "4ab3976b-dcbe-4020-b06b-15c63f8fd1c7"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "4ab3976b-dcbe-4020-b06b-15c63f8fd1c7"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:34:35 | 97 ms |
|
|||||||||||
| 14 |
Spark Connect - session_id: "4ab3976b-dcbe-4020-b06b-15c63f8fd1c7"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "4ab3976b-dcbe-4020-b06b-15c63f8fd1c7"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:34:40 | 2 s | ||||||||||||
|
|||||||||||||||
| 16 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:36:10 | 65 ms | [9] |
|
||||||||||
| 17 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:36:22 | 78 ms |
|
|||||||||||
| 18 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:36:24 | 0.5 s |
|
|||||||||||
| 19 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:36:26 | 1.0 s | ||||||||||||
|
|||||||||||||||
| 21 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:36:30 | 1.0 s | [12][13] |
|
||||||||||
| 22 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:38:04 | 71 ms |
|
|||||||||||
| 23 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:38:39 | 49 ms |
|
|||||||||||
| 24 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:38:39 | 2 ms |
|
|||||||||||
| 26 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:41:33 | 1 s | ||||||||||||
|
|||||||||||||||
| 30 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:44:54 | 1.0 s | ||||||||||||
|
|||||||||||||||
| 32 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:44:57 | 0.3 s | [18] |
|
||||||||||
| 33 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:45:25 | 20 ms |
|
|||||||||||
| 34 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:45:25 | 2 ms |
|
|||||||||||
| 36 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:45:41 | 1 s | ||||||||||||
|
|||||||||||||||
| 38 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:45:57 | 0.2 s |
|
|||||||||||
| 39 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:45:57 | 2 ms |
|
|||||||||||
| 40 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:46:02 | 48 ms |
|
|||||||||||
| 41 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:46:19 | 18 ms |
|
|||||||||||
| 42 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
root {
common {
pla...
|
2026/05/11 04:46:19 | 2 ms |
|
|||||||||||
| ID ▾ | Description | Submitted | Duration | Succeeded Job IDs | Failed Job IDs | Error Message | Sub Execution IDs |
|---|---|---|---|---|---|---|---|
| 35 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:45:28 | 1 ms | TABLE_OR_VIEW_NOT_FOUND[TABLE_OR_VIEW_NOT_FOUND] The table or view `bronze`.`test`.`test` cannot be found. Verify the spelling and correctness of the schema and catalog. If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog. To tolerate the error on drop use DROP VIEW IF EXISTS or DROP TABLE IF EXISTS. SQLSTATE: 42P01; line 1 pos 14; 'Project [*] +- 'UnresolvedRelation [bronze, test, test], [], false |
|
||
| 29 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:44:40 | 1 ms | TABLE_OR_VIEW_NOT_FOUND[TABLE_OR_VIEW_NOT_FOUND] The table or view `bronze`.`test`.`test` cannot be found. Verify the spelling and correctness of the schema and catalog. If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog. To tolerate the error on drop use DROP VIEW IF EXISTS or DROP TABLE IF EXISTS. SQLSTATE: 42P01; line 1 pos 14; 'Project [*] +- 'UnresolvedRelation [bronze, test, test], [], false |
|
||
| 28 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:41:50 | 69 ms | java.lang.NoSuchMethodError: 'org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation.create(org.apache.spark.sql.connector.catalog.Table, scala.Option, scala.Option, org.apache.spark.sql.util.CaseInsensitiveStringMap)'java.lang.NoSuchMethodError: 'org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation.create(org.apache.spark.sql.connector.catalog.Table, scala.Option, scala.Option, org.apache.spark.sql.util.CaseInsensitiveStringMap)' at org.apache.iceberg.spark.SparkTableUtil.createRelation(SparkTableUtil.java:1000) at org.apache.iceberg.spark.SparkTableUtil.loadMetadataTable(SparkTableUtil.java:987) at org.apache.iceberg.spark.SparkTableUtil.loadMetadataTable(SparkTableUtil.java:980) at org.apache.iceberg.spark.actions.BaseSparkAction.loadMetadataTable(BaseSparkAction.java:224) at org.apache.iceberg.spark.actions.BaseSparkAction.manifestDF(BaseSparkAction.java:183) at org.apache.iceberg.spark.actions.BaseSparkAction.contentFileDS(BaseSparkAction.java:157) at org.apache.iceberg.spark.actions.BaseSparkAction.contentFileDS(BaseSparkAction.java:148) at org.apache.iceberg.spark.actions.DeleteReachableFilesSparkAction.reachableFileDS(DeleteReachableFilesSparkAction.java:121) at org.apache.iceberg.spark.actions.DeleteReachableFilesSparkAction.doExecute(DeleteReachableFilesSparkAction.java:106) at org.apache.iceberg.spark.JobGroupUtils.withJobGroupInfo(JobGroupUtils.java:59) at org.apache.iceberg.spark.JobGroupUtils.withJobGroupInfo(JobGroupUtils.java:51) at org.apache.iceberg.spark.actions.BaseSparkAction.withJobGroupInfo(BaseSparkAction.java:130) at org.apache.iceberg.spark.actions.DeleteReachableFilesSparkAction.execute(DeleteReachableFilesSparkAction.java:96) at org.apache.iceberg.spark.SparkCatalog.purgeTable(SparkCatalog.java:378) at org.apache.spark.sql.execution.datasources.v2.DropTableExec.run(DropTableExec.scala:39) at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43) at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43) at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49) at org.apache.spark.sql.execution.QueryExecution.$anonfun$eagerlyExecuteCommands$2(QueryExecution.scala:185) at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$8(SQLExecution.scala:177) at org.apache.spark.sql.execution.SQLExecution$.withSessionTagsApplied(SQLExecution.scala:285) at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$7(SQLExecution.scala:139) at org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:94) at org.apache.spark.sql.artifact.ArtifactManager.$anonfun$withResources$1(ArtifactManager.scala:112) at org.apache.spark.util.Utils$.withContextClassLoader(Utils.scala:185) at org.apache.spark.sql.artifact.ArtifactManager.withClassLoaderIfNeeded(ArtifactManager.scala:102) at org.apache.spark.sql.artifact.ArtifactManager.withResources(ArtifactManager.scala:111) at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$6(SQLExecution.scala:139) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:308) at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$1(SQLExecution.scala:138) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:804) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId0(SQLExecution.scala:92) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:250) at org.apache.spark.sql.execution.QueryExecution.$anonfun$eagerlyExecuteCommands$1(QueryExecution.scala:185) at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:717) at org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$eagerlyExecute$1(QueryExecution.scala:184) at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$3.applyOrElse(QueryExecution.scala:201) at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$3.applyOrElse(QueryExecution.scala:194) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:491) at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:107) at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:491) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:37) at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:360) at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:356) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:37) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:37) at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:467) at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:194) at org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyCommandExecuted$1(QueryExecution.scala:155) at scala.util.Try$.apply(Try.scala:217) at org.apache.spark.util.Utils$.doTryWithCallerStacktrace(Utils.scala:1392) at org.apache.spark.util.LazyTry.tryT$lzycompute(LazyTry.scala:46) at org.apache.spark.util.LazyTry.tryT(LazyTry.scala:46) at org.apache.spark.util.LazyTry.get(LazyTry.scala:58) at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:160) at org.apache.spark.sql.classic.Dataset.<init>(Dataset.scala:276) at org.apache.spark.sql.classic.Dataset$.$anonfun$ofRows$5(Dataset.scala:139) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:804) at org.apache.spark.sql.classic.Dataset$.ofRows(Dataset.scala:135) at org.apache.spark.sql.classic.SparkSession.$anonfun$sql$4(SparkSession.scala:584) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:804) at org.apache.spark.sql.classic.SparkSession.sql(SparkSession.scala:561) at org.apache.spark.sql.connect.planner.SparkConnectPlanner.executeSQL(SparkConnectPlanner.scala:3148) at org.apache.spark.sql.connect.planner.SparkConnectPlanner.handleSqlCommand(SparkConnectPlanner.scala:2996) at org.apache.spark.sql.connect.planner.SparkConnectPlanner.process(SparkConnectPlanner.scala:2830) at org.apache.spark.sql.connect.execution.SparkConnectPlanExecution.handlePlan(SparkConnectPlanExecution.scala:96) at org.apache.spark.sql.connect.execution.ExecuteThreadRunner.$anonfun$executeInternal$1(ExecuteThreadRunner.scala:225) at org.apache.spark.sql.connect.execution.ExecuteThreadRunner.$anonfun$executeInternal$1$adapted(ExecuteThreadRunner.scala:197) at org.apache.spark.sql.connect.service.SessionHolder.$anonfun$withSession$2(SessionHolder.scala:396) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:804) at org.apache.spark.sql.connect.service.SessionHolder.$anonfun$withSession$1(SessionHolder.scala:396) at org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:94) at org.apache.spark.sql.artifact.ArtifactManager.$anonfun$withResources$1(ArtifactManager.scala:112) at org.apache.spark.util.Utils$.withContextClassLoader(Utils.scala:185) at org.apache.spark.sql.artifact.ArtifactManager.withClassLoaderIfNeeded(ArtifactManager.scala:102) at org.apache.spark.sql.artifact.ArtifactManager.withResources(ArtifactManager.scala:111) at org.apache.spark.sql.connect.service.SessionHolder.withSession(SessionHolder.scala:395) at org.apache.spark.sql.connect.execution.ExecuteThreadRunner.executeInternal(ExecuteThreadRunner.scala:197) at org.apache.spark.sql.connect.execution.ExecuteThreadRunner.org$apache$spark$sql$connect$execution$ExecuteThreadRunner$$execute(ExecuteThreadRunner.scala:126) at org.apache.spark.sql.connect.execution.ExecuteThreadRunner$ExecutionThread.run(ExecuteThreadRunner.scala:334) |
|
||
| 25 |
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "61ec31aa-ec60-4847-8384-fe1f77644a71"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:40:37 | 20 ms | TABLE_OR_VIEW_NOT_FOUND[TABLE_OR_VIEW_NOT_FOUND] The table or view `bronze`.`test`.`verify_sts` cannot be found. Verify the spelling and correctness of the schema and catalog. If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog. To tolerate the error on drop use DROP VIEW IF EXISTS or DROP TABLE IF EXISTS. SQLSTATE: 42P01 |
|
||
| 9 |
Spark Connect - session_id: "f15d8ea2-f375-4431-9a62-15dfcf3e3fdd"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "f15d8ea2-f375-4431-9a62-15dfcf3e3fdd"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:27:17 | 0 ms | REQUIRES_SINGLE_PART_NAMESPACE[REQUIRES_SINGLE_PART_NAMESPACE] spark_catalog requires a single-part namespace, but got `landing`.`test`. SQLSTATE: 42K05 |
|
||
| 7 |
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:26:32 | 0 ms | REQUIRES_SINGLE_PART_NAMESPACE[REQUIRES_SINGLE_PART_NAMESPACE] spark_catalog requires a single-part namespace, but got `landing`.`test`. SQLSTATE: 42K05 |
|
||
| 6 |
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:25:42 | 0 ms | REQUIRES_SINGLE_PART_NAMESPACE[REQUIRES_SINGLE_PART_NAMESPACE] spark_catalog requires a single-part namespace, but got `test`.`test`. SQLSTATE: 42K05 |
|
||
| 4 |
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
Spark Connect - session_id: "968efe1c-80d2-4c9c-8f88-84933a24d214"
user_context {
user_id: "iqran"
}
plan {
command {
sql_command {
...
|
2026/05/11 04:24:55 | 0 ms | REQUIRES_SINGLE_PART_NAMESPACE[REQUIRES_SINGLE_PART_NAMESPACE] spark_catalog requires a single-part namespace, but got `test`.`test`. SQLSTATE: 42K05 |
|