Skip to content

Commit f40bf4d

Browse files
pan3793dongjoon-hyun
authored andcommitted
[SPARK-51386][CORE][SQL] Assign name to error conditions _LEGACY_ERROR_TEMP_3300-3302
### What changes were proposed in this pull request? Assign name to error conditions `_LEGACY_ERROR_TEMP_3300-3302` with sqlState `82001-82003`, all of them are applicable for `SparkOutOfMemoryError` ### Why are the changes needed? Improve the error framework. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Pass GHA. ### Was this patch authored or co-authored using generative AI tooling? No. Closes apache#50149 from pan3793/SPARK-51386. Authored-by: Cheng Pan <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent 50e00b7 commit f40bf4d

File tree

6 files changed

+42
-22
lines changed

6 files changed

+42
-22
lines changed

common/utils/src/main/resources/error/error-conditions.json

+18-15
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,12 @@
1111
],
1212
"sqlState" : "42845"
1313
},
14+
"AGGREGATE_OUT_OF_MEMORY" : {
15+
"message" : [
16+
"No enough memory for aggregation"
17+
],
18+
"sqlState" : "82001"
19+
},
1420
"ALL_PARAMETERS_MUST_BE_NAMED" : {
1521
"message" : [
1622
"Using name parameterized queries requires all parameters to be named. Parameters missing names: <exprs>."
@@ -4392,6 +4398,12 @@
43924398
],
43934399
"sqlState" : "XXKD0"
43944400
},
4401+
"POINTER_ARRAY_OUT_OF_MEMORY" : {
4402+
"message" : [
4403+
"Not enough memory to grow pointer array"
4404+
],
4405+
"sqlState" : "82002"
4406+
},
43954407
"PROTOBUF_DEPENDENCY_NOT_FOUND" : {
43964408
"message" : [
43974409
"Could not find dependency: <dependencyName>."
@@ -4645,6 +4657,12 @@
46454657
],
46464658
"sqlState" : "42601"
46474659
},
4660+
"SPILL_OUT_OF_MEMORY" : {
4661+
"message" : [
4662+
"Error while calling spill() on <consumerToSpill> : <message>"
4663+
],
4664+
"sqlState" : "82003"
4665+
},
46484666
"SQL_CONF_NOT_FOUND" : {
46494667
"message" : [
46504668
"The SQL config <sqlConf> cannot be found. Please verify that the config exists."
@@ -9327,21 +9345,6 @@
93279345
"Doesn't support month or year interval: <interval>"
93289346
]
93299347
},
9330-
"_LEGACY_ERROR_TEMP_3300" : {
9331-
"message" : [
9332-
"error while calling spill() on <consumerToSpill> : <message>"
9333-
]
9334-
},
9335-
"_LEGACY_ERROR_TEMP_3301" : {
9336-
"message" : [
9337-
"Not enough memory to grow pointer array"
9338-
]
9339-
},
9340-
"_LEGACY_ERROR_TEMP_3302" : {
9341-
"message" : [
9342-
"No enough memory for aggregation"
9343-
]
9344-
},
93459348
"_LEGACY_ERROR_USER_RAISED_EXCEPTION" : {
93469349
"message" : [
93479350
"<errorMessage>"

common/utils/src/main/resources/error/error-states.json

+18-1
Original file line numberDiff line numberDiff line change
@@ -6504,7 +6504,24 @@
65046504
"standard": "N",
65056505
"usedBy": ["Oracle"]
65066506
},
6507-
6507+
"82001": {
6508+
"description": "No enough memory for aggregation",
6509+
"origin": "Spark",
6510+
"standard": "N",
6511+
"usedBy": ["Spark"]
6512+
},
6513+
"82002": {
6514+
"description": "Not enough memory to grow pointer array",
6515+
"origin": "Spark",
6516+
"standard": "N",
6517+
"usedBy": ["Spark"]
6518+
},
6519+
"82003": {
6520+
"description": "Error while calling spill()",
6521+
"origin": "Spark",
6522+
"standard": "N",
6523+
"usedBy": ["Spark"]
6524+
},
65086525
"82100": {
65096526
"description": "out of memory (could not allocate)",
65106527
"origin": "Oracle",

core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java

+3-3
Original file line numberDiff line numberDiff line change
@@ -278,15 +278,15 @@ private long trySpillAndAcquire(
278278
}
279279
} catch (ClosedByInterruptException | InterruptedIOException e) {
280280
// This called by user to kill a task (e.g: speculative task).
281-
logger.error("error while calling spill() on {}", e,
281+
logger.error("Error while calling spill() on {}", e,
282282
MDC.of(LogKeys.MEMORY_CONSUMER$.MODULE$, consumerToSpill));
283283
throw new RuntimeException(e.getMessage());
284284
} catch (IOException e) {
285-
logger.error("error while calling spill() on {}", e,
285+
logger.error("Error while calling spill() on {}", e,
286286
MDC.of(LogKeys.MEMORY_CONSUMER$.MODULE$, consumerToSpill));
287287
// checkstyle.off: RegexpSinglelineJava
288288
throw new SparkOutOfMemoryError(
289-
"_LEGACY_ERROR_TEMP_3300",
289+
"SPILL_OUT_OF_MEMORY",
290290
new HashMap<String, String>() {{
291291
put("consumerToSpill", consumerToSpill.toString());
292292
put("message", e.getMessage());

core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ public void expandPointerArray(LongArray newArray) {
216216
if (array != null) {
217217
if (newArray.size() < array.size()) {
218218
// checkstyle.off: RegexpSinglelineJava
219-
throw new SparkOutOfMemoryError("_LEGACY_ERROR_TEMP_3301", new HashMap<>());
219+
throw new SparkOutOfMemoryError("POINTER_ARRAY_OUT_OF_MEMORY", new HashMap<>());
220220
// checkstyle.on: RegexpSinglelineJava
221221
}
222222
Platform.copyMemory(

sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -682,7 +682,7 @@ case class HashAggregateExec(
682682
| $unsafeRowKeys, $unsafeRowKeyHash);
683683
| if ($unsafeRowBuffer == null) {
684684
| // failed to allocate the first page
685-
| throw new $oomeClassName("_LEGACY_ERROR_TEMP_3302", new java.util.HashMap());
685+
| throw new $oomeClassName("AGGREGATE_OUT_OF_MEMORY", new java.util.HashMap());
686686
| }
687687
|}
688688
""".stripMargin

sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregationIterator.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -212,7 +212,7 @@ class TungstenAggregationIterator(
212212
if (buffer == null) {
213213
// failed to allocate the first page
214214
// scalastyle:off throwerror
215-
throw new SparkOutOfMemoryError("_LEGACY_ERROR_TEMP_3302", new util.HashMap())
215+
throw new SparkOutOfMemoryError("AGGREGATE_OUT_OF_MEMORY", new util.HashMap())
216216
// scalastyle:on throwerror
217217
}
218218
}

0 commit comments

Comments
 (0)