Skip to content

Commit 73fcd35

Browse files
committed
Use throws annotation
1 parent 22bfb68 commit 73fcd35

File tree

3 files changed

+18
-9
lines changed

3 files changed

+18
-9
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -262,8 +262,9 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
262262
/**
263263
* Get a time parameter as seconds; throws a NoSuchElementException if it's not set. If no
264264
* suffix is provided then seconds are assumed.
265-
* @note Throws `NoSuchElementException`
265+
* @throws java.util.NoSuchElementException
266266
*/
267+
@throws(classOf[NoSuchElementException])
267268
def getTimeAsSeconds(key: String): Long = {
268269
Utils.timeStringAsSeconds(get(key))
269270
}
@@ -279,8 +280,9 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
279280
/**
280281
* Get a time parameter as milliseconds; throws a NoSuchElementException if it's not set. If no
281282
* suffix is provided then milliseconds are assumed.
282-
* @note Throws `NoSuchElementException`
283+
* @throws java.util.NoSuchElementException
283284
*/
285+
@throws(classOf[NoSuchElementException])
284286
def getTimeAsMs(key: String): Long = {
285287
Utils.timeStringAsMs(get(key))
286288
}
@@ -296,8 +298,9 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
296298
/**
297299
* Get a size parameter as bytes; throws a NoSuchElementException if it's not set. If no
298300
* suffix is provided then bytes are assumed.
299-
* @note Throws `NoSuchElementException`
301+
* @throws java.util.NoSuchElementException
300302
*/
303+
@throws(classOf[NoSuchElementException])
301304
def getSizeAsBytes(key: String): Long = {
302305
Utils.byteStringAsBytes(get(key))
303306
}
@@ -320,8 +323,9 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
320323
/**
321324
* Get a size parameter as Kibibytes; throws a NoSuchElementException if it's not set. If no
322325
* suffix is provided then Kibibytes are assumed.
323-
* @note Throws `NoSuchElementException`
326+
* @throws java.util.NoSuchElementException
324327
*/
328+
@throws(classOf[NoSuchElementException])
325329
def getSizeAsKb(key: String): Long = {
326330
Utils.byteStringAsKb(get(key))
327331
}
@@ -337,8 +341,9 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
337341
/**
338342
* Get a size parameter as Mebibytes; throws a NoSuchElementException if it's not set. If no
339343
* suffix is provided then Mebibytes are assumed.
340-
* @note Throws `NoSuchElementException`
344+
* @throws java.util.NoSuchElementException
341345
*/
346+
@throws(classOf[NoSuchElementException])
342347
def getSizeAsMb(key: String): Long = {
343348
Utils.byteStringAsMb(get(key))
344349
}
@@ -354,8 +359,9 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
354359
/**
355360
* Get a size parameter as Gibibytes; throws a NoSuchElementException if it's not set. If no
356361
* suffix is provided then Gibibytes are assumed.
357-
* @note Throws `NoSuchElementException`
362+
* @throws java.util.NoSuchElementException
358363
*/
364+
@throws(classOf[NoSuchElementException])
359365
def getSizeAsGb(key: String): Long = {
360366
Utils.byteStringAsGb(get(key))
361367
}

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2061,8 +2061,9 @@ class SparkContext(config: SparkConf) extends Logging {
20612061
* Cancel a given job if it's scheduled or running.
20622062
*
20632063
* @param jobId the job ID to cancel
2064-
* @note Throws `InterruptedException` if the cancel message cannot be sent
2064+
* @throws InterruptedException if the cancel message cannot be sent
20652065
*/
2066+
@throws(classOf[InterruptedException])
20662067
def cancelJob(jobId: Int) {
20672068
dagScheduler.cancelJob(jobId)
20682069
}
@@ -2071,8 +2072,9 @@ class SparkContext(config: SparkConf) extends Logging {
20712072
* Cancel a given stage and all jobs associated with it.
20722073
*
20732074
* @param stageId the stage ID to cancel
2074-
* @note Throws `InterruptedException` if the cancel message cannot be sent
2075+
* @throws InterruptedException if the cancel message cannot be sent
20752076
*/
2077+
@throws(classOf[InterruptedException])
20762078
def cancelStage(stageId: Int) {
20772079
dagScheduler.cancelStage(stageId)
20782080
}

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2354,8 +2354,9 @@ private[spark] object Utils extends Logging {
23542354
* A spark url (`spark://host:port`) is a special URI that its scheme is `spark` and only contains
23552355
* host and port.
23562356
*
2357-
* @note Throws `SparkException` if sparkUrl is invalid.
2357+
* @throws org.apache.spark.SparkException if sparkUrl is invalid.
23582358
*/
2359+
@throws(classOf[SparkException])
23592360
def extractHostPortFromSparkUrl(sparkUrl: String): (String, Int) = {
23602361
try {
23612362
val uri = new java.net.URI(sparkUrl)

0 commit comments

Comments
 (0)