Skip to content

Commit a5a1fbe

Browse files
committed
fixed up some wording
1 parent 7694b8f commit a5a1fbe

File tree

2 files changed

+6
-6
lines changed

2 files changed

+6
-6
lines changed

sql/core/src/main/scala/org/apache/spark/sql/api/python/PythonSQLUtils.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,16 +37,16 @@ private[sql] object PythonSQLUtils {
3737
* Python callable function to convert an RDD of serialized ArrowRecordBatches into
3838
* a [[DataFrame]].
3939
*
40-
* @param arrowStreamRDD A JavaRDD of Arrow record batches as byte arrays.
40+
* @param arrowBatchRDD A JavaRDD of serialized ArrowRecordBatches.
4141
* @param schemaString JSON Formatted Spark schema for Arrow batches.
4242
* @param sqlContext The active [[SQLContext]].
4343
* @return The converted [[DataFrame]].
4444
*/
4545
def arrowStreamToDataFrame(
46-
arrowStreamRDD: JavaRDD[Array[Byte]],
46+
arrowBatchRDD: JavaRDD[Array[Byte]],
4747
schemaString: String,
4848
sqlContext: SQLContext): DataFrame = {
49-
ArrowConverters.toDataFrame(arrowStreamRDD, schemaString, sqlContext)
49+
ArrowConverters.toDataFrame(arrowBatchRDD, schemaString, sqlContext)
5050
}
5151

5252
/**

sql/core/src/main/scala/org/apache/spark/sql/execution/arrow/ArrowConverters.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -196,20 +196,20 @@ private[sql] object ArrowConverters {
196196
* Create a DataFrame from a JavaRDD of serialized ArrowRecordBatches.
197197
*/
198198
private[sql] def toDataFrame(
199-
arrowStreamRDD: JavaRDD[Array[Byte]],
199+
arrowBatchRDD: JavaRDD[Array[Byte]],
200200
schemaString: String,
201201
sqlContext: SQLContext): DataFrame = {
202202
val schema = DataType.fromJson(schemaString).asInstanceOf[StructType]
203203
val timeZoneId = sqlContext.sessionState.conf.sessionLocalTimeZone
204-
val rdd = arrowStreamRDD.rdd.mapPartitions { iter =>
204+
val rdd = arrowBatchRDD.rdd.mapPartitions { iter =>
205205
val context = TaskContext.get()
206206
ArrowConverters.fromBatchIterator(iter, schema, timeZoneId, context)
207207
}
208208
sqlContext.internalCreateDataFrame(rdd, schema)
209209
}
210210

211211
/**
212-
* Read a file as an Arrow stream and create an RDD of serialized ArrowRecordBatches.
212+
* Read a file as an Arrow stream and return an RDD of serialized ArrowRecordBatches.
213213
*/
214214
private[sql] def readArrowStreamFromFile(sqlContext: SQLContext, filename: String):
215215
JavaRDD[Array[Byte]] = {

0 commit comments

Comments
 (0)