Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions docs/sql-data-sources-avro.md
Original file line number Diff line number Diff line change
Expand Up @@ -107,9 +107,9 @@ val df = spark
// 2. Filter by column `favorite_color`;
// 3. Encode the column `name` in Avro format.
val output = df
.select(from_avro('value, jsonFormatSchema) as 'user)
.select(from_avro($"value", jsonFormatSchema) as $"user")
.where("user.favorite_color == \"red\"")
.select(to_avro($"user.name") as 'value)
.select(to_avro($"user.name") as $"value")

val query = output
.writeStream
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ object SimpleTypedAggregator {
.getOrCreate()

import spark.implicits._
val ds = spark.range(20).select(('id % 3).as("key"), 'id).as[(Long, Long)]
val ds = spark.range(20).select(($"id" % 3).as("key"), $"id").as[(Long, Long)]
println("input data:")
ds.show()

Expand Down
2 changes: 1 addition & 1 deletion sql/core/src/main/scala/org/apache/spark/sql/Column.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1108,7 +1108,7 @@ class Column(val expr: Expression) extends Logging {
* Gives the column an alias.
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".as('colB))
* df.select($"colA".as("colB"))
* }}}
*
* If the current column has metadata associated with it, this metadata will be propagated
Expand Down
4 changes: 2 additions & 2 deletions sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2307,7 +2307,7 @@ class Dataset[T] private[sql](
* case class Book(title: String, words: String)
* val ds: Dataset[Book]
*
* val allWords = ds.select('title, explode(split('words, " ")).as("word"))
* val allWords = ds.select($"title", explode(split($"words", " ")).as("word"))
*
* val bookCountPerWord = allWords.groupBy("word").agg(count_distinct("title"))
* }}}
Expand Down Expand Up @@ -2346,7 +2346,7 @@ class Dataset[T] private[sql](
* `functions.explode()`:
*
* {{{
* ds.select(explode(split('words, " ")).as("word"))
* ds.select(explode(split($"words", " ")).as("word"))
* }}}
*
* or `flatMap()`:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,8 +136,8 @@ object Window {
* val df = Seq((1, "a"), (1, "a"), (2, "a"), (1, "b"), (2, "b"), (3, "b"))
* .toDF("id", "category")
* val byCategoryOrderedById =
* Window.partitionBy('category).orderBy('id).rowsBetween(Window.currentRow, 1)
* df.withColumn("sum", sum('id) over byCategoryOrderedById).show()
* Window.partitionBy($"category").orderBy($"id").rowsBetween(Window.currentRow, 1)
* df.withColumn("sum", sum($"id") over byCategoryOrderedById).show()
*
* +---+--------+---+
* | id|category|sum|
Expand Down Expand Up @@ -188,8 +188,8 @@ object Window {
* val df = Seq((1, "a"), (1, "a"), (2, "a"), (1, "b"), (2, "b"), (3, "b"))
* .toDF("id", "category")
* val byCategoryOrderedById =
* Window.partitionBy('category).orderBy('id).rangeBetween(Window.currentRow, 1)
* df.withColumn("sum", sum('id) over byCategoryOrderedById).show()
* Window.partitionBy($"category").orderBy($"id").rangeBetween(Window.currentRow, 1)
* df.withColumn("sum", sum($"id") over byCategoryOrderedById).show()
*
* +---+--------+---+
* | id|category|sum|
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,8 @@ class WindowSpec private[sql](
* val df = Seq((1, "a"), (1, "a"), (2, "a"), (1, "b"), (2, "b"), (3, "b"))
* .toDF("id", "category")
* val byCategoryOrderedById =
* Window.partitionBy('category).orderBy('id).rowsBetween(Window.currentRow, 1)
* df.withColumn("sum", sum('id) over byCategoryOrderedById).show()
* Window.partitionBy($"category").orderBy($"id").rowsBetween(Window.currentRow, 1)
* df.withColumn("sum", sum($"id") over byCategoryOrderedById).show()
*
* +---+--------+---+
* | id|category|sum|
Expand Down Expand Up @@ -168,8 +168,8 @@ class WindowSpec private[sql](
* val df = Seq((1, "a"), (1, "a"), (2, "a"), (1, "b"), (2, "b"), (3, "b"))
* .toDF("id", "category")
* val byCategoryOrderedById =
* Window.partitionBy('category).orderBy('id).rangeBetween(Window.currentRow, 1)
* df.withColumn("sum", sum('id) over byCategoryOrderedById).show()
* Window.partitionBy($"category").orderBy($"id").rangeBetween(Window.currentRow, 1)
* df.withColumn("sum", sum($"id") over byCategoryOrderedById).show()
*
* +---+--------+---+
* | id|category|sum|
Expand Down