diff --git a/R/pkg/tests/fulltests/test_sparkSQL.R b/R/pkg/tests/fulltests/test_sparkSQL.R index c93b92edbff8e..cada7813c950c 100644 --- a/R/pkg/tests/fulltests/test_sparkSQL.R +++ b/R/pkg/tests/fulltests/test_sparkSQL.R @@ -4000,7 +4000,7 @@ test_that("Call DataFrameWriter.save() API in Java without path and check argume # DataFrameWriter.save() without path. expect_error(write.df(df, source = "csv"), paste("Error in save : org.apache.spark.SparkIllegalArgumentException:", - "Expected exactly one path to be specified")) + "'path' is not specified.")) expect_error(write.json(df, jsonPath), "Error in json : analysis error - \\[PATH_ALREADY_EXISTS\\].*") expect_error(write.text(df, jsonPath), diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala index 97c88d660b002..1412ba422f421 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala @@ -462,8 +462,10 @@ case class DataSource( val allPaths = paths ++ caseInsensitiveOptions.get("path") val outputPath = if (allPaths.length == 1) { makeQualified(new Path(allPaths.head)) - } else { + } else if (allPaths.length > 1) { throw QueryExecutionErrors.multiplePathsSpecifiedError(allPaths) + } else { + throw QueryExecutionErrors.dataPathNotSpecifiedError() } val caseSensitive = sparkSession.sessionState.conf.caseSensitiveAnalysis