diff --git a/NEWS.md b/NEWS.md index 1f749bac5..88314da09 100644 --- a/NEWS.md +++ b/NEWS.md @@ -3,6 +3,8 @@ * `across(everything())` doesn't select grouping columns created via `.by` in `summarise()` (@mgirlich, #1493). * New translations of clock function `date_count_between()` for SQL server, Redshift, Snowflake, Postgres, and Spark (@edward-burn, #1495). +* Spark SQL backend now supports persisting tables with + `compute(x, name = I("x.y.z"), temporary = FALSE)` (@zacdav-db, #1502). # dbplyr 2.5.0 diff --git a/R/backend-spark-sql.R b/R/backend-spark-sql.R index c162f5ac0..10be0967d 100644 --- a/R/backend-spark-sql.R +++ b/R/backend-spark-sql.R @@ -138,12 +138,8 @@ simulate_spark_sql <- function() simulate_dbi("Spark SQL") analyze = TRUE, in_transaction = FALSE) { - if (temporary) { - sql <- sql_values_subquery(con, values, types = types, lvl = 1) - db_compute(con, table, sql, overwrite = overwrite) - } else { - NextMethod() - } + sql <- sql_values_subquery(con, values, types = types, lvl = 1) + db_compute(con, table, sql, overwrite = overwrite, temporary = temporary) } #' @export @@ -158,14 +154,11 @@ simulate_spark_sql <- function() simulate_dbi("Spark SQL") analyze = TRUE, in_transaction = FALSE) { - if (!temporary) { - cli::cli_abort("Spark SQL only support temporary tables") - } - sql <- glue_sql2( con, "CREATE ", if (overwrite) "OR REPLACE ", - "TEMPORARY VIEW {.tbl {table}} AS \n", + if (temporary) "TEMPORARY VIEW" else "TABLE", + " {.tbl {table}} AS \n", "{.from {sql}}" ) DBI::dbExecute(con, sql)