You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by th...@apache.org on 2023/06/15 12:39:13 UTC

[arrow-cookbook] branch main updated: [R] Fix test failures on main (#308)

This is an automated email from the ASF dual-hosted git repository.

thisisnic pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-cookbook.git


The following commit(s) were added to refs/heads/main by this push:
     new f07681a  [R] Fix test failures on main (#308)
f07681a is described below

commit f07681adf2d9415a03a748edd74e0ad9ca15d7a9
Author: Nic Crane <th...@gmail.com>
AuthorDate: Thu Jun 15 13:39:07 2023 +0100

    [R] Fix test failures on main (#308)
    
    Fixes #307
---
 r/content/datasets.Rmd                 |  2 +-
 r/content/reading_and_writing_data.Rmd | 16 ++++++++--------
 2 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/r/content/datasets.Rmd b/r/content/datasets.Rmd
index c9baf02..b2f2a37 100644
--- a/r/content/datasets.Rmd
+++ b/r/content/datasets.Rmd
@@ -168,7 +168,7 @@ air_data
 ```{r, test_open_dataset, opts.label = "test"}
 test_that("open_dataset chunk works as expected", {
   expect_equal(nrow(air_data), 153)
-  expect_equal(arrange(collect(air_data), Month, Day), arrange(airquality, Month, Day), ignore_attr = TRUE)
+  expect_equal(tibble::as_tibble(arrange(collect(air_data), Month, Day)), arrange(tibble::as_tibble(airquality), Month, Day))
 })
 ```
 
diff --git a/r/content/reading_and_writing_data.Rmd b/r/content/reading_and_writing_data.Rmd
index a089eb8..7110ab2 100644
--- a/r/content/reading_and_writing_data.Rmd
+++ b/r/content/reading_and_writing_data.Rmd
@@ -74,7 +74,7 @@ test_that("asdf_table chunk works as expected", {
 
 ### Discussion
 
-You can use either `as.data.frame()` or `dplyr::collect()` to do this.
+You can `dplyr::collect()` to return a tibble or `as.data.frame()` to return a `data.frame`.
 
 ## Write a Parquet file
 
@@ -84,7 +84,7 @@ You want to write a single Parquet file to disk.
 
 ```{r, write_parquet}
 # Create table
-my_table <- arrow_table(data.frame(group = c("A", "B", "C"), score = c(99, 97, 99)))
+my_table <- arrow_table(tibble::tibble(group = c("A", "B", "C"), score = c(99, 97, 99)))
 # Write to Parquet
 write_parquet(my_table, "my_table.parquet")
 ```
@@ -106,18 +106,18 @@ parquet_tbl
 ```
 ```{r, test_read_parquet, opts.label = "test"}
 test_that("read_parquet works as expected", {
-  expect_identical(parquet_tbl, data.frame(group = c("A", "B", "C"), score = c(99, 97, 99)))
+  expect_equal(parquet_tbl, tibble::tibble(group = c("A", "B", "C"), score = c(99, 97, 99)))
 })
 ```
 
-As the argument `as_data_frame` was left set to its default value of `TRUE`, the file was read in as a `data.frame` object.
+As the argument `as_data_frame` was left set to its default value of `TRUE`, the file was read in as a tibble.
 
 ```{r, read_parquet_2}
 class(parquet_tbl)
 ```
 ```{r, test_read_parquet_2, opts.label = "test"}
 test_that("read_parquet_2 works as expected", {
-  expect_s3_class(parquet_tbl, "data.frame")
+  expect_s3_class(parquet_tbl, "tbl_df")
 })
 ```
 
@@ -172,7 +172,7 @@ time_only
 ```
 ```{r, test_read_parquet_filter, opts.label = "test"}
 test_that("read_parquet_filter works as expected", {
-  expect_identical(time_only, data.frame(time = c(43, 44, 40)))
+  expect_identical(time_only, tibble::tibble(time = c(43, 44, 40)))
 })
 ```
 
@@ -258,7 +258,7 @@ my_ipc_stream <- arrow::read_ipc_stream("my_table.arrows")
 test_that("read_ipc_stream chunk works as expected", {
   expect_equal(
     my_ipc_stream,
-    data.frame(group = c("A", "B", "C"), score = c(99, 97, 99))
+    tibble::tibble(group = c("A", "B", "C"), score = c(99, 97, 99))
   )
 })
 unlink("my_table.arrows")
@@ -291,7 +291,7 @@ my_csv <- read_csv_arrow("cars.csv", as_data_frame = FALSE)
 
 ```{r, test_read_csv_arrow, opts.label = "test"}
 test_that("read_csv_arrow chunk works as expected", {
-  expect_equivalent(as.data.frame(my_csv), cars)
+  expect_equal(as.data.frame(my_csv), cars)
 })
 unlink("cars.csv")
 ```