Skip to content

Commit

Permalink
Merge branch 'main' into boolean-encoder-valid-and-true
Browse files Browse the repository at this point in the history
  • Loading branch information
mapleFU committed Aug 18, 2024
2 parents c1adbed + b7e618f commit 450fd95
Show file tree
Hide file tree
Showing 4 changed files with 39 additions and 8 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/r.yml
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,9 @@ jobs:
with:
fetch-depth: 0
submodules: recursive
- name: Free up disk space
run: |
ci/scripts/util_free_space.sh
- name: Cache Docker Volumes
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
Expand Down
16 changes: 16 additions & 0 deletions c_glib/arrow-glib/file-system.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ G_BEGIN_DECLS
* #GArrowS3FileSystem is a class for S3-backed file system.
*
* #GArrowGCSFileSystem is a class for GCS-backed file system.
*
* #GArrowAzureFileSystem is a class for Azure-backed file system.
*/

/* arrow::fs::FileInfo */
Expand Down Expand Up @@ -1561,6 +1563,18 @@ garrow_gcs_file_system_class_init(GArrowGCSFileSystemClass *klass)
{
}

G_DEFINE_TYPE(GArrowAzureFileSystem, garrow_azure_file_system, GARROW_TYPE_FILE_SYSTEM)

static void
garrow_azure_file_system_init(GArrowAzureFileSystem *file_system)
{
}

static void
garrow_azure_file_system_class_init(GArrowAzureFileSystemClass *klass)
{
}

G_END_DECLS

GArrowFileInfo *
Expand Down Expand Up @@ -1592,6 +1606,8 @@ garrow_file_system_new_raw(std::shared_ptr<arrow::fs::FileSystem> *arrow_file_sy
file_system_type = GARROW_TYPE_S3_FILE_SYSTEM;
} else if (type_name == "gcs") {
file_system_type = GARROW_TYPE_GCS_FILE_SYSTEM;
} else if (type_name == "abfs") {
file_system_type = GARROW_TYPE_AZURE_FILE_SYSTEM;
} else if (type_name == "mock") {
file_system_type = GARROW_TYPE_MOCK_FILE_SYSTEM;
}
Expand Down
12 changes: 12 additions & 0 deletions c_glib/arrow-glib/file-system.h
Original file line number Diff line number Diff line change
Expand Up @@ -337,4 +337,16 @@ struct _GArrowGCSFileSystemClass
GArrowFileSystemClass parent_class;
};

#define GARROW_TYPE_AZURE_FILE_SYSTEM (garrow_azure_file_system_get_type())
GARROW_AVAILABLE_IN_18_0
G_DECLARE_DERIVABLE_TYPE(GArrowAzureFileSystem,
garrow_azure_file_system,
GARROW,
AZURE_FILE_SYSTEM,
GArrowFileSystem)
struct _GArrowAzureFileSystemClass
{
GArrowFileSystemClass parent_class;
};

G_END_DECLS
16 changes: 8 additions & 8 deletions r/tests/testthat/test-extra-package-roundtrip.R
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ skip_on_cran()
# So that we can force these in CI
load_or_skip <- function(pkg) {
if (identical(tolower(Sys.getenv("ARROW_R_FORCE_EXTRA_PACKAGE_TESTS")), "true")) {
# because of this indirection on the package name we also avoid a CHECK note and
# because of this indirection on the package name we also avoid a CHECK note and
# we don't otherwise need to Suggest this
requireNamespace(pkg, quietly = TRUE)
} else {
Expand All @@ -46,11 +46,11 @@ test_that("readr read csvs roundtrip", {

# we should still be able to turn this into a table
new_df <- read_csv(tf, show_col_types = FALSE)
expect_equal(new_df, as_tibble(arrow_table(new_df)))
expect_equal(new_df, as_tibble(arrow_table(new_df)))

# we should still be able to turn this into a table
new_df <- read_csv(tf, show_col_types = FALSE, lazy = TRUE)
expect_equal(new_df, as_tibble(arrow_table(new_df)))
expect_equal(new_df, as_tibble(arrow_table(new_df)))

# and can roundtrip to a parquet file
pq_tmp_file <- tempfile()
Expand All @@ -65,11 +65,11 @@ test_that("data.table objects roundtrip", {
load_or_skip("data.table")

# https://github.com/Rdatatable/data.table/blob/83fd2c05ce2d8555ceb8ba417833956b1b574f7e/R/cedta.R#L25-L27
.datatable.aware=TRUE
.datatable.aware <- TRUE

DT <- as.data.table(example_data)

# Table -> collect which is what writing + reading to parquet uses under the hood to roundtrip
# Table to collect which is what writing + reading to parquet uses under the hood to roundtrip
tab <- as_arrow_table(DT)
DT_read <- collect(tab)

Expand All @@ -80,9 +80,9 @@ test_that("data.table objects roundtrip", {
# and we can set keys + indices + create new columns
setkey(DT, chr)
setindex(DT, dbl)
DT[, dblshift := data.table::shift(dbl, 1)]
DT[, dblshift := shift(dbl, 1)]

# Table -> collect
# Table to collect
tab <- as_arrow_table(DT)
DT_read <- collect(tab)

Expand All @@ -96,7 +96,7 @@ test_that("units roundtrip", {
tbl <- example_data
units(tbl$dbl) <- "s"

# Table -> collect which is what writing + reading to parquet uses under the hood to roundtrip
# Table to collect which is what writing + reading to parquet uses under the hood to roundtrip
tab <- as_arrow_table(tbl)
tbl_read <- collect(tab)

Expand Down

0 comments on commit 450fd95

Please sign in to comment.