Skip to content

Commit d7c0aa6

Browse files
ci: skip databricks tests on fork PRs, and update cross-compat tests for latest pins-R (#355)
* ci: skip databricks tests on fork PRs * Trim whitespace and update all actions/checkout to v6 * Use native pipe in R integration tests
1 parent 754224c commit d7c0aa6

7 files changed

Lines changed: 30 additions & 26 deletions

File tree

.github/workflows/ci.yml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ jobs:
3838
# ignore doctests
3939
pytest_opts: "-k pins/tests"
4040
steps:
41-
- uses: actions/checkout@v4
41+
- uses: actions/checkout@v6
4242
- uses: actions/setup-python@v4
4343
with:
4444
python-version: ${{ matrix.python }}
@@ -84,7 +84,7 @@ jobs:
8484
runs-on: ubuntu-latest
8585
if: ${{ !github.event.pull_request.head.repo.fork }}
8686
steps:
87-
- uses: actions/checkout@v4
87+
- uses: actions/checkout@v6
8888
- uses: actions/setup-python@v4
8989
with:
9090
python-version: "3.10"
@@ -112,7 +112,7 @@ jobs:
112112
runs-on: ubuntu-latest
113113
if: ${{ github.event.pull_request.head.repo.fork }}
114114
steps:
115-
- uses: actions/checkout@v4
115+
- uses: actions/checkout@v6
116116
- uses: actions/setup-python@v4
117117
with:
118118
python-version: "3.10"
@@ -123,15 +123,15 @@ jobs:
123123
python -m pip install -e .[test]
124124
- name: Run tests
125125
run: |
126-
# TODO: better way to disable all cloud backend tests?
127-
pytest pins -m 'not fs_rsc and not fs_s3 and not fs_gcs and not fs_abfs and not skip_on_github'
126+
# Skip all tests requiring credentials, which aren't available on fork PRs.
127+
pytest pins -m 'not fs_rsc and not fs_s3 and not fs_gcs and not fs_abfs and not fs_dbc and not skip_on_github'
128128
129129
130130
build-docs:
131131
name: "Build Docs"
132132
runs-on: ubuntu-latest
133133
steps:
134-
- uses: actions/checkout@v3
134+
- uses: actions/checkout@v6
135135
- uses: actions/setup-python@v4
136136
with:
137137
python-version: "3.10"
@@ -234,7 +234,7 @@ jobs:
234234
if: github.event_name == 'release'
235235
needs: [build-docs, tests]
236236
steps:
237-
- uses: actions/checkout@v4
237+
- uses: actions/checkout@v6
238238
- uses: actions/setup-python@v4
239239
with:
240240
python-version: "3.10"

.github/workflows/code-checks.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,15 +13,15 @@ jobs:
1313
name: "Run pre-commit"
1414
runs-on: ubuntu-latest
1515
steps:
16-
- uses: actions/checkout@v4
16+
- uses: actions/checkout@v6
1717
- uses: actions/setup-python@v4
1818
- uses: pre-commit/action@v3.0.1
1919

2020
pyright:
2121
name: "Run Pyright"
2222
runs-on: ubuntu-latest
2323
steps:
24-
- uses: actions/checkout@v4
24+
- uses: actions/checkout@v6
2525
- uses: actions/setup-python@v4
2626
with:
2727
python-version: 3.13 # Use the maximum version supported by python-pins

.github/workflows/cross-compat.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ jobs:
2222
name: "Check cross lib compatibility"
2323
runs-on: ubuntu-latest
2424
steps:
25-
- uses: actions/checkout@v4
25+
- uses: actions/checkout@v6
2626

2727
- name: Install libcurl on Linux
2828
run: sudo apt-get update -y && sudo apt-get install -y libcurl4-openssl-dev

pyproject.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,10 @@ universal = 1
8787
[tool.pytest.ini_options]
8888
markers = [
8989
"fs_file: mark test to only run on local filesystem",
90+
# NOTE: if you add a new marker here for a backend that requires credentials,
91+
# be sure to exclude it from the test command in .github/workflows/ci.yml
92+
# in the "test-fork" job, to avoid failing tests where GitHub Actions
93+
# secrets aren't available.
9094
"fs_s3: mark test to only run on AWS S3 bucket filesystem",
9195
"fs_gcs: mark test to only run on Google Cloud Storage bucket filesystem",
9296
"fs_abfs: mark test to only run on Azure Datalake filesystem",
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
library(pins)
2-
args <- commandArgs(trailingOnly=TRUE)
2+
args <- commandArgs(trailingOnly = TRUE)
33

44
board <- board_folder(args[1])
5-
board %>% pin_write(mtcars, "mtcars", type="csv")
5+
board |> pin_write(mtcars, "mtcars", type = "csv")
Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
library(pins)
22

3-
args <- commandArgs(trailingOnly=TRUE)
3+
args <- commandArgs(trailingOnly = TRUE)
44

55

66
# create board ----
@@ -13,13 +13,13 @@ board_r <- board_folder(args[2])
1313

1414
cat("Checking mtcars pin\n")
1515

16-
res_mtcars <- board_py %>% pin_read("mtcars")
17-
stopifnot(all.equal(res_mtcars, datasets::mtcars, check.attributes=FALSE))
16+
res_mtcars <- board_py |> pin_read("mtcars")
17+
stopifnot(all.equal(res_mtcars, datasets::mtcars, check.attributes = FALSE))
1818

19-
meta_mtcars_py <- board_py %>% pin_meta("mtcars")
19+
meta_mtcars_py <- board_py |> pin_meta("mtcars")
2020
cat("\nPython meta:\n\n")
2121
print(meta_mtcars_py)
2222

23-
meta_mtcars_r <- board_r %>% pin_meta("mtcars")
23+
meta_mtcars_r <- board_r |> pin_meta("mtcars")
2424
cat("\nR meta:\n\n")
2525
print(meta_mtcars_r)

script/stage_r_pins.R

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,22 +3,22 @@ library(pins)
33
df <- data.frame(x = 1:2, y = c("a", "b"))
44
df_v2 <- data.frame(x = 1:2, y = c("a", "b"), z = 3:4)
55

6-
#board <- board_s3("ci-pins", prefix = "r-pins-test")
7-
board <- board_folder("pins/tests/pins-compat", versioned=TRUE)
6+
# board <- board_s3("ci-pins", prefix = "r-pins-test")
7+
board <- board_folder("pins/tests/pins-compat", versioned = TRUE)
88

9-
all_pins <- board %>% pin_list()
10-
board %>% pin_delete(all_pins)
9+
all_pins <- board |> pin_list()
10+
board |> pin_delete(all_pins)
1111

1212
# write two versions of df as CSV ----
13-
board %>% pin_write(df, "df_csv", type="csv")
13+
board |> pin_write(df, "df_csv", type = "csv")
1414
Sys.sleep(2)
15-
board %>% pin_write(df_v2, "df_csv", type="csv")
15+
board |> pin_write(df_v2, "df_csv", type = "csv")
1616

1717
# write two versions of df as arrow ----
18-
board %>% pin_write(df, "df_arrow", type="arrow")
18+
board |> pin_write(df, "df_arrow", type = "arrow")
1919

2020
# write two versions of df as RDS ----
21-
board %>% pin_write(df, "df_rds", type="rds")
21+
board |> pin_write(df, "df_rds", type = "rds")
2222

2323
# write unversioned pin as CSV
24-
board %>% pin_write(df, "df_unversioned", versioned=FALSE)
24+
board |> pin_write(df, "df_unversioned", versioned = FALSE)

0 commit comments

Comments
 (0)