-
Notifications
You must be signed in to change notification settings - Fork 31
65 lines (54 loc) · 1.65 KB
/
sync-hf.yml
File metadata and controls
65 lines (54 loc) · 1.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
name: Sync to HuggingFace
on:
push:
branches: [main]
paths:
- 'algorithmic/problems/**'
- 'research/problems/**'
release:
types: [published]
workflow_dispatch: # Allow manual trigger
jobs:
sync:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install huggingface_hub
run: pip install huggingface_hub
- name: Export dataset
run: python scripts/export_hf_dataset.py
- name: Upload to HuggingFace
run: |
python -c "
from huggingface_hub import HfApi, CommitOperationAdd
import os
from pathlib import Path
api = HfApi(token=os.environ['HF_TOKEN'])
# Collect all files for single commit
operations = []
# Add data files
data_dir = Path('hf_export/data')
for f in data_dir.iterdir():
operations.append(CommitOperationAdd(
path_in_repo=f'data/{f.name}',
path_or_fileobj=str(f)
))
# Add README
operations.append(CommitOperationAdd(
path_in_repo='README.md',
path_or_fileobj='hf_export/README.md'
))
# Single commit with all files
api.create_commit(
repo_id='FrontierCS/Frontier-CS',
repo_type='dataset',
operations=operations,
commit_message='Update dataset'
)
print(f'Uploaded {len(operations)} files')
"
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}