Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 0 additions & 92 deletions .github/workflows/api-doc.yml

This file was deleted.

2 changes: 1 addition & 1 deletion .github/workflows/api-lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
submodules: recursive
- name: Prepare folio-tools
Expand Down
47 changes: 47 additions & 0 deletions .github/workflows/api-schema-lint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
name: api-schema-lint

# https://dev.folio.org/guides/describe-schema/

# API_DIRECTORIES: string: The space-separated list of directories to search
# for JSON Schema files.
# e.g. 'src/main/resources/openapi'
# NOTE: -- Also add each separate path to each of the "on: paths:" sections.
# e.g. 'src/main/resources/openapi/**'
#
# API_EXCLUDES: string: The space-separated list of directories and files
# to exclude from traversal, in addition to the default exclusions.
# e.g. ''

env:
API_DIRECTORIES: 'src/main/resources/openapi'
API_EXCLUDES: 'example'

on:
workflow_dispatch:
push:
paths:
- 'src/main/resources/openapi/**'
pull_request:
paths:
- 'src/main/resources/openapi/**'

jobs:
api-schema-lint:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v6
with:
submodules: recursive
- name: Prepare folio-tools
run: |
git clone https://github.com/folio-org/folio-tools
cd folio-tools/api-schema-lint \
&& yarn install \
&& pip3 install -r requirements.txt
- name: Do api-schema-lint
run: |
python3 folio-tools/api-schema-lint/api_schema_lint.py \
--loglevel info \
--directories ${{ env.API_DIRECTORIES }} \
--excludes ${{ env.API_EXCLUDES }}
27 changes: 27 additions & 0 deletions .github/workflows/k8s-deploy-latest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Very simple workflow to deploy the *latest* published container image with the 'latest' tag.
# This does not post updated module descriptors to okapi, update permissions or enable
# new versions of a module with the tenant. If that is needed, it should be done manually
# via the Okapi API.

name: k8s-deploy-latest

env:
K8S_NAMESPACE: 'folio-dev-new'
K8S_DEPLOYMENT: 'mod-inventory-update-dev'

on:
workflow_dispatch

jobs:
k8s-deploy-latest:

runs-on: ubuntu-latest
steps:
- name: Deploy latest to K8s
uses: actions-hub/kubectl@v1.34.1
env:
KUBE_CONFIG: ${{ secrets.FOLIO_DEV_NEW_SA_KUBECONFIG }}
with:
args:
-n ${{ env.K8S_NAMESPACE }} rollout restart deployment ${{ env.K8S_DEPLOYMENT }}

136 changes: 136 additions & 0 deletions .github/workflows/mvn-dev-build-deploy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
name: mvn-dev-build-deploy

on:
push:
pull_request:
types: [opened, synchronize, reopened]
workflow_dispatch:

env:
PUBLISH_BRANCH: 'deployment'
OKAPI_URL: 'https://folio-dev-new-okapi.folio-dev.indexdata.com'
OKAPI_SECRET_USER: "${{ secrets.FOLIO_DEV_NEW_OKAPI_USER }}"
OKAPI_SECRET_PASSWORD: "${{ secrets.FOLIO_DEV_NEW_OKAPI_PASSWORD }}"
OK_SESSION: 'session1'

jobs:
mvn-dev-build-deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
submodules: recursive

- name: Set up JDK 21
uses: actions/setup-java@v5
with:
java-version: 21
distribution: 'temurin' # Alternative distribution options are available.
cache: 'maven'

- name: Prepare okclient
run: git clone https://github.com/indexdata/okclient

- name: Ensure OK and FOLIO login
# So do not proceed with other workflow steps if not available.
run: |
source okclient/ok.sh
OK -S ${{ env.OK_SESSION }} \
-h ${{ env.OKAPI_URL }} \
-t "supertenant" \
-u ${{ env.OKAPI_SECRET_USER }} \
-p ${{ env.OKAPI_SECRET_PASSWORD }}
OK -S ${{ env.OK_SESSION }} -x

- name: Gather some variables
run: |
echo "MODULE_NAME=$(mvn help:evaluate -Dexpression=project.artifactId -q -DforceStdout)" >> $GITHUB_ENV
echo "SHA_SHORT=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
echo "CURRENT_BRANCH=$(echo ${GITHUB_REF#refs/heads/})" >> $GITHUB_ENV

- name: Set module version
run: |
echo "MODULE_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout)-${SHA_SHORT}" >> $GITHUB_ENV

- name: Cache SonarCloud packages
uses: actions/cache@v4
with:
path: ~/.sonar/cache
key: ${{ runner.os }}-sonar
restore-keys: ${{ runner.os }}-sonar

- name: Cache Maven packages
uses: actions/cache@v4
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-m2

- name: Testcontainers Docker api version hack
run: echo api.version=1.44 >> ~/.docker-java.properties

- name: Maven build
run: mvn clean org.jacoco:jacoco-maven-plugin:prepare-agent install org.jacoco:jacoco-maven-plugin:report

- name: SQ analyze
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
run: mvn -B org.sonarsource.scanner.maven:sonar-maven-plugin:sonar -Dsonar.host.url=https://sonarcloud.io -Dsonar.organization=indexdata -Dsonar.projectKey=indexdata_${{ github.event.repository.name }}

- name: Update ModuleDescriptor Id
run: |
if test -f "$MOD_DESCRIPTOR"; then
echo "Found $MOD_DESCRIPTOR"
cat <<< $(jq '.id = "${{ env.MODULE_NAME}}-${{ env.MODULE_VERSION }}"' $MOD_DESCRIPTOR) > $MOD_DESCRIPTOR
echo "MODULE_DESCRIPTOR=$MOD_DESCRIPTOR" >> $GITHUB_ENV
else
echo "Could not find $MOD_DESCRIPTOR"
exit 1
fi
env:
MOD_DESCRIPTOR: './target/ModuleDescriptor.json'

- name: Read ModuleDescriptor
id: moduleDescriptor
uses: juliangruber/read-file-action@v1
with:
path: ${{ env.MODULE_DESCRIPTOR }}

- name: Login to Index Data Docker Hub account
if: ${{ env.CURRENT_BRANCH == env.PUBLISH_BRANCH }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}

- name: Build and publish Docker image
if: ${{ env.CURRENT_BRANCH == env.PUBLISH_BRANCH }}
uses: docker/build-push-action@v6
with:
context: .
push: true
tags: indexdata/${{ env.MODULE_NAME }}:${{ env.MODULE_VERSION }},indexdata/${{ env.MODULE_NAME }}:latest

- name: Publish ModuleDescriptor to Okapi
if: ${{ env.CURRENT_BRANCH == env.PUBLISH_BRANCH }}
run: |
source okclient/ok.sh
echo "Do login ..."
OK -S ${{ env.OK_SESSION }} \
-h ${{ env.OKAPI_URL }} \
-t "supertenant" \
-u ${{ env.OKAPI_SECRET_USER }} \
-p ${{ env.OKAPI_SECRET_PASSWORD }}
echo "Post the MD and report the response status ..."
OK -S ${{ env.OK_SESSION }} _/proxy/modules \
-X post -f ${{ env.MODULE_DESCRIPTOR }}
declare -n NAMEREF_STATUS=${{ env.OK_SESSION }}_HTTPStatus
echo "Response status: $NAMEREF_STATUS"
echo "Do logout ..."
OK -S ${{ env.OK_SESSION }} -x

- name: Print module version to job summary
run: |
echo "#### Module Version: ${{ env.MODULE_VERSION }}" >> $GITHUB_STEP_SUMMARY
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,12 @@ public Future<Void> createDatabase(TenantPgPool pool) {
+ dbColumnNameAndType(MESSAGE) + ", "
+ metadata.columnsDdl() + ") ",
"CREATE INDEX IF NOT EXISTS import_job_channel_id_idx "
+ " ON " + pool.getSchema() + "." + table() + "(" + dbColumnName(CHANNEL_ID) + ")"
+ " ON " + pool.getSchema() + "." + table() + "(" + dbColumnName(CHANNEL_ID) + ")",
"ALTER TABLE " + pool.getSchema() + "." + table() + " DROP CONSTRAINT import_job_channel_id_fkey",
"ALTER TABLE " + pool.getSchema() + "." + table()
+ " ADD CONSTRAINT import_job_channel_id_fkey FOREIGN KEY (" + dbColumnName(CHANNEL_ID) + ")"
+ " REFERENCES " + pool.getSchema() + "." + Tables.CHANNEL + " (" + new Channel().dbColumnName(ID) + ") "
+ " ON DELETE CASCADE"
).mapEmpty();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,13 @@ public Future<Void> createDatabase(TenantPgPool pool) {
+ metadata.columnsDdl()
+ ")",
"CREATE INDEX IF NOT EXISTS log_statement_import_job_id_idx "
+ " ON " + pool.getSchema() + "." + table() + "(" + dbColumnName(IMPORT_JOB_ID) + ")"
+ " ON " + pool.getSchema() + "." + table() + "(" + dbColumnName(IMPORT_JOB_ID) + ")",
"ALTER TABLE " + pool.getSchema() + "." + table() + " DROP CONSTRAINT log_statement_import_job_id_fkey",
"ALTER TABLE " + pool.getSchema() + "." + table()
+ " ADD CONSTRAINT log_statement_import_job_id_fkey FOREIGN KEY (" + dbColumnName(IMPORT_JOB_ID) + ")"
+ " REFERENCES " + pool.getSchema() + "." + Tables.IMPORT_JOB
+ " (" + new ImportJob().dbColumnName(ID) + ") "
+ " ON DELETE CASCADE"
).mapEmpty();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,14 @@ public Future<Void> createDatabase(TenantPgPool pool) {
+ metadata.columnsDdl()
+ ")",
"CREATE INDEX IF NOT EXISTS record_failure_import_job_id_idx "
+ " ON " + pool.getSchema() + "." + table() + "(" + dbColumnName(IMPORT_JOB_ID) + ")"
+ " ON " + pool.getSchema() + "." + table() + "(" + dbColumnName(IMPORT_JOB_ID) + ")",
"ALTER TABLE " + pool.getSchema() + "." + table()
+ " DROP CONSTRAINT IF EXISTS record_failure_import_job_id_fkey",
"ALTER TABLE " + pool.getSchema() + "." + table()
+ " ADD CONSTRAINT record_failure_import_job_id_fkey FOREIGN KEY (" + dbColumnName(IMPORT_JOB_ID) + ")"
+ " REFERENCES " + pool.getSchema() + "." + Tables.IMPORT_JOB
+ " (" + new ImportJob().dbColumnName(ID) + ") "
+ " ON DELETE CASCADE"
).mapEmpty();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ public void cleanUp() {
.put("module_from", "mod-inventory-update-1.0.0")
.put("purge", true), null);
fakeFolioApis.settingsStorage.wipeMockRecords();
deleteFileQueues();
//deleteFileQueues();
super.cleanUp();
}

Expand Down
Loading